input stringlengths 2.65k 237k | output stringclasses 1
value |
|---|---|
axis and having a second given vector lying in a
specified coordinate plane.
"""
CSPYCE_DEFINITIONS["twovec"] = {
"axdef": "Vector defining a principal axis.",
"indexa": "Principal axis number of axdef (X=1, Y=2, Z=3).",
"plndef": "Vector defining (with axdef) a principal plane.",
"indexp": "Second axis number (with indexa) of principal plane.",
"mout": "Output rotation matrix.",
}
CSPYCE_URL["twovec"] = "https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/twovec_c.html"
#########################################
CSPYCE_SIGNATURES ["tyear"] = []
CSPYCE_ARGNAMES ["tyear"] = []
CSPYCE_RETURNS ["tyear"] = ["float"]
CSPYCE_RETNAMES ["tyear"] = ["value"]
CSPYCE_ABSTRACT ["tyear"] = """
Return the number of seconds in a tropical year.
"""
CSPYCE_DEFINITIONS["tyear"] = {
"value": "number of seconds in a tropical year",
}
CSPYCE_URL["tyear"] = "https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/tyear_c.html"
#########################################
CSPYCE_SIGNATURES ["ucrss"] = 2*["float[3]"]
CSPYCE_ARGNAMES ["ucrss"] = ["v1", "v2"]
CSPYCE_RETURNS ["ucrss"] = ["float[3]"]
CSPYCE_RETNAMES ["ucrss"] = ["vout"]
CSPYCE_ABSTRACT ["ucrss"] = """
Compute the normalized cross product of two 3-vectors.
"""
CSPYCE_DEFINITIONS["ucrss"] = {
"v1": "Left vector for cross product.",
"v2": "Right vector for cross product.",
"vout": "Normalized cross product (v1xv2) / |v1xv2|.",
}
CSPYCE_URL["ucrss"] = "https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ucrss_c.html"
#########################################
CSPYCE_SIGNATURES ["unitim"] = ["time", "string", "string"]
CSPYCE_ARGNAMES ["unitim"] = ["epoch", "insys", "outsys"]
CSPYCE_RETURNS ["unitim"] = ["float"]
CSPYCE_RETNAMES ["unitim"] = ["value"]
CSPYCE_ABSTRACT ["unitim"] = """
Transform time from one uniform scale to another. The uniform time
scales are TAI, TDT, TDB, <float> et, JED, JDTDB, JDTDT.
"""
CSPYCE_DEFINITIONS["unitim"] = {
"epoch": "An epoch to be converted.",
"insys": "The time scale associated with the input epoch.",
"outsys": "The time scale associated with the function value.",
"value": "the value in outsys that is equivalent to the epoch on the insys time scale.",
}
CSPYCE_URL["unitim"] = "https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/unitim_c.html"
#########################################
CSPYCE_SIGNATURES ["unload"] = ["string"]
CSPYCE_ARGNAMES ["unload"] = ["file"]
CSPYCE_RETURNS ["unload"] = []
CSPYCE_RETNAMES ["unload"] = []
CSPYCE_ABSTRACT ["unload"] = """
Unload a SPICE kernel.
"""
CSPYCE_DEFINITIONS["unload"] = {
"file": "The name of a kernel to unload.",
}
CSPYCE_URL["unload"] = "https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/unload_c.html"
#########################################
CSPYCE_SIGNATURES ["unorm"] = ["float[3]"]
CSPYCE_ARGNAMES ["unorm"] = ["v1"]
CSPYCE_RETURNS ["unorm"] = ["float[3]", "float"]
CSPYCE_RETNAMES ["unorm"] = ["vout", "vmag"]
CSPYCE_ABSTRACT ["unorm"] = """
Normalize a double precision 3-vector and return its magnitude.
"""
CSPYCE_DEFINITIONS["unorm"] = {
"v1": "Vector to be normalized.",
"vout": "Unit vector v1 / |v1|.",
"vmag": "Magnitude of v1, i.e. |v1|.",
}
CSPYCE_PS ["unorm"] = "If v1 is the zero vector, then vout will also be zero."
CSPYCE_URL["unorm"] = "https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/unorm_c.html"
#########################################
CSPYCE_SIGNATURES ["unormg"] = ["float[*]"]
CSPYCE_ARGNAMES ["unormg"] = ["v1"]
CSPYCE_RETURNS ["unormg"] = ["float[*]", "float"]
CSPYCE_RETNAMES ["unormg"] = ["vout", "vmag"]
CSPYCE_ABSTRACT ["unormg"] = """
Normalize a double precision vector of arbitrary dimension and return
its magnitude.
"""
CSPYCE_DEFINITIONS["unormg"] = {
"v1": "Vector to be normalized.",
"vout": "Unit vector v1 / |v1|.",
"vmag": "Magnitude of v1, that is, |v1|.",
}
CSPYCE_PS ["unormg"] = "If v1 is the zero vector, then vout will also be zero."
CSPYCE_URL["unormg"] = "https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/unormg_c.html"
#########################################
CSPYCE_SIGNATURES ["utc2et"] = ["string"]
CSPYCE_ARGNAMES ["utc2et"] = ["utcstr"]
CSPYCE_RETURNS ["utc2et"] = ["float"]
CSPYCE_RETNAMES ["utc2et"] = ["et"]
CSPYCE_ABSTRACT ["utc2et"] = """
Convert an input time from Calendar or Julian Date format, UTC, to
ephemeris seconds past J2000.
"""
CSPYCE_DEFINITIONS["utc2et"] = {
"utcstr": "Input time string, UTC.",
"et": "Output epoch, ephemeris seconds past J2000.",
}
CSPYCE_URL["utc2et"] = "https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/utc2et_c.html"
#########################################
CSPYCE_SIGNATURES ["vadd"] = 2*["float[3]"]
CSPYCE_ARGNAMES ["vadd"] = ["v1", "v2"]
CSPYCE_RETURNS ["vadd"] = ["float[3]"]
CSPYCE_RETNAMES ["vadd"] = ["vout"]
CSPYCE_ABSTRACT ["vadd"] = """
add two 3 dimensional vectors.
"""
CSPYCE_DEFINITIONS["vadd"] = {
"v1": "First vector to be added.",
"v2": "Second vector to be added.",
"vout": "Sum vector, v1 + v2.",
}
CSPYCE_URL["vadd"] = "https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vadd_c.html"
#########################################
CSPYCE_SIGNATURES ["vaddg"] = 2*["float[*]"]
CSPYCE_ARGNAMES ["vaddg"] = ["v1", "v2"]
CSPYCE_RETURNS ["vaddg"] = ["float[*]"]
CSPYCE_RETNAMES ["vaddg"] = ["vout"]
CSPYCE_ABSTRACT ["vaddg"] = """
add two vectors of arbitrary dimension.
"""
CSPYCE_DEFINITIONS["vaddg"] = {
"v1": "First vector to be added.",
"v2": "Second vector to be added.",
"vout": "Sum vector, v1 + v2.",
}
CSPYCE_URL["vaddg"] = "https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vaddg_c.html"
#########################################
CSPYCE_SIGNATURES ["vcrss"] = 2*["float[3]"]
CSPYCE_ARGNAMES ["vcrss"] = ["v1", "v2"]
CSPYCE_RETURNS ["vcrss"] = ["float[3]"]
CSPYCE_RETNAMES ["vcrss"] = ["vout"]
CSPYCE_ABSTRACT ["vcrss"] = """
Compute the cross product of two 3-dimensional vectors.
"""
CSPYCE_DEFINITIONS["vcrss"] = {
"v1": "Left hand vector for cross product.",
"v2": "Right hand vector for cross product.",
"vout": "Cross product v1xv2.",
}
CSPYCE_URL["vcrss"] = "https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vcrss_c.html"
#########################################
CSPYCE_SIGNATURES ["vdist"] = 2*["float[3]"]
CSPYCE_ARGNAMES ["vdist"] = ["v1", "v2"]
CSPYCE_RETURNS ["vdist"] = ["float"]
CSPYCE_RETNAMES ["vdist"] = ["dist"]
CSPYCE_ABSTRACT ["vdist"] = """
Return the distance between two three-dimensional vectors.
"""
CSPYCE_DEFINITIONS["vdist"] = {
"v1": "The first of two 3-vectors.",
"v2": "The second of two 3-vectors.",
"dist": "The distance between v1 and v2.",
}
CSPYCE_URL["vdist"] = "https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vdist_c.html"
#########################################
CSPYCE_SIGNATURES ["vdistg"] = 2*["float[*]"]
CSPYCE_ARGNAMES ["vdistg"] = ["v1", "v2"]
CSPYCE_RETURNS ["vdistg"] = ["float"]
CSPYCE_RETNAMES ["vdistg"] = ["dist"]
CSPYCE_ABSTRACT ["vdistg"] = """
Return the distance between two vectors of arbitrary dimension.
"""
CSPYCE_DEFINITIONS["vdistg"] = {
"v1": "The first of two 3-vectors.",
"v2": "The second of two 3-vectors.",
"dist": "The distance between v1 and v2.",
}
CSPYCE_URL["vdistg"] = "https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vdistg_c.html"
#########################################
CSPYCE_SIGNATURES ["vdot"] = 2*["float[3]"]
CSPYCE_ARGNAMES ["vdot"] = ["v1", "v2"]
CSPYCE_RETURNS ["vdot"] = ["float"]
CSPYCE_RETNAMES ["vdot"] = ["value"]
CSPYCE_ABSTRACT ["vdot"] = """
Compute the dot product of two double precision, 3-dimensional vectors.
"""
CSPYCE_DEFINITIONS["vdot"] = {
"v1": "First vector in the dot product.",
"v2": "Second vector in the dot product.",
"value": "The value of the dot product of v1 and v2.",
}
CSPYCE_URL["vdot"] = "https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vdot_c.html"
#########################################
CSPYCE_SIGNATURES ["vdotg"] = 2*["float[*]"]
CSPYCE_ARGNAMES ["vdotg"] = ["v1", "v2"]
CSPYCE_RETURNS ["vdotg"] = ["float"]
CSPYCE_RETNAMES ["vdotg"] = ["value"]
CSPYCE_ABSTRACT ["vdotg"] = """
Compute the dot product of two vectors of arbitrary dimension.
"""
CSPYCE_DEFINITIONS["vdotg"] = {
"v1": "First vector in the dot product.",
"v2": "Second vector in the dot product.",
"value": "The value of the dot product of v1 and v2.",
}
CSPYCE_URL["vdotg"] = "https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vdotg_c.html"
#########################################
CSPYCE_SIGNATURES ["vequ"] = ["float[3]"]
CSPYCE_ARGNAMES ["vequ"] = ["vin"]
CSPYCE_RETURNS ["vequ"] = ["float[3]"]
CSPYCE_RETNAMES ["vequ"] = ["vout"]
CSPYCE_ABSTRACT ["vequ"] = """
Make one double precision 3-dimensional vector equal to another.
"""
CSPYCE_DEFINITIONS["vequ"] = {
"vin": "3-dimensional double precision vector.",
"vout": "3-dimensional double precision vector set equal to vin.",
}
CSPYCE_URL["vequ"] = "https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vequ_c.html"
#########################################
CSPYCE_SIGNATURES ["vequg"] = ["float[*]"]
CSPYCE_ARGNAMES ["vequg"] = ["vin"]
CSPYCE_RETURNS ["vequg"] = ["float[*]"]
CSPYCE_RETNAMES ["vequg"] = ["vout"]
CSPYCE_ABSTRACT ["vequg"] = """
Make one double precision vector of arbitrary dimension equal to
another.
"""
CSPYCE_DEFINITIONS["vequg"] = {
"vin": "double precision vector.",
"vout": "double precision vector set equal to vin.",
}
CSPYCE_URL["vequg"] = "https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vequg_c.html"
#########################################
CSPYCE_SIGNATURES ["vhat"] = ["float[3]"]
CSPYCE_ARGNAMES ["vhat"] = ["v1"]
CSPYCE_RETURNS ["vhat"] = ["float[3]"]
CSPYCE_RETNAMES ["vhat"] = ["vout"]
CSPYCE_ABSTRACT ["vhat"] = """
Find the unit vector along a double precision 3-dimensional vector.
"""
CSPYCE_DEFINITIONS["vhat"] = {
"v1": "Vector to be unitized.",
"vout": "Unit vector v1 / |v1|.",
}
CSPYCE_URL["vhat"] = "https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vhat_c.html"
#########################################
CSPYCE_SIGNATURES ["vhatg"] = ["float[*]"]
CSPYCE_ARGNAMES ["vhatg"] = ["v1"]
CSPYCE_RETURNS ["vhatg"] = ["float[*]"]
CSPYCE_RETNAMES ["vhatg"] = ["vout"]
CSPYCE_ABSTRACT ["vhatg"] = """
Find the unit vector along a double precision vector of arbitrary
dimension.
"""
CSPYCE_DEFINITIONS["vhatg"] = {
"v1": "Vector to be normalized.",
"vout": "Unit vector v1 / |v1|.",
}
CSPYCE_PS ["vhatg"] = "If v1 is the zero vector, then vout will also be zero."
CSPYCE_URL["vhatg"] = "https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vhatg_c.html"
#########################################
CSPYCE_SIGNATURES ["vlcom3"] = 3*["float", "float[3]"]
CSPYCE_ARGNAMES ["vlcom3"] = ["a", "v1", "b", "v2", "c", "v3"]
CSPYCE_RETURNS ["vlcom3"] = ["float[3]"]
CSPYCE_RETNAMES ["vlcom3"] = ["sum"]
CSPYCE_ABSTRACT ["vlcom3"] = """
This subroutine computes the vector linear combination
a*v1 + b*v2 + c*v3 of double precision, 3-dimensional vectors.
"""
CSPYCE_DEFINITIONS["vlcom3"] = {
"a": "Coefficient of v1",
"v1": "Vector in 3-space",
"b": "Coefficient of v2",
"v2": "Vector in 3-space",
"c": "Coefficient of v3",
"v3": "Vector in 3-space",
"sum": "Linear Vector Combination a*v1 + b*v2 + c*v3",
}
CSPYCE_URL["vlcom3"] = "https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vlcom3_c.html"
#########################################
CSPYCE_SIGNATURES ["vlcom"] = 2*["float", "float[3]"]
CSPYCE_ARGNAMES ["vlcom"] = ["a", "v1", "b", "v2"]
CSPYCE_RETURNS ["vlcom"] = ["float[3]"]
CSPYCE_RETNAMES ["vlcom"] = ["sum"]
CSPYCE_ABSTRACT ["vlcom"] = """
Compute a vector linear combination of two double precision,
3-dimensional vectors.
"""
CSPYCE_DEFINITIONS["vlcom"] = {
"a": "Coefficient of v1",
"v1": "Vector in 3-space",
"b": "Coefficient of v2",
"v2": "Vector in 3-space",
"sum": "Linear Vector Combination a*v1 + b*v2",
}
CSPYCE_URL["vlcom"] = "https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vlcom_c.html"
#########################################
CSPYCE_SIGNATURES ["vlcomg"] = 2*["float", "float[*]"]
CSPYCE_ARGNAMES ["vlcomg"] = ["a", "v1", "b", "v2"]
CSPYCE_RETURNS ["vlcomg"] = ["float[*]"]
CSPYCE_RETNAMES ["vlcomg"] = ["sum"]
CSPYCE_ABSTRACT ["vlcomg"] = """
Compute a vector linear combination of two double precision vectors of
arbitrary dimension.
"""
CSPYCE_DEFINITIONS["vlcomg"] = {
"a": "Coefficient of v1",
"v1": "Vector in n-space",
"b": "Coefficient of v2",
"v2": "Vector in n-space",
"sum": "Linear Vector Combination a*v1 + b*v2",
}
CSPYCE_URL["vlcomg"] = "https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vlcomg_c.html"
#########################################
CSPYCE_SIGNATURES ["vminug"] = ["float[*]"]
CSPYCE_ARGNAMES ["vminug"] = ["vin"]
CSPYCE_RETURNS ["vminug"] = ["float[*]"]
CSPYCE_RETNAMES ["vminug"] = ["vout"]
CSPYCE_ABSTRACT ["vminug"] = """
Negate a double precision vector of arbitrary dimension.
"""
CSPYCE_DEFINITIONS["vminug"] = {
"vin": "ndim-dimensional double precision vector to be negated.",
"vout": "ndouble precision vector equal to -vin.",
}
CSPYCE_URL["vminug"] = "https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vminug_c.html"
#########################################
CSPYCE_SIGNATURES ["vminus"] = ["float[3]"]
CSPYCE_ARGNAMES ["vminus"] = ["v1"]
CSPYCE_RETURNS ["vminus"] = ["float[3]"]
CSPYCE_RETNAMES ["vminus"] = ["vout"]
CSPYCE_ABSTRACT ["vminus"] = """
Negate a double precision 3-dimensional vector.
"""
CSPYCE_DEFINITIONS["vminus"] = {
"v1": " Vector to be negated.",
"vout": "Negated vector -v1.",
}
CSPYCE_URL["vminus"] = "https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vminus_c.html"
#########################################
CSPYCE_SIGNATURES ["vnorm"] = ["float[3]"]
CSPYCE_ARGNAMES ["vnorm"] = ["v1"]
CSPYCE_RETURNS ["vnorm"] = ["float"]
CSPYCE_RETNAMES ["vnorm"] = ["value"]
CSPYCE_ABSTRACT ["vnorm"] = """
Compute the magnitude of a double precision, 3-dimensional vector.
"""
CSPYCE_DEFINITIONS["vnorm"] = {
"v1": "Vector whose magnitude is to be found.",
"value": "The norm of v1.",
}
CSPYCE_URL["vnorm"] = "https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vnorm_c.html"
#########################################
CSPYCE_SIGNATURES ["vnormg"] = ["float[*]"]
CSPYCE_ARGNAMES ["vnormg"] = ["v1"]
CSPYCE_RETURNS ["vnormg"] = ["float"]
CSPYCE_RETNAMES ["vnormg"] = ["value"]
CSPYCE_ABSTRACT ["vnormg"] = """
Compute the magnitude of a double precision vector of arbitrary
dimension.
"""
CSPYCE_DEFINITIONS["vnormg"] = {
"v1": "Vector whose magnitude is to be found.",
"value": "The norm of v1.",
}
CSPYCE_URL["vnormg"] = "https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vnormg_c.html"
#########################################
CSPYCE_SIGNATURES ["vpack"] = 3*["float"]
CSPYCE_ARGNAMES ["vpack"] = ["x", "y", "z"]
CSPYCE_RETURNS ["vpack"] = ["float[3]"]
CSPYCE_RETNAMES ["vpack"] = ["vout"]
CSPYCE_ABSTRACT ["vpack"] = """
Pack three scalar components into a vector.
"""
CSPYCE_DEFINITIONS["vpack"] = {
"x": "First scalar component of a 3-vector.",
"y": "Second scalar component of a 3-vector.",
"z": "Third scalar component of a 3-vector.",
"vout": "Equivalent 3-vector.",
}
CSPYCE_URL["vpack"] = "https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vpack_c.html"
#########################################
CSPYCE_SIGNATURES ["vperp"] = 2*["float[3]"]
CSPYCE_ARGNAMES ["vperp"] = ["v1", "v2"]
CSPYCE_RETURNS ["vperp"] = ["float[3]"]
CSPYCE_RETNAMES ["vperp"] = ["perp"]
CSPYCE_ABSTRACT ["vperp"] = """
Find the component of a vector that is perpendicular to a second vector.
All vectors are 3-dimensional.
"""
CSPYCE_DEFINITIONS["vperp"] = {
"v1": "The vector whose orthogonal component is sought.",
"v2": "The vector used as the orthogonal reference.",
"perp": "The component of a | |
import numpy as np
import cv2
import random
'''
Board class.
Board data:
1=white, -1=black, 0=empty
first dim is column , 2nd is row:
pieces[1][7] is the square in column 2,
at the opposite end of the board in row 8.
Squares are stored and manipulated as (x,y) tuples.
x is the column, y is the row.
'''
class Board():
def __init__(self, n, m):
"Set up initial board configuration."
self.n = n
self.m = m
self.rows = n + m
self.cols = n
self.total_cells = self.rows * self.cols
self.total_actions = self.n * self.n * self.m
self.pieces = None
# self.box_list = []
self.reset()
def reset(self):
self.pieces = np.zeros((self.n + self.m,self.n), dtype=np.int8)
box_list = self.generate_boxes(min_width=1, max_width=int(self.n/2) + 1, min_height=1, max_height=int(self.n/3))
self._fill_pieces_with_box_list(box_list)
self.box_list_cells = self.calculate_box_list_area()
def setBoard(self, board_pieces):
self.pieces = board_pieces
self.box_list_cells = self.calculate_box_list_area()
def _fill_pieces_with_box_list(self, box_list):
start_y = self.n
for ix, box in enumerate(box_list):
if ix >= self.m:
break
w,h = box
self.pieces[start_y,:w] = h
start_y += 1
def calculate_box_list_area(self):
return int(np.sum(self.pieces[self.n:]))
def generate_boxes(self, min_width=1, max_width=4, min_height=1, max_height=2):
boxes = []
total_cells = self.n * self.n
acc_cells = 0
while acc_cells < total_cells and len(boxes) < self.m:
w = random.randint(min_width, max_width)
h = random.randint(min_height, max_height)
acc_cells += w * h
boxes.append((w,h))
# then sort, smallest to biggest
boxes = sorted(boxes, key=lambda bx: bx[0] * bx[1])
# then sort same area boxes, from biggest width to smallest
idx = 0
total = len(boxes)
sorted_boxes = []
while idx < total:
box1 = boxes[idx]
cur_area = box1[0] * box1[1]
same_area_boxes = [box1]
for ix in xrange(idx + 1, total):
box2 = boxes[ix]
if box2[0] * box2[1] != cur_area:
break
same_area_boxes.append(box2)
sorted_boxes.extend(sorted(same_area_boxes, key=lambda bx: bx[0]))
idx += len(same_area_boxes)
return sorted_boxes
def is_full(self):
return np.all(self.pieces[:self.n]==1)
def get_score(self):
occ_cnt = self.get_occupied_count()
half_cnt = min(self.box_list_cells, self.n * self.n) / 2.
occ_score = (float(occ_cnt - half_cnt) / half_cnt)# ** 2
# occ_score = -occ_score if occ_cnt < half_cnt else occ_score
return occ_score
def get_occupied_count(self):
return int(np.sum(self.pieces[:self.n])) # since occupied are 1, non-occ are 0
# add [][] indexer syntax to the Board
def __getitem__(self, index):
return self.pieces[index]
# def countDiff(self, color):
# """Counts the extra # pieces of the given color
# (1 for white, -1 for black, 0 for empty spaces)"""
# count = 0
# for y in range(self.n):
# for x in range(self.n):
# if self[x][y]==color:
# count += 1
# if self[x][y]==-color:
# count -= 1
# return count
def is_valid_placement(self, square, box_size):
x,y = square
w,h = box_size
assert w!=0 and h!=0
assert x < self.n and y < self.n
if self.pieces[y,x]==0: # not occupied
if (x+w-1) < self.n and (y+h-1) < self.n:
if np.sum(self[y:y+h,x:x+w]) == 0: # none of the placement cells are occupied
if (y+h) < self.n: # if not on ground
# CHECK IF placement is on top of a sufficient number of occupied cells, relative to box width
return np.sum(self.pieces[y+h,x:x+w]) >= w # int(w/2)+1
return True
return False
def get_legal_moves(self, box_size):
"""Returns all the legal moves for the box size
"""
# assert len(box_size) == 2 # box_size: w,h
moves = set() # stores the legal moves.
(w,h) = box_size
for y in xrange(self.n):
for x in xrange(self.n):
square = (x,y)
if self.is_valid_placement(square, box_size):
moves.add(square)
return list(moves)
def get_legal_moves_all(self):
legal_moves = []
for box_idx in xrange(self.m):
w,h = self.get_box_size_from_idx(box_idx)
if w == 0:
continue
box_legal_moves = self.get_legal_moves((w,h))
for mov in box_legal_moves:
legal_moves.append(self.get_action_from_square_and_box_idx(mov, box_idx)) # convert to actions
return legal_moves
def has_legal_moves(self, box_size):
"""Returns all the legal moves for the given color.
(1 for white, -1 for black
"""
assert len(box_size) == 2 # box_size: w,h
moves = set() # stores the legal moves.
# Get all the squares with pieces of the given color.
for y in range(self.n):
for x in range(self.n):
square = (x,y)
if self.is_valid_placement(square, box_size):
return True
return False
def has_legal_moves_all(self):
for box_idx in xrange(self.m):
w,h = self.get_box_size_from_idx(box_idx)
if w > 0 and self.has_legal_moves((w,h)):
return True
return False
def move_box(self, move, box_size):
"""Perform the given move on the board; flips pieces as necessary.
color gives the color pf the piece to play (1=white,-1=black)
"""
#Much like move generation, start at the new piece's square and
#follow it on all 8 directions to look for a piece allowing flipping.
# Add the piece to the empty square.
# print(move)
x,y = move
assert(x < self.n and y < self.n)
w,h = box_size
self.pieces[y:y+h,x:x+w] = 1
def get_box_size_from_idx(self, box_idx):
box_cells = self.pieces[self.n + box_idx]
w = int(np.sum(box_cells > 0))
if w == 0:
return (0,0)
h = int(box_cells[0]) # assumes first index always occupied
return (w,h)
def get_square_and_box_size_from_action(self, action):
box_idx = action / (self.n * self.n)
square_idx = action % (self.n * self.n)
w,h = self.get_box_size_from_idx(box_idx)
if w == 0:
return None, None, box_idx
x,y = self.boardIndexToSquare(square_idx)
return (x,y), (w,h), box_idx
def get_action_from_square_and_box_idx(self, square, box_idx):
x, y = square
return box_idx * self.n * self.n + y * self.n + x
def is_action_valid(self, action):
sq, box_size, box_idx = self.get_square_and_box_size_from_action(action)
if sq is None:
return False
return self.is_valid_placement(sq, box_size)
def execute_move(self, action):
sq, box_size, box_idx = self.get_square_and_box_size_from_action(action)
if sq is None:
return
x,y = sq
w,h = box_size
self.pieces[y:y+h,x:x+w] = 1
# self.pieces[self.n+box_idx] = 0
self.pieces[self.n+box_idx:-1] = self.pieces[self.n+box_idx+1:]
self.pieces[-1] = 0 # move up
def boardIndexToSquare(self, idx):
x = int(idx%self.n)
y = int(idx/self.n)
return x,y
class BoardRenderer(object):
def __init__(self, unit_res=30, grid_line_width=1, occupied_px=(0,0,255), box_px=(255,0,0), grid_line_px=(0,0,0), text_px=(0,0,0)):
self.unit_res = unit_res
self.grid_line_px = grid_line_px
self.occupied_px = occupied_px
self.grid_line_width = grid_line_width
self.box_px = box_px
self.text_px = text_px
def display_board(self, board_obj):
unit_res = self.unit_res
grid_line_width = self.grid_line_width
grid_line_px = self.grid_line_px
n = board_obj.n
m = board_obj.m
nr = n + m # total rows
img_width = unit_res*n+n-grid_line_width
img_height = unit_res*nr+nr-grid_line_width
board_img = np.ones((img_height,img_width,3), dtype=np.uint8)
board_img *= 255 # all to white
# first, generate grid lines
idx_x = 0
idx_y = 0
for x in xrange(n-1):
idx_x += unit_res + grid_line_width
board_img[:,idx_x-1] = grid_line_px
for y in xrange(nr-1):
idx_y += unit_res + grid_line_width
board_img[idx_y-1,:] = grid_line_px
mr,mc = np.where(board_obj.pieces[:n]==1)
for x,y in zip(mc,mr):
self.fill_board_img_square(board_img, (x,y), self.occupied_px)
for ix in xrange(m):
y = n+ix
for x,cell in enumerate(board_obj.pieces[y]):
if cell == 0:
continue
self.fill_board_img_square(board_img, (x,y), self.box_px, "%d"%(cell))
return board_img
def fill_board_img_square(self, board_img, square, fill_px, text=None):
assert(type(board_img) == np.ndarray)
assert(type(square) == tuple and type(fill_px) == tuple)
r = self.unit_res
gl_width = self.grid_line_width
x,y = square
start_x = x * r + gl_width * x
start_y = y * r + gl_width * y
board_img[start_y:start_y+r,start_x:start_x+r] = fill_px
if text:
pos = (start_x + r / 2, start_y + r / 2)
font_scale = r / 50.
cv2.putText(board_img, text, pos, cv2.FONT_HERSHEY_COMPLEX, font_scale, self.text_px)
def fill_board_squares(self, board_obj, square_list, fill_px):
board_img = self.display_board(board_obj)
if type(square_list) != list:
square_list = [square_list]
for sq in square_list:
self.fill_board_img_square(board_img, sq, fill_px)
return board_img
def draw_action(self, board_obj, action, action_px=(0,255,0)):
b = board_obj
sq, box_sz, _ = b.get_square_and_box_size_from_action(action)
return self.draw_box_from_square(b, sq, box_sz, action_px)
def draw_box_from_square(self, board_obj, square, box_size, fill_px=(0,255,0)):
box_w, box_h = box_size
sq = square
board_img = self.fill_board_squares(board_obj, [(sq[0]+w,sq[1]+h) for w in xrange(box_w) for h in xrange(box_h)], fill_px)
return board_img
def get_square_from_pixel_pos(self, board_obj, pos):
unit_res = self.unit_res
grid_line_width = self.grid_line_width
square_x = None
square_y = None
x,y = pos
cols = board_obj.cols
rows = board_obj.rows
idx_x = 0
idx_y = 0
for c in xrange(cols):
idx_x += unit_res + grid_line_width
if x < idx_x:
square_x = c
break
for r in xrange(rows):
idx_y += unit_res + grid_line_width
if y < idx_y:
square_y = r
break
return (square_x, square_y)
if __name__ == '__main__':
n = 6
m = 10
b = Board(n, m)
box_sizes = [(3,2),(1,1),(2,2),(3,2),(3,1),(3,1),(5,2),(5,2),(2,2)] # (w,h), ...
moves = [(0,0),(5,5),(5,4),(0,4),(3,4),(1,3),(2,1),(1,1),(1,2)] # (x,y), ...
for ix,move in enumerate(moves):
box_size = box_sizes[ix]
if b.is_valid_placement(move, box_size):
print("Move valid!", move, "Box size:", box_size)
b.move_box(move, box_size)
print(b.pieces[:n])
print(b.get_legal_moves((2,1)))
b.reset()
b_renderer = BoardRenderer(unit_res=30)
board_img = b_renderer.display_board(b)
cv2.imshow('board', board_img)
cv2.waitKey(0)
b.execute_move(30)
board_img = b_renderer.display_board(b)
cv2.imshow('board', board_img)
cv2.waitKey(0)
for action in sorted(b.get_legal_moves_all()):
board_img | |
<filename>pretix_capacity_reports/exporter.py
import json
import tempfile
from collections import OrderedDict
from datetime import timedelta, time, datetime
import pytz
from dateutil.parser import parse
from django import forms
from django.db.models import OuterRef, Exists, Prefetch, Count, Q, Sum, Min, Subquery, Max, F, CharField, Value
from django.db.models.functions import TruncDay, Coalesce, Cast, Concat
from django.utils.functional import cached_property
from django.utils.timezone import now, get_current_timezone, make_aware
from django.utils.translation import gettext_lazy as _
from i18nfield.strings import LazyI18nString
from openpyxl import Workbook
from openpyxl.cell.cell import KNOWN_TYPES
from openpyxl.utils import get_column_letter
from pretix.base.exporter import MultiSheetListExporter
from pretix.base.models import Quota, EventMetaValue, Order, OrderPosition, SubEvent, Checkin, LogEntry, Item, ItemVariation
class BaseMSLE(MultiSheetListExporter):
def _render_xlsx(self, form_data, output_file=None): # vendored pretix 3.16 version
wb = Workbook(write_only=True)
n_sheets = len(self.sheets)
for i_sheet, (s, l) in enumerate(self.sheets):
ws = wb.create_sheet(str(l))
if hasattr(self, 'prepare_xlsx_sheet_' + s):
getattr(self, 'prepare_xlsx_sheet_' + s)(ws)
total = 0
counter = 0
for i, line in enumerate(self.iterate_sheet(form_data, sheet=s)):
if isinstance(line, self.ProgressSetTotal):
total = line.total
continue
ws.append([
str(val) if not isinstance(val, KNOWN_TYPES) else val
for val in line
])
if total:
counter += 1
if counter % max(10, total // 100) == 0:
self.progress_callback(counter / total * 100 / n_sheets + 100 / n_sheets * i_sheet)
if output_file:
wb.save(output_file)
return self.get_filename() + '.xlsx', 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet', None
else:
with tempfile.NamedTemporaryFile(suffix='.xlsx') as f:
wb.save(f.name)
f.seek(0)
return self.get_filename() + '.xlsx', 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet', f.read()
@cached_property
def product_choices(self):
product_choices = [('', 'All')]
for r in Item.objects.filter(
event__in=self.events, variations__isnull=True,
).annotate(
n=Cast('name', output_field=CharField())
).values('n').distinct():
product_choices.append((str(r['n']) + '#!#-', i18ntostring(r['n'])))
for r in ItemVariation.objects.filter(
item__event__in=self.events
).annotate(
n=Cast('item__name', output_field=CharField()),
v=Cast('value', output_field=CharField())
).values(
'n', 'v'
).distinct():
product_choices.append(('{}#!#{}'.format(r['n'], r['v']), '{} – {}'.format(
i18ntostring(r['n']),
i18ntostring(r['v']),
)))
product_choices.sort(key=lambda i: str(i[1]))
return product_choices
def i18ntostring(v):
if v.startswith('{'):
return LazyI18nString(json.loads(v))
return v
class CapacityUtilizationReport(BaseMSLE):
identifier = 'capacity_utilization'
verbose_name = 'Capacity & Utilization'
meta_name = 'AgencyNumber'
sheets = [
('date_agency_event', _('By date, agency, and event')),
('date_agency', _('By date and agency')),
('date_time_agency_event', _('By time slot, agency, event')),
('agency_date_day', _('By agency and day')),
('agency_date_week', _('By agency and week')),
]
@property
def export_form_fields(self):
defdate_start = now().astimezone(get_current_timezone()).date()
defdate_end = now().astimezone(get_current_timezone()).date() + timedelta(days=6)
f = OrderedDict(
list(super().export_form_fields.items()) + [
('date_from',
forms.DateField(
label=_('Start date'),
widget=forms.DateInput(attrs={'class': 'datepickerfield'}),
initial=defdate_start,
)),
('date_to',
forms.DateField(
label=_('End date'),
widget=forms.DateInput(attrs={'class': 'datepickerfield'}),
initial=defdate_end,
)),
('product_name',
forms.ChoiceField(
label=_('Product and variation'),
choices=self.product_choices,
required=False
)),
]
)
if self.is_multievent and self.events.first():
organizer = self.events.first().organizer
for mp in organizer.meta_properties.prefetch_related('event_values'):
if mp.name != self.meta_name:
continue
values = sorted(list({v.value for v in mp.event_values.all()}))
f['meta:{}'.format(mp.name)] = forms.MultipleChoiceField(
label=mp.name,
choices=[(v, v) for v in values],
widget=forms.CheckboxSelectMultiple(
attrs={'class': 'scrolling-multiple-choice'}
),
initial=values,
)
return f
def iterate_sheet(self, form_data, sheet):
if self.events.first():
self.tz = self.events.first().timezone
else:
self.tz = pytz.UTC
self.date_from = form_data['date_from']
self.date_until = form_data['date_to']
if isinstance(self.date_from, str):
self.date_from = parse(self.date_from).date()
self.datetime_from = make_aware(datetime.combine(
self.date_from,
time(hour=0, minute=0, second=0, microsecond=0)
), self.tz)
if isinstance(self.date_until, str):
self.date_until = parse(self.date_until).date()
self.datetime_until = make_aware(datetime.combine(
self.date_until + timedelta(days=1),
time(hour=0, minute=0, second=0, microsecond=0)
), self.tz)
self.cached_events = list(
self.events.prefetch_related(
'organizer', '_settings_objects', 'organizer___settings_objects', 'organizer__meta_properties',
Prefetch(
'meta_values',
EventMetaValue.objects.select_related('property'),
to_attr='meta_values_cached'
)
)
)
subevs = {
(r['day'].astimezone(self.tz).date(), r['event_id']): r['c'] for r in
self._subevent_qs(form_data).annotate(
day=TruncDay('date_from', tzinfo=self.tz)
).order_by().values('day', 'event_id').annotate(c=Count('*'))
}
quotas = {
(r['day'].astimezone(self.tz).date(), r['event_id']): r['c'] for r in
self._base_quota_qs(form_data).filter(
size__isnull=False,
).annotate(
day=TruncDay(Coalesce('subevent__date_from', 'event__date_from'), tzinfo=self.tz)
).order_by().values('day', 'event_id').annotate(c=Sum('size'))
}
orders = {
(r['day'].astimezone(self.tz).date(), r['order__event_id']): r['c'] for r in
self._base_position_qs(form_data).annotate(
day=TruncDay(Coalesce('subevent__date_from', 'order__event__date_from'), tzinfo=self.tz)
).order_by().values('day', 'order__event_id').annotate(c=Count('*'))
}
checkins = {
(r['day'].astimezone(self.tz).date(), r['order__event_id']): r['c'] for r in
self._base_position_qs(form_data, has_checkin=True).annotate(
day=TruncDay(Coalesce('subevent__date_from', 'order__event__date_from'), tzinfo=self.tz)
).order_by().values('day', 'order__event_id').annotate(c=Count('*'))
}
meta_values = form_data['meta:{}'.format(self.meta_name)] if self.is_multievent else [self.event.meta_data[self.meta_name]]
if hasattr(self, 'iterate_' + sheet):
yield from getattr(self, 'iterate_' + sheet)(form_data, meta_values, subevs, quotas, orders, checkins)
def _base_quota_qs(self, form_data):
qs = Quota.objects.filter(
Q(subevent__date_from__gte=self.datetime_from, subevent__date_from__lt=self.datetime_until) | Q(subevent__isnull=True, event__date_from__gte=self.datetime_from, event__date_from__lt=self.datetime_until),
event__in=self.events,
subevent__date_from__gte=self.datetime_from,
subevent__date_from__lt=self.datetime_until,
)
if form_data['product_name']:
qs = qs.annotate(
has_p=Exists(
Quota.items.through.objects.annotate(
n=Concat(Cast('item__name', output_field=CharField()), Value('#!#-')),
).filter(
item__variations__isnull=True,
quota=OuterRef('pk'),
n=form_data['product_name']
)
),
has_v=Exists(
Quota.variations.through.objects.annotate(
n=Concat(
Cast('itemvariation__item__name', output_field=CharField()),
Value('#!#'),
Cast('itemvariation__value', output_field=CharField()),
)
).filter(
quota=OuterRef('pk'),
n=form_data['product_name']
)
),
).filter(
Q(has_p=True) | Q(has_v=True)
)
for i, n in enumerate([self.meta_name]):
if 'meta:{}'.format(n) in form_data:
emv_with_value = EventMetaValue.objects.filter(
event=OuterRef('event_id'),
property__name=n,
value__in=form_data['meta:{}'.format(n)]
)
qs = qs.annotate(**{
'attr_{}'.format(i): Exists(emv_with_value)
}).filter(**{
'attr_{}'.format(i): True
})
# item, variation
return qs
def _base_position_qs(self, form_data, has_checkin=False):
qs = OrderPosition.objects.filter(
Q(subevent__date_from__gte=self.datetime_from, subevent__date_from__lt=self.datetime_until) | Q(subevent__isnull=True, order__event__date_from__gte=self.datetime_from, order__event__date_from__lt=self.datetime_until),
order__event__in=self.events,
order__status__in=(Order.STATUS_PAID, Order.STATUS_PENDING),
)
for i, n in enumerate([self.meta_name]):
if 'meta:{}'.format(n) in form_data:
emv_with_value = EventMetaValue.objects.filter(
event=OuterRef('order__event_id'),
property__name=n,
value__in=form_data['meta:{}'.format(n)]
)
qs = qs.annotate(**{
'attr_{}'.format(i): Exists(emv_with_value)
}).filter(**{
'attr_{}'.format(i): True
})
if has_checkin:
qs = qs.annotate(has_checkin=Exists(Checkin.objects.filter(position=OuterRef('pk')))).filter(has_checkin=True)
if form_data['product_name']:
qs = qs.annotate(
n=Concat(
Cast('item__name', output_field=CharField()),
Value('#!#'),
Coalesce(Cast('variation__value', output_field=CharField()), Value('-'))
)
).filter(
Q(n=form_data['product_name'])
)
return qs
def _subevent_qs(self, form_data):
qs = SubEvent.objects.filter(
event__in=self.events,
date_from__gte=self.datetime_from,
date_from__lt=self.datetime_until
)
for i, n in enumerate([self.meta_name]):
if 'meta:{}'.format(n) in form_data:
emv_with_value = EventMetaValue.objects.filter(
event=OuterRef('event_id'),
property__name=n,
value__in=form_data['meta:{}'.format(n)]
)
qs = qs.annotate(**{
'attr_{}'.format(i): Exists(emv_with_value)
}).filter(**{
'attr_{}'.format(i): True
})
if form_data['product_name']:
qs = qs.annotate(
has_p=Exists(
Quota.items.through.objects.annotate(
n=Concat(Cast('item__name', output_field=CharField()), Value('#!#-')),
).filter(
item__variations__isnull=True,
quota__subevent=OuterRef('pk'),
n=form_data['product_name']
)
),
has_v=Exists(
Quota.variations.through.objects.annotate(
n=Concat(
Cast('itemvariation__item__name', output_field=CharField()),
Value('#!#'),
Cast('itemvariation__value', output_field=CharField()),
)
).filter(
quota__subevent=OuterRef('pk'),
n=form_data['product_name']
)
),
).filter(
Q(has_p=True) | Q(has_v=True)
)
return qs
def _date_iter(self):
dt = self.date_from
while dt <= self.date_until:
yield dt
dt += timedelta(days=1)
def _week_iter(self):
dt = self.date_from
current_week = []
while dt <= self.date_until:
current_week.append(dt)
if dt.weekday() == 5: # saturday
yield current_week
current_week = []
dt += timedelta(days=1)
if current_week:
yield current_week
def iterate_date_time_agency_event(self, form_data, meta_values, subevs, quotas, orders, checkins):
quotas = {
(r['dt'].astimezone(self.tz), r['event_id']): r['c'] for r in
self._base_quota_qs(form_data).filter(
size__isnull=False,
).annotate(
dt=Coalesce('subevent__date_from', 'event__date_from'),
).order_by().values('dt', 'event_id').annotate(c=Sum('size'))
}
orders = {
(r['dt'].astimezone(self.tz), r['order__event_id']): r['c'] for r in
self._base_position_qs(form_data).annotate(
dt=Coalesce('subevent__date_from', 'order__event__date_from'),
).order_by().values('dt', 'order__event_id').annotate(c=Count('*'))
}
checkins = {
(r['dt'].astimezone(self.tz), r['order__event_id']): r['c'] for r in
self._base_position_qs(form_data, has_checkin=True).annotate(
dt=Coalesce('subevent__date_from', 'order__event__date_from'),
).order_by().values('dt', 'order__event_id').annotate(c=Count('*'))
}
yield [
"Date of Event", "Time slot", self.meta_name, "Event ID", "Sum of Quota", "Sum of Orders", "Sum of Checked in"
]
for mv in meta_values:
events = sorted([e for e in self.cached_events if e.meta_data[self.meta_name] == mv], key=lambda e: str(e.name))
for e in events:
time_slots = {r[0] for r in orders.keys() if r[1] == e.pk} | {r[0] for r in checkins.keys() if r[1] == e.pk} | {r[0] for r in quotas.keys() if r[1] == e.pk}
time_slots = sorted(list(time_slots))
for dt in time_slots:
yield [
dt.strftime('%m/%d/%Y'),
dt.strftime('%I:%M %p'),
mv,
e.slug,
quotas.get((dt, e.pk), 0) or 0,
orders.get((dt, e.pk), 0),
checkins.get((dt, e.pk), 0),
]
def iterate_date_agency_event(self, form_data, meta_values, subevs, quotas, orders, checkins):
yield [
"Date of Event", self.meta_name, "Event ID", "Number of Timeslots", "Sum of Quota", "Sum of Orders", "Sum of Checked in"
]
for mv in meta_values:
events = sorted([e for e in self.cached_events if e.meta_data[self.meta_name] == mv], key=lambda e: str(e.name))
for e in events:
for dt in self._date_iter():
subevcnt = subevs.get((dt, e.pk), 0)
if e.has_subevents and not subevcnt:
continue
yield [
dt.strftime('%m/%d/%Y'),
mv,
e.slug,
subevcnt,
quotas.get((dt, e.pk), 0) or 0,
orders.get((dt, e.pk), 0),
checkins.get((dt, e.pk), 0),
]
def prepare_xlsx_sheet_date_agency_event(self, ws):
ws.freeze_panes = 'A2'
ws.column_dimensions['A'].width = 20
ws.column_dimensions['B'].width = 30
ws.column_dimensions['C'].width = 20
ws.column_dimensions['D'].width = 15
ws.column_dimensions['E'].width = 15
ws.column_dimensions['F'].width = 15
ws.column_dimensions['G'].width = 15
def iterate_date_agency(self, form_data, meta_values, subevs, quotas, orders, checkins):
yield [
"Date of Event", self.meta_name, "Number of Events", "Sum of Quotas", "Sum of Orders", "Sum of Checked in"
]
for mv in meta_values:
events = sorted([e for e in self.cached_events if e.meta_data[self.meta_name] == mv], key=lambda e: str(e.name))
for dt in self._date_iter():
evcnt = sum((1 if not e.has_subevents or subevs.get((dt, e.pk), 0) else 0 for e in events), start=0)
if not evcnt:
continue
yield [
dt.strftime('%m/%d/%Y'),
mv,
evcnt,
sum((quotas.get((dt, e.pk), 0) or 0 for e in events), start=0),
sum((orders.get((dt, e.pk), 0) for e in events), start=0),
sum((checkins.get((dt, e.pk), 0) for e in events), start=0),
]
def prepare_xlsx_sheet_date_agency(self, ws):
ws.freeze_panes = 'A2'
ws.column_dimensions['A'].width = 20
ws.column_dimensions['B'].width = 30
ws.column_dimensions['C'].width = 15
ws.column_dimensions['D'].width = 15
ws.column_dimensions['E'].width = 15
ws.column_dimensions['F'].width = 15
def iterate_agency_date_day(self, form_data, meta_values, subevs, quotas, orders, checkins):
yield [
self.meta_name,
"",
] + [
dt.strftime('%m/%d/%Y') for dt in self._date_iter()
]
for mv in meta_values:
events = sorted([e for e in self.cached_events if e.meta_data[self.meta_name] == mv], key=lambda e: str(e.name))
yield [
mv, "Sum of Quotas",
] + [
sum((quotas.get((dt, e.pk), 0) or 0 for e in events), start=0) for dt in self._date_iter()
]
yield [
"", "Sum of Orders",
] + [
sum((orders.get((dt, e.pk), 0) or 0 for e in events), start=0) for dt in self._date_iter()
]
yield [
"", "Sum | |
counters details
interface_xr_tree = rpc_reply_etree.xpath(
".//int:interfaces/int:interface-xr/int:interface", namespaces=C.NS
)
for interface in interface_xr_tree:
interface_name = self._find_txt(
interface, "./int:interface-name", default="", namespaces=C.NS
)
if interface_name[:8] == "Loopback" and interface_name[8:].isdigit():
continue
interface_stats = {}
if (
self._find_txt(
interface,
"./int:interface-statistics/int:stats-type",
default="",
namespaces=C.NS,
)
== "basic"
):
interface_stats["tx_multicast_packets"] = ""
interface_stats["tx_discards"] = ""
interface_stats["tx_octets"] = ""
interface_stats["tx_errors"] = ""
interface_stats["rx_octets"] = ""
interface_stats["tx_unicast_packets"] = ""
interface_stats["rx_errors"] = ""
interface_stats["tx_broadcast_packets"] = ""
interface_stats["rx_multicast_packets"] = ""
interface_stats["rx_broadcast_packets"] = ""
interface_stats["rx_discards"] = ""
interface_stats["rx_unicast_packets"] = ""
else:
int_stats_xpath = "./int:interface-statistics/int:full-interface-stats/"
interface_stats["tx_multicast_packets"] = napalm.base.helpers.convert(
int,
self._find_txt(
interface,
int_stats_xpath + "int:multicast-packets-sent",
"0",
namespaces=C.NS,
),
)
interface_stats["tx_discards"] = napalm.base.helpers.convert(
int,
self._find_txt(
interface,
int_stats_xpath + "int:output-drops",
"0",
namespaces=C.NS,
),
)
interface_stats["tx_octets"] = napalm.base.helpers.convert(
int,
self._find_txt(
interface,
int_stats_xpath + "int:bytes-sent",
"0",
namespaces=C.NS,
),
)
interface_stats["tx_errors"] = napalm.base.helpers.convert(
int,
self._find_txt(
interface,
int_stats_xpath + "int:output-errors",
"0",
namespaces=C.NS,
),
)
interface_stats["rx_octets"] = napalm.base.helpers.convert(
int,
self._find_txt(
interface,
int_stats_xpath + "int:bytes-received",
"0",
namespaces=C.NS,
),
)
interface_stats["tx_unicast_packets"] = napalm.base.helpers.convert(
int,
self._find_txt(
interface,
int_stats_xpath + "int:packets-sent",
"0",
namespaces=C.NS,
),
)
interface_stats["rx_errors"] = napalm.base.helpers.convert(
int,
self._find_txt(
interface,
int_stats_xpath + "int:input-errors",
"0",
namespaces=C.NS,
),
)
interface_stats["tx_broadcast_packets"] = napalm.base.helpers.convert(
int,
self._find_txt(
interface,
int_stats_xpath + "int:broadcast-packets-sent",
"0",
namespaces=C.NS,
),
)
interface_stats["rx_multicast_packets"] = napalm.base.helpers.convert(
int,
self._find_txt(
interface,
int_stats_xpath + "int:multicast-packets-received",
"0",
namespaces=C.NS,
),
)
interface_stats["rx_broadcast_packets"] = napalm.base.helpers.convert(
int,
self._find_txt(
interface,
int_stats_xpath + "int:broadcast-packets-received",
"0",
namespaces=C.NS,
),
)
interface_stats["rx_discards"] = napalm.base.helpers.convert(
int,
self._find_txt(
interface,
int_stats_xpath + "int:input-drops",
"0",
namespaces=C.NS,
),
)
interface_stats["rx_unicast_packets"] = napalm.base.helpers.convert(
int,
self._find_txt(
interface,
int_stats_xpath + "int:packets-received",
"0",
namespaces=C.NS,
),
)
interface_counters[interface_name] = interface_stats
return interface_counters
def get_bgp_neighbors(self):
"""Return BGP neighbors details."""
def get_vrf_neighbors(rpc_reply_etree, xpath):
"""Return BGP neighbors details for a given VRF."""
neighbors = {}
for neighbor in rpc_reply_etree.xpath(xpath, namespaces=C.NS):
this_neighbor = {}
this_neighbor["local_as"] = napalm.base.helpers.convert(
int,
self._find_txt(
neighbor, "./bgp:local-as", default="", namespaces=C.NS
),
)
this_neighbor["remote_as"] = napalm.base.helpers.convert(
int,
self._find_txt(
neighbor, "./bgp:remote-as", default="", namespaces=C.NS
),
)
this_neighbor["remote_id"] = napalm.base.helpers.convert(
str,
self._find_txt(
neighbor, "./bgp:router-id", default="", namespaces=C.NS
),
)
try:
this_neighbor["description"] = napalm.base.helpers.convert(
str,
self._find_txt(
neighbor, "./bgp:description", default="", namespaces=C.NS
),
)
except AttributeError:
logger.debug(
"No attribute 'description' for neighbor %s"
% (this_neighbor["remote_as"])
)
this_neighbor["description"] = ""
this_neighbor["is_enabled"] = not (
self._find_txt(
neighbor,
"./bgp:is-administratively-shut-down",
default="",
namespaces=C.NS,
)
== "true"
)
if (
str(
self._find_txt(
neighbor,
"./bgp:connection-state",
default="",
namespaces=C.NS,
)
)
== "bgp-st-estab"
):
this_neighbor["is_up"] = True
this_neighbor["uptime"] = napalm.base.helpers.convert(
int,
self._find_txt(
neighbor,
"./bgp:connection-established-time",
default="",
namespaces=C.NS,
),
)
else:
this_neighbor["is_up"] = False
this_neighbor["uptime"] = -1
this_neighbor["address_family"] = {}
if (
self._find_txt(
neighbor,
"./bgp:connection-remote-address/\
bgp:afi",
default="",
namespaces=C.NS,
)
== "ipv4"
):
this_afi = "ipv4"
elif (
self._find_txt(
neighbor,
"./bgp:connection-remote-address/bgp:afi",
default="",
namespaces=C.NS,
)
== "ipv6"
):
this_afi = "ipv6"
else:
this_afi = self._find_txt(
neighbor,
"./bgp:connection-remote-address/bgp:afi",
default="",
namespaces=C.NS,
)
this_neighbor["address_family"][this_afi] = {}
try:
this_neighbor["address_family"][this_afi][
"received_prefixes"
] = napalm.base.helpers.convert(
int,
self._find_txt(
neighbor,
"./bgp:af-data/bgp:prefixes-accepted",
default="",
namespaces=C.NS,
),
0,
) + napalm.base.helpers.convert(
int,
self._find_txt(
neighbor,
"./bgp:af-data/bgp:prefixes-denied",
default="",
namespaces=C.NS,
),
0,
)
this_neighbor["address_family"][this_afi][
"accepted_prefixes"
] = napalm.base.helpers.convert(
int,
self._find_txt(
neighbor,
"./bgp:af-data/bgp:prefixes-accepted",
default="",
namespaces=C.NS,
),
0,
)
this_neighbor["address_family"][this_afi][
"sent_prefixes"
] = napalm.base.helpers.convert(
int,
self._find_txt(
neighbor,
"./bgp:af-data/\
bgp:prefixes-advertised",
default="",
namespaces=C.NS,
),
0,
)
except AttributeError:
this_neighbor["address_family"][this_afi]["received_prefixes"] = -1
this_neighbor["address_family"][this_afi]["accepted_prefixes"] = -1
this_neighbor["address_family"][this_afi]["sent_prefixes"] = -1
neighbor_ip = napalm.base.helpers.ip(
self._find_txt(
neighbor, "./bgp:neighbor-address", default="", namespaces=C.NS
)
)
neighbors[neighbor_ip] = this_neighbor
return neighbors
rpc_reply = self.device.get(filter=("subtree", C.BGP_NEIGHBOR_REQ_FILTER)).xml
# Converts string to tree
rpc_reply_etree = ETREE.fromstring(rpc_reply)
result = {}
this_vrf = {}
this_vrf["peers"] = {}
# get neighbors and router id from default(global) VRF
default_vrf_xpath = """.//bgp:bgp/bgp:instances/bgp:instance/
bgp:instance-active/bgp:default-vrf/"""
this_vrf["router_id"] = napalm.base.helpers.convert(
str,
self._find_txt(
rpc_reply_etree,
default_vrf_xpath
+ "bgp:global-process-info/\
bgp:vrf/bgp:router-id",
default="",
namespaces=C.NS,
),
)
this_vrf["peers"] = get_vrf_neighbors(
rpc_reply_etree, default_vrf_xpath + "bgp:neighbors/bgp:neighbor"
)
result["global"] = this_vrf
# get neighbors and router id from other VRFs
vrf_xpath = """.//bgp:bgp/bgp:instances/
bgp:instance/bgp:instance-active/bgp:vrfs"""
for vrf in rpc_reply_etree.xpath(vrf_xpath + "/bgp:vrf", namespaces=C.NS):
this_vrf = {}
this_vrf["peers"] = {}
this_vrf["router_id"] = napalm.base.helpers.convert(
str,
self._find_txt(
vrf,
"./bgp:global-process-info/bgp:vrf/\
bgp:router-id",
default="",
namespaces=C.NS,
),
)
vrf_name = self._find_txt(
vrf, "./bgp:vrf-name", default="", namespaces=C.NS
)
this_vrf["peers"] = get_vrf_neighbors(
rpc_reply_etree,
vrf_xpath
+ "/bgp:vrf[bgp:vrf-name='"
+ vrf_name
+ "']\
/bgp:neighbors/bgp:neighbor",
)
result[vrf_name] = this_vrf
return result
def get_environment(self):
"""Return environment details."""
def env_ns_prefix():
"""Return prefix for ENVMON model in router capabilities."""
for prefix in C.ENVMON_NAMESPACES:
for capability in self.device.server_capabilities:
if C.ENVMON_NAMESPACES[prefix] in capability:
return prefix
return None
environment_status = {}
environment_status["fans"] = {}
environment_status["temperature"] = {}
environment_status["power"] = {}
environment_status["cpu"] = {}
environment_status["memory"] = 0.0
router_model = self.get_facts().get("model")
if router_model not in C.PLAT_NO_ENVMON:
nsp = env_ns_prefix()
rpc_reply = self.device.get(
filter=("subtree", C.ENVMON_RPC_REQ_FILTER[nsp])
).xml
# Converts string to etree
result_tree = ETREE.fromstring(rpc_reply)
#
# FAN
#
fans = {}
fan_location_xpath = ".//{}:environment/{}:oper/{}:fan/\
{}:location".format(
nsp, nsp, nsp, nsp
)
for fan_location in result_tree.xpath(
fan_location_xpath, namespaces=C.ENVMON_NAMESPACES
):
fan_name = self._find_txt(
fan_location,
"./{}:location".format(nsp),
default="",
namespaces=C.ENVMON_NAMESPACES,
).lstrip("0/")
if "FT" in fan_name:
fans[fan_name] = {"status": True}
environment_status["fans"] = fans
#
# POWER
#
power = {}
power_location_xpath = ".//{}:environment/{}:oper/{}:power/\
{}:location".format(
nsp, nsp, nsp, nsp
)
capacity = 0.0
for power_location in result_tree.xpath(
power_location_xpath, namespaces=C.ENVMON_NAMESPACES
):
power_location_name = self._find_txt(
power_location,
"./{}:location".format(nsp),
default="",
namespaces=C.ENVMON_NAMESPACES,
)
if power_location_name.isdigit():
capacity = float(
self._find_txt(
power_location,
"./{}:pem_attributes/\
{}:usable_power_capacity".format(
nsp, nsp
),
default="",
namespaces=C.ENVMON_NAMESPACES,
)
)
continue
if (
re.search(r"\d/PT\d", power_location_name) is not None
or re.search(r"\d/PM\d", power_location_name) is not None
):
for pem_attr in power_location.xpath(
"./{}:pem_attributes".format(nsp),
namespaces=C.ENVMON_NAMESPACES,
):
pem = self._find_txt(
pem_attr,
"./{}:pem".format(nsp),
default="",
namespaces=C.ENVMON_NAMESPACES,
)
status = self._find_txt(
pem_attr,
"./{}:status".format(nsp),
default="",
namespaces=C.ENVMON_NAMESPACES,
)
output_voltage = float(
self._find_txt(
pem_attr,
"./{}:output_voltage".format(nsp),
default="0.0",
namespaces=C.ENVMON_NAMESPACES,
)
)
output_current = float(
self._find_txt(
pem_attr,
"./{}:output_current".format(nsp),
default="0.0",
namespaces=C.ENVMON_NAMESPACES,
)
)
power[pem] = {
"status": status == "OK",
"output": round(output_voltage * output_current, 2),
"capacity": capacity,
}
environment_status["power"] = power
#
# TEMPERATURE
#
temperature = {}
temp_location_xpath = ".//{}:environment/{}:oper/{}:temperatures/\
{}:location".format(
nsp, nsp, nsp, nsp
)
for temp_location in result_tree.xpath(
temp_location_xpath, namespaces=C.ENVMON_NAMESPACES
):
temp_location_name = self._find_txt(
temp_location,
"./{}:location".format(nsp),
default="",
namespaces=C.ENVMON_NAMESPACES,
)
for sensor_attributes in temp_location.xpath(
"./{}:sensor_attributes".format(nsp), namespaces=C.ENVMON_NAMESPACES
):
sensor_id = self._find_txt(
sensor_attributes,
"./{}:sensor_id".format(nsp),
default="",
namespaces=C.ENVMON_NAMESPACES,
)
if sensor_id in ["Inlet", "Control Sensor"]:
temp_value = float(
self._find_txt(
sensor_attributes,
"./{}:value".format(nsp),
default="",
namespaces=C.ENVMON_NAMESPACES,
)
)
major_lo = float(
self._find_txt(
sensor_attributes,
"./{}:major_lo".format(nsp),
default="",
namespaces=C.ENVMON_NAMESPACES,
)
)
major_hi = float(
self._find_txt(
sensor_attributes,
"./{}:major_hi".format(nsp),
default="",
namespaces=C.ENVMON_NAMESPACES,
)
)
critical_lo = float(
self._find_txt(
sensor_attributes,
"./{}:critical_lo".format(nsp),
default="",
namespaces=C.ENVMON_NAMESPACES,
)
)
critical_hi = float(
self._find_txt(
sensor_attributes,
"./{}:critical_hi".format(nsp),
default="",
namespaces=C.ENVMON_NAMESPACES,
)
)
is_alert = (temp_value <= major_lo) or (temp_value >= major_hi)
is_critical = (temp_value <= critical_lo) or (
temp_value >= critical_hi
)
temperature[temp_location_name] = {
"is_alert": is_alert,
"temperature": temp_value,
"is_critical": is_critical,
}
break
environment_status["temperature"] = temperature
#
# CPU
#
cpu = {}
rpc_reply = self.device.get(
filter=("subtree", C.ENV_SYS_MON_RPC_REQ_FILTER)
).xml
# Converts string to etree
result_tree = ETREE.fromstring(rpc_reply)
for module in result_tree.xpath(
".//sys:system-monitoring/sys:cpu-utilization", namespaces=C.NS
):
this_cpu = {}
this_cpu["%usage"] = napalm.base.helpers.convert(
float,
self._find_txt(
module, "./sys:total-cpu-five-minute", default="", namespaces=C.NS
),
)
node_name = self._find_txt(
module, "./sys:node-name", default="", namespaces=C.NS
)
cpu[node_name] = this_cpu
environment_status["cpu"] = cpu
#
# Memory
#
rpc_reply = self.device.get(filter=("subtree", C.ENV_MEM_RPC_REQ_FILTER)).xml
# Converts string to etree
result_tree = ETREE.fromstring(rpc_reply)
for node in result_tree.xpath(
".//mem:memory-summary/mem:nodes/mem:node", namespaces=C.NS
):
node_name = self._find_txt(
node, "./mem:node-name", default="", namespaces=C.NS
)
slot = node_name.split("/")[1]
if slot in ["RP0", "RSP0"]:
available_ram = napalm.base.helpers.convert(
int,
self._find_txt(
node,
"./mem:summary/mem:system-ram-memory",
default="",
namespaces=C.NS,
),
)
free_ram = napalm.base.helpers.convert(
int,
self._find_txt(
node,
"./mem:summary/\
mem:free-physical-memory",
default="",
namespaces=C.NS,
),
)
if available_ram and free_ram:
used_ram = available_ram - free_ram
memory = {}
memory["available_ram"] = available_ram
memory["used_ram"] = used_ram
environment_status["memory"] = memory
break # we're only looking at one of the RSP's
return environment_status
def get_lldp_neighbors(self):
"""Return LLDP neighbors details."""
# init result dict
lldp_neighbors = {}
rpc_reply = self.device.get(filter=("subtree", C.LLDP_RPC_REQ_FILTER)).xml
# Converts string to etree
result_tree = ETREE.fromstring(rpc_reply)
lldp_xpath = ".//lldp:lldp/lldp:nodes/lldp:node/lldp:neighbors\
/lldp:details/lldp:detail"
for neighbor in result_tree.xpath(
lldp_xpath + "/lldp:lldp-neighbor", namespaces=C.NS
):
interface_name = self._find_txt(
neighbor, "./lldp:receiving-interface-name", default="", namespaces=C.NS
)
system_name = napalm.base.helpers.convert(
str,
self._find_txt(
neighbor,
"./lldp:detail/lldp:system-name",
default="",
namespaces=C.NS,
),
)
port_id = napalm.base.helpers.convert(
str,
self._find_txt(
neighbor, "./lldp:port-id-detail", default="", namespaces=C.NS
),
)
if interface_name not in lldp_neighbors.keys():
lldp_neighbors[interface_name] = []
lldp_neighbors[interface_name].append(
{"hostname": system_name, "port": port_id}
)
return lldp_neighbors
def get_lldp_neighbors_detail(self, interface=""):
"""Detailed view of the LLDP neighbors."""
lldp_neighbors_detail = {}
rpc_reply = self.device.get(filter=("subtree", C.LLDP_RPC_REQ_FILTER)).xml
# Converts string to etree
result_tree = ETREE.fromstring(rpc_reply)
lldp_neighbor_xpath = ".//lldp:lldp/lldp:nodes/lldp:node/lldp:neighbors\
/lldp:details/lldp:detail/lldp:lldp-neighbor"
for neighbor in result_tree.xpath(lldp_neighbor_xpath, namespaces=C.NS):
interface_name = napalm.base.helpers.convert(
str,
self._find_txt(
neighbor,
"./lldp:receiving-interface-name",
default="",
namespaces=C.NS,
| |
item in combo box dropdown list.
item_idx = event.GetSelection()
# Get index of combo box that triggered the event.
current_box = event.GetEventObject()
for box_idx, box_instance in enumerate(self.inputs):
if current_box is box_instance:
break
print("Combo:", box_idx, item_idx, self.itemlist[box_idx][3][item_idx])
"""
# Run the validator bound to the combo box that has a selection event.
# This should not fail unless the combo options were setup incorrectly.
# If the validation fails, the validator will highlight the input field
# to alert the user of the error.
combo_box = event.GetEventObject()
combo_box.GetValidator().Validate(combo_box)
event.Skip()
#==============================================================================
class InputListDialog(wx.Dialog):
"""
This class implements a general purpose mechanism for obtaining and
validating user input from several fields in a pop-up dialog box.
(See InputListPanel that uses a scrolled window instead of a dialog box.)
It creates a pop-up dialog box in which to display one or more input fields
each preceded by a label. The input fields can be a combination of simple
data entry boxes or drop down combo boxes. Automatic validation of user
input is performed. OK and Cancel buttons are provided at the bottom of
the dialog box for the user to signal completion of data entry whereupon
the caller can use the GetResults() method to obtain the final results from
all fields in the form of a list of values. As with any dialog box, when
the user presses OK or Cancel the dialog disappears from the screen, but
the caller of this class is responsible for destroying the dialog box.
The dialog box is automatically sized to fit the fields and buttons with
reasonable spacing between the widgets. The layout is:
+-------------------------------------+
| Title [X] |
+-------------------------------------+
| |
| Label-1: [<drop down list> |V] |
| | Note that drop down lists and
| Label-2: [<data entry field-2>] | simple data entry fields can
| ... | be specified in any order.
| Label-n: [<data entry field-n>] |
| |
| [ OK ] [Cancel] |
| |
+-------------------------------------+
The itemlist parameter controls the display. It is a list of input field
description lists where each description list contains 5 or 6 elements and
the 6th element is optional. The items in the description list are:
[0] Label string prefix for the input field
[1] Default value
[2] Datatype for validation (see ItemListValidator docstring for details)
[3] flags parameter in the form of a string of characters as follows:
R - input is required; otherwise input is optional and can be blank
E - field is editable by the user; otherwise it is non-editable and box
is grayed-out; a non-editable field has its default value returned
C - field is a combobox; otherwise it is a simple data entry box
L - field is preceded by a divider line; 'L' takes precedent over 'H'
H - field is preceded by a header given in the 6th element of the list;
the following header sub-options are valid only if 'H' is specified:
0 - header text size is same as label text size (default)
1 - header text size is label text size + 1 point (large)
2 - header text size is label text size + 2 points (x-large)
3 - header text size is label text size + 3 points (2x-large)
B - header text is bolded
U - header text is underlined
Options can be combined in the flags string such as 'REHB2' which means
field is required, editable, and preceeded by a bold, extra-large header
[4] List of values for a combo box or None for a simple data entry field
[5] Header string to be displayed above the label string of the input field;
if 'H' is not specified, this list element can be omitted or can be None
The align parameter determines whether input fields are aligned across when
the input fields are grouped into sections. If True, the widest text label
determines the space allocated for all labels; if False, the text label
width is determined separately for each section.
The fontsize parameter allows the caller to specify a font point size to
override the default point size.
See the AppTestFrame class for a comprehensive example.
"""
def __init__(self,
parent=None,
id=wx.ID_ANY,
title="Enter Data",
pos=wx.DefaultPosition,
size=(300, -1), # x is min_width; y will be calculated
style=wx.DEFAULT_DIALOG_STYLE,
name="",
itemlist=[],
align=False,
fontsize=None,
):
wx.Dialog.__init__(self, parent, id, title, pos, size, style, name)
self.align = align
self.itemlist = itemlist
self.item_cnt = len(self.itemlist)
if self.item_cnt == 0:
return
# Set the font for this window and all child windows (widgets) from the
# parent window, or from the system defaults if no parent is given.
# A dialog box does not inherit font info from its parent, so we will
# explicitly get it from the parent and apply it to the dialog box.
if parent is not None:
font = parent.GetFont()
self.SetFont(font)
# If the caller specifies a font size, override the default value.
if fontsize is not None:
font = self.GetFont()
font.SetPointSize(fontsize)
self.SetFont(font)
#print("Input Dialog box font ptsize =", self.GetFont().GetPointSize())
# Create the button controls (OK and Cancel) and bind their events.
ok_button = wx.Button(self, wx.ID_OK, "OK")
ok_button.SetDefault()
cancel_button = wx.Button(self, wx.ID_CANCEL, "Cancel")
self.Bind(wx.EVT_BUTTON, self.OnOk, ok_button)
# Specify the widget layout using sizers.
main_sizer = wx.BoxSizer(wx.VERTICAL)
# Create the text controls for labels and associated input fields and
# any optional headers.
self.add_items_to_dialog_box()
# Divide the input items into sections prefaced by header text (except
# that the first section is not required to have a header). A section
# list is created that contains the index of the item that starts a new
# section plus a final entry that is one beyond the last item.
sect = [0] # declare item 0 to be start of a new section
for i in range(self.item_cnt):
if i > 0 and self.headers[i] is not None:
sect.append(i)
sect.append(self.item_cnt)
#print("Section index list:", sect)
# Place the items for each section in its own flex grid sizer.
for i in range(len(sect)-1):
j, k = sect[i], sect[i+1] - 1
#print("Items per section:", j, "to", k)
fg_sizer = self.add_items_to_sizer(j, k)
# Add the flex grid sizer to the main sizer.
if self.headers[j] is not None: # self.headers[0] could be None
main_sizer.Add(self.headers[j], 0, border=10,
flag=wx.EXPAND|wx.TOP|wx.LEFT|wx.RIGHT)
main_sizer.Add(fg_sizer, 0, border=10,
flag=wx.EXPAND|wx.TOP|wx.LEFT|wx.RIGHT)
# Create the button sizer that will put the buttons in a row, right
# justified, and with a fixed amount of space between them. This
# emulates the Windows convention for placing a set of buttons at the
# bottom right of the window.
button_sizer = wx.BoxSizer(wx.HORIZONTAL)
button_sizer.Add((10, -1), 1) # stretchable whitespace
button_sizer.Add(ok_button, 0)
button_sizer.Add((10, -1), 0) # non-stretchable whitespace
button_sizer.Add(cancel_button, 0)
# Add a separator line before the buttons.
separator = wx.StaticLine(self, wx.ID_ANY, style=wx.LI_HORIZONTAL)
main_sizer.Add(separator, 0, border=10,
flag=wx.EXPAND|wx.TOP|wx.LEFT|wx.RIGHT)
# Add the button sizer to the main sizer.
main_sizer.Add(button_sizer, 0, border=10,
flag=wx.EXPAND|wx.TOP|wx.BOTTOM|wx.RIGHT)
# Finalize the sizer and establish the dimensions of the dialog box.
# The minimum width is explicitly set because the sizer is not able to
# take into consideration the width of the enclosing frame's title.
self.SetSizer(main_sizer)
main_sizer.SetMinSize((size[0], -1))
main_sizer.Fit(self)
def add_items_to_dialog_box(self):
"""
For each input item, create a header (optional), label, and input box
widget to instantiate it. Put the handles for these widgets in the
headers, labels, and inputs lists where the length of each list is the
same as the number of input boxes.
"""
self.headers = []
self.labels = []
self.inputs = []
self.widest = 0
first_error_idx = None
for x in range(self.item_cnt):
params = len(self.itemlist[x])
if params == 6:
text, default, datatype, flags, plist, header = self.itemlist[x]
elif params == 5:
text, default, datatype, flags, plist = self.itemlist[x]
header = None
if default is None:
default = "" # display None as a null string
# Process the flags parameter.
required | |
<filename>server/pyfiles/sockets/socketHandler.py
""" Manages a socketIO connection to clients, and associated WebSocket events"""
import threading
import jsonpickle
import logging
import json
from flask import request
from flask_socketio import SocketIO, emit, join_room, leave_room
from pyfiles import playerController, characterController, userInput, sessionHandler
from pyfiles.model import overworld, characterClass
from pyfiles.db import database, attributes
from pyfiles.model.session import SESSION_ID_JSON_NAME, SESSION_USERNAME_JSON_NAME, SESSION_JSON_NAME
import socketio
class SocketHandler:
def __init__(self, flask_app, game_controller, **kwargs):
self.game_controller = game_controller
logging.info('Creating new SocketIO server.. ')
self.flask_socketio_server = SocketIO(flask_app, **kwargs)
self.flask_app = flask_app
def run_server(self):
logging.info('Running SocketIO Application..')
self.flask_socketio_server.run(self.flask_app)
def get_rooms(self):
return self.flask_socketio_server.server.rooms(sid=request.sid)
"""
Socket 'connect' event handler
emits a welcome message to the client
also sets a 5min timeout to disconnect the session
"""
def handle_connect(self) -> None:
logging.info('IN| New connection with request SID: ' + request.sid)
sessionHandler.add_connected_session(request.sid)
logging.debug('OUT| connection-response to: '+request.sid)
emit('connection-response', {'chat-data': 'Welcome to AberWebMUD! Please create a character or login by typing \'user [username]\' ', SESSION_ID_JSON_NAME: request.sid}, room=request.sid)
#emit('status-response',statusResponse)
sessionHandler.list_sessions()
client_rooms = self.flask_socketio_server.server.rooms(sid=request.sid);
logging.info('Client rooms for ' + request.sid + ': ' + json.dumps(client_rooms))
for joined_room in client_rooms:
self.handle_join({'room': joined_room})
#5 minute (300 sec) session clearing timer
connection_timeout = threading.Timer(300, lambda: sessionHandler.remove_connected_session(request.sid))
def handle_disconnect(self) -> None:
logging.info('Client disconnected, request SID: ' + request.sid)
removed_connected_session = sessionHandler.remove_connected_session(request.sid)
removed_active_session = sessionHandler.remove_active_session(request.sid)
if removed_connected_session is True:
print('Connected session removed: '+request.sid)
if removed_active_session is True:
print('Active session removed: '+request.sid)
print('A session disconnected: '+request.sid)
def handle_join(self, data):
joined_room = data['room']
logging.info('Client with request SID: ' + request.sid + ' joined room: ' + joined_room + ' joining..')
join_room(joined_room)
def handle_leave(self, data):
left_room = data['room']
logging.info('Client with request SID: ' + request.sid + ' left room: ' + left_room + ' joining..')
leave_room(left_room)
def check_session_id(self, sid):
if not sessionHandler.is_sessionid_connected_or_active(sid):
logging.info('Invalid SID, current rooms: ')
logging.info(self.get_rooms())
emit('invalid-sid', sid, room=request.sid)
def request_character_details(self, sid):
emit('request-character-details', sid, room=request.sid)
def get_session_id(self, session_json):
session_json = sessionHandler.extract_session_json(session_json)
if session_json is not None and SESSION_ID_JSON_NAME in session_json:
return session_json[SESSION_ID_JSON_NAME]
else:
return None
def is_valid_session_json(self, session_json):
if SESSION_USERNAME_JSON_NAME not in session_json:
raise ValueError('Expected Session username in Session JSON: [{}]'.format(SESSION_USERNAME_JSON_NAME))
if SESSION_ID_JSON_NAME not in session_json:
raise ValueError('Expected Session ID in Session JSON: [{}]'.format(SESSION_ID_JSON_NAME))
username = session_json[SESSION_USERNAME_JSON_NAME]
session_id = session_json[SESSION_ID_JSON_NAME]
valid = sessionHandler.valid_player_session(username, session_id, playerController.find_player(username))
return all(valid)
def register_callbacks(self):
self.flask_socketio_server.on_event('connect', self.handle_connect)
self.flask_socketio_server.on_event('disconnect', self.handle_disconnect)
self.flask_socketio_server.on_event('join', self.handle_join)
self.flask_socketio_server.on_event('leave', self.handle_leave)
self.flask_socketio_server.on_event('validate-sid', lambda sid: self.check_session_id(sid))
# Messages can be received at any point, so no active session check
self.flask_socketio_server.on_event('new-chat-message', lambda data: self.handle_message(data))
# pass authentication directly to our handler
self.flask_socketio_server.on_event('client-auth', lambda data: self.authenticate_user(data))
self.flask_socketio_server.on_event('map-data-request', lambda: sessionHandler.verify_active_and_call(self.send_map_data, request.get_json()))
self.flask_socketio_server.on_event('movement-command', lambda json: sessionHandler.verify_active_and_call(self.handle_movement, json))
#socket_server.on_event('request-character-details', send_char_details)
self.flask_socketio_server.on_event('character-details', lambda json : sessionHandler.verify_active_and_call(self.handle_char_details, json))
# self.flask_socketio_server.on('get-attribute-class-options', lambda json_data: sessionHandler.verify_active_and_call(self.handle_get_attribute_class_options, json_data))
def send_server_message(self, message : dict or str, toAll : bool) -> None:
""" Builds an ad-hoc sessionJson for the server and passes on the message data """
messageData = {'chat-data': message, SESSION_JSON_NAME: {[SESSION_USERNAME_JSON_NAME]:'server'}}
self.send_message(messageData, toAll)
def send_message(self, messageData : dict, toAll : bool) -> None:
#"""Broadcasts a chat-message-response to all connected users """
logging.debug('OUT| chat message RESPONSE to all')
emit('chat-message-response', messageData, broadcast=toAll)
""" Sends a login failure event, specifying whether or not that player exists """
def send_login_failure(self, session_id : str, found_player : bool) -> None:
logging.debug('OUT| login failure: '+str(request.sid))
emit('login-failure', {'playerExists' : found_player}, room=session_id)
def send_login_success(self, username : str, session_id : str) -> None:
""" Emits a login-success event to the client with the new session id
"""
logging.debug('OUT| login success: '+session_id)
emit('login-success', {'username': username, SESSION_ID_JSON_NAME: session_id}, room=session_id)
sessionHandler.list_sessions() # List sessions for debug/info
def send_help_message(self) -> None:
message = "Currently supported chat commands are:\n"
command_list = userInput.get_command_list()
for command in command_list:
message += command + '\n'
self.send_server_message(message, False) #Send a response back to the one client
logging.info(message)
"""
Socket 'map-data-request' event handler
"""
def send_map_data(self) -> None:
session_id = request.sid
theOverworld = overworld.getOverworld()
if len(theOverworld.map_tiles) > 0:
logging.debug('OUT| map-response')
emit('map-data-response', {'map-size-x': theOverworld.map_size_x,
'map-size-y': theOverworld.map_size_y,
'data': jsonpickle.encode(theOverworld.map_tiles)
},
room=session_id
)
def parse_login(self, session_id : str, username : str) -> None:
#user inputted username from client message
user_and_account = sessionHandler.valid_player_session(username, session_id, playerController.find_player(username)) # (userExists, logged_in)
#Exists but not logged in
if user_and_account[0] is True and user_and_account[1] is False:
#Send the password request for existing user
logging.debug('OUT| request-password for: '+username)
emit('request-password', username)
#Exists and logged in
if all(user_and_account):
logging.info('User '+username+' already logged in..')
#User does not exist
if not user_and_account[0]:
#Send the password creation request for a new account
logging.debug('OUT| request-new-password for: '+username)
emit('request-new-password', username, room=session_id)
def handle_message(self, data: dict) -> None:
logging.info('IN| player message: '+str(data))
#Store locally and Remove the sessionId so we don't rebroadcast it to anyone
if SESSION_JSON_NAME in data and SESSION_ID_JSON_NAME in data[SESSION_JSON_NAME] :
sid = data[SESSION_JSON_NAME][SESSION_ID_JSON_NAME]
del data[SESSION_JSON_NAME][SESSION_ID_JSON_NAME]
else:
logging.info('Message missing sessionJson / sessionId! : ' + str(data))
return False
#Check the message for commands and choice
message_details = userInput.check_message_params(data)
#True if the the message was properly formatted, #1st tuple in nested tuple
if message_details[1][0] is True:
input_params = message_details[1][1] #2nd tuple in nested tuple
user_choice = message_details[0]
#Login choice
if user_choice == 1:
username = input_params['chat-data'][SESSION_USERNAME_JSON_NAME] #Username is here for a login
self.parse_login(sid, username)
#Message choice
elif user_choice == 2:
#user inputted username from client message
username = data[SESSION_JSON_NAME][SESSION_USERNAME_JSON_NAME] #Username from sessionJSON otherwise
found_player = playerController.find_player(username)
if found_player is not None:
logging.info('OUT| MESSAGE: '+str(data)+' Actual: '+str(input_params['chat-data']))
self.send_message(input_params, True) #Rebroadcast the message {data,sessionJson}
else:
#Send an eror message back to the user
self.send_server_message('User must be logged in to message', False)
#Help command choice
elif user_choice == 3:
#Send back a list of commands if this session is authenticated/active
if sessionHandler.active_session_exists(sid):
self.send_help_message()
else:
logging.info('Failed to parse message: ' + str(data))
return False
# Alerts all clients to a character movement
def send_movement_update(self, username, old_position, new_position):
movement_update = {
[SESSION_USERNAME_JSON_NAME]: username,
'old_position': {
'pos_x': old_position[0],
'pos_y': old_position[1]
},
'position': {
'pos_x': new_position[0],
'pos_y': new_position[1]
}
}
logging.debug('OUT| movement UPDATE : ' + str(movement_update))
emit('movement-update', movement_update, broadcast=True)
def handle_movement(self, message: dict) -> None:
""" Handles a player movement command message send over SocketsIO """
#If the movment returns True, all is good and we can send back a movement response
#move_player also checks if the username exists for us
logging.debug('IN| MOVEMENT MESSAGE: '+str(message))
move_x = message['moveX']
move_y = message['moveY']
session_json = sessionHandler.extract_session_json(message)
if self.is_valid_session_json(session_json):
username = session_json[SESSION_USERNAME_JSON_NAME]
player_pos = playerController.get_player_pos(username)
if player_pos is not None:
movement_success = False
if playerController.move_player(username, move_x, move_y) is True:
movement_success = True
new_pos = playerController.get_player_pos(username)
#Update every client to the new movement
self.send_movement_update(username, player_pos, new_pos)
logging.debug('OUT| movement RESPONSE, success: '+str(movement_success))
emit('movement-response', {'success': movement_success}, broadcast=False)
else:
logging.error('No valid session for movement attempt, request.sid: ' + request.sid)
def send_char_details_update(self, update_success, character_data, sid) -> None:
logging.info('OUT| character-details-update '+str(character_data))
character_details_update = {
'success': update_success,
'character': character_data
}
emit('character-details-update', character_details_update, room=request.sid)
def handle_char_details(self, message: dict) -> None:
""" Receives character data from the client, validates it, and updates the DB """
logging.info('CHAR DETAILS: '+str(message))
if SESSION_JSON_NAME in message and SESSION_USERNAME_JSON_NAME in message[SESSION_JSON_NAME]:
sessionData = [message[SESSION_JSON_NAME][SESSION_USERNAME_JSON_NAME], request.sid]
username = sessionData[0]
sessionId = sessionData[1]
if all(sessionHandler.valid_player_session(username, sessionId, playerController.find_player(username))):
#Check the details and emit a response based on that
if userInput.validate_character_update(message):
logging.info('Updating char details: '+str(message))
update_success = characterController.update_character_details(message)
logging.info('CHARACTER UPDATE SUCCESS: '+str(update_success))
username = message[SESSION_JSON_NAME][SESSION_USERNAME_JSON_NAME]
character_data = playerController.get_character_json(username)['character']
self.send_char_details_update(update_success, character_data, request.sid)
else:
logging.info('Invalid character update data')
self.send_char_details_update(False, {}, request.sid)
else:
logging.info('IN| (CHAR-STATS) stats save attempted for invalid session. SID: ' + str(request.sid))
else:
logging.info('IN| Malformed protocol message for char details')
"""
Logs in the user (adds session to active list, sends success event, and checks that their details are filled in,
otherwise sending an event to prompt for them
"""
def login_user(self, sid : str, username : str) -> None:
logging.info('Logging in.. '+sid)
sessionHandler.add_active_session(sid, username)
player = playerController.find_player(username)
if player is not None:
self.send_login_success(username, sid)
# Once the player has logged in, we should check they've filled in their character details
try:
playerController.validate_character_details(username)
except ValueError:
logging.info('Player details are invalid. Requesting detail submission.')
self.request_character_details(sid)
""" Authenticates/logs in a user through username and password
Uses decoration for @socketio.on so we can directly invoke this instead of checking session validity
"""
def authenticate_user(self, jsonData) -> None:
# validation
if 'data' not in | |
substitute,
'@', '<@if ><@/if>', {})
def test_empty_key3a(self):
# We do not allow an empty key name
self.assertRaisesAndMatchesTraceback(InvalidTagKeyName,
'1(1,1)',
substitute,
'@', '<@if><@/if>', {})
def test_empty_key4(self):
# We do not allow an empty key name
self.assertRaisesAndMatchesTraceback(InvalidTagKeyName,
'1(1,1)',
substitute,
'@', '<@case ><@/case>', {})
def test_empty_key4a(self):
# We do not allow an empty key name
self.assertRaisesAndMatchesTraceback(InvalidTagKeyName,
'1(1,1)',
substitute,
'@', '<@case><@/case>', {})
def test_empty_key5(self):
# We do not allow an empty key name
self.assertRaisesAndMatchesTraceback(InvalidTagKeyName,
'1(1,1)',
substitute,
'@', '<@saveraw ><@/saveraw>', {})
def test_empty_key5a(self):
# We do not allow an empty key name
self.assertRaisesAndMatchesTraceback(InvalidTagKeyName,
'1(1,1)',
substitute,
'@', '<@saveraw><@/saveraw>', {})
def test_empty_key6(self):
# We do not allow an empty key name
self.assertRaisesAndMatchesTraceback(InvalidTagKeyName,
'1(1,1)',
substitute,
'@', '<@loop ><@/loop>', {})
def test_empty_key6a(self):
# We do not allow an empty key name
self.assertRaisesAndMatchesTraceback(InvalidTagKeyName,
'1(1,1)',
substitute,
'@', '<@loop><@/loop>', {})
def test_empty_key7(self):
# We do not allow an empty key name
self.assertRaisesAndMatchesTraceback(InvalidTagKeyName,
'1(1,1)',
substitute,
'@', '<@saveeval ><@/saveeval>', {})
def test_empty_key7a(self):
# We do not allow an empty key name
self.assertRaisesAndMatchesTraceback(InvalidTagKeyName,
'1(1,1)',
substitute,
'@', '<@saveeval><@/saveeval>', {})
def test_empty_key8(self):
# We do not allow an empty key name
self.assertRaisesAndMatchesTraceback(InvalidTagKeyName,
'1(1,1)',
substitute,
'@', '<@namespace ><@/namespace>', {})
def test_empty_key8a(self):
# We do not allow an empty key name
self.assertRaisesAndMatchesTraceback(InvalidTagKeyName,
'1(1,1)',
substitute,
'@', '<@namespace><@/namespace>', {})
def test_empty_key9(self):
# We do not allow an empty key name
self.assertRaisesAndMatchesTraceback(InvalidTagKeyName,
'1(1,1)',
substitute,
'@', '<@saveoverride ><@/saveoverride>', {})
def test_empty_key9a(self):
# We do not allow an empty key name
self.assertRaisesAndMatchesTraceback(InvalidTagKeyName,
'1(1,1)',
substitute,
'@', '<@saveoverride><@/saveoverride>', {})
class tagsubTestOverflows(tagsub_TestCase):
def setUp(self):
self.nestedTagTemplate = ''.join(map(lambda i: '<@if %d>' % i, range(1, tagsub.max_nested_tag_depth + 1)))
self.nestedTagPos = len(self.nestedTagTemplate) + 1
def test_LoopStackOverflowError3(self):
# This will not overflow, but with the bad structure
# should get a SyntaxError instead
tag = '<@loop test>'
template = ''
for i in range(tagsub.max_nested_tag_depth):
template = '%s%s' % (tag, template)
tb = '%d(1,%d)' % ((len(tag) * (tagsub.max_nested_tag_depth - 1) + 1,) * 2)
self.assertRaisesAndMatchesTraceback(TagsubTemplateSyntaxError,
tb,
substitute, '@',
template,
{'test': [{}]}
)
def test_LoopStackOverflowError3a(self):
# This is like the one above, but has proper syntax
template = ''
for i in range(tagsub.max_nested_tag_depth):
template = '<@loop test>%s<@/loop>' % template
result = substitute('@', template, {'test': [{}]})
self.assertEqual(result, '')
def test_LoopStackOverflowError4(self):
# No matter what the tagchar, they share the same stack
tagsubCharList = '@#$%^&*+='
tag = '<%sloop test>'
template = ''
for i in range(tagsub.max_nested_tag_depth + 1):
template = '%s%s' % (tag % tagsubCharList[i % 9], template)
tb = '%d(1,%d)' % ((len(tag % '@') * tagsub.max_nested_tag_depth + 1,) * 2)
self.assertRaisesAndMatchesTraceback(TagStackOverflowError,
tb,
substitute, tagsubCharList,
template,
[{'test': [{}]}, ] * len(tagsubCharList)
)
def test_LoopStackOverflowError5(self):
tag1 = '<@loop test>'
tag2 = '<@namespace test2>'
tb = '%d(1,%d)' % ((len(tag1) * (tagsub.max_nested_tag_depth - 1) + len(tag2) + 1,) * 2)
self.assertRaisesAndMatchesTraceback(TagStackOverflowError,
tb,
substitute, '@',
tag1 * (tagsub.max_nested_tag_depth - 1) + tag2 * 2,
{'test': [{}], 'test2': {}}
)
def test_LoopStackOverflowError6(self):
# This will not overflow, but with the bad structure
# should get a SyntaxError instead
tag1 = '<@loop test>'
tag2 = '<@namespace test2>'
tb = '%d(1,%d)' % ((len(tag1) * (tagsub.max_nested_tag_depth - 2) + len(tag2) + 1,) * 2)
self.assertRaisesAndMatchesTraceback(TagsubTemplateSyntaxError,
tb,
substitute, '@',
tag1 * (tagsub.max_nested_tag_depth - 2) + tag2 * 2,
{'test': [{}], 'test2': {}}
)
def test_LoopStackOverflowError7(self):
# No matter what the tagchar, they share the same stack
tagsubCharList = '@#$%^&*+='
tag = '<%snamespace test>'
template = ''
for i in range(tagsub.max_nested_tag_depth + 1):
template = '%s%s' % (tag % tagsubCharList[i % 9], template)
tb = '%d(1,%d)' % ((len(tag % '@') * tagsub.max_nested_tag_depth + 1,) * 2)
self.assertRaisesAndMatchesTraceback(TagStackOverflowError,
tb,
substitute, tagsubCharList,
template,
[{'test': {}}, ] * len(tagsubCharList)
)
## Try the overflows with each nestable tag, since the testing for
## overflow happens independently for each tag group.
def test_nestedTagOverflow1(self):
template = self.nestedTagTemplate + '<@if %d>' % (tagsub.max_nested_tag_depth + 1)
self.assertRaisesAndMatchesTraceback(TagStackOverflowError,
'%d(1,%d)' % (self.nestedTagPos, self.nestedTagPos),
substitute, '@',
template,
{})
def test_nestedTagOverflow2(self):
pos = self.nestedTagPos - (len('<@if >') + len(str(tagsub.max_nested_tag_depth)))
self.assertRaisesAndMatchesTraceback(TagsubTemplateSyntaxError,
'%d(1,%d)' % (pos, pos),
substitute, '@',
self.nestedTagTemplate,
{})
def test_nestedTagOverflow3(self):
# Like the loop tag stack, multiple tagchars share the same stack
template = self.nestedTagTemplate + '<#if %d>' % (tagsub.max_nested_tag_depth + 1)
self.assertRaisesAndMatchesTraceback(TagStackOverflowError,
'%d(1,%d)' % (self.nestedTagPos, self.nestedTagPos),
substitute, '@#',
template,
[{}, {}])
# Test variations with each tag
def test_nestedTagOverflow4(self):
template = self.nestedTagTemplate + '<@case %d>' % (tagsub.max_nested_tag_depth + 1)
self.assertRaisesAndMatchesTraceback(TagStackOverflowError,
'%d(1,%d)' % (self.nestedTagPos, self.nestedTagPos),
substitute, '@',
template,
{})
def test_nestedTagOverflow5(self):
template = self.nestedTagTemplate + '<@loop %d>' % (tagsub.max_nested_tag_depth + 1)
self.assertRaisesAndMatchesTraceback(TagStackOverflowError,
'%d(1,%d)' % (self.nestedTagPos, self.nestedTagPos),
substitute, '@',
template,
{})
def test_nestedTagOverflow6(self):
template = self.nestedTagTemplate + '<@saveraw %d>' % (tagsub.max_nested_tag_depth + 1)
self.assertRaisesAndMatchesTraceback(TagStackOverflowError,
'%d(1,%d)' % (self.nestedTagPos, self.nestedTagPos),
substitute, '@',
template,
{})
def test_nestedTagOverflow7(self):
template = self.nestedTagTemplate + '<@saveeval %d>' % (tagsub.max_nested_tag_depth + 1)
self.assertRaisesAndMatchesTraceback(TagStackOverflowError,
'%d(1,%d)' % (self.nestedTagPos, self.nestedTagPos),
substitute, '@',
template,
{})
def test_nestedTagOverflow8(self):
template = self.nestedTagTemplate + '<@namespace %d>' % (tagsub.max_nested_tag_depth + 1)
self.assertRaisesAndMatchesTraceback(TagStackOverflowError,
'%d(1,%d)' % (self.nestedTagPos, self.nestedTagPos),
substitute, '@',
template,
{})
def test_nestedTagOverflow9(self):
template = self.nestedTagTemplate + '<@saveoverride %d>' % (tagsub.max_nested_tag_depth + 1)
self.assertRaisesAndMatchesTraceback(TagStackOverflowError,
'%d(1,%d)' % (self.nestedTagPos, self.nestedTagPos),
substitute, '@',
template,
{})
def test_nestedSaveEvalOverflow2(self):
# The current max_depth is 4, but the top level result takes one.
self.assertEqual(tagsub.max_saveeval_depth, 4)
self.assertRaisesAndMatchesTraceback(TagsubTemplateSyntaxError,
'29(1,29)',
substitute, '@',
'<@saveeval t1><@saveeval t2><@saveeval t3>', {})
class tagsub_if_tests(tagsub_TestCase):
def test_basic_if_else(self):
result = substitute('@', '<@if isTrue>true<@else>false<@/if>', {'isTrue': 1})
self.assertEqual(result, 'true')
result = substitute('@', '<@if\tisTrue >true<@else>false<@/if>', {'isTrue': 1})
self.assertEqual(result, 'true')
result = substitute('@', '<@if isTrue>true<@else>false<@/if>', {'isTrue': 1}, doStrictKeyLookup=True)
self.assertEqual(result, 'true')
def test_basic_if_else2(self):
result = substitute('@', '<@if isTrue>true<@else>false<@/if>', {'isTrue': 0})
self.assertEqual(result, 'false')
result = substitute('@', '<@if isTrue>true<@else>false<@/if>', {'isTrue': 0}, doStrictKeyLookup=True)
self.assertEqual(result, 'false')
def test_basic_if_else3(self):
result = substitute('@', '<@if isTrue>true<@else>false<@/if>', {'isTrue': None})
self.assertEqual(result, 'false')
result = substitute('@', '<@if isTrue>true<@else>false<@/if>', {'isTrue': None}, doStrictKeyLookup=True)
self.assertEqual(result, 'false')
def test_basic_if_else4(self):
result = substitute('@', '<@if isTrue>true<@else>false<@/if>', {'isTrue': '1'})
self.assertEqual(result, 'true')
result = substitute('@', '<@if isTrue>true<@else>false<@/if>', {'isTrue': '1'}, doStrictKeyLookup=True)
self.assertEqual(result, 'true')
def test_basic_if_else5(self):
result = substitute('@', '<@if isTrue>true<@else>false<@/if>', {'isTrue': ''})
self.assertEqual(result, 'false')
result = substitute('@', '<@if isTrue>true<@else>false<@/if>', {'isTrue': ''}, doStrictKeyLookup=True)
self.assertEqual(result, 'false')
def test_basic_if_else6(self):
result = substitute('@', '<@if isTrue>true<@else>false<@/if>', {'isTrue': '0'}, is0False=True)
self.assertEqual(result, 'false')
result = substitute('@', '<@if isTrue>true<@else>false<@/if>', {'isTrue': '0'},
is0False=True, doStrictKeyLookup=True)
self.assertEqual(result, 'false')
def test_basic_if_else6a(self):
result = substitute('@', '<@if isTrue>true<@else>false<@/if>', {'isTrue': '0'}, is0False=False)
self.assertEqual(result, 'true')
result = substitute('@', '<@if isTrue>true<@else>false<@/if>', {'isTrue': '0'},
is0False=False, doStrictKeyLookup=True)
self.assertEqual(result, 'true')
def test_basic_if_else7(self):
result = substitute('@', '<@if isTrue>true<@else>false<@/if>', {'isTrue': []})
self.assertEqual(result, 'false')
result = substitute('@', '<@if isTrue>true<@else>false<@/if>', {'isTrue': []}, doStrictKeyLookup=True)
self.assertEqual(result, 'false')
def test_basic_if_else8(self):
result = substitute('@', '<@if isTrue>true<@else>false<@/if>', {'isTrue': {}}, doStrictKeyLookup=True)
self.assertEqual(result, 'false')
def test_basic_if_else9(self):
result = substitute('@', '<@if isTrue>true<@else>false<@/if>', {})
self.assertEqual(result, 'false')
self.assertRaisesAndMatchesTraceback(KeyError,
'1(1,1)',
substitute, '@',
'<@if isTrue>true<@else>false<@/if>', {}, doStrictKeyLookup=True)
## The next eight test all combinations of truth of three values
def test_basic_if_else10(self):
result = substitute('@',
'<@if isOne>one<@elif isTwo>two<@elif isThree>three<@else>none<@/if>',
{'isOne': 0, 'isTwo': 0, 'isThree': 0}
)
self.assertEqual(result, 'none')
result = substitute('@',
'<@if isOne >one<@elif\tisTwo\n>two<@elif isThree\n>three<@else>none<@/if>',
{'isOne': 0, 'isTwo': 0, 'isThree': 0}
)
self.assertEqual(result, 'none')
def test_basic_if_else11(self):
result = substitute('@',
'<@if isOne>one<@elif isTwo>two<@elif isThree>three<@else>none<@/if>',
{'isOne': 0, 'isTwo': 0, 'isThree': 1}
)
self.assertEqual(result, 'three')
result = substitute('@',
'<@if isOne >one<@elif\tisTwo\n>two<@elif isThree\n>three<@else>none<@/if>',
{'isOne': 0, 'isTwo': 0, 'isThree': 1}
)
self.assertEqual(result, 'three')
def test_basic_if_else12(self):
result = substitute('@',
'<@if isOne>one<@elif isTwo>two<@elif isThree>three<@else>none<@/if>',
{'isOne': 0, 'isTwo': 1, 'isThree': 0}
)
self.assertEqual(result, 'two')
result = substitute('@',
'<@if isOne >one<@elif\tisTwo\n>two<@elif isThree\n>three<@else>none<@/if>',
{'isOne': 0, 'isTwo': 1, 'isThree': 0}
)
self.assertEqual(result, 'two')
def test_basic_if_else13(self):
result = substitute('@',
'<@if isOne>one<@elif isTwo>two<@elif isThree>three<@else>none<@/if>',
{'isOne': 0, 'isTwo': 1, 'isThree': 1}
)
self.assertEqual(result, 'two')
result = substitute('@',
'<@if isOne >one<@elif\tisTwo\n>two<@elif isThree\n>three<@else>none<@/if>',
{'isOne': 0, 'isTwo': 1, 'isThree': 1}
)
self.assertEqual(result, 'two')
def test_basic_if_else14(self):
result = substitute('@',
'<@if isOne>one<@elif isTwo>two<@elif isThree>three<@else>none<@/if>',
{'isOne': 1, 'isTwo': 0, 'isThree': 0}
)
self.assertEqual(result, 'one')
result = substitute('@',
'<@if isOne >one<@elif\tisTwo\n>two<@elif isThree\n>three<@else>none<@/if>',
{'isOne': 1, 'isTwo': 0, 'isThree': 0}
)
self.assertEqual(result, 'one')
def test_basic_if_else15(self):
result = substitute('@',
'<@if isOne>one<@elif isTwo>two<@elif isThree>three<@else>none<@/if>',
{'isOne': 1, 'isTwo': 0, 'isThree': 1}
)
self.assertEqual(result, 'one')
result = substitute('@',
'<@if isOne >one<@elif\tisTwo\n>two<@elif isThree\n>three<@else>none<@/if>',
{'isOne': 1, 'isTwo': 0, 'isThree': 1}
)
self.assertEqual(result, 'one')
def test_basic_if_else16(self):
result = substitute('@',
'<@if isOne>one<@elif isTwo>two<@elif isThree>three<@else>none<@/if>',
{'isOne': 1, 'isTwo': 1, 'isThree': 0}
)
self.assertEqual(result, 'one')
result = substitute('@',
'<@if isOne >one<@elif\tisTwo\n>two<@elif isThree\n>three<@else>none<@/if>',
{'isOne': 1, 'isTwo': 1, 'isThree': | |
<reponame>karachia/iOS-Home-Screen<filename>ios_home_screen.py
"""
A simplified data model of iOS Home Screen
General Design:
This design utilizes a Doubly Linked List (implemented in this file as well) for holding apps and folders within pages or dock, as well as pages within the Home Screen.
Using doubly linked list allows storing the home screen data (apps, folders) in the same order as they will be presented to the user in the UI in pages, dock, and folders.
It also enables moving the apps and folders around easily without any extra cleaning and re-ordering, or shifting items back and forth as one would do in an array list.
Pages are also stored in the doubly linked list to support adding and deleting pages in the middle of the list optimally, and to keep the same order as the pages are represented to the user in the UI.
Assumptions:
- App names are unique and are used as app ID's (or bundle ID's)
- Folder names are also unique
"""
from collections import deque
from pydoc import render_doc
import traceback
import uuid
from abc import ABC, abstractmethod
class Node:
next = None
prev = None
name = ""
def __init__(self, name):
self.name = name
def __repr__(self):
return self.name
class Item(Node):
def __init__(self, name, parent=None):
super().__init__(name)
self.parent = parent
class App(Item):
def __init__(self, name, parent=None, can_delete=True):
"""Constructor for App
Args:
name (str): name of the App. It also acts as app's unique ID / bundle ID
parent (Single_Page_Container, optional): The page/container in which the app resides). Defaults to None.
"""
super().__init__(name, parent=parent)
self.can_delete= can_delete
def run(self):
print("\n----------------------\n.\n.\n.\n")
print(f"{self.name} is running...")
print(".\n.\n.\n----------------------\n")
# Abstract method
class Container(ABC):
def __init__(self, container_type) -> None:
super().__init__()
self._container_type = container_type
@property
def container_type(self):
return self._container_type
# Protected methods:
@abstractmethod
def _add_item(self, item):
pass
@abstractmethod
def _remove_item(self, item):
pass
@abstractmethod
def _move_item(self, item, position):
pass
class Single_Page_Container(Container):
"""A container class for Items"""
def __init__(self, items, capacity, parent_container=None, container_type=None):
"""Constructor
Args:
name (str): Name of the container
items ([Items]): List of Items to hold
capacity (int): Maximum capacity of the items the container can hold
"""
super().__init__(container_type="single_page_container" if container_type is None else container_type)
self._items = DoublyLinkedList(capacity=capacity)
self.parent_container = parent_container
if items:
for item in items:
self._add_item(item)
@property
def size(self):
return self._items.size
@property
def capacity(self):
return self._items.capacity
@property
def items(self):
return self._items
def _remove_item(self, item):
item = self._items.remove(item)
item.parent = None
return item
def _add_item(self, item):
item.parent = self
self._items.push(item)
def _move_item(self, item, position=None):
_item = self._items.remove(item)
if position is None:
self._items.push(_item)
else:
self._items.add_at_index(position, _item)
def add_item(self, item, position=None):
if position is None:
self._add_item(item)
else:
self.add_item_at_index(item, position)
def add_item_at_index(self, item, index):
item.parent = self
self._items.add_at_index(index, item)
def add_item_to_end(self, item):
item.parent = self
self._items.push(item)
def remove_item(self, item):
return self._remove_item(item)
def remove_last_item(self):
app = self._items.pop()
app.parent = None
return app
def move_item(self, item, position=None):
self._move_item(item, position=position)
class Multi_Page_Container(Container):
"""A container class for Apps that supports organizing apps across multiple Pages"""
def __init__(
self,
page_columns=3,
page_rows=4,
max_pages=4,
container_type = None,
):
super().__init__(container_type="multi_page_container" if container_type is None else container_type)
self.page_columns = page_columns
self.page_rows = page_rows
self.page_capacity = page_rows * page_columns
self.max_pages = max_pages
self._pages = DoublyLinkedList(capacity=self.max_pages)
if max_pages:
self.max_apps = int(self.page_capacity * max_pages)
else:
self.max_apps = None
@property
def pages(self):
return self._pages
def get_paget_at_index(self, idx):
return self._pages.at_index(idx)
def _add_item(self, item, page=None, position_within_page=None):
if page is None:
position_within_page = None
# then push to the last page or create one if one does not exit
# if there are no pages yet or the last page has reached capacity of apps and folders within it
if self._pages.tail is None or self._pages.tail.size == self.page_capacity:
# try creating a new page and adding the app to it, only if the max number of pages has not been reached yet
if self.max_pages is None or self._pages.size < self.max_pages:
self._pages.push(Page(items=[item], capacity=self.page_capacity, parent_container=self))
return item
# Otherwise go from the last page to the first page and see if there is an opening to add the app to
else:
curr_page = self._pages.tail.prev # start the search from the page to the last
while curr_page is not None:
if curr_page.size() < curr_page.capacity:
curr_page.add_item(item)
return item
curr_page = curr_page.prev
# If no open position is found for this new app, return non
print(f"Error: Failed to add the new app '{item.name}'. Could not find an open position in any of the pages. ")
return None
else:
# add to the last page
page = self._pages.tail
page.add_item(item)
else:
self.add_item_at_page(item, page, position_within_page=position_within_page)
def add_item_at_page(self, item, page, position_within_page=None):
if page.parent_container != self:
print("Invalid request. The page does not belong to this container")
return
if page.size == page.capacity and (self.max_pages is None or self.pages.size < self.max_pages):
# Move the last item on the page to a new page right after the specified page.
last_item = page.remove_last_item()
self._pages.add_after(page, Page(items=[last_item], capacity=self.page_capacity, parent_container=self))
page.add_item(item, position=position_within_page)
def remove_page(self, page):
if page.parent_container != self:
return
if page.size > 0:
return None
page = self._pages.remove(page)
def _remove_item(self, item):
items_origianl_parent = item.parent
try:
if item is not None:
page = item.parent
item = page.remove_item(item)
# remove the page if there is no other items on it
if page.size == 0:
self._pages.remove(page)
return item
return None
finally: # if page is empty after remove, remove the page
# if items_origianl_parent:
# if items_origianl_parent.size == 0:
# if items_origianl_parent.parent_container is not None:
# items_origianl_parent.parent_container.remove_page(items_origianl_parent)
pass
def _move_item(self, item, page, position=None):
if item.parent.parent_container != self or page.parent_container != self:
print(f"Invalid request. '{item.name} and/or the specified page do not belong to the same container")
return
# Then the move is contained within the same page. No need to worry about page being full.
if item.parent.id == page.id:
page.move_item(item, position=position)
else:
_item = item.parent.remove_item(item)
# if the specified page is full see if we can open up room by pushing the last item to a new page right after
if page.size == page.capacity:
# Move the last item on the page to a new page right after the specified page.
if self._pages.size == self.max_pages:
print(f"The specified page is full and the folder has reach its max capacity for pages. Cannot move {_item.name}")
return
last_item = page.remove_last_item()
self._pages.add_after(page, Page(items=[last_item], capacity=self.page_capacity, parent_container=self))
if position is None:
page.add_item_to_end(_item)
else:
page.add_item_at_index(_item, position)
def get_page_at_index(self, page_idx):
return self._pages.at_index(page_idx)
class Dock(Single_Page_Container):
"""Represents the iOS Dock. It extends Single_Page_Container class and its name property is set to 'Dock'"""
def __init__(self, capacity, parent_container=None):
super().__init__(container_type="dock", items=None, capacity=capacity, parent_container=parent_container)
class Page(Node, Single_Page_Container):
def __init__(self, items, capacity, parent_container=None):
self.id = str(uuid.uuid4())
Node.__init__(self, name=f"page_{self.id}")
Single_Page_Container.__init__(self, items=items, capacity=capacity, parent_container=parent_container, container_type="page",)
class Folder(Item, Multi_Page_Container):
def __init__(self, name, apps, page_columns=3, page_rows=3, max_pages=3, parent=None):
Item.__init__(self, name=name, parent=parent)
Multi_Page_Container.__init__(self, page_columns, page_rows, max_pages, container_type="folder")
if apps:
for app in apps:
self.add_item(app)
def add_item(self, item, page=None, position_within_page=None):
self._add_item(item, page=page, position_within_page=position_within_page)
def remove_item(self, item):
self._remove_item(item)
def move_item(self, item, page_idx, position=None):
if item.parent.parent_container != self:
print(f"{item.name} does is not in the current folder")
return
items_origianl_parent = item.parent
try:
if page_idx < self._pages.size and page_idx >= 0:
page = self.get_page_at_index(page_idx)
elif page_idx == self._pages.size:
item = item.parent.remove_item(item)
page = Page(items=[item], capacity=self.page_capacity, parent_container=self)
self._pages.push(page)
return
else:
print("Invalid Index")
return
self._move_item(item, page, position=position)
finally:
if items_origianl_parent:
if items_origianl_parent.size == 0:
if items_origianl_parent.parent_container is not None:
items_origianl_parent.parent_container.remove_page(items_origianl_parent)
class Home(Multi_Page_Container):
"""
Simplified data model for iOS Home Screen. Extends Multi_Page_App_Container and support folders, dock, and running apps.
It also represents the same order of apps and folders in pages, folders, and dock as they will | |
quote_is_paused = False
quotes.append(' '.join(current_quote))
current_quote = []
else:
quote_is_paused = True
return quotes
def get_count_of_word(self, word):
"""
.. _get-count-of-word:
Returns the number of instances of a word in the text. Not case-sensitive.
If this is your first time running this method, this can be slow.
:param word: word to be counted in text
:return: Number of occurences of the word, as an int
>>> from gender_analysis import Document
>>> from pathlib import Path
>>> from gender_analysis.testing.common import TEST_DATA_DIR
>>> document_metadata = {'author': '<NAME>', 'title': 'Scarlet Letter',
... 'date': '2018', 'filename': 'test_text_2.txt',
... 'filepath': Path(TEST_DATA_DIR,
... 'document_test_files', 'test_text_2.txt')}
>>> scarlett = Document(document_metadata)
>>> scarlett.get_count_of_word("sad")
4
>>> scarlett.get_count_of_word('ThisWordIsNotInTheWordCounts')
0
"""
# If word_counts were not previously initialized, do it now and store it for the future.
if not self._word_counts_counter:
self._word_counts_counter = Counter(self.get_tokenized_text())
return self._word_counts_counter[word]
def get_count_of_words(self, words):
"""
A helper method for retrieving the number of occurrences of a given set of words within
a Document.
:param words: a list of strings.
:return: a Counter with each word in words keyed to its number of occurrences.
>>> from gender_analysis.text.document import Document
>>> from pathlib import Path
>>> from gender_analysis.testing.common import TEST_DATA_DIR
>>> document_filepath = Path(TEST_DATA_DIR, 'document_test_files', 'test_text_9.txt')
>>> document_metadata = {'filename': 'test_text_2.txt', 'filepath': document_filepath}
>>> test_document = Document(document_metadata)
>>> test_document.get_count_of_words(['sad', 'was', 'sadness', 'very'])
Counter({'was': 5, 'sad': 1, 'very': 1, 'sadness': 0})
"""
return Counter({word: self.get_count_of_word(word) for word in words})
def get_wordcount_counter(self):
"""
Returns a counter object of all of the words in the text.
If this is your first time running this method, this can be slow.
:return: Python Counter object
>>> from gender_analysis import Document
>>> from pathlib import Path
>>> from gender_analysis.testing.common import TEST_DATA_DIR
>>> document_metadata = {'author': '<NAME>', 'title': 'Scarlet Letter',
... 'date': '2018', 'filename': 'test_text_10.txt',
... 'filepath': Path(TEST_DATA_DIR,
... 'document_test_files', 'test_text_10.txt')}
>>> scarlett = Document(document_metadata)
>>> scarlett.get_wordcount_counter()
Counter({'was': 2, 'convicted': 2, 'hester': 1, 'of': 1, 'adultery': 1})
"""
# If word_counts were not previously initialized, do it now and store it for the future.
if not self._word_counts_counter:
self._word_counts_counter = Counter(self.get_tokenized_text())
return self._word_counts_counter
def words_associated(self, target_word):
"""
.. _words-associated:
Returns a Counter of the words found after a given word.
In the case of double/repeated words, the counter would include the word itself and the next
new word.
Note: words always return lowercase.
:param word: Single word to search for in the document's text
:return: a Python Counter() object with {associated_word: occurrences}
>>> from gender_analysis import Document
>>> from pathlib import Path
>>> from gender_analysis.testing.common import TEST_DATA_DIR
>>> document_metadata = {'author': '<NAME>', 'title': 'Scarlet Letter',
... 'date': '2018', 'filename': 'test_text_11.txt',
... 'filepath': Path(TEST_DATA_DIR,
... 'document_test_files', 'test_text_11.txt')}
>>> scarlett = Document(document_metadata)
>>> scarlett.words_associated("his")
Counter({'cigarette': 1, 'speech': 1})
"""
target_word = target_word.lower()
word_count = Counter()
check = False
text = self.get_tokenized_text()
for word in text:
if check:
word_count[word] += 1
check = False
if word == target_word:
check = True
return word_count
# pylint: disable=line-too-long
def get_word_windows(self, search_terms, window_size=2):
"""
.. _get-word-windows:
Finds all instances of `word` and returns a counter of the words around it.
window_size is the number of words before and after to return, so the total window is
2*window_size + 1.
This is not case sensitive.
:param search_terms: String or list of strings to search for
:param window_size: integer representing number of words to search for in either direction
:return: Python Counter object
>>> from gender_analysis import Document
>>> from pathlib import Path
>>> from gender_analysis.testing.common import TEST_DATA_DIR
>>> document_metadata = {'author': '<NAME>', 'title': 'Scarlet Letter',
... 'date': '2018', 'filename': 'test_text_12.txt',
... 'filepath': Path(TEST_DATA_DIR,
... 'document_test_files', 'test_text_12.txt')}
>>> scarlett = Document(document_metadata)
search_terms can be either a string...
>>> scarlett.get_word_windows("his", window_size=2)
Counter({'he': 1, 'lit': 1, 'cigarette': 1, 'and': 1, 'then': 1, 'began': 1, 'speech': 1, 'which': 1})
... or a list of strings.
>>> scarlett.get_word_windows(['purse', 'tears'])
Counter({'her': 2, 'of': 1, 'and': 1, 'handed': 1, 'proposal': 1, 'drowned': 1, 'the': 1})
"""
if isinstance(search_terms, str):
search_terms = [search_terms]
search_terms = set(i.lower() for i in search_terms)
counter = Counter()
for text_window in windowed(self.get_tokenized_text(), 2 * window_size + 1):
if text_window[window_size] in search_terms:
for surrounding_word in text_window:
if surrounding_word not in search_terms:
counter[surrounding_word] += 1
return counter
def get_word_freq(self, word):
"""
.. _get-word-freq:
Returns the frequency of appearance of a word in the document
:param word: str to search for in document
:return: float representing the portion of words in the text that are the parameter word
>>> from gender_analysis import Document
>>> from pathlib import Path
>>> from gender_analysis.testing.common import TEST_DATA_DIR
>>> document_metadata = {'author': '<NAME>', 'title': 'Scarlet Letter',
... 'date': '1900', 'filename': 'test_text_2.txt',
... 'filepath': Path(TEST_DATA_DIR,
... 'document_test_files', 'test_text_2.txt')}
>>> scarlett = Document(document_metadata)
>>> frequency = scarlett.get_word_freq('sad')
>>> frequency
0.13333333333333333
"""
word_frequency = self.get_count_of_word(word) / self.word_count
return word_frequency
def get_word_frequencies(self, words):
"""
A helper method for retreiving the frequencies of a given set of words within a Document.
:param words: a list of strings.
:return: a dictionary of words keyed to float frequencies.
>>> from gender_analysis.text.document import Document
>>> from pathlib import Path
>>> from gender_analysis.testing.common import TEST_DATA_DIR
>>> document_filepath = Path(TEST_DATA_DIR, 'document_test_files', 'test_text_9.txt')
>>> document_metadata = {'filename': 'test_text_2.txt', 'filepath': document_filepath}
>>> test_document = Document(document_metadata)
>>> test_document.get_word_frequencies(['peace', 'died', 'foobar'])
{'peace': 0.02702702702702703, 'died': 0.02702702702702703, 'foobar': 0.0}
"""
word_frequencies = {word: self.get_count_of_word(word) / self.word_count for word in words}
return word_frequencies
def get_part_of_speech_tags(self):
"""
.. _get-pos:
Returns the part of speech tags as a list of tuples. The first part of each tuple is the
term, the second one the part of speech tag.
Note: the same word can have a different part of speech tags. In the example below,
see "refuse" and "permit".
:return: List of tuples (term, speech_tag)
>>> from gender_analysis import Document
>>> from pathlib import Path
>>> from gender_analysis.testing.common import TEST_DATA_DIR
>>> document_metadata = {'author': '<NAME>', 'title': 'Scarlet Letter',
... 'date': '1900', 'filename': 'test_text_13.txt',
... 'filepath': Path(TEST_DATA_DIR,
... 'document_test_files', 'test_text_13.txt')}
>>> document = Document(document_metadata)
>>> document.get_part_of_speech_tags()[:4]
[('They', 'PRP'), ('refuse', 'VBP'), ('to', 'TO'), ('permit', 'VB')]
>>> document.get_part_of_speech_tags()[-4:]
[('the', 'DT'), ('refuse', 'NN'), ('permit', 'NN'), ('.', '.')]
"""
if self._part_of_speech_tags is not None:
return self._part_of_speech_tags
common.download_nltk_package_if_not_present('tokenizers/punkt')
common.download_nltk_package_if_not_present('taggers/averaged_perceptron_tagger')
text = nltk.word_tokenize(self.text)
pos_tags = nltk.pos_tag(text)
self._part_of_speech_tags = pos_tags
return pos_tags
def get_part_of_speech_words(self, words, remove_swords=True):
"""
A helper method for retrieving the number of occurrences of input words keyed to their
NLTK tag values (i.e., 'NN' for noun).
:param words: a list of strings.
:param remove_swords: optional boolean, remove stop words from return.
:return: a dictionary keying NLTK tag strings to Counter instances.
>>> from gender_analysis.text.document import Document
>>> from pathlib import Path
>>> from gender_analysis.testing.common import TEST_DATA_DIR
>>> document_filepath = Path(TEST_DATA_DIR, 'document_test_files', 'test_text_9.txt')
>>> document_metadata = {'filename': 'test_text_2.txt', 'filepath': document_filepath}
>>> test_document = Document(document_metadata)
>>> test_document.get_part_of_speech_words(['peace', 'died', 'beautiful', 'foobar'])
{'JJ': Counter({'beautiful': 3}), 'VBD': Counter({'died': 1}), 'NN': Counter({'peace': 1})}
"""
common.download_nltk_package_if_not_present('corpora/stopwords')
stop_words = set(nltk.corpus.stopwords.words('english'))
document_pos_tags = self.get_part_of_speech_tags()
words_set = {word.lower() for word in words}
output = {}
for token, tag in document_pos_tags:
lowered_token = token.lower()
if remove_swords is True and token in stop_words:
continue
if token not in words_set:
continue
if tag not in output:
output[tag] = Counter()
output[tag][lowered_token] += 1
return output
def update_metadata(self, new_metadata):
"""
Updates the metadata of the document without requiring a complete reloading
of the text and other properties.
'filename' cannot be updated with this method.
:param new_metadata: dict of new metadata to apply to the document
:return: None
This can be used to correct mistakes in the metadata:
>>> from gender_analysis import Document
>>> from gender_analysis.testing.common import TEST_CORPUS_PATH
>>> from pathlib import Path
>>> metadata = {'filename': 'aanrud_longfrock.txt',
... 'filepath': Path(TEST_CORPUS_PATH, 'aanrud_longfrock.txt'),
... 'date': '2098'}
>>> d = Document(metadata)
>>> new_metadata = {'date': '1903'}
>>> d.update_metadata(new_metadata)
>>> d.date
1903
Or it can be used to add completely new attributes:
>>> new_attribute = {'cookies': 'chocolate chip'}
>>> d.update_metadata(new_attribute)
>>> d.cookies
'chocolate chip'
"""
if not isinstance(new_metadata, dict):
raise ValueError(
f'new_metadata must be a dictionary of metadata keys, not | |
'kind', 'type': 'str'},
'sku': {'key': 'sku', 'type': 'Sku'},
'description': {'key': 'properties.description', 'type': 'str'},
'parameters': {'key': 'properties.parameters', 'type': '[ParameterDeclaration]'},
'sources': {'key': 'properties.sources', 'type': '[SourceNodeBase]'},
'processors': {'key': 'properties.processors', 'type': '[ProcessorNodeBase]'},
'sinks': {'key': 'properties.sinks', 'type': '[SinkNodeBase]'},
}
def __init__(
self,
**kwargs
):
super(PipelineTopology, self).__init__(**kwargs)
self.kind = kwargs['kind']
self.sku = kwargs['sku']
self.description = kwargs.get('description', None)
self.parameters = kwargs.get('parameters', None)
self.sources = kwargs.get('sources', None)
self.processors = kwargs.get('processors', None)
self.sinks = kwargs.get('sinks', None)
class PipelineTopologyCollection(msrest.serialization.Model):
"""A collection of PipelineTopology items.
:param value: A collection of PipelineTopology items.
:type value: list[~video_analyzer.models.PipelineTopology]
:param next_link: A link to the next page of the collection (when the collection contains too
many results to return in one response).
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[PipelineTopology]'},
'next_link': {'key': '@nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PipelineTopologyCollection, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class PipelineTopologyUpdate(ProxyResource):
"""Pipeline topology describes the processing steps to be applied when processing content for a particular outcome. The topology should be defined according to the scenario to be achieved and can be reused across many pipeline instances which share the same processing characteristics. For instance, a pipeline topology which captures content from a RTSP camera and archives the content can be reused across many different cameras, as long as the same processing is to be applied across all the cameras. Individual instance properties can be defined through the use of user-defined parameters, which allow for a topology to be parameterized. This allows individual pipelines refer to different values, such as individual cameras' RTSP endpoints and credentials. Overall a topology is composed of the following:
* Parameters: list of user defined parameters that can be references across the topology nodes.
* Sources: list of one or more data sources nodes such as an RTSP source which allows for content to be ingested from cameras.
* Processors: list of nodes which perform data analysis or transformations.
* Sinks: list of one or more data sinks which allow for data to be stored or exported to other destinations.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Fully qualified resource ID for the resource. Ex -
/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
"Microsoft.Storage/storageAccounts".
:vartype type: str
:ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy
information.
:vartype system_data: ~video_analyzer.models.SystemData
:param kind: Topology kind. Possible values include: "Live", "Batch".
:type kind: str or ~video_analyzer.models.Kind
:param sku: Describes the properties of a SKU.
:type sku: ~video_analyzer.models.Sku
:param description: An optional description of the pipeline topology. It is recommended that
the expected use of the topology to be described here.
:type description: str
:param parameters: List of the topology parameter declarations. Parameters declared here can be
referenced throughout the topology nodes through the use of "${PARAMETER_NAME}" string pattern.
Parameters can have optional default values and can later be defined in individual instances of
the pipeline.
:type parameters: list[~video_analyzer.models.ParameterDeclaration]
:param sources: List of the topology source nodes. Source nodes enable external data to be
ingested by the pipeline.
:type sources: list[~video_analyzer.models.SourceNodeBase]
:param processors: List of the topology processor nodes. Processor nodes enable pipeline data
to be analyzed, processed or transformed.
:type processors: list[~video_analyzer.models.ProcessorNodeBase]
:param sinks: List of the topology sink nodes. Sink nodes allow pipeline data to be stored or
exported.
:type sinks: list[~video_analyzer.models.SinkNodeBase]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'system_data': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'kind': {'key': 'kind', 'type': 'str'},
'sku': {'key': 'sku', 'type': 'Sku'},
'description': {'key': 'properties.description', 'type': 'str'},
'parameters': {'key': 'properties.parameters', 'type': '[ParameterDeclaration]'},
'sources': {'key': 'properties.sources', 'type': '[SourceNodeBase]'},
'processors': {'key': 'properties.processors', 'type': '[ProcessorNodeBase]'},
'sinks': {'key': 'properties.sinks', 'type': '[SinkNodeBase]'},
}
def __init__(
self,
**kwargs
):
super(PipelineTopologyUpdate, self).__init__(**kwargs)
self.kind = kwargs.get('kind', None)
self.sku = kwargs.get('sku', None)
self.description = kwargs.get('description', None)
self.parameters = kwargs.get('parameters', None)
self.sources = kwargs.get('sources', None)
self.processors = kwargs.get('processors', None)
self.sinks = kwargs.get('sinks', None)
class PrivateEndpoint(msrest.serialization.Model):
"""The Private Endpoint resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The ARM identifier for Private Endpoint.
:vartype id: str
"""
_validation = {
'id': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PrivateEndpoint, self).__init__(**kwargs)
self.id = None
class PrivateEndpointConnection(Resource):
"""The Private Endpoint Connection resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Fully qualified resource ID for the resource. Ex -
/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
"Microsoft.Storage/storageAccounts".
:vartype type: str
:ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy
information.
:vartype system_data: ~video_analyzer.models.SystemData
:param private_endpoint: The resource of private end point.
:type private_endpoint: ~video_analyzer.models.PrivateEndpoint
:param private_link_service_connection_state: A collection of information about the state of
the connection between service consumer and provider.
:type private_link_service_connection_state:
~video_analyzer.models.PrivateLinkServiceConnectionState
:ivar provisioning_state: The provisioning state of the private endpoint connection resource.
Possible values include: "Succeeded", "Creating", "Deleting", "Failed".
:vartype provisioning_state: str or
~video_analyzer.models.PrivateEndpointConnectionProvisioningState
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'system_data': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpoint'},
'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PrivateEndpointConnection, self).__init__(**kwargs)
self.private_endpoint = kwargs.get('private_endpoint', None)
self.private_link_service_connection_state = kwargs.get('private_link_service_connection_state', None)
self.provisioning_state = None
class PrivateEndpointConnectionListResult(msrest.serialization.Model):
"""List of private endpoint connection associated with the specified storage account.
:param value: Array of private endpoint connections.
:type value: list[~video_analyzer.models.PrivateEndpointConnection]
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[PrivateEndpointConnection]'},
}
def __init__(
self,
**kwargs
):
super(PrivateEndpointConnectionListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
class PrivateLinkResource(Resource):
"""A private link resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Fully qualified resource ID for the resource. Ex -
/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
"Microsoft.Storage/storageAccounts".
:vartype type: str
:ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy
information.
:vartype system_data: ~video_analyzer.models.SystemData
:ivar group_id: The private link resource group id.
:vartype group_id: str
:ivar required_members: The private link resource required member names.
:vartype required_members: list[str]
:param required_zone_names: The private link resource Private link DNS zone name.
:type required_zone_names: list[str]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'system_data': {'readonly': True},
'group_id': {'readonly': True},
'required_members': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'group_id': {'key': 'properties.groupId', 'type': 'str'},
'required_members': {'key': 'properties.requiredMembers', 'type': '[str]'},
'required_zone_names': {'key': 'properties.requiredZoneNames', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(PrivateLinkResource, self).__init__(**kwargs)
self.group_id = None
self.required_members = None
self.required_zone_names = kwargs.get('required_zone_names', None)
class PrivateLinkResourceListResult(msrest.serialization.Model):
"""A list of private link resources.
:param value: Array of private link resources.
:type value: list[~video_analyzer.models.PrivateLinkResource]
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[PrivateLinkResource]'},
}
def __init__(
self,
**kwargs
):
super(PrivateLinkResourceListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
class PrivateLinkServiceConnectionState(msrest.serialization.Model):
"""A collection of information about the state of the connection between service consumer and provider.
:param status: Indicates whether the connection has been Approved/Rejected/Removed by the owner
of the service. Possible values include: "Pending", "Approved", "Rejected".
:type status: str or ~video_analyzer.models.PrivateEndpointServiceConnectionStatus
:param description: The reason for approval/rejection of the connection.
:type description: str
:param actions_required: A message indicating if changes on the service provider require any
updates on the consumer.
:type actions_required: str
"""
_attribute_map = | |
# encoding: utf-8
# module GH_IO.Types calls itself Types
# from GH_IO, Version=1.0.0.0, Culture=neutral, PublicKeyToken=<KEY>
# by generator 1.145
""" NamespaceTracker represent a CLS namespace. """
# no imports
# no functions
# classes
class GH_BoundingBox(object):
"""
Represents a 3D bounding box, denoted by two points.
GH_BoundingBox(nMin: GH_Point3D, nMax: GH_Point3D)
GH_BoundingBox(Minx: float, Miny: float, Minz: float, Maxx: float, Maxy: float, Maxz: float)
"""
def ToString(self):
"""
ToString(self: GH_BoundingBox) -> str
Converts this structure to a human-readable string.
Returns: A string representation of the box structure.
"""
pass
@staticmethod # known case of __new__
def __new__(self, *__args):
"""
__new__[GH_BoundingBox]() -> GH_BoundingBox
__new__(cls: type, nMin: GH_Point3D, nMax: GH_Point3D)
__new__(cls: type, Minx: float, Miny: float, Minz: float, Maxx: float, Maxy: float, Maxz: float)
"""
pass
Max = None
Min = None
class GH_Interval1D(object):
"""
Represents two double precision floating point values.
GH_Interval1D(na: float, nb: float)
"""
def ToString(self):
"""
ToString(self: GH_Interval1D) -> str
Converts this structure to a human-readable string.
Returns: A string representation of the Interval structure.
"""
pass
@staticmethod # known case of __new__
def __new__(self, na, nb):
"""
__new__[GH_Interval1D]() -> GH_Interval1D
__new__(cls: type, na: float, nb: float)
"""
pass
a = None
b = None
class GH_Interval2D(object):
"""
Represents two double precision domains.
GH_Interval2D(nu: GH_Interval1D, nv: GH_Interval1D)
GH_Interval2D(nu0: float, nu1: float, nv0: float, nv1: float)
"""
def ToString(self):
"""
ToString(self: GH_Interval2D) -> str
Converts this structure to a human-readable string.
Returns: A string representation of the two-dimensional Interval structure.
"""
pass
@staticmethod # known case of __new__
def __new__(self, *__args):
"""
__new__[GH_Interval2D]() -> GH_Interval2D
__new__(cls: type, nu: GH_Interval1D, nv: GH_Interval1D)
__new__(cls: type, nu0: float, nu1: float, nv0: float, nv1: float)
"""
pass
u = None
v = None
class GH_Item(object, GH_IBinarySupport, GH_IXmlSupport):
"""
Represents a single data item in a chunk.
GH_Item(item_name: str, item_data: bool)
GH_Item(item_name: str, item_index: int, item_data: bool)
GH_Item(item_name: str, item_data: Byte)
GH_Item(item_name: str, item_index: int, item_data: Byte)
GH_Item(item_name: str, item_data: int)
GH_Item(item_name: str, item_index: int, item_data: int)
GH_Item(item_name: str, item_data: Int64)
GH_Item(item_name: str, item_index: int, item_data: Int64)
GH_Item(item_name: str, item_data: Single)
GH_Item(item_name: str, item_index: int, item_data: Single)
GH_Item(item_name: str, item_data: float)
GH_Item(item_name: str, item_index: int, item_data: float)
GH_Item(item_name: str, item_data: Decimal)
GH_Item(item_name: str, item_index: int, item_data: Decimal)
GH_Item(item_name: str, item_data: DateTime)
GH_Item(item_name: str, item_index: int, item_data: DateTime)
GH_Item(item_name: str, item_data: Guid)
GH_Item(item_name: str, item_index: int, item_data: Guid)
GH_Item(item_name: str, item_data: str)
GH_Item(item_name: str, item_index: int, item_data: str)
GH_Item(item_name: str, item_data: Array[Byte])
GH_Item(item_name: str, item_index: int, item_data: Array[Byte])
GH_Item(item_name: str, item_data: Array[float])
GH_Item(item_name: str, item_index: int, item_data: Array[float])
GH_Item(item_name: str, item_data: Point)
GH_Item(item_name: str, item_index: int, item_data: Point)
GH_Item(item_name: str, item_data: PointF)
GH_Item(item_name: str, item_index: int, item_data: PointF)
GH_Item(item_name: str, item_data: Size)
GH_Item(item_name: str, item_index: int, item_data: Size)
GH_Item(item_name: str, item_data: SizeF)
GH_Item(item_name: str, item_index: int, item_data: SizeF)
GH_Item(item_name: str, item_data: Rectangle)
GH_Item(item_name: str, item_index: int, item_data: Rectangle)
GH_Item(item_name: str, item_data: RectangleF)
GH_Item(item_name: str, item_index: int, item_data: RectangleF)
GH_Item(item_name: str, item_data: Color)
GH_Item(item_name: str, item_index: int, item_data: Color)
GH_Item(item_name: str, item_data: Bitmap)
GH_Item(item_name: str, item_index: int, item_data: Bitmap)
GH_Item(item_name: str, item_data: GH_Point2D)
GH_Item(item_name: str, item_index: int, item_data: GH_Point2D)
GH_Item(item_name: str, item_data: GH_Point3D)
GH_Item(item_name: str, item_index: int, item_data: GH_Point3D)
GH_Item(item_name: str, item_data: GH_Point4D)
GH_Item(item_name: str, item_index: int, item_data: GH_Point4D)
GH_Item(item_name: str, item_data: GH_Interval1D)
GH_Item(item_name: str, item_index: int, item_data: GH_Interval1D)
GH_Item(item_name: str, item_data: GH_Interval2D)
GH_Item(item_name: str, item_index: int, item_data: GH_Interval2D)
GH_Item(item_name: str, item_data: GH_Line)
GH_Item(item_name: str, item_index: int, item_data: GH_Line)
GH_Item(item_name: str, item_data: GH_BoundingBox)
GH_Item(item_name: str, item_index: int, item_data: GH_BoundingBox)
GH_Item(item_name: str, item_data: GH_Plane)
GH_Item(item_name: str, item_index: int, item_data: GH_Plane)
GH_Item(item_name: str, item_data: GH_Version)
GH_Item(item_name: str, item_index: int, item_data: GH_Version)
"""
@staticmethod
def CreateFrom(*__args):
"""
CreateFrom(node: XmlNode) -> GH_Item
Creates a new instance of GH_Item and sets the fields from an Xml node object.
node: Xml node object that defines the field data.
Returns: The constructed and read item.
CreateFrom(reader: BinaryReader) -> GH_Item
Creates a new instance of GH_Item and sets the fields from a reader object.
reader: Reader object that defines the field data.
Returns: The constructed and read item.
"""
pass
def Read(self, *__args):
"""
Read(self: GH_Item, node: XmlNode)
Deserialize this item from an Xml node.
node: Xml node to serialize from.
Read(self: GH_Item, reader: BinaryReader)
Deserialize this item from a binary stream.
reader: Reader to deserialize with.
"""
pass
def ToString(self):
"""
ToString(self: GH_Item) -> str
Converts the struct into a human readable format.
"""
pass
def Write(self, writer):
"""
Write(self: GH_Item, writer: XmlWriter)
Serialize this item into an Xml stream.
writer: Writer to serialize with.
Write(self: GH_Item, writer: BinaryWriter)
Serialize this item into a binary stream.
writer: Writer to serialize with.
"""
pass
def __init__(self, *args): # cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod # known case of __new__
def __new__(self, item_name, *__args):
"""
__new__(cls: type, item_name: str, item_data: bool)
__new__(cls: type, item_name: str, item_index: int, item_data: bool)
__new__(cls: type, item_name: str, item_data: Byte)
__new__(cls: type, item_name: str, item_index: int, item_data: Byte)
__new__(cls: type, item_name: str, item_data: int)
__new__(cls: type, item_name: str, item_index: int, item_data: int)
__new__(cls: type, item_name: str, item_data: Int64)
__new__(cls: type, item_name: str, item_index: int, item_data: Int64)
__new__(cls: type, item_name: str, item_data: Single)
__new__(cls: type, item_name: str, item_index: int, item_data: Single)
__new__(cls: type, item_name: str, item_data: float)
__new__(cls: type, item_name: str, item_index: int, item_data: float)
__new__(cls: type, item_name: str, item_data: Decimal)
__new__(cls: type, item_name: str, item_index: int, item_data: Decimal)
__new__(cls: type, item_name: str, item_data: DateTime)
__new__(cls: type, item_name: str, item_index: int, item_data: DateTime)
__new__(cls: type, item_name: str, item_data: Guid)
__new__(cls: type, item_name: str, item_index: int, item_data: Guid)
__new__(cls: type, item_name: str, item_data: str)
__new__(cls: type, item_name: str, item_index: int, item_data: str)
__new__(cls: type, item_name: str, item_data: Array[Byte])
__new__(cls: type, item_name: str, item_index: int, item_data: Array[Byte])
__new__(cls: type, item_name: str, item_data: Array[float])
__new__(cls: type, item_name: str, item_index: int, item_data: Array[float])
__new__(cls: type, item_name: str, item_data: Point)
__new__(cls: type, item_name: str, item_index: int, item_data: Point)
__new__(cls: type, item_name: str, item_data: PointF)
__new__(cls: type, item_name: str, item_index: int, item_data: PointF)
__new__(cls: type, item_name: str, item_data: Size)
__new__(cls: type, item_name: str, item_index: int, item_data: Size)
__new__(cls: type, item_name: str, item_data: SizeF)
__new__(cls: type, item_name: str, item_index: int, item_data: SizeF)
__new__(cls: type, item_name: str, item_data: Rectangle)
__new__(cls: type, item_name: str, item_index: int, item_data: Rectangle)
__new__(cls: type, item_name: str, item_data: RectangleF)
__new__(cls: type, item_name: str, item_index: int, item_data: RectangleF)
__new__(cls: type, item_name: str, item_data: Color)
__new__(cls: type, item_name: str, item_index: int, item_data: Color)
__new__(cls: type, item_name: str, item_data: Bitmap)
__new__(cls: type, item_name: str, item_index: int, item_data: Bitmap)
__new__(cls: type, item_name: str, item_data: GH_Point2D)
__new__(cls: type, item_name: str, item_index: int, item_data: GH_Point2D)
__new__(cls: type, item_name: str, item_data: GH_Point3D)
__new__(cls: type, item_name: str, item_index: int, item_data: GH_Point3D)
__new__(cls: type, item_name: str, item_data: GH_Point4D)
__new__(cls: type, item_name: str, item_index: int, item_data: GH_Point4D)
__new__(cls: type, item_name: str, item_data: GH_Interval1D)
__new__(cls: type, item_name: str, item_index: int, item_data: GH_Interval1D)
__new__(cls: type, item_name: str, item_data: GH_Interval2D)
__new__(cls: type, item_name: str, item_index: int, item_data: GH_Interval2D)
__new__(cls: type, item_name: str, item_data: GH_Line)
__new__(cls: type, item_name: str, item_index: int, item_data: GH_Line)
__new__(cls: type, item_name: str, item_data: GH_BoundingBox)
__new__(cls: type, item_name: str, item_index: int, item_data: GH_BoundingBox)
__new__(cls: type, item_name: str, item_data: GH_Plane)
__new__(cls: type, item_name: str, item_index: int, item_data: GH_Plane)
__new__(cls: type, item_name: str, item_data: GH_Version)
__new__(cls: type, item_name: str, item_index: int, item_data: GH_Version)
"""
pass
def __repr__(self, *args): # cannot find CLR method
""" __repr__(self: object) -> str """
pass
def __str__(self, *args): # cannot find CLR method
pass
DebuggerDisplay = property(
lambda self: object(), lambda self, v: None, lambda self: None
) # default
"""Readonly property used during Debugging.
Get: DebuggerDisplay(self: GH_Item) -> str
"""
HasIndex = property(
lambda self: object(), lambda self, v: None, lambda self: None
) # default
"""Gets the index existence implication. The item is considered to | |
<reponame>EladGabay/pulumi-oci
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['IdpGroupMappingArgs', 'IdpGroupMapping']
@pulumi.input_type
class IdpGroupMappingArgs:
def __init__(__self__, *,
group_id: pulumi.Input[str],
identity_provider_id: pulumi.Input[str],
idp_group_name: pulumi.Input[str]):
"""
The set of arguments for constructing a IdpGroupMapping resource.
:param pulumi.Input[str] group_id: (Updatable) The OCID of the IAM Service [group](https://docs.cloud.oracle.com/iaas/api/#/en/identity/20160918/Group/) you want to map to the IdP group.
:param pulumi.Input[str] identity_provider_id: The OCID of the identity provider.
:param pulumi.Input[str] idp_group_name: (Updatable) The name of the IdP group you want to map.
"""
pulumi.set(__self__, "group_id", group_id)
pulumi.set(__self__, "identity_provider_id", identity_provider_id)
pulumi.set(__self__, "idp_group_name", idp_group_name)
@property
@pulumi.getter(name="groupId")
def group_id(self) -> pulumi.Input[str]:
"""
(Updatable) The OCID of the IAM Service [group](https://docs.cloud.oracle.com/iaas/api/#/en/identity/20160918/Group/) you want to map to the IdP group.
"""
return pulumi.get(self, "group_id")
@group_id.setter
def group_id(self, value: pulumi.Input[str]):
pulumi.set(self, "group_id", value)
@property
@pulumi.getter(name="identityProviderId")
def identity_provider_id(self) -> pulumi.Input[str]:
"""
The OCID of the identity provider.
"""
return pulumi.get(self, "identity_provider_id")
@identity_provider_id.setter
def identity_provider_id(self, value: pulumi.Input[str]):
pulumi.set(self, "identity_provider_id", value)
@property
@pulumi.getter(name="idpGroupName")
def idp_group_name(self) -> pulumi.Input[str]:
"""
(Updatable) The name of the IdP group you want to map.
"""
return pulumi.get(self, "idp_group_name")
@idp_group_name.setter
def idp_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "idp_group_name", value)
@pulumi.input_type
class _IdpGroupMappingState:
def __init__(__self__, *,
compartment_id: Optional[pulumi.Input[str]] = None,
group_id: Optional[pulumi.Input[str]] = None,
identity_provider_id: Optional[pulumi.Input[str]] = None,
idp_group_name: Optional[pulumi.Input[str]] = None,
inactive_state: Optional[pulumi.Input[str]] = None,
state: Optional[pulumi.Input[str]] = None,
time_created: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering IdpGroupMapping resources.
:param pulumi.Input[str] compartment_id: The OCID of the tenancy containing the `IdentityProvider`.
:param pulumi.Input[str] group_id: (Updatable) The OCID of the IAM Service [group](https://docs.cloud.oracle.com/iaas/api/#/en/identity/20160918/Group/) you want to map to the IdP group.
:param pulumi.Input[str] identity_provider_id: The OCID of the identity provider.
:param pulumi.Input[str] idp_group_name: (Updatable) The name of the IdP group you want to map.
:param pulumi.Input[str] inactive_state: The detailed status of INACTIVE lifecycleState.
:param pulumi.Input[str] state: The mapping's current state.
:param pulumi.Input[str] time_created: Date and time the mapping was created, in the format defined by RFC3339. Example: `2016-08-25T21:10:29.600Z`
"""
if compartment_id is not None:
pulumi.set(__self__, "compartment_id", compartment_id)
if group_id is not None:
pulumi.set(__self__, "group_id", group_id)
if identity_provider_id is not None:
pulumi.set(__self__, "identity_provider_id", identity_provider_id)
if idp_group_name is not None:
pulumi.set(__self__, "idp_group_name", idp_group_name)
if inactive_state is not None:
pulumi.set(__self__, "inactive_state", inactive_state)
if state is not None:
pulumi.set(__self__, "state", state)
if time_created is not None:
pulumi.set(__self__, "time_created", time_created)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> Optional[pulumi.Input[str]]:
"""
The OCID of the tenancy containing the `IdentityProvider`.
"""
return pulumi.get(self, "compartment_id")
@compartment_id.setter
def compartment_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "compartment_id", value)
@property
@pulumi.getter(name="groupId")
def group_id(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The OCID of the IAM Service [group](https://docs.cloud.oracle.com/iaas/api/#/en/identity/20160918/Group/) you want to map to the IdP group.
"""
return pulumi.get(self, "group_id")
@group_id.setter
def group_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "group_id", value)
@property
@pulumi.getter(name="identityProviderId")
def identity_provider_id(self) -> Optional[pulumi.Input[str]]:
"""
The OCID of the identity provider.
"""
return pulumi.get(self, "identity_provider_id")
@identity_provider_id.setter
def identity_provider_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "identity_provider_id", value)
@property
@pulumi.getter(name="idpGroupName")
def idp_group_name(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The name of the IdP group you want to map.
"""
return pulumi.get(self, "idp_group_name")
@idp_group_name.setter
def idp_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "idp_group_name", value)
@property
@pulumi.getter(name="inactiveState")
def inactive_state(self) -> Optional[pulumi.Input[str]]:
"""
The detailed status of INACTIVE lifecycleState.
"""
return pulumi.get(self, "inactive_state")
@inactive_state.setter
def inactive_state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "inactive_state", value)
@property
@pulumi.getter
def state(self) -> Optional[pulumi.Input[str]]:
"""
The mapping's current state.
"""
return pulumi.get(self, "state")
@state.setter
def state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "state", value)
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> Optional[pulumi.Input[str]]:
"""
Date and time the mapping was created, in the format defined by RFC3339. Example: `2016-08-25T21:10:29.600Z`
"""
return pulumi.get(self, "time_created")
@time_created.setter
def time_created(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "time_created", value)
class IdpGroupMapping(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
group_id: Optional[pulumi.Input[str]] = None,
identity_provider_id: Optional[pulumi.Input[str]] = None,
idp_group_name: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
This resource provides the Idp Group Mapping resource in Oracle Cloud Infrastructure Identity service.
Creates a single mapping between an IdP group and an IAM Service
[group](https://docs.cloud.oracle.com/iaas/api/#/en/identity/20160918/Group/).
## Example Usage
```python
import pulumi
import pulumi_oci as oci
test_idp_group_mapping = oci.identity.IdpGroupMapping("testIdpGroupMapping",
group_id=oci_identity_group["test_group"]["id"],
identity_provider_id=oci_identity_identity_provider["test_identity_provider"]["id"],
idp_group_name=var["idp_group_mapping_idp_group_name"])
```
## Import
IdpGroupMappings can be imported using the `id`, e.g.
```sh
$ pulumi import oci:identity/idpGroupMapping:IdpGroupMapping test_idp_group_mapping "identityProviders/{identityProviderId}/groupMappings/{mappingId}"
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] group_id: (Updatable) The OCID of the IAM Service [group](https://docs.cloud.oracle.com/iaas/api/#/en/identity/20160918/Group/) you want to map to the IdP group.
:param pulumi.Input[str] identity_provider_id: The OCID of the identity provider.
:param pulumi.Input[str] idp_group_name: (Updatable) The name of the IdP group you want to map.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: IdpGroupMappingArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
This resource provides the Idp Group Mapping resource in Oracle Cloud Infrastructure Identity service.
Creates a single mapping between an IdP group and an IAM Service
[group](https://docs.cloud.oracle.com/iaas/api/#/en/identity/20160918/Group/).
## Example Usage
```python
import pulumi
import pulumi_oci as oci
test_idp_group_mapping = oci.identity.IdpGroupMapping("testIdpGroupMapping",
group_id=oci_identity_group["test_group"]["id"],
identity_provider_id=oci_identity_identity_provider["test_identity_provider"]["id"],
idp_group_name=var["idp_group_mapping_idp_group_name"])
```
## Import
IdpGroupMappings can be imported using the `id`, e.g.
```sh
$ pulumi import oci:identity/idpGroupMapping:IdpGroupMapping test_idp_group_mapping "identityProviders/{identityProviderId}/groupMappings/{mappingId}"
```
:param str resource_name: The name of the resource.
:param IdpGroupMappingArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(IdpGroupMappingArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
group_id: Optional[pulumi.Input[str]] = None,
identity_provider_id: Optional[pulumi.Input[str]] = None,
idp_group_name: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = IdpGroupMappingArgs.__new__(IdpGroupMappingArgs)
if group_id is None and not opts.urn:
raise TypeError("Missing required property 'group_id'")
__props__.__dict__["group_id"] = group_id
if identity_provider_id is None and not opts.urn:
raise TypeError("Missing required property 'identity_provider_id'")
__props__.__dict__["identity_provider_id"] = identity_provider_id
if idp_group_name is None and not opts.urn:
raise TypeError("Missing required property 'idp_group_name'")
__props__.__dict__["idp_group_name"] = idp_group_name
__props__.__dict__["compartment_id"] = None
__props__.__dict__["inactive_state"] = None
__props__.__dict__["state"] = None
__props__.__dict__["time_created"] = None
super(IdpGroupMapping, __self__).__init__(
'oci:identity/idpGroupMapping:IdpGroupMapping',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
group_id: Optional[pulumi.Input[str]] = None,
identity_provider_id: Optional[pulumi.Input[str]] = None,
idp_group_name: Optional[pulumi.Input[str]] = None,
inactive_state: Optional[pulumi.Input[str]] = None,
state: Optional[pulumi.Input[str]] = None,
time_created: Optional[pulumi.Input[str]] = None) -> 'IdpGroupMapping':
"""
Get an existing IdpGroupMapping resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] compartment_id: The OCID of the tenancy containing the `IdentityProvider`.
:param pulumi.Input[str] group_id: (Updatable) The OCID of the IAM Service [group](https://docs.cloud.oracle.com/iaas/api/#/en/identity/20160918/Group/) you want to map to the IdP group.
:param pulumi.Input[str] identity_provider_id: The OCID of the identity provider.
:param pulumi.Input[str] idp_group_name: (Updatable) The name of the IdP group you want to map.
:param pulumi.Input[str] inactive_state: The detailed status of INACTIVE lifecycleState.
:param pulumi.Input[str] state: The mapping's current state.
:param pulumi.Input[str] time_created: Date and time the mapping was created, in the format defined by RFC3339. Example: `2016-08-25T21:10:29.600Z`
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _IdpGroupMappingState.__new__(_IdpGroupMappingState)
__props__.__dict__["compartment_id"] = compartment_id
__props__.__dict__["group_id"] = group_id
__props__.__dict__["identity_provider_id"] = identity_provider_id
__props__.__dict__["idp_group_name"] = idp_group_name
__props__.__dict__["inactive_state"] = inactive_state
__props__.__dict__["state"] = state
__props__.__dict__["time_created"] = time_created
return IdpGroupMapping(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> pulumi.Output[str]:
"""
The OCID of the tenancy containing the `IdentityProvider`.
"""
return pulumi.get(self, "compartment_id")
@property
@pulumi.getter(name="groupId")
def group_id(self) -> pulumi.Output[str]:
"""
(Updatable) The OCID of the IAM Service [group](https://docs.cloud.oracle.com/iaas/api/#/en/identity/20160918/Group/) you want to map to the IdP group.
"""
return pulumi.get(self, "group_id")
@property
@pulumi.getter(name="identityProviderId")
def identity_provider_id(self) -> pulumi.Output[str]:
"""
The OCID of the identity provider.
| |
<gh_stars>0
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections import Counter
import dataclasses
import functools
import numpy as np
from typing import Callable, Sequence, Tuple, Union, Mapping, Optional, List, Dict, NamedTuple
from jax import core
from jax._src.lib import xla_bridge as xb
from jax._src.lib import xla_client as xc
from jax.interpreters import pxla, xla
from jax._src.util import prod, safe_zip, cache
from jax._src.api import device_put
from jax.interpreters.sharded_jit import PartitionSpec
Shape = Tuple[int, ...]
MeshAxes = Sequence[Union[str, Tuple[str], None]]
DeviceArray = xc.Buffer
Device = xc.Device
ArrayLike = Union[np.ndarray, DeviceArray]
Index = Tuple[slice, ...]
_hashed_index = lambda x: hash(tuple((v.start, v.stop) for v in x))
def _convert_list_args_to_tuple(f):
@functools.wraps(f)
def wrapper(*args, **kwargs):
args = [tuple(a) if isinstance(a, list) else a for a in args]
kwargs = {k: (tuple(v) if isinstance(v, list) else v) for k, v in kwargs.items()}
return f(*args, **kwargs)
return wrapper
def _canonicalize_mesh_axes(mesh_axes):
if not isinstance(mesh_axes, PartitionSpec):
pspec = PartitionSpec(*mesh_axes)
else:
pspec = mesh_axes
return pspec
def _get_indices(global_shape: Shape, global_mesh: pxla.Mesh,
mesh_axes: MeshAxes) -> Tuple[Index, ...]:
# Import here to avoid cyclic import error when importing gda in pjit.py.
from jax.experimental.pjit import get_array_mapping, _prepare_axis_resources
pspec = _canonicalize_mesh_axes(mesh_axes)
parsed_pspec, _, _ = _prepare_axis_resources(pspec, "mesh_axes")
array_mapping = get_array_mapping(parsed_pspec)
# The dtype doesn't matter for creating sharding specs.
aval = core.ShapedArray(global_shape, np.float32)
sharding_spec = pxla.mesh_sharding_specs(
global_mesh.shape, global_mesh.axis_names)(aval, array_mapping)
indices = pxla.spec_to_indices(global_shape, sharding_spec)
return indices # type: ignore
@_convert_list_args_to_tuple
@cache()
def get_shard_indices(global_shape: Shape, global_mesh: pxla.Mesh,
mesh_axes: MeshAxes) -> Mapping[Device, Index]:
indices = _get_indices(global_shape, global_mesh, mesh_axes)
# The type: ignore is to ignore the type returned by `spec_to_indices`.
return dict(
(d, i)
for d, i in safe_zip(global_mesh.devices.flat, indices)) # type: ignore
@_convert_list_args_to_tuple
@cache()
def get_shard_indices_replica_ids(
global_shape: Shape, global_mesh: pxla.Mesh,
mesh_axes: MeshAxes) -> Mapping[Device, Tuple[Index, int]]:
return _get_shard_indices_replica_ids_uncached(global_shape, global_mesh, mesh_axes)
def _get_shard_indices_replica_ids_uncached(
global_shape: Shape, global_mesh: pxla.Mesh,
mesh_axes: MeshAxes) -> Mapping[Device, Tuple[Index, int]]:
indices = _get_indices(global_shape, global_mesh, mesh_axes)
index_to_replica: Dict[int, int] = Counter()
out = {}
unique_shards = 0
for device, index in safe_zip(global_mesh.devices.flat, indices):
h_index = _hashed_index(index)
replica_id = index_to_replica[h_index]
if replica_id == 0:
unique_shards += 1
index_to_replica[h_index] += 1
out[device] = (index, replica_id)
shard_shape = get_shard_shape(global_shape, global_mesh, mesh_axes)
expected_unique_shards = prod(
[g // s for g, s in safe_zip(global_shape, shard_shape) if g != 0 or s != 0])
if expected_unique_shards != unique_shards:
raise RuntimeError(
f'Number of expected unique shards are: {expected_unique_shards} but '
f'got {unique_shards}. Please file a bug at '
'https://github.com/google/jax/issues.')
return out
@_convert_list_args_to_tuple
@cache()
def get_shard_shape(global_shape, global_mesh, mesh_axes) -> Shape:
chunk_size = []
for mesh_axis, size in zip(mesh_axes, global_shape):
if not mesh_axis:
chunk_size.append(size)
elif isinstance(mesh_axis, tuple):
m = prod([global_mesh.shape[ma] for ma in mesh_axis])
chunk_size.append(size // m)
else:
chunk_size.append(size // global_mesh.shape[mesh_axis])
if len(chunk_size) != len(global_shape):
chunk_size.extend(global_shape[len(chunk_size):])
return tuple(chunk_size)
@dataclasses.dataclass(frozen=True)
class Shard:
"""A single data shard of a GlobalDeviceArray.
Args:
device : Which device this shard resides on.
index : The index into the global array of this shard.
replica_id : Integer id indicating which replica of the global array this
shard is part of. Always 0 for fully sharded data
(i.e. when there’s only 1 replica).
data : The data of this shard. None if ``device`` is non-local.
"""
device: Device
index: Index
replica_id: int
# None if this `Shard` lives on a non-local device.
data: Optional[DeviceArray] = None
class _GdaFastPathArgs(NamedTuple):
global_indices_replica_ids: Mapping[Device, Tuple[Index, int]]
local_devices: Sequence[Device]
class GlobalDeviceArray:
"""A logical array with data sharded across multiple devices and processes.
If you’re not already familiar with JAX’s multi-process programming model,
please read https://jax.readthedocs.io/en/latest/multi_process.html.
A GlobalDeviceArray (GDA) can be thought of as a view into a single logical
array sharded across processes. The logical array is the “global” array, and
each process has a GlobalDeviceArray object referring to the same global array
(similarly to how each process runs a multi-process pmap or pjit). Each process
can access the shape, dtype, etc. of the global array via the GDA, pass the
GDA into multi-process pjits, and get GDAs as pjit outputs (coming soon: xmap
and pmap). However, each process can only directly access the shards of the
global array data stored on its local devices.
GDAs can help manage the inputs and outputs of multi-process computations.
A GDA keeps track of which shard of the global array belongs to which device,
and provides callback-based APIs to materialize the correct shard of the data
needed for each local device of each process.
A GDA consists of data shards. Each shard is stored on a different device.
There are local shards and global shards. Local shards are those on local
devices, and the data is visible to the current process. Global shards are
those across all devices (including local devices), and the data isn’t visible
if the shard is on a non-local device with respect to the current process.
Please see the ``Shard`` class to see what information is stored inside that
data structure.
Note: to make pjit output GlobalDeviceArrays, set the environment variable
``JAX_PARALLEL_FUNCTIONS_OUTPUT_GDA=true`` or add the following to your code:
``jax.config.update('jax_parallel_functions_output_gda', True)``
Args:
global_shape : The global shape of the array.
global_mesh : The global mesh representing devices across multiple
processes.
mesh_axes : A sequence with length less than or equal to the rank of the
global array (i.e. the length of the global shape). Each element can be:
* An axis name of ``global_mesh``, indicating that the corresponding
global array axis is partitioned across the given device axis of
``global_mesh``.
* A tuple of axis names of ``global_mesh``. This is like the above option
except the global array axis is partitioned across the product of axes
named in the tuple.
* None indicating that the corresponding global array axis is not
partitioned.
For more information, please see:
https://jax.readthedocs.io/en/latest/jax-101/08-pjit.html#more-information-on-partitionspec
device_buffers: DeviceArrays that are on the local devices of ``global_mesh``.
Attributes:
shape : Global shape of the array.
dtype : Dtype of the global array.
local_shards : List of :class:`Shard` on the local devices of the current process.
Data is materialized for all local shards.
global_shards : List of all :class:`Shard` of the global array. Data isn’t
available if a shard is on a non-local device with respect to the current
process.
is_fully_replicated : True if the full array value is present on all devices
of the global mesh.
Example::
# Logical mesh is (hosts, devices)
assert global_mesh.shape == {'x': 4, 'y': 8}
global_input_shape = (64, 32)
mesh_axes = P('x', 'y')
# Dummy example data; in practice we wouldn't necessarily materialize global data
# in a single process.
global_input_data = np.arange(
np.prod(global_input_shape)).reshape(global_input_shape)
def get_local_data_slice(index):
# index will be a tuple of slice objects, e.g. (slice(0, 16), slice(0, 4))
# This method will be called per-local device from the GDA constructor.
return global_input_data[index]
gda = GlobalDeviceArray.from_callback(
global_input_shape, global_mesh, mesh_axes, get_local_data_slice)
f = pjit(lambda x: x @ x.T, out_axis_resources = P('y', 'x'))
with mesh(global_mesh.shape, global_mesh.axis_names):
out = f(gda)
print(type(out)) # GlobalDeviceArray
print(out.shape) # global shape == (64, 64)
print(out.local_shards[0].data) # Access the data on a single local device,
# e.g. for checkpointing
print(out.local_shards[0].data.shape) # per-device shape == (8, 16)
print(out.local_shards[0].index) # Numpy-style index into the global array that
# this data shard corresponds to
# `out` can be passed to another pjit call, out.local_shards can be used to
# export the data to non-jax systems (e.g. for checkpointing or logging), etc.
"""
def __init__(self, global_shape: Shape, global_mesh: pxla.Mesh,
mesh_axes: MeshAxes, device_buffers: Sequence[DeviceArray],
_gda_fast_path_args: Optional[_GdaFastPathArgs] = None):
self._global_shape = global_shape
self._global_mesh = global_mesh
self._mesh_axes = mesh_axes
self._device_buffers = device_buffers
# Optionally precomputed for performance.
self._gda_fast_path_args = _gda_fast_path_args
self._current_process = xb.process_index()
if self._gda_fast_path_args is None:
self._local_devices = self._global_mesh.local_devices
else:
self._local_devices = self._gda_fast_path_args.local_devices
for db, ld in safe_zip(device_buffers, self._local_devices):
if db.device() != ld:
raise ValueError(
"The `global_mesh.local_devices` | |
os.environ, required_env)
):
try:
from mpi4py import MPI
mpi_discovery(verbose=True)
except ImportError as e:
print(
e,
": mpi4py cannot be imported -- please install Stoke with the MPI option (pip install stoke[mpi])",
)
# Initialize call for DDP
torch.distributed.init_process_group(
backend=self._ddp_config.backend, init_method=self._ddp_config.init_method
)
def setup_distributed(self):
"""Handles any underlying DDP setup post init
Returns
-------
None
"""
# Set the device rank
torch.cuda.set_device(self._device_id)
# Call the init fnc here after device id is set
self._call_init()
def wrap_distributed(
self,
model: torch.nn.Module,
grad_accum: Optional[int],
optimizer: Optional[Union[torch.optim.Optimizer, OSS]] = None,
) -> Tuple[torch.nn.Module, Union[torch.optim.Optimizer, OSS]]:
"""Overrides base implementation for wrapping with either DDP, Fairscale SDDP or Fairscale FSDP
Parameters
----------
model: torch.nn.Module
current model object
optimizer: Optional[Union[torch.optim.Optimizer, OSS]], default: None
current optimizer object
grad_accum: int, default: None
Number of gradient accumulation steps
Returns
-------
model: torch.nn.Module
Wrapped model object
optimizer: Union[torch.optim.Optimizer, OSS]
Wrapped optimizer object
"""
self._print_device(f"{self._name} Class: {type(self._ddp_handler).__name__}")
# Print info if verbose
if self._verbose:
self._print_info()
self._print_device(
[
f"{self._name} -- Device ID: {torch.cuda.current_device()}",
f"{self._name} -- Rank: {self.rank}",
]
)
if self._ddp_config.convert_to_sync_batch_norm:
self.print_device(
f"Converting all BatchNorm*D layers to torch.nn.SyncBatchNorm layers..."
)
torch.nn.SyncBatchNorm.convert_sync_batchnorm(module=model)
if self._verbose and isinstance(
self._ddp_handler, (FairscaleSDDPExtension, FairscaleFSDPExtension)
):
self._print_device(
f"Wrapped PyTorch DDP with {type(self._ddp_handler).__name__}"
)
# Pass through to the handler for DDP wrappers
model, optimizer = self._ddp_handler.handle_ddp(
model=model, optimizer=optimizer, grad_accum=grad_accum, rank=self.rank
)
return model, optimizer
def detach_and_sync_loss(
self,
loss: Union[torch.Tensor, List[torch.Tensor], Tuple[torch.Tensor]],
device=None,
):
"""Takes loss(es) and detaches from the compute graph and syncs across devices if needed (via an all-reduce)
Parameters
----------
loss: Union[torch.Tensor, List[torch.Tensor], Tuple[torch.Tensor]]
current loss(es) on the device
device: default: None
output device of the sync call
Returns
-------
Union[float, List[float], Tuple[float]]
loss(es) that has(have) been synced across multiple devices and detached from the graph
"""
if isinstance(loss, (list, tuple)):
return type(loss)(
self._single_detach_and_sync_loss(val, device) for val in loss
)
else:
return self._single_detach_and_sync_loss(loss, device)
def _single_detach_and_sync_loss(self, loss: torch.Tensor, device=None):
"""Take a single loss and detach it from the compute graph and sync across devices if needed
Parameters
----------
loss: torch.Tensor
current loss(es) on the device
device: default: None
output device of the sync call
Returns
-------
float
detached, synced, and mean calculated across devices
"""
# map to the same device the loss is on pre detach if not set
if device is None:
device = loss.device
detached_loss = loss.item()
with torch.no_grad():
loss_tensor = torch.tensor(detached_loss, device=device, dtype=loss.dtype)
# Make sure everyone is synced before calling all reduce
torch.distributed.barrier()
# Loss tensor is worker specific so all_reduce (and SUM)
torch.distributed.all_reduce(loss_tensor)
# Detach and divide by the world size to get the mean on each device
return loss_tensor.item() / self.world_size
def grad_accum_context(self, model: torch.nn.Module):
"""Return the context to wrap the gradient accumulation steps
DDP: https://pytorch.org/tutorials/recipes/recipes/tuning_guide.html (Skip unnecessary all-reduce(s))
SDDP: https://fairscale.readthedocs.io/en/latest/api/nn/sharded_ddp.html
FSDP: https://fairscale.readthedocs.io/en/latest/api/nn/fsdp.html
Parameters
----------
model: torch.nn.Module
current model object
Returns
-------
no_sync() context if no_sync flag in config to prevent un-needed communication overhead when using gradient
accumulation else nullcontext
"""
if self._verbose and self._ddp_config.no_sync:
self._print_device("DDP Using no sync context")
context = model.no_sync() if self._ddp_config.no_sync else nullcontext()
return context
def barrier(self):
"""Calls the underlying distributed barrier if available"""
torch.distributed.barrier()
@property
def rank(self):
"""Returns current distributed rank"""
return torch.distributed.get_rank()
@property
def world_size(self):
"""Returns current world size"""
return torch.distributed.get_world_size()
@property
def initialized(self):
"""Returns if distributed backend is initialized correctly"""
return torch.distributed.is_initialized()
def clean(self):
"""Cleans up at the end of a DDP run"""
torch.distributed.destroy_process_group()
class DistributedDeepspeed(BaseDistributed):
"""Class for using Deepspeed as the distributed backend
This class handles common functionality for the deepspeed backend including setup, loss sync,
gradient accumulation context, step context and various properties/attributes
Attributes
----------
device_id
initialized
rank
world_size
_batch_size_per_device: int
batch size per device or for non-distributed the overall batch size
_deepspeed_config: DeepspeedConfig
Configuration object for Deepspeed backend
_device_id: int, default: None
Current device id
_info_rank: Union[int, List[int]]
Which device(s) to print information
_name: str
name of current backend
_verbose: bool, default: True
flag for Stoke print verbosity
"""
def __init__(
self,
batch_size_per_device: int,
info_rank: Union[int, List[int]],
verbose: bool = True,
**kwargs,
):
"""Init call for DistributedDeepspeed
Parameters
----------
batch_size_per_device: int
batch size per device or for non-distributed the overall batch size
info_rank: Union[int, List[int]]
Which device(s) to print information
verbose: bool, default: True
flag for Stoke print verbosity
**kwargs: dict, optional
Extra arguments passed to the __init__ call -- here deepspeed_config, grad_accum_steps or grad_clip
might be passed in
"""
self._deepspeed_config = kwargs["deepspeed_config"]
# Call init first to pass local rank to super
self._call_init()
# Forward device to super -- should be set from MPI lookup that is called
super(DistributedDeepspeed, self).__init__(
device_id=int(os.environ["LOCAL_RANK"]),
batch_size_per_device=batch_size_per_device,
info_rank=info_rank,
name="Deepspeed",
verbose=verbose,
)
self._deepspeed_init_config = self._handle_deepspeed_configs(
grad_accum_steps=kwargs["grad_accum_steps"], grad_clip=kwargs["grad_clip"]
)
def _call_init(self):
"""Does any backend initialization work related to deepspeed setup
Returns
-------
None
"""
ds.init_distributed(
dist_backend=self._deepspeed_config.dist_backend,
auto_mpi_discovery=self._deepspeed_config.auto_mpi_discovery,
distributed_port=self._deepspeed_config.distributed_port,
verbose=self._deepspeed_config.verbose,
init_method=self._deepspeed_config.init_method,
)
def setup_distributed(self):
"""Handles any underlying deepspeed setup post init
Returns
-------
None
"""
# Set the device rank
torch.cuda.set_device(self._device_id)
def wrap_distributed(
self,
model: torch.nn.Module,
grad_accum: Optional[int],
optimizer: Optional[Union[torch.optim.Optimizer, OSS]] = None,
) -> Tuple[torch.nn.Module, Union[torch.optim.Optimizer, OSS]]:
"""Overrides base implementation for wrapping with Deepspeed
Parameters
----------
model: torch.nn.Module
current model object
optimizer: Optional[Union[torch.optim.Optimizer, OSS]], default: None
current optimizer object
grad_accum: int, default: None
Number of gradient accumulation steps
Returns
-------
model: torch.nn.Module
Wrapped model object
optimizer: Union[torch.optim.Optimizer, OSS]
Wrapped optimizer object
"""
# Print info if verbose
if self._verbose:
self._print_info()
self._print_device(
f"{self._name} -- Device ID: {torch.cuda.current_device()}"
)
self._print_device(f"{self._name} -- Rank: {self.rank}")
model, optimizer, _, _ = ds.initialize(
model=model,
optimizer=optimizer,
model_parameters=filter(lambda p: p.requires_grad, model.parameters()),
config_params=self._deepspeed_init_config,
)
return model, optimizer
def _handle_deepspeed_configs(
self,
grad_accum_steps: int,
grad_clip: Optional[Union[ClipGradConfig, ClipGradNormConfig]],
):
"""Handles building the dictionary of configs that the deepspeed initialize call expects
https://www.deepspeed.ai/docs/config-json/
Parameters
----------
grad_accum_steps: int
number of gradient accumulation steps
grad_clip: Optional[Union[ClipGradConfig, ClipGradNormConfig]], default: None
gradient clipping config objects
Returns
-------
dict
All deepspeed parameters merged together from individual pieces
"""
# empty dict to start
ds_config = {}
# Map batch size stuff -- need to define 2/3
ds_config.update(self._map_ds_batch_configs(grad_accum_steps=grad_accum_steps))
# Skip optimizer & skip scheduler
# Map communication
ds_config.update(self._map_ds_communication_configs())
# Map FP16 and add enabled flag if selected
ds_config.update(self._map_ds_fp16_configs())
# Map grad clipping
ds_config.update(self._map_ds_grad_clip_configs(grad_clip=grad_clip))
# Map zero -- internally map param offloading and optimizer offloading to zero
ds_config.update(self._map_ds_zero_configs())
# Map aio
ds_config.update(self._map_ds_aio_configs())
# Map logging
ds_config.update(self._map_ds_logging_configs())
# Map flops -- enabled
ds_config.update(self._map_ds_flops_configs())
# Map activation checkpointing
ds_config.update(self._map_ds_activation_checkpointing_configs())
# Map tensorboard
ds_config.update(self._map_ds_tensorboard_config())
# Map PLD
ds_config.update(self._map_ds_pld_config())
return ds_config
def _map_ds_pld_config(self):
"""Maps progressive layer drop parameters
https://www.deepspeed.ai/tutorials/progressive_layer_dropping/
https://github.com/microsoft/DeepSpeed/blob/master/deepspeed/runtime/constants.py#L293
Returns
-------
dict
pld parameters or enabled false dict
"""
if self._deepspeed_config.progressive_layer_drop is not None:
map_dict = {
v.name: getattr(self._deepspeed_config.progressive_layer_drop, v.name)
for v in self._deepspeed_config.progressive_layer_drop.__attrs_attrs__
}
map_dict.update({"enabled": True})
return {"progressive_layer_drop": map_dict}
else:
return {"progressive_layer_drop": {"enabled": False}}
def _map_ds_tensorboard_config(self):
"""Maps tensorboard related parameters
https://github.com/microsoft/DeepSpeed/blob/master/deepspeed/runtime/constants.py#L268
Returns
-------
dict
tensorboard parameters or enabled false dict
"""
if self._deepspeed_config.tensorboard is not None:
map_dict = {
v.name: getattr(self._deepspeed_config.tensorboard, v.name)
for v in self._deepspeed_config.tensorboard.__attrs_attrs__
}
map_dict.update({"enabled": True})
return {"tensorboard": map_dict}
else:
return {"tensorboard": {"enabled": False}}
def _map_ds_grad_clip_configs(
self, grad_clip: Optional[Union[ClipGradConfig, ClipGradNormConfig]]
):
"""Maps grad clipping related parameters
https://www.deepspeed.ai/docs/config-json/#gradient-clipping
Parameters
----------
grad_clip: Optional[Union[ClipGradConfig, ClipGradNormConfig]], default: None
gradient clipping config objects
Returns
-------
dict
gradient clipping parameters or empty dict
"""
if grad_clip is not None:
if isinstance(grad_clip, ClipGradNormConfig):
return {"gradient_clipping": grad_clip.max_norm}
else:
raise ValueError(
f"Deepspeed does not currently support "
f'{type(grad_clip).__name__.replace("Config", "")}'
)
else:
return {}
def _map_ds_logging_configs(self):
"""Maps logging related parameters
https://www.deepspeed.ai/docs/config-json/#logging
Returns
-------
dict
logging parameters or empty dict
"""
return {
"steps_per_print": self._deepspeed_config.steps_per_print,
"dump_state": self._deepspeed_config.dump_state,
"wall_clock_breakdown": self._deepspeed_config.wall_clock_breakdown,
}
def _map_ds_activation_checkpointing_configs(self):
"""Maps activation checkpointing related parameters
https://www.deepspeed.ai/docs/config-json/#activation-checkpointing
Returns
-------
dict
activation checkpointing parameters or empty dict
"""
if self._deepspeed_config.activation_checkpointing is not None:
map_dict = {
v.name: getattr(self._deepspeed_config.activation_checkpointing, v.name)
for v in self._deepspeed_config.activation_checkpointing.__attrs_attrs__
}
return {"activation_checkpointing": map_dict}
else:
return {}
def _map_ds_flops_configs(self):
"""Maps flops related parameters
https://www.deepspeed.ai/docs/config-json/#flops-profiler
Returns
-------
dict
flops parameters or enabled false dict
"""
if self._deepspeed_config.flops_profiler is not None:
map_dict = {
v.name: getattr(self._deepspeed_config.flops_profiler, v.name)
for v in self._deepspeed_config.flops_profiler.__attrs_attrs__
}
map_dict.update({"enabled": True})
return {"flops_profiler": map_dict}
else:
return {"flops_profiler": {"enabled": False}}
def _map_ds_aio_configs(self):
"""Maps async i/o related parameters
| |
# Defines core methods shared amongst system test scripts
import sets
import unittest
import shlex
import subprocess
import os
import binascii
import requests
import logging
import random
import time
import datetime
import json
import pprint
import xml.etree.ElementTree as ET
from StringIO import StringIO
DEFAULT_GROUP_NAME = "Default Group"
DEFAULT_TPM_POLICY = "Test TPM Policy"
DEFAULT_IMA_POLICY = "Test IMA Policy"
CACHED_XML_REPORT = None
APPRAISAL_SUCCESS_MESSAGE = "Appraisal passed"
class HIRSPortal:
def __init__(self, hirs_server_url):
self.server_url = hirs_server_url
def request(self, method, path, params={}, data={}, files={}, expected_status_codes=[200], operation=None, verify=False):
return web_request(self.server_url, method, path, params, data, files, expected_status_codes, operation, verify)
def set_default_policies(self, tpm_policy="No Policy",
ima_policy="No Policy"):
"""set the given policies to be the policies for the default group."""
payload = {"description": "default group modified for systems tests",
"name": DEFAULT_GROUP_NAME}
# TODO this will report failure if the group already exists. Not sure how to avoid this
request_result = self.request("post", "portal/group/create", data=payload)
self.set_tpm_ima_policy(DEFAULT_GROUP_NAME, tpm_policy, ima_policy)
def set_tpm_ima_policy(self, group_name=DEFAULT_GROUP_NAME, tpm_policy=None, ima_policy=None):
"""set the TPM and IMA policy for the group"""
payload = {"name": group_name,
"ima": ima_policy,
"tpm": tpm_policy,
"optionRadio" : "existingImaPolicy",
"policyName" : ""}
self.request("post", "portal/group/update/policies", data=payload)
payload = {"name": group_name,
"ima": ima_policy,
"tpm": tpm_policy,
"optionRadio" : "existingTpmPolicy",
"policyName" : ""}
self.request("post", "portal/group/update/policies", data=payload)
def set_group_appraisal_wait_setting(self, group_name=DEFAULT_GROUP_NAME,
is_client_waiting='checked'):
"""set the specified group's client wait for appraisal setting to the specified value."""
self.request("post", "portal/group/editWaitForAppraisalCompletion", data={"groupName": group_name, "enabled" : is_client_waiting})
def get_latest_report(self):
"""Retrieves the latest report that was created for the given client.
The retrieved report is cached. Calling run_hirs_report will clear the
latest report from the cache.
"""
global CACHED_XML_REPORT
if CACHED_XML_REPORT:
logging.info("found cached XML report")
return CACHED_XML_REPORT
logging.info("cached XML report not found, retrieving latest report from"
"the server")
latest_report_id = self.get_latest_report_summary()['report']['id']
logging.info("requesting raw report")
request_result = self.request("get", "portal/report/xml/raw?uuid=" + latest_report_id, operation="get latest report")
CACHED_XML_REPORT = request_result.text
return CACHED_XML_REPORT
def get_alert_count_from_latest_report(self):
""" Retrieves the alert count from the latest report. """
return self.get_alerts_from_latest_report()['recordsTotal']
def get_alerts_from_latest_report(self):
""" Retrieves the alert list from the latest report. """
latest_report_id = self.get_latest_report_summary()['report']['id']
return self.request("get", "portal/alerts/list?report=" + latest_report_id).json()
def start_on_demand(self, group_name="Default%20Group"):
self.request("get", "portal/on-demand/group/" + group_name)
def get_latest_report_summary(self):
"""Pull the latest report summary from the Portal."""
all_reports = self.request("get", "portal/report/list").json()['data']
if len(all_reports) == 0:
return None
return max(all_reports, key=lambda report: report['timestamp'])
def get_devices(self):
"""Get devices Portal."""
return self.request("get", "portal/devices/list").json()
def report_contains_ima_record(self, filename, sha_hash, report_id):
"""Check whether the report with the given id contains the given filename
and hash.
"""
logging.info("checking if report with ID {} contains file {} with hash {}".format(
report_id, filename, sha_hash))
ima_records = self.request("get", "portal/report/list/imaRecords", params={'scope': 'REPORT', 'id': report_id}).json()['data']
def record_matcher(record):
# check for IMA records with this hash, and if the filename is in the record's path
# (works for full or partial path)
return (record['hash']['digestString'] == sha_hash) and (filename in record['path'])
matching_records = filter(record_matcher, ima_records)
return len(matching_records) > 0
def upload_payload(self, payload):
json_path = "tmp.json"
json_file = open(json_path, 'w')
json_file.write(json.dumps(payload))
json_file.close()
post_file = {'file': open(json_path, 'rb')}
logging.debug("uploading policy:\n{0}".format(pprint.pformat(payload)))
response = self.request("post", "portal/policies/import", files=post_file, operation="upload policy")
post_file['file'].close()
os.remove(json_path)
return payload["name"]
def add_ima_policy(self, required_set=None, whitelist=None, blacklist=None, ignore=None, unknown_fail="false", delta_reports_enabled="false", policy_name_prefix=""):
timestamp = get_current_timestamp()
policy_name = "{0}_IMA_systems_test_policy_{1}".format(policy_name_prefix, timestamp)
policy_description = "IMA policy for systems testing"
payload = {"name": policy_name,
"description": policy_description,
"type": "IMA"}
required_payload, whitelist_payload, ignore_payload, blacklist_payload = [], [], [], []
if required_set is not None:
required_payload.append(required_set)
if whitelist is not None:
whitelist_payload.append(whitelist)
if blacklist is not None:
blacklist_payload.append(blacklist)
if ignore is not None:
ignore_payload.append(ignore)
ima_payload = {
"deltaReportEnable": delta_reports_enabled,
"failOnUnknowns": unknown_fail,
"validatePcr": "false",
"checkSubsequentBaselines": "true",
"partialPathEnable": "true",
"required": required_payload,
"whitelist": whitelist_payload,
"blacklist": blacklist_payload,
"ignoreSet": ignore_payload
}
payload.update(ima_payload)
return self.upload_payload(payload)
def add_tpm_wl_policy(self, baseline, policy_name_prefix=""):
timestamp = get_current_timestamp()
policy_name = "{0}_TPM_systems_test_wl_policy_{1}".format(policy_name_prefix, timestamp)
policy_description = "TPM white list policy for systems testing"
payload = {"name": policy_name,
"description": policy_description,
"type": "TPM"}
tpm_payload = {"appraiserPcrMask": 0xffffff,
"reportPcrMask": 0xffffff,
"appraiseFullReport": "true",
"validateSignature": "true",
"white-list-baselines": [baseline]}
payload.update(tpm_payload)
return self.upload_payload(payload)
def add_tpm_bl_policy(self, baseline, policy_name_prefix=""):
timestamp = get_current_timestamp()
policy_name = "{0}_TPM_systems_test_bl_policy_{1}".format(policy_name_prefix, timestamp)
policy_description = "TPM black list policy for systems testing"
payload = {"name": policy_name,
"description": policy_description,
"type": "TPM"}
tpm_payload = {"appraiserPcrMask": 0xffffff,
"reportPcrMask": 0xffffff,
"appraiseFullReport": "true",
"validateSignature": "true",
"black-list-baselines": [baseline]}
payload.update(tpm_payload)
return self.upload_payload(payload)
def add_to_ima_baseline(self, baseline_name, file_path, file_hash):
self.request("post", "portal/baselines/record/ima/add", data={'name': baseline_name, 'path': file_path, 'hash': file_hash}, operation="add to IMA baseline")
def upload_csv_baseline(self, baseline_path, appraiser_type):
post_file = {'file': open(baseline_path, 'rb')}
current_time = datetime.datetime.now()
baseline_name = baseline_path.split('.')[0] + '_' + str(current_time.hour) + '-' + str(current_time.minute) + '-' + str(current_time.second)
self.request("post", "uploadImaCsv", data={'baselineName': baseline_name, 'optionsRadios': appraiser_type}, files=post_file, operation="upload baseline")
if request_result != 200:
logging.error("upload baseline return code: {0}, response text:\n"
"{1}".format(request_result.status_code, request_result.text))
post_file['file'].close()
subprocess.call("rm " + baseline_path, shell=True)
return baseline_name
"""Creates a Yum repository, configures it with a URL, triggers an update, and waits for the update to complete via Portal endpoints."""
def configure_yum_repository(self, baseline_name, base_url):
self.request("post", "portal/repository/create", params={'name':baseline_name,'type':'Yum'}, operation="create Yum repository")
self.request("post", "portal/repository/update/url", params={'name':baseline_name,'baseUrl':base_url}, operation="set URL of Yum repository")
self.request("post", "portal/repository/job/trigger", params={'name':baseline_name}, operation="update Yum repository")
# 4. wait for update to finish
update_complete = False
max_wait_time_seconds = 240
sleep_time_seconds = 5
counter = 1
while not update_complete:
time.sleep(sleep_time_seconds)
if counter * sleep_time_seconds >= max_wait_time_seconds:
msg = "Timeout waiting for repository update: {0} seconds".format(max_wait_time_seconds)
logging.error(msg)
raise RuntimeError(msg)
counter += 1
request_result = self.request("get", "portal/repository/job/check", params={'name':baseline_name}, operation="check status of repo update job")
update_complete = not json.loads(request_result.text)['jobCurrentlyRunning']
"""Creates a BroadRepoImaBaseline repository, configures it with a repository, and updates the baseline from the repository's contents via Portal endpoints."""
def create_broad_ima_baseline(self, baseline_name, repository_name):
self.request("post", "portal/baselines/create", params={'name':baseline_name,'type':'broad'}, operation="create broad baseline")
self.request("post", "portal/baselines/update/repositories", params={'name':baseline_name,'repositories':[repository_name]}, operation="add repository to broad baseline")
self.request("post", "portal/baselines/triggerupdate", params={'name':baseline_name}, operation="update broad repository from its repository")
"""Creates a new Policy with the given type and name via Portal endpoints."""
def create_policy(self, name, policy_type):
self.request("post", "portal/policies/create", params={'name':name,'type':policy_type}, operation="create new policy")
"""Enables or disables partial path checking for an IMA policy."""
def set_partial_paths_for_ima_policy(self, policy_name, enabled):
checked = 'unchecked'
if enabled:
checked = 'checked'
self.request("post", "portal/policies/update", params={'name':policy_name,'partial':checked}, operation="update policy's partial path setting")
"""Enables or disables kernel detection for a TPM policy."""
def set_kernel_setting(self, policy_name, kernel_detect_enabled, kernel_alert_enabled, kernel_alert_severity="UNSPECIFIED"):
kernel_detect_checked = 'false'
if kernel_detect_enabled:
kernel_detect_checked = 'true'
kernel_alert_checked = 'false'
if kernel_alert_enabled:
kernel_alert_checked = 'true'
self.request("post", "portal/policies/update/editKernelDetectSettings", params={'name':policy_name,'kernelDetectToggle':kernel_detect_checked,'kernelAlertToggle':kernel_alert_checked,'kernelAlertSeverity':kernel_alert_severity}, operation="update policy's kernel detection setting")
"""Creates a new Policy with the given type and name via Portal endpoints."""
def add_baseline_to_required_sets(self, policy_name, baseline_name):
self.request("post", "portal/policies/update", params={'name':policy_name,'required':[baseline_name]}, operation="add baseline to required sets")
def get_alerts(self):
return self.request("get", "portal/alerts/list").json()
class AttestationCAPortal:
def __init__(self, hirs_server_url):
self.server_url = hirs_server_url
def request(self, method, path, params={}, data={}, files={}, expected_status_codes=[200], operation=None, verify=False):
return web_request(self.server_url, method, path, params, data, files, expected_status_codes, operation, verify)
def check_is_online(self):
return self.request("get", "portal/certificate-request/platform-credentials/list").json()
def get_supply_chain_validation_summaries(self):
return self.request("get", "portal/validation-reports/list").json()
def disable_supply_chain_validations(self):
# the initial POST request goes through, but the redirect from the server is attempted which results in a 404,
# or possibly a 200 on centos7, apparently.
self.request("post", "portal/policy/update-ec-validation",
expected_status_codes=[404, 200], params={'ecValidate': "unchecked",})
self.request("post", "portal/policy/update-pc-validation",
expected_status_codes=[404, 200], params={'pcValidate': 'unchecked'})
self.request("post", "portal/policy/update-pc-attribute-validation",
expected_status_codes=[404, 200], params={'pcAttributeValidate': 'unchecked'})
def enable_supply_chain_validations(self):
# the initial POST request goes through, but the redirect from the server is attempted which results in a 404,
# or possibly a 200 on centos7, apparently.
self.request("post", "portal/policy/update-ec-validation",
expected_status_codes=[404, 200], params={'ecValidate': "checked",})
self.request("post", "portal/policy/update-pc-validation",
expected_status_codes=[404, 200], params={'pcValidate': 'checked'})
self.request("post", "portal/policy/update-pc-attribute-validation",
expected_status_codes=[404, 200], params={'pcAttributeValidate': 'checked'})
def enable_ec_validation(self):
self.request("post", "portal/policy/update-ec-validation",
expected_status_codes=[404, 200], params={'ecValidate': "checked",})
def get_devices(self):
"""Get devices from ACA portal."""
return self.request("get", "portal/devices/list").json()
def get_ek_certs(self):
"""Get EK certs from ACA portal."""
return self.request("get", "portal/certificate-request/endorsement-key-credentials/list").json()
def get_pk_certs(self):
"""Get PK certs from ACA portal."""
return self.request("get", "portal/certificate-request/platform-credentials/list").json()
def get_trust_chains(self):
"""Get trust chains from ACA portal."""
return self.request("get", "portal/certificate-request/trust-chain/list").json()
def upload_ca_cert(self, ca_cert_file):
file = {'file': open(ca_cert_file, 'rb')}
self.request("post", "portal/certificate-request/trust-chain/upload", files=file, operation="upload CA cert")
def upload_pk_cert(self, pk_cert_file):
file = {'file': open(pk_cert_file, 'rb')}
self.request("post", "portal/certificate-request/platform-credentials/upload", files=file, operation="upload PK cert")
def web_request(server_url, method, path, params={}, data={}, files={}, expected_status_codes=[200], operation=None, verify=False):
url = server_url + path
if method not in ['get', 'post']:
raise ValueError("Method " + method + " not valid.")
request_response = getattr(requests, method)(url, params=params, data=data, files=files, verify=verify)
request_msg = method + " " + url
if operation == None:
operation = request_msg
else:
operation += " (" + request_msg + ")"
check_request_response(expected_status_codes, request_response, operation)
return request_response
"""Checks a requests response to see if its status code matches the expected status code. If it does, this method returns True. If it | |
bitmask:
if bitmask & 1:
if i <= _LAST_CAP.value:
res.add(Cap(i))
else:
warnings.warn(
"Unrecognized capability (number {}) found in capability set. This may result "
"in strange behavior. Are you using an old version of pyprctl on a newer "
"kernel?".format(i),
RuntimeWarning,
)
bitmask >>= 1
i += 1
return res
def _capset_to_bitmask(caps: Iterable[Cap]) -> int:
res = 0
for cap in caps:
res |= 1 << cap.value
return res
def _split_bitmask_32(bitmask: int) -> Tuple[int, int]:
return bitmask >> 32, bitmask & ((1 << 32) - 1)
def _combine_bitmask_32(upper: int, lower: int) -> int:
return (upper << 32) | lower
def _capstate_from_text(text: str) -> Tuple[Set[Cap], Set[Cap], Set[Cap]]:
# Returns (effective, inheritable, permitted)
effective: Set[Cap] = set()
inheritable: Set[Cap] = set()
permitted: Set[Cap] = set()
for clause in text.split():
match = re.search(r"[-+=]", clause)
if match is None:
raise ValueError("Invalid capability set clause")
cap_names = clause[: match.start()]
action_spec = clause[match.start():]
caps = (
list(Cap)
if cap_names.lower() in ("", "all")
else [Cap.from_name(name) for name in cap_names.split(",")]
)
should_raise = True
last_ch = None
for ch in action_spec:
if ch in "+-=":
if last_ch is not None and last_ch not in "eip":
raise ValueError("Repeated flag characters in capability set clause")
if ch == "=":
# Drop the listed capabilities in all sets
effective.difference_update(caps)
inheritable.difference_update(caps)
permitted.difference_update(caps)
# Now only raise it in the specified sets
should_raise = True
elif ch == "+":
should_raise = True
elif ch == "-":
should_raise = False
elif ch == "e":
if should_raise:
effective.update(caps)
else:
effective.difference_update(caps)
elif ch == "i":
if should_raise:
inheritable.update(caps)
else:
inheritable.difference_update(caps)
elif ch == "p":
if should_raise:
permitted.update(caps)
else:
permitted.difference_update(caps)
else:
raise ValueError("Invalid character {!r} in capability set clause".format(ch))
last_ch = ch
return effective, inheritable, permitted
def _capstate_to_text(*, effective: Set[Cap], inheritable: Set[Cap], permitted: Set[Cap]) -> str:
if not effective and not inheritable and not permitted:
return "="
def cap_set_to_text(caps: Set[Cap], prefix_ch: str) -> str:
if caps == _ALL_CAPS_SET:
return "" if prefix_ch == "=" else "all"
return ",".join(
"cap_" + cap.name.lower() for cap in sorted(caps, key=lambda cap: cap.value)
)
orig_effective = effective
orig_inheritable = inheritable
orig_permitted = permitted
# These are the capabilities that need to be added.
effective = set(effective)
inheritable = set(inheritable)
permitted = set(permitted)
# These are the capabilities that need to be dropped (perhaps because we batch-added "extra"
# capabilities, for example as in "=e cap_chown-e").
drop_effective: Set[Cap] = set()
drop_inheritable: Set[Cap] = set()
drop_permitted: Set[Cap] = set()
parts: List[str] = []
def add_part(
caps: Set[Cap],
*,
eff: bool = False,
inh: bool = False,
perm: bool = False,
drop: bool = False,
) -> None:
if not caps:
# Nothing to do!
return
# If we're pretty close to a full set, just use a full set.
if not drop and caps != _ALL_CAPS_SET and len(_ALL_CAPS_SET - caps) <= 10:
caps = _ALL_CAPS_SET
if drop:
prefix_ch = "-"
elif not parts:
# No previous values that the resetting behavior of "=" might mess up
prefix_ch = "="
else:
prefix_ch = "+"
parts.append(
cap_set_to_text(caps, prefix_ch)
+ prefix_ch
+ ("e" if eff else "")
+ ("i" if inh else "")
+ ("p" if perm else "")
)
if drop:
# We just dropped these capabilities; we don't need to keep track of them any more
if eff:
drop_effective.difference_update(caps)
if inh:
drop_inheritable.difference_update(caps)
if perm:
drop_permitted.difference_update(caps)
else:
if eff:
# If there were any capabilities in "caps" that aren't in "orig_effective",
# then those were extraneous and we need to remove them later.
drop_effective.update(caps - orig_effective)
# All of the capabilities in "caps" have been added; we don't need to
# keep track of them in "effective" any more.
effective.difference_update(caps)
if inh:
drop_inheritable.update(caps - orig_inheritable)
inheritable.difference_update(caps)
if perm:
drop_permitted.update(caps - orig_permitted)
permitted.difference_update(caps)
# First, add the ones that are common to all 3 sets.
add_part(effective & inheritable & permitted, eff=True, inh=True, perm=True)
# If we "overshot" by adding too many capabilities (for example, if all three sets had every
# capability except CAP_CHOWN), then we need to drop the "extra" ones -- at least, the "extras"
# that are common to all 3 sets.
add_part(
drop_effective & drop_inheritable & drop_permitted, eff=True, inh=True, perm=True, drop=True
)
# Now, go through and add the various combinations (cap_chown+ei, cap_chown+ep, etc.).
add_part(effective & inheritable, eff=True, inh=True)
add_part(effective & permitted, eff=True, perm=True)
add_part(inheritable & permitted, inh=True, perm=True)
# Again remove any "extras" that are common to 2 sets.
add_part(drop_effective & drop_inheritable, eff=True, inh=True, drop=True)
add_part(drop_effective & drop_permitted, eff=True, perm=True, drop=True)
add_part(drop_inheritable & drop_permitted, inh=True, perm=True, drop=True)
# Now add the remaining ones that are set-specific.
add_part(effective, eff=True)
add_part(inheritable, inh=True)
add_part(permitted, perm=True)
# Nothing should be left to add
assert not effective
assert not inheritable
assert not permitted
# Finally, drop the ones that are set-specific.
add_part(drop_effective, eff=True, drop=True)
add_part(drop_inheritable, inh=True, drop=True)
add_part(drop_permitted, perm=True, drop=True)
# And now nothing should be left to remove
assert not drop_effective
assert not drop_inheritable
assert not drop_permitted
return " ".join(parts)
@enum.unique
class Secbits(enum.Flag):
"""
Represents the different securebits that can be used to change the kernel's handling of
capabilities for UID 0.
"""
#: If this bit is set, the kernel will not grant capabilities to set-user-ID-root programs, or
#: to processes with an effective or real user ID of 0 on ``exec()``. See capabilities(7) for
#: more details.
NOROOT = 1 << 0
#: "Locks" the NOROOT securebit so it cannot be changed.
NOROOT_LOCKED = 1 << 1
#: Stops the kernel from adjusting the process's permitted/effective/ambient capabilities when
#: the process's effective and filesystem UIDs are switched between 0 and nonzero. See
#: capabilities(7) for more details.
NO_SETUID_FIXUP = 1 << 2
#: "Locks" the NO_SETUID_FIXUP securebit so it cannot be changed.
NO_SETUID_FIXUP_LOCKED = 1 << 3
#: Provides the same functionality as :py:func:`get_keepcaps()` and :py:func:`set_keepcaps()`.
#:
#: Note that changes made with :py:func:`get_keepcaps()`/:py:func:`set_keepcaps()` are reflected
#: in the value of this flag as returned by :py:func:`get_securebits()`, and vice versa. Since
#: changing the securebits requires CAP_SETPCAP, it may be better to use those functions instead
#: if this is the only securebit that you need to change.
KEEP_CAPS = 1 << 4
#: "Locks" the KEEP_CAPS securebit so it cannot be changed.
#:
#: Note: If the KEEP_CAPS securebit is set, even if it is "locked" using this flag, the kernel
#: will still clear it on an ``exec()``. So this setting is only really useful to lock the
#: KEEP_CAPS securebit as "off".
KEEP_CAPS_LOCKED = 1 << 5
#: Disables raising ambient capabilities (such as with :py:func:`cap_ambient_raise()`).
NO_CAP_AMBIENT_RAISE = 1 << 6
#: "Locks" the NO_CAP_AMBIENT_RAISE securebit so it cannot be changed.
NO_CAP_AMBIENT_RAISE_LOCKED = 1 << 7
def get_securebits() -> Secbits:
"""
Get the current secure bits.
"""
return Secbits(ffi.prctl(ffi.PR_GET_SECUREBITS, 0, 0, 0, 0))
def set_securebits(secbits: Secbits) -> None:
"""
Set the current secure bits.
(This requires CAP_SETPCAP.)
"""
ffi.prctl(ffi.PR_SET_SECUREBITS, secbits.value, 0, 0, 0)
class _SecurebitsAccessor: # pylint: disable=too-few-public-methods
def _make_property( # type: ignore # pylint: disable=no-self-argument
secbit: Secbits,
) -> property:
def getter(self: "_SecurebitsAccessor") -> bool: # pylint: disable=unused-argument
return bool(get_securebits() & secbit)
def setter(
self: "_SecurebitsAccessor", value: bool # pylint: disable=unused-argument
) -> None:
cur_secbits = get_securebits()
if value:
cur_secbits |= secbit
else:
cur_secbits &= ~secbit # pylint: disable=invalid-unary-operand-type
set_securebits(cur_secbits)
return property(getter, setter)
noroot = _make_property(Secbits.NOROOT)
noroot_locked = _make_property(Secbits.NOROOT_LOCKED)
no_setuid_fixup = _make_property(Secbits.NO_SETUID_FIXUP)
no_setuid_fixup_locked = _make_property(Secbits.NO_SETUID_FIXUP_LOCKED)
keep_caps = _make_property(Secbits.KEEP_CAPS)
keep_caps_locked = _make_property(Secbits.KEEP_CAPS_LOCKED)
no_cap_ambient_raise = _make_property(Secbits.NO_CAP_AMBIENT_RAISE)
no_cap_ambient_raise_locked = _make_property(Secbits.NO_CAP_AMBIENT_RAISE_LOCKED)
del _make_property
_lock_map = {
secbit: getattr(Secbits, secbit.name + "_LOCKED")
for secbit in Secbits
if not secbit.name.endswith("_LOCKED")
}
def __repr__(self) -> str:
return "<Securebits: {}>".format(self)
def __str__(self) -> str:
cur_secbits = get_securebits()
return ", ".join(
"secure-{}: {} ({})".format(
secbit.name.lower().replace("_", "-"),
"yes" if secbit in cur_secbits else "no",
"locked" if lock_secbit in cur_secbits else "unlocked",
)
for secbit, lock_secbit in self._lock_map.items()
)
securebits = _SecurebitsAccessor()
def capbset_read(cap: Cap) -> Optional[bool]:
"""
Check whether the given capability is present in the current thread's bounding capability set.
This | |
"""
IMPORTING LIBS
"""
import dgl
import numpy as np
import os
import socket
import time
import random
import glob
import argparse, json
import pickle
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.utils.data import DataLoader
from tensorboardX import SummaryWriter
from tqdm import tqdm
import matplotlib
import matplotlib.pyplot as plt
class DotDict(dict):
def __init__(self, **kwds):
self.update(kwds)
self.__dict__ = self
"""
IMPORTING CUSTOM MODULES/METHODS
"""
from nets.ZINC_graph_regression.load_net import gnn_model # import all GNNS
from data.data import LoadData # import dataset
"""
GPU Setup
"""
def gpu_setup(use_gpu, gpu_id):
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"] = str(gpu_id)
if torch.cuda.is_available() and use_gpu:
print('cuda available with GPU:',torch.cuda.get_device_name(0))
device = torch.device("cuda")
else:
print('cuda not available')
device = torch.device("cpu")
return device
"""
VIEWING MODEL CONFIG AND PARAMS
"""
def view_model_param(MODEL_NAME, net_params):
model = gnn_model(MODEL_NAME, net_params)
total_param = 0
print("MODEL DETAILS:\n")
#print(model)
for param in model.parameters():
# print(param.data.size())
total_param += np.prod(list(param.data.size()))
print('MODEL/Total parameters:', MODEL_NAME, total_param)
return total_param
"""
TRAINING CODE
"""
def train_val_pipeline(MODEL_NAME, dataset, params, net_params, dirs):
t0 = time.time()
per_epoch_time = []
DATASET_NAME = dataset.name
if net_params['pe_init'] == 'lap_pe':
tt = time.time()
print("[!] -LapPE: Initializing graph positional encoding with Laplacian PE.")
dataset._add_lap_positional_encodings(net_params['pos_enc_dim'])
print("[!] Time taken: ", time.time()-tt)
elif net_params['pe_init'] == 'rand_walk':
tt = time.time()
print("[!] -LSPE: Initializing graph positional encoding with rand walk features.")
dataset._init_positional_encodings(net_params['pos_enc_dim'], net_params['pe_init'])
print("[!] Time taken: ", time.time()-tt)
tt = time.time()
print("[!] -LSPE (For viz later): Adding lapeigvecs to key 'eigvec' for every graph.")
dataset._add_eig_vecs(net_params['pos_enc_dim'])
print("[!] Time taken: ", time.time()-tt)
if MODEL_NAME in ['SAN', 'GraphiT']:
if net_params['full_graph']:
st = time.time()
print("[!] Adding full graph connectivity..")
dataset._make_full_graph() if MODEL_NAME == 'SAN' else dataset._make_full_graph((net_params['p_steps'], net_params['gamma']))
print('Time taken to add full graph connectivity: ',time.time()-st)
trainset, valset, testset = dataset.train, dataset.val, dataset.test
root_log_dir, root_ckpt_dir, write_file_name, write_config_file, viz_dir = dirs
device = net_params['device']
# Write the network and optimization hyper-parameters in folder config/
with open(write_config_file + '.txt', 'w') as f:
f.write("""Dataset: {},\nModel: {}\n\nparams={}\n\nnet_params={}\n\n\nTotal Parameters: {}\n\n""" .format(DATASET_NAME, MODEL_NAME, params, net_params, net_params['total_param']))
log_dir = os.path.join(root_log_dir, "RUN_" + str(0))
writer = SummaryWriter(log_dir=log_dir)
# setting seeds
random.seed(params['seed'])
np.random.seed(params['seed'])
torch.manual_seed(params['seed'])
if device.type == 'cuda':
torch.cuda.manual_seed(params['seed'])
torch.cuda.manual_seed_all(params['seed'])
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
print("Training Graphs: ", len(trainset))
print("Validation Graphs: ", len(valset))
print("Test Graphs: ", len(testset))
model = gnn_model(MODEL_NAME, net_params)
model = model.to(device)
optimizer = optim.Adam(model.parameters(), lr=params['init_lr'], weight_decay=params['weight_decay'])
scheduler = optim.lr_scheduler.ReduceLROnPlateau(optimizer, mode='min',
factor=params['lr_reduce_factor'],
patience=params['lr_schedule_patience'],
verbose=True)
epoch_train_losses, epoch_val_losses = [], []
epoch_train_MAEs, epoch_val_MAEs = [], []
# import train functions for all GNNs
from train.train_ZINC_graph_regression import train_epoch_sparse as train_epoch, evaluate_network_sparse as evaluate_network
train_loader = DataLoader(trainset, num_workers=4, batch_size=params['batch_size'], shuffle=True, collate_fn=dataset.collate)
val_loader = DataLoader(valset, num_workers=4, batch_size=params['batch_size'], shuffle=False, collate_fn=dataset.collate)
test_loader = DataLoader(testset, num_workers=4, batch_size=params['batch_size'], shuffle=False, collate_fn=dataset.collate)
# At any point you can hit Ctrl + C to break out of training early.
try:
with tqdm(range(params['epochs'])) as t:
for epoch in t:
t.set_description('Epoch %d' % epoch)
start = time.time()
epoch_train_loss, epoch_train_mae, optimizer = train_epoch(model, optimizer, device, train_loader, epoch)
epoch_val_loss, epoch_val_mae, __ = evaluate_network(model, device, val_loader, epoch)
epoch_test_loss, epoch_test_mae, __ = evaluate_network(model, device, test_loader, epoch)
del __
epoch_train_losses.append(epoch_train_loss)
epoch_val_losses.append(epoch_val_loss)
epoch_train_MAEs.append(epoch_train_mae)
epoch_val_MAEs.append(epoch_val_mae)
writer.add_scalar('train/_loss', epoch_train_loss, epoch)
writer.add_scalar('val/_loss', epoch_val_loss, epoch)
writer.add_scalar('train/_mae', epoch_train_mae, epoch)
writer.add_scalar('val/_mae', epoch_val_mae, epoch)
writer.add_scalar('test/_mae', epoch_test_mae, epoch)
writer.add_scalar('learning_rate', optimizer.param_groups[0]['lr'], epoch)
t.set_postfix(time=time.time()-start, lr=optimizer.param_groups[0]['lr'],
train_loss=epoch_train_loss, val_loss=epoch_val_loss,
train_MAE=epoch_train_mae, val_MAE=epoch_val_mae,
test_MAE=epoch_test_mae)
per_epoch_time.append(time.time()-start)
# Saving checkpoint
ckpt_dir = os.path.join(root_ckpt_dir, "RUN_")
if not os.path.exists(ckpt_dir):
os.makedirs(ckpt_dir)
torch.save(model.state_dict(), '{}.pkl'.format(ckpt_dir + "/epoch_" + str(epoch)))
files = glob.glob(ckpt_dir + '/*.pkl')
for file in files:
epoch_nb = file.split('_')[-1]
epoch_nb = int(epoch_nb.split('.')[0])
if epoch_nb < epoch-1:
os.remove(file)
scheduler.step(epoch_val_loss)
if optimizer.param_groups[0]['lr'] < params['min_lr']:
print("\n!! LR EQUAL TO MIN LR SET.")
break
# Stop training after params['max_time'] hours
if time.time()-t0 > params['max_time']*3600:
print('-' * 89)
print("Max_time for training elapsed {:.2f} hours, so stopping".format(params['max_time']))
break
except KeyboardInterrupt:
print('-' * 89)
print('Exiting from training early because of KeyboardInterrupt')
test_loss_lapeig, test_mae, g_outs_test = evaluate_network(model, device, test_loader, epoch)
train_loss_lapeig, train_mae, g_outs_train = evaluate_network(model, device, train_loader, epoch)
print("Test MAE: {:.4f}".format(test_mae))
print("Train MAE: {:.4f}".format(train_mae))
print("Convergence Time (Epochs): {:.4f}".format(epoch))
print("TOTAL TIME TAKEN: {:.4f}s".format(time.time()-t0))
print("AVG TIME PER EPOCH: {:.4f}s".format(np.mean(per_epoch_time)))
if net_params['pe_init'] == 'rand_walk':
# Visualize actual and predicted/learned eigenvecs
from utils.plot_util import plot_graph_eigvec
if not os.path.exists(viz_dir):
os.makedirs(viz_dir)
sample_graph_ids = [15,25,45]
for f_idx, graph_id in enumerate(sample_graph_ids):
# Test graphs
g_dgl = g_outs_test[graph_id]
f = plt.figure(f_idx, figsize=(12,6))
plt1 = f.add_subplot(121)
plot_graph_eigvec(plt1, graph_id, g_dgl, feature_key='eigvec', actual_eigvecs=True)
plt2 = f.add_subplot(122)
plot_graph_eigvec(plt2, graph_id, g_dgl, feature_key='p', predicted_eigvecs=True)
f.savefig(viz_dir+'/test'+str(graph_id)+'.jpg')
# Train graphs
g_dgl = g_outs_train[graph_id]
f = plt.figure(f_idx, figsize=(12,6))
plt1 = f.add_subplot(121)
plot_graph_eigvec(plt1, graph_id, g_dgl, feature_key='eigvec', actual_eigvecs=True)
plt2 = f.add_subplot(122)
plot_graph_eigvec(plt2, graph_id, g_dgl, feature_key='p', predicted_eigvecs=True)
f.savefig(viz_dir+'/train'+str(graph_id)+'.jpg')
writer.close()
"""
Write the results in out_dir/results folder
"""
with open(write_file_name + '.txt', 'w') as f:
f.write("""Dataset: {},\nModel: {}\n\nparams={}\n\nnet_params={}\n\n{}\n\nTotal Parameters: {}\n\n
FINAL RESULTS\nTEST MAE: {:.4f}\nTRAIN MAE: {:.4f}\n\n
Convergence Time (Epochs): {:.4f}\nTotal Time Taken: {:.4f} hrs\nAverage Time Per Epoch: {:.4f} s\n\n\n"""\
.format(DATASET_NAME, MODEL_NAME, params, net_params, model, net_params['total_param'],
test_mae, train_mae, epoch, (time.time()-t0)/3600, np.mean(per_epoch_time)))
def main():
"""
USER CONTROLS
"""
parser = argparse.ArgumentParser()
parser.add_argument('--config', help="Please give a config.json file with training/model/data/param details")
parser.add_argument('--gpu_id', help="Please give a value for gpu id")
parser.add_argument('--model', help="Please give a value for model name")
parser.add_argument('--dataset', help="Please give a value for dataset name")
parser.add_argument('--out_dir', help="Please give a value for out_dir")
parser.add_argument('--seed', help="Please give a value for seed")
parser.add_argument('--epochs', help="Please give a value for epochs")
parser.add_argument('--batch_size', help="Please give a value for batch_size")
parser.add_argument('--init_lr', help="Please give a value for init_lr")
parser.add_argument('--lr_reduce_factor', help="Please give a value for lr_reduce_factor")
parser.add_argument('--lr_schedule_patience', help="Please give a value for lr_schedule_patience")
parser.add_argument('--min_lr', help="Please give a value for min_lr")
parser.add_argument('--weight_decay', help="Please give a value for weight_decay")
parser.add_argument('--print_epoch_interval', help="Please give a value for print_epoch_interval")
parser.add_argument('--L', help="Please give a value for L")
parser.add_argument('--hidden_dim', help="Please give a value for hidden_dim")
parser.add_argument('--out_dim', help="Please give a value for out_dim")
parser.add_argument('--residual', help="Please give a value for residual")
parser.add_argument('--edge_feat', help="Please give a value for edge_feat")
parser.add_argument('--readout', help="Please give a value for readout")
parser.add_argument('--in_feat_dropout', help="Please give a value for in_feat_dropout")
parser.add_argument('--dropout', help="Please give a value for dropout")
parser.add_argument('--layer_norm', help="Please give a value for layer_norm")
parser.add_argument('--batch_norm', help="Please give a value for batch_norm")
parser.add_argument('--max_time', help="Please give a value for max_time")
parser.add_argument('--pos_enc_dim', help="Please give a value for pos_enc_dim")
parser.add_argument('--pos_enc', help="Please give a value for pos_enc")
parser.add_argument('--alpha_loss', help="Please give a value for alpha_loss")
parser.add_argument('--lambda_loss', help="Please give a value for lambda_loss")
parser.add_argument('--pe_init', help="Please give a value for pe_init")
args = parser.parse_args()
with open(args.config) as f:
config = json.load(f)
# device
if args.gpu_id is not None:
config['gpu']['id'] = int(args.gpu_id)
config['gpu']['use'] = True
device = gpu_setup(config['gpu']['use'], config['gpu']['id'])
# model, dataset, out_dir
if args.model is not None:
MODEL_NAME = args.model
else:
MODEL_NAME = config['model']
if args.dataset is not None:
DATASET_NAME = args.dataset
else:
DATASET_NAME = config['dataset']
dataset = LoadData(DATASET_NAME)
if args.out_dir is not None:
out_dir = args.out_dir
else:
out_dir = config['out_dir']
# parameters
params = config['params']
if args.seed is not None:
params['seed'] = int(args.seed)
if args.epochs is not None:
params['epochs'] = int(args.epochs)
if args.batch_size is not None:
params['batch_size'] = int(args.batch_size)
if args.init_lr is not None:
params['init_lr'] = float(args.init_lr)
if args.lr_reduce_factor is not None:
params['lr_reduce_factor'] = float(args.lr_reduce_factor)
if args.lr_schedule_patience is not None:
params['lr_schedule_patience'] = int(args.lr_schedule_patience)
if args.min_lr is not None:
params['min_lr'] = float(args.min_lr)
if args.weight_decay is not None:
params['weight_decay'] = float(args.weight_decay)
if args.print_epoch_interval is not None:
params['print_epoch_interval'] = int(args.print_epoch_interval)
if args.max_time is not None:
params['max_time'] = float(args.max_time)
# network parameters
net_params = config['net_params']
net_params['device'] = device
net_params['gpu_id'] = config['gpu']['id']
net_params['batch_size'] = params['batch_size']
if args.L is not None:
net_params['L'] = int(args.L)
if args.hidden_dim is not None:
net_params['hidden_dim'] = int(args.hidden_dim)
if args.out_dim is not None:
net_params['out_dim'] = int(args.out_dim)
if args.residual is not None:
net_params['residual'] = True if args.residual=='True' else False
if args.edge_feat is not None:
net_params['edge_feat'] = True if args.edge_feat=='True' else False
if args.readout is not None:
net_params['readout'] = args.readout
if args.in_feat_dropout is not None:
net_params['in_feat_dropout'] = float(args.in_feat_dropout)
if args.dropout is not None:
net_params['dropout'] = float(args.dropout)
if args.layer_norm is not None:
net_params['layer_norm'] = True if args.layer_norm=='True' else False
if args.batch_norm is not None:
net_params['batch_norm'] = True if args.batch_norm=='True' else False
if args.pos_enc is not None:
net_params['pos_enc'] = True if args.pos_enc=='True' else False
if args.pos_enc_dim is not None:
net_params['pos_enc_dim'] = int(args.pos_enc_dim)
if args.alpha_loss is not None:
net_params['alpha_loss'] = float(args.alpha_loss)
if args.lambda_loss is not None:
net_params['lambda_loss'] | |
import itertools
import collections
from mhdata import cfg
from mhdata.io import DataMap
from mhdata.util import ensure_warn
from . import datafn
def validate(mhdata):
"Perform all validations, print out the errors, and return if it succeeded or not"
errors = []
errors.extend(validate_items(mhdata))
errors.extend(validate_locations(mhdata))
errors.extend(validate_monsters(mhdata))
errors.extend(validate_monster_rewards(mhdata))
errors.extend(validate_skills(mhdata))
errors.extend(validate_armor(mhdata))
errors.extend(validate_weapons(mhdata))
errors.extend(validate_charms(mhdata))
errors.extend(validate_quests(mhdata))
if errors:
for error in errors:
print("ERROR: " + error)
return False
return True
def validate_items(mhdata):
errors = []
# check for existance in item combinations
for combo in mhdata.item_combinations:
items = (combo['result'], combo['first'], combo['second'])
items = filter(None, items) # removes nulls
for item in items:
if item not in mhdata.item_map.names('en'):
errors.append(f"{item} in combinations doesn't exist")
return errors
def validate_locations(mhdata):
errors = []
for location_entry in mhdata.location_map.values():
for item_entry in location_entry['items']:
item_lang = item_entry['item_lang']
item_name = item_entry['item']
if item_name not in mhdata.item_map.names(item_lang):
errors.append(f"{item_name} in location items doesn't exist")
return errors
def validate_monsters(mhdata):
errors = []
# Check that all monsters have hitzones
for entry in mhdata.monster_map.values():
ensure_warn('hitzones' in entry, f"Monster {entry.name('en')} missing hitzones")
# Check that large monsters have weakness and normal is included
for entry in mhdata.monster_map.values():
if entry['size'] == 'small':
continue
name = entry.name('en')
if 'weaknesses' not in entry:
print(f"Warning: Large monster {name} does not contain a weakness entry")
continue
if 'normal' not in map(lambda w: w['form'], entry['weaknesses']):
errors.append(f"Invalid weaknesses in {name}, normal is a required state")
return errors
def validate_monster_rewards(mhdata):
"""Validates monster rewards for sane values.
Certain fields (like carve) sum to 100,
Others (like quest rewards) must be at least 100%"""
# Those other than these are validated for 100% drop rate EXACT.
# Quest rewards sometimes contain a guaranteed reward.
# We should probably separate, but most databases don't separate them.
# Investigate further
uncapped_conditions = ("Quest Reward (Bronze)")
errors = set()
for monster_id, entry in mhdata.monster_map.items():
if 'rewards' not in entry:
continue
monster_name = entry.name('en') # used for error display
valid = True
for reward in entry['rewards']:
condition = reward['condition_en']
rank = reward['rank']
# ensure condition exists
if condition not in mhdata.monster_reward_conditions_map.names('en'):
errors.add(f"Invalid condition {condition} in monster {monster_name}")
valid = False
if reward['item_en'] not in mhdata.item_map.names('en'):
errors.add(f"Monster reward item {reward['item_en']} doesn't exist")
valid = False
if rank not in cfg.supported_ranks:
errors.add(f"Unsupported rank {rank} in {monster_name} rewards")
valid = False
if not valid:
continue
# Ensure percentage is correct (at or greater than 100)
rank_reward_key_fn = lambda r: (r['rank'], r['condition_en'])
sorted_rewards = sorted(entry['rewards'], key=rank_reward_key_fn)
for (rank, condition), items in itertools.groupby(sorted_rewards, rank_reward_key_fn):
items = list(items)
# Check if any item is considered unknown. If any are, all must be unknown.
num_unknown_percent = len([i for i in items if i['percentage'] is None])
if num_unknown_percent == len(items):
continue
elif num_unknown_percent > 0:
errors.add(f"Error with {monster_name} rewards" +
f" - entries for ({rank}, {condition}) must all be blank or must all have a percentage.")
continue
percentage_sum = sum((int(r['percentage']) for r in items), 0)
key_str = f"(rank {rank} condition {condition})"
error_start = f"Rewards %'s for monster {monster_name} {key_str}"
if condition not in uncapped_conditions:
ensure_warn(
percentage_sum == 100,
f"{error_start} does not sum to 100")
else:
ensure_warn(
percentage_sum >= 100,
f"{error_start} does not sum to at least 100")
return errors
def validate_skills(mhdata):
errors = []
all_unlocked = set()
for skill in mhdata.skill_map.values():
skill_name = skill['name']['en']
expected_max = len(skill['levels'])
encountered_levels = set()
for level in skill['levels']:
level_value = level['level']
if level_value < 0 or level_value > expected_max:
errors.append(f"Skill {skill_name} has out of range effect {level_value}")
continue
encountered_levels.add(level_value)
if len(encountered_levels) != expected_max:
errors.append(f"Skill {skill_name} is missing effect levels")
unlocked = skill['unlocks']
if unlocked:
unlocked_skill = mhdata.skill_map.entry_of('en', unlocked)
if not unlocked_skill:
errors.append(f"Skill {skill_name} unlocks invalid skill {unlocked}")
elif not unlocked_skill['secret']:
errors.append(f"Skill {skill_name} unlocks skill {unlocked}, but that skill is not a secret skill")
else:
all_unlocked.add(unlocked)
# second pass, make sure that all secret skills are unlocked by something
for skill in mhdata.skill_map.values():
skill_name = skill['name']['en']
if skill['secret'] and skill_name not in all_unlocked:
errors.append(f"Skill {skill_name} has locked slots, but has no associated secret skill")
return errors
def validate_armor(mhdata):
errors = []
# Checks if any pieces of armor is listed in two different sets
encountered_armors = set()
# Validate armorsets
for setentry in mhdata.armorset_map.values():
setname = setentry.name('en')
monster_name = setentry['monster']
if monster_name and not monster_name in mhdata.monster_map.names('en'):
errors.append(f"Armorset {setname} has invalid monster {monster_name}")
# All armor pieces in the set
armor_names = [setentry[part] for part in cfg.armor_parts]
armor_names = list(filter(None, armor_names))
if not armor_names:
print(f"Warning: {setname} has no armor entries")
for armor_name in armor_names:
armor_id = mhdata.armor_map.id_of('en', armor_name)
if not armor_id:
errors.append(f"Armorset {setname} has invalid armor {armor_name}")
continue
if armor_id in encountered_armors:
errors.append(f"Armorset {setname} has duplicated armor {armor_name}")
continue
encountered_armors.add(armor_id)
# Validate Armor
for armor_entry in mhdata.armor_map.values():
# Ensure that all armor items were encountered
if armor_entry.id not in encountered_armors:
errors.append(f"Armor {armor_entry.name('en')} is not in an armor set")
# Ensure items exist
for item_name, _ in datafn.iter_armor_recipe(armor_entry):
if item_name not in mhdata.item_map.names('en'):
errors.append(f"Item {item_name} in armors does not exist")
# Ensure skills exist
for skill_name, _ in datafn.iter_skill_points(armor_entry):
if skill_name not in mhdata.skill_map.names('en'):
errors.append(f"Skill {skill_name} in armors does not exist")
# Validate Armorset bonuses
for bonus_entry in mhdata.armorset_bonus_map.values():
for skill_name, _ in datafn.iter_setbonus_skills(bonus_entry):
if skill_name not in mhdata.skill_map.names('en'):
errors.append(f"Skill {skill_name} in set bonuses does not exist")
return errors
def validate_weapons(mhdata):
errors = []
for entry in mhdata.weapon_map.values():
name = entry.name('en')
weapon_type = entry['weapon_type']
if entry['category'] != 'Kulve':
if not entry.get('craft', {}):
errors.append(f"Weapon {name} does not have any recipes")
else:
# Check if items in the recipe exist
for recipe in entry['craft']:
for item, quantity in datafn.iter_recipe(recipe):
if item not in mhdata.item_map.names('en'):
errors.append(f"Weapon {name} has invalid item {item} in a recipe")
if weapon_type in cfg.weapon_types_melee and not entry.get('sharpness', None):
errors.append(f"Melee weapon {name} does not have a sharpness value")
if weapon_type == cfg.HUNTING_HORN and not entry.get('notes', None):
errors.append(f"Hunting horn {name} is missing a notes value")
if weapon_type == cfg.BOW and not entry.get('bow', None):
errors.append(f"Weapon {name} is missing bow data")
if weapon_type in cfg.weapon_types_gun:
if not entry.get('ammo_config', None):
errors.append(f"Weapon {name} is missing ammo config")
elif entry['ammo_config'] not in mhdata.weapon_ammo_map:
errors.append(f"Weapon {name} has invalid ammo config")
if entry['element1'] and (entry['element1_attack'] or 0) == 0:
errors.append(f"Weapon {name} has an element but is missing an attack value")
# Test that dragon has elderseal and vice versa
has_elderseal = entry['elderseal'] is not None
is_dragon = entry['element1'] == 'Dragon' or entry['element2'] == 'Dragon' or entry['phial'] == 'dragon'
if has_elderseal and not is_dragon:
errors.append(f"Weapon {name} has elderseal but no dragon element")
if is_dragon and not has_elderseal:
errors.append(f"Weapon {name} has a dragon element but no elderseal")
# Validate weapon ammo settings. Bullet types with clip size zero must have "null state" other attributes.
for name, ammo_entry in mhdata.weapon_ammo_map.items():
for key, data in ammo_entry.items():
if not isinstance(data, collections.Mapping): continue
if 'clip' not in data: continue
if data['clip'] == 0:
# This bullet exists, so make sure other parameters make sense
if data.get('rapid', False) == True:
errors.append(f"{name} has invalid rapid value for {key}")
if data.get('recoil', None):
errors.append(f"{name} has invalid recoil value for {key}")
if data.get('reload', None):
errors.append(f"{name} has invalid reload value for {key}")
else:
if 'recoil' in data and not data.get('recoil', None):
errors.append(f"{name} is missing recoil value for {key}")
if not data.get('reload', None):
errors.append(f"{name} is missing reload value for {key}")
return errors
def validate_charms(mhdata):
errors = []
names = mhdata.charm_map.names("en")
for entry in mhdata.charm_map.values():
previous_entry = entry['previous_en']
if previous_entry is not None and previous_entry not in names:
errors.append(f"Charm {previous_entry} for previous_en does not exist")
return errors
def validate_quests(mhdata):
# todo: use an alternative schema validation scheme that allows null checking to be separate from type coerce
errors = []
for entry in mhdata.quest_map.values():
name = entry.name('en')
if not entry['quest_type']:
errors.append(f"Quest {name} needs a quest type")
if entry['location_en'] not in mhdata.location_map.names('en'):
errors.append(f"Quest {name} has invalid location {entry['location_en']}")
monsters = set()
for monster in entry['monsters']:
monsters.add(monster['monster_en'])
if monster['monster_en'] not in mhdata.monster_map.names('en'):
errors.append(f"Quest {name} has invalid monster {monster['monster_en']}")
if len(monsters) < len(entry['monsters']):
errors.append(f"Quest {name} has duplicate monsters")
for reward in entry['rewards']:
if reward['item_en'] not | |
<gh_stars>0
#!/usr/bin/env python
"""
Copies all the Zend tests to a temporary directory, runs them in interp mode,
then copies the good ones to test/zend/good and the bad ones to test/zend/bad.
"""
import argparse
import glob
import json
import os
import re
import shutil
import subprocess
import sys
import tarfile
import urllib2
# The version that we will be importing the tests from.
# Must be a valid, released version from php download site
zend_version = "5.6.1"
# Don't even pull these into the repo.
# We want running the bad tests to still complete.
no_import = (
# these hang forever
'/ext/standard/tests/array/array_pad_variation2.phpt',
'/ext/standard/tests/general_functions/sleep_error.phpt',
'/ext/standard/tests/general_functions/usleep_error.phpt',
'/ext/zlib/tests/gzgetc_basic.phpt',
'/ext/zlib/tests/gzgets_basic.phpt',
# too large input
'/tests/lang/024.phpt',
# cause of failure uninvestigated:
'ext/zlib/tests/gzreadgzwriteplain.php',
'ext/spl/tests/SplFileObject_setCsvControl_variation001.php',
# spews files until they work
'/ext/standard/tests/file/readfile_variation4.phpt',
'/ext/xmlwriter/tests/001.phpt',
'/ext/xmlwriter/tests/004.phpt',
'/ext/xmlwriter/tests/005.phpt',
'/ext/xmlwriter/tests/006.phpt',
'/ext/xmlwriter/tests/OO_001.phpt',
'/ext/xmlwriter/tests/OO_004.phpt',
'/ext/xmlwriter/tests/OO_005.phpt',
'/ext/xmlwriter/tests/OO_006.phpt',
# not tests
'/ext/spl/examples/',
'/ext/xmlwriter/examples/',
# not implemented extensions
'/ext/com_dotnet',
'/ext/dba',
'/ext/enchant',
'/ext/ereg',
'/ext/interbase',
'/ext/mssql',
'/ext/mysqlnd',
'/ext/oci8',
'/ext/odbc',
'/ext/pdo_dblib',
'/ext/pdo_firebird',
'/ext/pdo_oci',
'/ext/pdo_odbc',
'/ext/pdo_pgsql',
'/ext/pspell',
'/ext/recode',
'/ext/shmop',
'/ext/skeleton',
'/ext/snmp',
'/ext/sybase_ct',
'/ext/sysvmsg',
'/ext/sysvsem',
'/ext/sysvshm',
'/ext/tidy',
'/ext/wddx',
'/ext/xmlrpc',
'/sapi',
# conscious decision not to match these
'/ext/spl/tests/arrayObject_getIteratorClass_basic1.phpt',
# Zend's "run-tests" uses the cgi-fcgi sapi to run tests
# our implementation is simply different by design
'/ext/standard/versioning/php_sapi_name_variation001.phpt',
)
# For marking tests as always failing. Used to keep flaky tests in flaky/.
flaky_tests = (
# line number is inconsistent on stack overflow
'/Zend/tests/bug41633_3.php',
# "ls" sometimes prints on stderr
'/ext/standard/tests/streams/bug64770.php',
# our build machines have no members in group 0...
'/ext/posix/tests/posix_getgrgid.php',
# Checking stat of ' ' races with mkdir_variation1
'/ext/standard/tests/file/lstat_stat_variation22.php',
'/ext/standard/tests/file/copy_variation3.php',
'/ext/standard/tests/file/file_exists_variation1.php',
# concurrency issues
'/ext/mysql/tests/001.php',
'/ext/mysql/tests/bug47438.php',
'/ext/mysql/tests/mysql_client_encoding.php',
'/ext/mysql/tests/mysql_select_db.php',
'/ext/sqlite3/tests/sqlite3_08_udf.php',
'/ext/sqlite3/tests/sqlite3_25_create_aggregate.php',
'/ext/standard/tests/file/bug41655_2.php',
'/ext/standard/tests/file/disk_free_space_basic.php',
'/ext/standard/tests/file/disk_total_space_basic.php',
'/ext/standard/tests/file/disk_total_space_variation.php',
'/ext/standard/tests/file/fread_socket_variation1.php',
'/ext/standard/tests/file/symlink_link_linkinfo_is_link_variation3.php',
'/ext/standard/tests/network/fsockopen_variation1.php',
'/ext/standard/tests/network/shutdown.php',
'/ext/standard/tests/strings/fprintf_variation_001.php',
'/ext/spl/tests/RecursiveDirectoryIterator_getSubPathname_basic.php',
'/ext/spl/tests/SplFileObject_fgetcsv_basic.php',
'/ext/spl/tests/SplFileObject_fgetcsv_delimiter_basic.php',
'/ext/spl/tests/SplFileObject_fgetcsv_escape_basic.php',
'/ext/spl/tests/SplFileObject_fgetcsv_escape_default.php',
'/ext/spl/tests/SplFileObject_fgetcsv_enclosure_basic.php',
'/ext/spl/tests/SplFileObject_fgetcsv_enclosure_error.php',
'/ext/spl/tests/SplFileObject_setCsvControl_error001.php',
'/ext/spl/tests/SplFileObject_setCsvControl_error002.php',
'/ext/spl/tests/SplFileObject_setCsvControl_error003.php',
'/ext/phar/tests/019.php',
'/ext/spl/tests/dit_006.php',
# these tests use each other's data
'/ext/standard/tests/file/bug38086.php',
'/ext/standard/tests/file/stream_copy_to_stream.php',
# these all write to temp3.txt.gz
'/ext/zlib/tests/gzseek_basic2.php',
'/ext/zlib/tests/gzseek_variation1.php',
'/ext/zlib/tests/gzseek_variation4.php',
'/ext/zlib/tests/gzseek_variation5.php',
# broken on ubuntu 12.04
'/ext/date/tests/DateTimeZone_listAbbreviations_basic1.php',
'/ext/date/tests/bug52290.php',
'/ext/date/tests/timezone_abbreviations_list_basic1.php',
'/ext/standard/tests/streams/bug61115-2.php',
# timing dependent
'/ext/date/tests/bug48187.php',
# works in interp but fails in JIT
'/ext/standard/tests/array/array_next_error2.php',
'/ext/standard/tests/array/prev_error3.php',
'/ext/standard/tests/class_object/get_object_vars_variation_003.php',
'/tests/lang/038.php',
# flaky for various reasons
'/ext/sockets/tests/socket_getpeername_ipv6loop.php',
# segfaults on contbuild in opt
'/ext/standard/tests/strings/explode_bug.php',
# XSL
'/ext/xsl/tests/bug49634.php',
'/ext/xsl/tests/bug54446_with_ini.php',
'/ext/xsl/tests/xsl-phpinfo.php',
'/ext/xsl/tests/xslt009.php',
'/ext/xsl/tests/xsltprocessor_getParameter-wrongparam.php',
'/ext/xsl/tests/xsltprocessor_removeParameter-wrongparams.php',
# flaky: t3619770
'/ext/zlib/tests/gzfile_basic.php',
'/ext/zlib/tests/readgzfile_basic.php',
# flaky: t3817758
'/ext/ftp/tests/ftp_nb_fget_basic1.php',
# flaky: t3851970
'/ext/sockets/tests/socket_bind_params.php',
'/ext/sockets/tests/socket_getpeername.php',
'/ext/session/tests/009.php',
'/ext/standard/tests/file/bug39538.php',
'/ext/standard/tests/file/bug53848.php',
'/ext/standard/tests/general_functions/proc_open02.php',
'/ext/ftp/tests/bug39458.php',
'/ext/standard/tests/file/rename_variation3.php',
'/ext/standard/tests/file/mkdir-003.php',
'/ext/standard/tests/file/symlink_link_linkinfo_is_link_variation9.php',
'/ext/zlib/tests/gzfile_basic.php',
'/ext/standard/tests/file/fopen_variation12.php',
'/ext/standard/tests/file/mkdir-002.php',
'/ext/standard/tests/file/readlink_realpath_variation1.php',
'/ext/session/tests/027.php',
'/ext/standard/tests/file/lchgrp_basic.php',
'/ext/standard/tests/file/mkdir-001.php',
'/ext/sockets/tests/ipv4loop.php',
'/ext/ftp/tests/ftp_alloc_basic1.php',
'/ext/ftp/tests/ftp_alloc_basic2.php',
# flaky on Travis: t4088096
'/ext/curl/tests/curl_copy_handle_basic_006.php',
'/ext/curl/tests/curl_copy_handle_basic_007.php',
# a new process can crop up
'/ext/posix/tests/posix_kill_basic.php',
# Using PHP7 versions of these tests
'/ext/session/tests/session_set_save_handler_class_002.php',
'/ext/session/tests/session_set_save_handler_class_016.php',
'/ext/session/tests/session_set_save_handler_iface_001.php',
# unsure why
'/ext/standard/tests/file/symlink_link_linkinfo_is_link_variation4.php',
# don't use the internet in tests...
'/ext/standard/tests/network/gethostbyname_error004.php',
'/ext/standard/tests/network/getmxrr.php'
# php coderz are so l33t
'/ext/sockets/tests/ipv6loop.php',
'/ext/sockets/tests/socket_getpeername_ipv4loop.php',
'/ext/standard/tests/network/fsockopen_variation2.php',
'/ext/sockets/tests/socket_create_listen.php',
'/ext/sockets/tests/socket_create_listen-win32.php',
# it references a whole directory with *
'/ext/standard/tests/file/copy_variation6.php',
# Tests for a bug in PHP
'/ext/gmp/tests/014.php',
# Another process can be created in the meantime
'/ext/posix/tests/posix_errno_variation2.php',
# duplicate of a test in test/slow
'/ext/gmp/tests/002.php',
# Something could be on sending on that UDP port
'/ext/standard/tests/network/bug20134.php',
# These pass with a proper proxy setup, which our build machines don't seem
# to have. i.e., the internet is attempted to be used and that is bad
'/ext/soap/tests/bugs/bug40609.php',
'/ext/soap/tests/schema/schema060.php',
'/ext/soap/tests/schema/schema084.php',
'/ext/soap/tests/schema/schema008.php',
'/ext/soap/tests/schema/schema070.php',
'/ext/soap/tests/schema/schema076.php',
'/ext/soap/tests/schema/schema034.php',
'/ext/soap/tests/schema/schema027.php',
'/ext/soap/tests/schema/schema037.php',
'/ext/soap/tests/schema/schema028.php',
'/ext/soap/tests/schema/schema080.php',
'/ext/soap/tests/schema/schema033.php',
'/ext/soap/tests/schema/schema031.php',
'/ext/soap/tests/schema/schema075.php',
'/ext/soap/tests/schema/schema015.php',
'/ext/soap/tests/schema/schema018.php',
'/ext/soap/tests/schema/schema069.php',
'/ext/soap/tests/schema/schema065.php',
'/ext/soap/tests/schema/schema019.php',
'/ext/soap/tests/schema/schema061.php',
'/ext/soap/tests/schema/schema074.php',
'/ext/soap/tests/schema/schema071.php',
'/ext/soap/tests/schema/schema077.php',
'/ext/soap/tests/schema/schema017.php',
'/ext/soap/tests/schema/schema005.php',
'/ext/soap/tests/schema/schema058.php',
'/ext/soap/tests/schema/schema003.php',
'/ext/soap/tests/schema/schema079.php',
'/ext/soap/tests/schema/schema032.php',
'/ext/soap/tests/schema/schema047.php',
'/ext/soap/tests/schema/schema004.php',
'/ext/soap/tests/schema/schema016.php',
'/ext/soap/tests/schema/schema045.php',
'/ext/soap/tests/schema/schema039.php',
'/ext/soap/tests/schema/schema026.php',
'/ext/soap/tests/schema/schema038.php',
'/ext/soap/tests/schema/schema001.php',
'/ext/soap/tests/schema/schema050.php',
'/ext/soap/tests/schema/schema041.php',
'/ext/soap/tests/schema/schema083.php',
'/ext/soap/tests/schema/schema011.php',
'/ext/soap/tests/schema/schema062.php',
'/ext/soap/tests/schema/schema029.php',
'/ext/soap/tests/schema/schema073.php',
'/ext/soap/tests/schema/schema025.php',
'/ext/soap/tests/schema/schema044.php',
'/ext/soap/tests/schema/schema023.php',
'/ext/soap/tests/schema/schema014.php',
'/ext/soap/tests/schema/schema052.php',
'/ext/soap/tests/schema/schema024.php',
'/ext/soap/tests/schema/schema072.php',
'/ext/soap/tests/schema/schema006.php',
'/ext/soap/tests/schema/schema082.php',
'/ext/soap/tests/schema/schema053.php',
'/ext/soap/tests/schema/schema085.php',
'/ext/soap/tests/schema/schema049.php',
'/ext/soap/tests/schema/schema063.php',
'/ext/soap/tests/schema/schema040.php',
'/ext/soap/tests/schema/schema043.php',
'/ext/soap/tests/schema/schema066.php',
'/ext/soap/tests/schema/schema048.php',
'/ext/soap/tests/schema/schema046.php',
'/ext/soap/tests/schema/schema007.php',
'/ext/soap/tests/schema/schema056.php',
'/ext/soap/tests/schema/schema067.php',
'/ext/soap/tests/schema/schema042.php',
'/ext/soap/tests/schema/schema059.php',
'/ext/soap/tests/schema/schema054.php',
'/ext/soap/tests/schema/schema036.php',
'/ext/soap/tests/schema/schema057.php',
'/ext/soap/tests/schema/schema002.php',
'/ext/soap/tests/schema/schema013.php',
'/ext/soap/tests/schema/schema051.php',
'/ext/soap/tests/schema/schema009.php',
'/ext/soap/tests/schema/schema078.php',
'/ext/soap/tests/schema/schema081.php',
'/ext/soap/tests/schema/schema030.php',
'/ext/soap/tests/schema/schema055.php',
'/ext/soap/tests/schema/schema021.php',
'/ext/soap/tests/schema/schema035.php',
'/ext/standard/tests/network/http-stream.php',
)
# Tests that work but not in repo mode
norepo_tests = (
# TODO: See if any of these should work in repo mode
'/Zend/tests/014.php',
'/Zend/tests/035.php',
'/Zend/tests/bug26697.php',
'/Zend/tests/bug28444.php',
'/Zend/tests/bug30519.php',
'/Zend/tests/bug30922.php',
'/Zend/tests/bug34064.php',
'/Zend/tests/bug36006.php',
'/Zend/tests/bug36759.php',
'/Zend/tests/bug39542.php',
'/Zend/tests/bug43651.php',
'/Zend/tests/bug44141.php',
'/Zend/tests/bug47593.php',
'/Zend/tests/bug55007.php',
'/Zend/tests/bug55578.php',
'/Zend/tests/bug60771.php',
'/Zend/tests/bug63741.php',
'/Zend/tests/bug67436/bug67436.php',
'/Zend/tests/class_alias_013.php',
'/Zend/tests/class_constants_003.php',
'/Zend/tests/class_exists_001.php',
'/Zend/tests/closure_040.php',
'/Zend/tests/closure_042.php',
'/Zend/tests/constants_005.php',
'/Zend/tests/errmsg_007.php',
'/Zend/tests/errmsg_026.php',
'/Zend/tests/errmsg_035.php',
'/Zend/tests/errmsg_036.php',
'/Zend/tests/error_reporting03.php',
'/Zend/tests/error_reporting04.php',
'/Zend/tests/error_reporting08.php',
'/Zend/tests/error_reporting09.php',
'/Zend/tests/halt_compiler2.php',
'/Zend/tests/jump14.php',
'/Zend/tests/lsb_013.php',
'/Zend/tests/lsb_021.php',
'/Zend/tests/lsb_022.php',
'/Zend/tests/ns_041.php',
'/Zend/tests/objects_029.php',
'/Zend/tests/objects_030.php',
'/Zend/tests/traits/bug55554b.php',
'/Zend/tests/traits/bug60369.php',
'/Zend/tests/traits/bug60809.php',
'/Zend/tests/traits/bugs/overridding-conflicting-property-initializer.php',
'/Zend/tests/traits/error_003.php',
'/Zend/tests/traits/property003.php',
'/Zend/tests/traits/property004.php',
'/Zend/tests/unset_cv01.php',
'/ext/bz2/tests/with_strings.php',
'/ext/pcre/tests/preg_replace.php',
'/ext/pdo_mysql/tests/pdo_mysql_connect_charset.php',
'/ext/pdo_sqlite/tests/bug33841.php',
'/ext/pdo_sqlite/tests/bug46139.php',
'/ext/pdo_sqlite/tests/bug52487.php',
'/ext/phar/tests/012.php',
# missing undefined variable notice
'/ext/posix/tests/posix_kill_variation1.php',
'/ext/posix/tests/posix_kill_variation2.php',
'/ext/posix/tests/posix_strerror_variation1.php',
'/ext/posix/tests/posix_getgrgid_variation.php',
'/ext/posix/tests/posix_getpwuid_variation.php',
####################################
'/ext/reflection/tests/bug64936.php',
'/ext/reflection/tests/bug29268.php',
'/ext/reflection/tests/traits005.php',
'/ext/sqlite3/tests/bug47159.php',
'/ext/sqlite3/tests/sqlite3_01_open.php',
'/ext/sqlite3/tests/sqlite3_02_create.php',
'/ext/sqlite3/tests/sqlite3_03_insert.php',
'/ext/sqlite3/tests/sqlite3_04_update.php',
'/ext/sqlite3/tests/sqlite3_05_delete.php',
'/ext/sqlite3/tests/sqlite3_09_blob_bound_param.php',
'/ext/sqlite3/tests/sqlite3_13_skip_all_cleanup.php',
'/ext/sqlite3/tests/sqlite3_14_querysingle.php',
'/ext/sqlite3/tests/sqlite3_16_select_no_results.php',
'/ext/sqlite3/tests/sqlite3_18_changes.php',
'/ext/sqlite3/tests/sqlite3_19_columninfo.php',
'/ext/sqlite3/tests/sqlite3_20_error.php',
'/ext/sqlite3/tests/sqlite3_22_loadextension.php',
'/ext/sqlite3/tests/sqlite3_23_escape_string.php',
'/ext/sqlite3/tests/sqlite3_24_last_insert_rowid.php',
'/ext/sqlite3/tests/sqlite3stmt_paramCount_basic.php',
'/ext/sqlite3/tests/sqlite3stmt_paramCount_error.php',
'/ext/standard/tests/array/001.php',
'/ext/standard/tests/array/003.php',
'/ext/standard/tests/array/sizeof_variation4.php',
'/ext/standard/tests/assert/assert.php',
'/ext/standard/tests/class_object/class_exists_basic_001.php',
'/ext/standard/tests/class_object/get_declared_classes_variation1.php',
'/ext/standard/tests/class_object/get_declared_interfaces_variation1.php',
'/ext/standard/tests/class_object/get_declared_traits_variation1.php',
'/ext/standard/tests/class_object/get_class_methods_variation_001',
'/ext/standard/tests/class_object/interface_exists_variation3.php',
'/ext/standard/tests/class_object/interface_exists_variation4.php',
'/ext/standard/tests/class_object/is_a_variation_001.php',
'/ext/standard/tests/class_object/is_subclass_of_variation_002',
'/ext/standard/tests/class_object/method_exists_variation_002',
'/ext/standard/tests/class_object/property_exists_variation1',
'/ext/standard/tests/file/file_get_contents_basic.php',
'/ext/standard/tests/file/file_get_contents_file_put_contents_basic.php',
'/ext/standard/tests/file/file_get_contents_file_put_contents_variation1.php',
'/ext/standard/tests/file/file_get_contents_file_put_contents_variation2.php',
'/ext/standard/tests/file/file_get_contents_variation1.php',
'/ext/standard/tests/file/readfile_variation6.php',
'/ext/standard/tests/file/unlink_variation8.php',
'/ext/standard/tests/file/unlink_variation10.php',
'/ext/standard/tests/general_functions/is_callable_error.php',
'/ext/standard/tests/general_functions/is_numeric.php',
'/ext/standard/tests/math/abs.php',
'/ext/standard/tests/math/acos_basic.php',
'/ext/standard/tests/math/acosh_basic.php',
'/ext/standard/tests/math/asin_basic.php',
'/ext/standard/tests/math/asinh_basic.php',
'/ext/standard/tests/math/atan_basic.php',
'/ext/standard/tests/math/atanh_basic.php',
'/ext/standard/tests/math/cos_basic.php',
'/ext/standard/tests/math/cosh_basic.php',
'/ext/standard/tests/math/deg2rad_basic.php',
'/ext/standard/tests/math/log10_basic.php',
'/ext/standard/tests/math/pow.php',
'/ext/standard/tests/math/rad2deg_basic.php',
'/ext/standard/tests/math/sin_basic.php',
'/ext/standard/tests/math/sinh_basic.php',
'/ext/standard/tests/math/tan_basic.php',
'/ext/standard/tests/math/tanh_basic.php',
'/ext/standard/tests/serialize/bug30234.php',
'/ext/standard/tests/streams/bug64770.php',
'/ext/standard/tests/streams/stream_resolve_include_path.php',
'/ext/standard/tests/strings/bug44242.php',
'/ext/standard/tests/strings/trim.php',
'/ext/standard/tests/strings/wordwrap.php',
'/ext/standard/tests/url/base64_encode_variation_001.php',
'/ext/standard/tests/url/parse_url_basic_001.php',
'/ext/standard/tests/url/parse_url_basic_002.php',
'/ext/standard/tests/url/parse_url_basic_003.php',
'/ext/standard/tests/url/parse_url_basic_004.php',
'/ext/standard/tests/url/parse_url_basic_005.php',
'/ext/standard/tests/url/parse_url_basic_006.php',
'/ext/standard/tests/url/parse_url_basic_007.php',
'/ext/standard/tests/url/parse_url_basic_008.php',
'/ext/standard/tests/url/parse_url_basic_009.php',
'/ext/standard/tests/url/parse_url_variation_001.php',
'/ext/standard/tests/url/parse_url_variation_002_64bit.php',
'/ext/standard/tests/url/rawurldecode_variation_001.php',
'/ext/standard/tests/url/rawurlencode_variation_001.php',
'/ext/standard/tests/url/urldecode_variation_001.php',
'/ext/standard/tests/url/urlencode_variation_001.php',
'/ext/tokenizer/tests/token_get_all_variation19.php',
'/ext/xsl/tests/bug48221.php',
'/ext/xsl/tests/bug54446.php',
'/ext/xsl/tests/xslt001.php',
'/ext/xsl/tests/xslt002.php',
'/ext/xsl/tests/xslt003.php',
'/ext/xsl/tests/xslt004.php',
'/ext/xsl/tests/xslt005.php',
'/ext/xsl/tests/xslt006.php',
'/ext/xsl/tests/xslt007.php',
'/ext/xsl/tests/xsltprocessor_getParameter-invalidparam.php',
'/ext/xsl/tests/xsltprocessor_getParameter.php',
'/ext/xsl/tests/xsltprocessor_registerPHPFunctions-allfuncs.php',
'/ext/xsl/tests/xsltprocessor_registerPHPFunctions-array-multiple.php',
'/ext/xsl/tests/xsltprocessor_registerPHPFunctions-array-notallowed.php',
'/ext/xsl/tests/xsltprocessor_registerPHPFunctions-array.php',
'/ext/xsl/tests/xsltprocessor_registerPHPFunctions-funcnostring.php',
'/ext/xsl/tests/xsltprocessor_registerPHPFunctions-funcundef.php',
'/ext/xsl/tests/xsltprocessor_registerPHPFunctions-null.php',
'/ext/xsl/tests/xsltprocessor_registerPHPFunctions-string-multiple.php',
'/ext/xsl/tests/xsltprocessor_registerPHPFunctions-string-notallowed.php',
'/ext/xsl/tests/xsltprocessor_registerPHPFunctions-string.php',
'/ext/xsl/tests/xsltprocessor_removeParameter-invalidparam.php',
'/ext/xsl/tests/xsltprocessor_removeParameter.php',
'/ext/xsl/tests/xsltprocessor_setparameter-errorquote.php',
'/ext/xsl/tests/xsltprocessor_setparameter-nostring.php',
'/ext/zip/tests/bug53579.php',
'/ext/zip/tests/bug64342_1.php',
'/ext/zip/tests/bug7658.php',
'/ext/zip/tests/oo_addemptydir.php',
'/ext/zip/tests/oo_addfile.php',
'/ext/zip/tests/oo_extract.php',
'/ext/zip/tests/oo_getcomment.php',
'/ext/zip/tests/oo_getnameindex.php',
'/ext/zip/tests/oo_namelocate.php',
'/ext/zip/tests/oo_rename.php',
'/ext/zip/tests/oo_setcomment.php',
'/ext/zip/tests/oo_stream.php',
'/ext/zlib/tests/gzcompress_basic1.php',
'/ext/zlib/tests/gzcompress_variation1.php',
'/ext/zlib/tests/gzdeflate_basic1.php',
'/ext/zlib/tests/gzdeflate_variation1.php',
'/ext/zlib/tests/gzencode_basic1.php',
'/ext/zlib/tests/gzencode_variation1-win32.php',
'/ext/zlib/tests/gzencode_variation1.php',
'/ext/zlib/tests/gzinflate_error1.php',
'/ext/zlib/tests/gzuncompress_basic1.php',
'/tests/classes/autoload_001.php',
'/tests/classes/autoload_002.php',
'/tests/classes/autoload_003.php',
'/tests/classes/autoload_004.php',
'/tests/classes/autoload_005.php',
'/tests/classes/autoload_006.php',
'/tests/classes/autoload_010.php',
'/tests/classes/autoload_018.php',
'/tests/classes/constants_scope_001.php',
'/tests/classes/unset_properties.php',
'/tests/lang/019.php',
'/tests/lang/034.php',
'/tests/lang/bug25922.php',
'/tests/lang/bug32924.php',
'/tests/lang/include_variation3.php',
'/tests/lang/static_variation_001.php',
'/tests/lang/static_variation_002.php',
# This test passes "by accident".
'ext/dom/tests/DOMNode_hasChildNodes.php',
# These tests use eval(), which is banned in repo mode.
'/Zend/tests/bug31102.php',
'/Zend/tests/bug33116.php',
'/Zend/tests/bug36513.php',
'/Zend/tests/bug43128.php',
'/Zend/tests/bug47714.php',
'/Zend/tests/bug54624.php',
'/Zend/tests/bug60444.php',
'/Zend/tests/bug62907.php',
'/Zend/tests/bug63305.php',
'/Zend/tests/bug65254.php',
'/Zend/tests/constants/dir-constant-eval.php',
'/Zend/tests/generators/bug67497.php',
'/Zend/tests/is_a.php',
'/Zend/tests/method_static_var.php',
'/ext/session/tests/bug53141.php',
'/ext/spl/tests/spl_autoload_014.php',
'/ext/spl/tests/spl_autoload_bug48541.php',
'/ext/standard/tests/class_object/is_a.php',
'/ext/standard/tests/general_functions/bug35229.php',
'/ext/standard/tests/serialize/bug62836_1.php',
'/tests/classes/constants_basic_006.php',
'/tests/lang/013.php',
'/tests/lang/014.php',
'/tests/lang/018.php',
'/tests/lang/bug21961.php',
'/tests/lang/foreachLoop.012.php',
# XSL: 'include "prepare.inc"' makes repo mode fail.
'/ext/xsl/tests/bug48221.php',
'/ext/xsl/tests/bug54446.php',
'/ext/xsl/tests/bug54446_with_ini.php',
'/ext/xsl/tests/xslt001.php',
'/ext/xsl/tests/xslt002.php',
'/ext/xsl/tests/xslt003.php',
'/ext/xsl/tests/xslt004.php',
'/ext/xsl/tests/xslt005.php',
'/ext/xsl/tests/xslt006.php',
'/ext/xsl/tests/xslt007.php',
'/ext/xsl/tests/xslt008.php',
'/ext/xsl/tests/xslt009.php',
'/ext/xsl/tests/xsltprocessor_getParameter-invalidparam.php',
'/ext/xsl/tests/xsltprocessor_getParameter.php',
'/ext/xsl/tests/xsltprocessor_getParameter-wrongparam.php',
'/ext/xsl/tests/xsltprocessor_registerPHPFunctions-allfuncs.php',
'/ext/xsl/tests/xsltprocessor_registerPHPFunctions-array-multiple.php',
'/ext/xsl/tests/xsltprocessor_registerPHPFunctions-array-notallowed.php',
'/ext/xsl/tests/xsltprocessor_registerPHPFunctions-array.php',
'/ext/xsl/tests/xsltprocessor_registerPHPFunctions-funcnostring.php',
'/ext/xsl/tests/xsltprocessor_registerPHPFunctions-funcundef.php',
'/ext/xsl/tests/xsltprocessor_registerPHPFunctions-null.php',
'/ext/xsl/tests/xsltprocessor_registerPHPFunctions-string-multiple.php',
'/ext/xsl/tests/xsltprocessor_registerPHPFunctions-string-notallowed.php',
'/ext/xsl/tests/xsltprocessor_registerPHPFunctions-string.php',
'/ext/xsl/tests/xsltprocessor_removeParameter-invalidparam.php',
'/ext/xsl/tests/xsltprocessor_removeParameter.php',
'/ext/xsl/tests/xsltprocessor_removeParameter-wrongparams.php',
'/ext/xsl/tests/xsltprocessor_setparameter-errorquote.php',
'/ext/xsl/tests/xsltprocessor_setparameter-nostring.php',
# These tests use create_function, which is basically eval.
'/Zend/tests/anonymous_func_001.php',
'/Zend/tests/anonymous_func_002.php',
'/Zend/tests/anonymous_func_003.php',
'/Zend/tests/closure_025.php',
'/Zend/tests/instanceof_001.php',
'/ext/spl/tests/bug61697.php',
'/ext/standard/tests/array/array_filter_variation7.php',
'/ext/standard/tests/array/array_map_variation10.php',
'/ext/standard/tests/array/array_walk_recursive_variation7.php',
'/ext/standard/tests/array/array_walk_variation7.php',
'/ext/standard/tests/array/uasort_variation7.php',
'/ext/standard/tests/array/usort_variation7.php',
'/ext/standard/tests/strings/bug37262.php',
'/tests/lang/bug17115.php',
'/tests/lang/bug22690.php',
'/tests/lang/bug24926.php',
# This creates an interface with the same name as a builtin, which
# hphpc doesn't correctly support AttrUnique flags on.
'/Zend/tests/inter_06.php',
# Tests use banned reflection features
'/ext/reflection/tests/bug30146.php',
)
# Random other files that zend wants
other_files = (
'/ext/bz2/tests/004_1.txt.bz2',
'/ext/bz2/tests/004_2.txt.bz2',
'/ext/calendar/tests/skipif.inc',
'/ext/ctype/tests/skipif.inc',
'/ext/curl/tests/curl_testdata1.txt',
'/ext/curl/tests/curl_testdata2.txt',
'/ext/curl/tests/responder/get.php',
'/ext/curl/tests/server.inc',
'/ext/curl/tests/skipif.inc',
'/ext/date/tests/DateTime_data-absolute.inc',
'/ext/date/tests/DateTime_data-dates.inc',
'/ext/date/tests/DateTime_data-fall-type2-type2.inc',
'/ext/date/tests/DateTime_data-fall-type2-type3.inc',
'/ext/date/tests/DateTime_data-fall-type3-type2.inc',
'/ext/date/tests/DateTime_data-fall-type3-type3.inc',
'/ext/date/tests/DateTime_data-february.inc',
'/ext/date/tests/DateTime_data-massive.inc',
'/ext/date/tests/DateTime_data-spring-type2-type2.inc',
'/ext/date/tests/DateTime_data-spring-type2-type3.inc',
'/ext/date/tests/DateTime_data-spring-type3-type2.inc',
'/ext/date/tests/DateTime_data-spring-type3-type3.inc',
'/ext/date/tests/examine_diff.inc',
'/ext/dba/tests/skipif.inc',
'/ext/dom/tests/book.xml',
'/ext/dom/tests/book.xml.gz',
'/ext/dom/tests/book.xsd',
'/ext/dom/tests/book-attr.xml',
'/ext/dom/tests/book-non-conforming-schema.xsd',
'/ext/dom/tests/book-not-a-schema.xsd',
'/ext/dom/tests/bug67081_0.xml',
'/ext/dom/tests/bug67081_1.xml',
'/ext/dom/tests/bug67081_2.xml',
'/ext/dom/tests/dom_test.inc',
'/ext/dom/tests/dom.xml',
'/ext/dom/tests/dom.ent',
'/ext/dom/tests/empty.html',
'/ext/dom/tests/note.xml',
'/ext/dom/tests/not_well.html',
'/ext/dom/tests/nsdoc.xml',
'/ext/dom/tests/skipif.inc',
'/ext/dom/tests/test.html',
'/ext/dom/tests/xinclude.xml',
'/ext/exif/tests/bug34704.jpg',
'/ext/exif/tests/bug48378.jpeg',
'/ext/exif/tests/bug60150.jpg',
'/ext/exif/tests/bug62523_1.jpg',
'/ext/exif/tests/bug62523_2.jpg',
'/ext/exif/tests/bug62523_3.jpg',
'/ext/exif/tests/exif_encoding_crash.jpg',
'/ext/exif/tests/image007.jpg',
'/ext/exif/tests/image008.jpg',
'/ext/exif/tests/image009.jpg',
'/ext/exif/tests/image010.jpg',
'/ext/exif/tests/image011.jpg',
'/ext/exif/tests/image012.jpg',
'/ext/exif/tests/image013.jpg',
'/ext/exif/tests/image014.jpg',
'/ext/exif/tests/image015.jpg',
'/ext/exif/tests/image016.tiff',
'/ext/exif/tests/image017.tiff',
'/ext/exif/tests/image018.tiff',
'/ext/exif/tests/image020.tiff',
'/ext/exif/tests/image021.tiff',
'/ext/exif/tests/image022.tiff',
'/ext/exif/tests/image023.tiff',
'/ext/exif/tests/image024.jpg',
'/ext/exif/tests/image025.jpg',
'/ext/exif/tests/image026.tiff',
'/ext/exif/tests/image027.tiff',
'/ext/exif/tests/test1.jpg',
'/ext/exif/tests/test2.jpg',
'/ext/exif/tests/test5.jpg',
'/ext/fileinfo/tests/magic',
'/ext/fileinfo/tests/resources/dir.zip',
'/ext/fileinfo/tests/resources/test.awk',
'/ext/fileinfo/tests/resources/test.bmp',
'/ext/fileinfo/tests/resources/test.gif',
'/ext/fileinfo/tests/resources/test.jpg',
'/ext/fileinfo/tests/resources/test.mp3',
'/ext/fileinfo/tests/resources/test.pdf',
'/ext/fileinfo/tests/resources/test.png',
'/ext/fileinfo/tests/resources/test.ppt',
'/ext/fileinfo/tests/skipif.inc',
'/ext/ftp/tests/cert.pem',
'/ext/ftp/tests/server.inc',
'/ext/ftp/tests/skipif.inc',
'/ext/gd/tests/bug37346.gif',
'/ext/gd/tests/bug38112.gif',
'/ext/gd/tests/bug43121.gif',
'/ext/gd/tests/conv_test.gif',
'/ext/gd/tests/conv_test.jpeg',
'/ext/gd/tests/conv_test.png',
'/ext/gd/tests/conv_test.xbm',
'/ext/gd/tests/crafted.gd2',
'/ext/gd/tests/php.gif',
'/ext/gd/tests/Rochester-Regular.otf',
'/ext/gd/tests/Rochester-Regular.otf.LICENSE.txt',
'/ext/gd/tests/src.gd2',
'/ext/gd/tests/src.wbmp',
'/ext/gd/tests/test8859.ttf',
'/ext/gd/tests/test.png',
'/ext/gd/tests/Tuffy.ttf',
'/ext/gettext/tests/locale/en/LC_CTYPE/dgettextTest.mo',
'/ext/gettext/tests/locale/en/LC_CTYPE/dgettextTest.po',
'/ext/gettext/tests/locale/en/LC_CTYPE/dgettextTest_switched.po',
'/ext/gettext/tests/locale/en/LC_CTYPE/dgettextTest_switch.mo',
'/ext/gettext/tests/locale/en/LC_CTYPE/dgettextTest_switch.po',
'/ext/gettext/tests/locale/en/LC_CTYPE/dngettextTest.mo',
'/ext/gettext/tests/locale/en/LC_CTYPE/dngettextTest.po',
'/ext/gettext/tests/locale/en/LC_MESSAGES/dgettextTest.mo',
'/ext/gettext/tests/locale/en/LC_MESSAGES/dgettextTest.po',
'/ext/gettext/tests/locale/en/LC_MESSAGES/dgettextTest_switch.mo',
'/ext/gettext/tests/locale/en/LC_MESSAGES/dgettextTest_switch.po',
'/ext/gettext/tests/locale/en/LC_MESSAGES/dngettextTest.mo',
'/ext/gettext/tests/locale/en/LC_MESSAGES/dngettextTest.po',
'/ext/gettext/tests/locale/en/LC_MESSAGES/messages.mo',
'/ext/gettext/tests/locale/en/LC_MESSAGES/messages.po',
'/ext/gettext/tests/locale/fi/LC_MESSAGES/messages.mo',
'/ext/gettext/tests/locale/fi/LC_MESSAGES/messages.po',
'/ext/iconv/tests/skipif.inc',
'/ext/imap/tests/skipif.inc',
'/ext/interbase/tests/skipif.inc',
'/ext/intl/tests/ut_common.inc',
'/ext/ldap/tests/connect.inc',
'/ext/ldap/tests/skipifbindfailure.inc',
'/ext/ldap/tests/skipif.inc',
'/ext/mbstring/tests/common.inc',
'/ext/mbstring/tests/skipif.inc',
'/ext/mcrypt/tests/vectors.txt',
'/ext/mysqli/tests/clean_table.inc',
'/ext/mysqli/tests/connect.inc',
'/ext/mysqli/tests/skipifconnectfailure.inc',
'/ext/mysqli/tests/skipifemb.inc',
'/ext/mysqli/tests/skipif.inc',
'/ext/mysqli/tests/skipifnotemb.inc',
'/ext/mysqli/tests/skipifunicode.inc',
'/ext/mysqli/tests/table.inc',
'/ext/mysql/tests/connect.inc',
'/ext/mysql/tests/skipif.inc',
'/ext/mysql/tests/table.inc',
'/ext/oci8/tests/skipif.inc',
'/ext/odbc/tests/skipif.inc',
'/ext/openssl/tests/005_crt.txt',
'/ext/openssl/tests/bug28382cert.txt',
'/ext/openssl/tests/bug37820cert.pem',
'/ext/openssl/tests/bug37820key.pem',
'/ext/openssl/tests/bug39217cert1.txt',
'/ext/openssl/tests/bug39217cert2.txt',
'/ext/openssl/tests/bug41033.pem',
'/ext/openssl/tests/bug41033pub.pem',
'/ext/openssl/tests/cert.crt',
'/ext/openssl/tests/openssl.cnf',
'/ext/openssl/tests/private.key',
'/ext/openssl/tests/public.key',
'/ext/openssl/tests/ServerClientTestCase.inc',
'/ext/pdo_firebird/tests/skipif.inc',
'/ext/pdo_mysql/tests/common.phpt',
'/ext/pdo_mysql/tests/config.inc',
'/ext/pdo_mysql/tests/skipif.inc',
'/ext/pdo_sqlite/tests/common.phpt',
'/ext/pdo/tests/pdo_test.inc',
'/ext/phar/tests/files/phar_test.inc',
'/ext/phar/tests/files/stuboflength1041.phar',
'/ext/phar/tests/tar/files/P1-1.0.0.tgz',
'/ext/reflection/tests/bug64936.inc',
'/ext/reflection/tests/included4.inc',
'/ext/session/tests/save_handler.inc',
'/ext/session/tests/skipif.inc',
'/ext/simplexml/tests/book.xml',
'/ext/simplexml/tests/bug24392.xml',
'/ext/snmp/tests/skipif.inc',
'/ext/soap/tests/bugs/bug27722.wsdl',
'/ext/soap/tests/bugs/bug28985.wsdl',
'/ext/soap/tests/bugs/bug29109.wsdl',
'/ext/soap/tests/bugs/bug29236.wsdl',
'/ext/soap/tests/bugs/bug29795.wsdl',
'/ext/soap/tests/bugs/bug30106.wsdl',
'/ext/soap/tests/bugs/bug30175.wsdl',
'/ext/soap/tests/bugs/bug30928.wsdl',
'/ext/soap/tests/bugs/bug34643.wsdl',
'/ext/soap/tests/bugs/bug36614.wsdl',
'/ext/soap/tests/bugs/bug36908.wsdl',
'/ext/soap/tests/bugs/bug36999.wsdl',
'/ext/soap/tests/bugs/bug37013.wsdl',
'/ext/soap/tests/bugs/bug37083.wsdl',
'/ext/soap/tests/bugs/bug38004.wsdl',
'/ext/soap/tests/bugs/bug38055.wsdl',
'/ext/soap/tests/bugs/bug38067.wsdl',
'/ext/soap/tests/bugs/bug38536.wsdl',
'/ext/soap/tests/bugs/bug40609.wsdl',
'/ext/soap/tests/bugs/bug40609.phpt',
'/ext/soap/tests/bugs/bug41337.wsdl',
'/ext/soap/tests/bugs/bug42326.wsdl',
'/ext/soap/tests/bugs/bug42692.wsdl',
'/ext/soap/tests/bugs/skipif.inc',
'/ext/soap/tests/classmap.wsdl',
'/ext/soap/tests/server030.wsdl',
'/ext/soap/tests/interop/Round2/Base/skipif.inc',
'/ext/soap/tests/interop/Round2/GroupB/skipif.inc',
'/ext/soap/tests/interop/Round3/GroupD/skipif.inc',
'/ext/soap/tests/interop/Round3/GroupE/skipif.inc',
'/ext/soap/tests/interop/Round3/GroupF/round3_groupF_extreq.wsdl',
'/ext/soap/tests/interop/Round3/GroupF/skipif.inc',
'/ext/soap/tests/interop/Round4/GroupG/skipif.inc',
'/ext/soap/tests/interop/Round4/GroupH/skipif.inc',
'/ext/soap/tests/interop/Round4/GroupI/skipif.inc',
'/ext/soap/tests/schema/skipif.inc',
'/ext/soap/tests/schema/test_schema.inc',
'/ext/soap/tests/server025.wsdl',
'/ext/soap/tests/skipif.inc',
'/ext/soap/tests/soap12/skipif.inc',
'/ext/soap/tests/soap12/soap12-test.inc',
'/ext/soap/tests/soap12/soap12-test.wsdl',
'/ext/spl/tests/fileobject_001a.txt',
'/ext/spl/tests/fileobject_001b.txt',
'/ext/spl/tests/SplFileObject_testinput.csv',
'/ext/spl/tests/testclass.class.inc',
'/ext/sqlite3/tests/new_db.inc',
'/ext/sqlite3/tests/skipif.inc',
'/ext/sqlite3/tests/stream_test.inc',
'/ext/standard/tests/array/compare_function.inc',
'/ext/standard/tests/array/data.inc',
'/ext/standard/tests/class_object/AutoInterface.inc',
'/ext/standard/tests/class_object/AutoLoaded.inc',
'/ext/standard/tests/class_object/AutoTest.inc',
'/ext/standard/tests/class_object/AutoTrait.inc',
'/ext/standard/tests/file/bug30362.txt',
'/ext/standard/tests/file/bug38086.txt',
'/ext/standard/tests/file/bug40501.csv',
'/ext/standard/tests/file/file.inc',
'/ext/standard/tests/file/fopen_include_path.inc',
'/ext/standard/tests/file/stream_rfc2397_003.gif',
'/ext/standard/tests/file/test2.csv',
'/ext/standard/tests/file/test3.csv',
'/ext/standard/tests/file/test.csv',
'/ext/standard/tests/general_functions/004.data',
'/ext/standard/tests/general_functions/bug49692.ini',
'/ext/standard/tests/general_functions/bug52138.data',
'/ext/standard/tests/general_functions/get_included_files_inc1.inc',
'/ext/standard/tests/general_functions/get_included_files_inc2.inc',
'/ext/standard/tests/general_functions/get_included_files_inc3.inc',
'/ext/standard/tests/general_functions/parse_ini_basic.data',
'/ext/standard/tests/general_functions/parse_ini_booleans.data',
'/ext/standard/tests/image/200x100.bmp',
'/ext/standard/tests/image/200x100.gif',
'/ext/standard/tests/image/200x100.jpg',
'/ext/standard/tests/image/200x100.png',
'/ext/standard/tests/image/200x100.swf',
'/ext/standard/tests/image/200x100.tif',
'/ext/standard/tests/image/200x100_unknown.unknown',
'/ext/standard/tests/image/246x247.png',
'/ext/standard/tests/image/2x2mm.tif',
'/ext/standard/tests/image/384x385.png',
'/ext/standard/tests/image/75x50.wbmp',
'/ext/standard/tests/image/75x50.xbm',
'/ext/standard/tests/image/blank_file.bmp',
'/ext/standard/tests/image/bug13213.jpg',
'/ext/standard/tests/image/skipif_imagetype.inc',
'/ext/standard/tests/image/test13pix.swf',
'/ext/standard/tests/image/test1bpix.bmp',
'/ext/standard/tests/image/test-1pix.bmp',
'/ext/standard/tests/image/test1pix.bmp',
'/ext/standard/tests/image/test1pix.jp2',
'/ext/standard/tests/image/test1pix.jpc',
'/ext/standard/tests/image/test1pix.jpg',
'/ext/standard/tests/image/test2pix.gif',
'/ext/standard/tests/image/test4pix.gif',
'/ext/standard/tests/image/test4pix.iff',
'/ext/standard/tests/image/test4pix.png',
'/ext/standard/tests/image/test4pix.psd',
'/ext/standard/tests/image/test4pix.swf',
'/ext/standard/tests/image/test4pix.tif',
'/ext/standard/tests/image/testAPP.jpg',
'/ext/standard/tests/image/test.gif',
'/ext/standard/tests/image/test.txt',
'/ext/standard/tests/math/allowed_rounding_error.inc',
'/ext/standard/tests/serialize/autoload_implements.p5c',
'/ext/standard/tests/serialize/autoload_interface.p5c',
'/ext/standard/tests/url/urls.inc',
'/ext/sybase_ct/tests/skipif.inc',
'/ext/xmlreader/tests/012.dtd',
'/ext/xmlreader/tests/012.xml',
'/ext/xmlreader/tests/dtdexample.dtd',
'/ext/xmlreader/tests/relaxNG2.rng',
'/ext/xmlreader/tests/relaxNG3.rng',
'/ext/xmlreader/tests/relaxNG.rng',
'/ext/xml/tests/skipif.inc',
'/ext/xml/tests/xmltest.xml',
'/ext/xsl/tests/53965/collection.xml',
'/ext/xsl/tests/53965/collection.xsl',
'/ext/xsl/tests/53965/include.xsl',
'/ext/xsl/tests/area_list.xsl',
'/ext/xsl/tests/area_name.xml',
'/ext/xsl/tests/bug49634.xml',
'/ext/xsl/tests/documentxpath.xsl',
'/ext/xsl/tests/exslt.xml',
'/ext/xsl/tests/exslt.xsl',
'/ext/xsl/tests/phpfunc-nostring.xsl',
'/ext/xsl/tests/phpfunc-undef.xsl',
'/ext/xsl/tests/phpfunc.xsl',
'/ext/xsl/tests/prepare.inc',
'/ext/xsl/tests/skipif.inc',
'/ext/xsl/tests/streamsinclude.xsl',
'/ext/xsl/tests/xslt011.xml',
'/ext/xsl/tests/xslt011.xsl',
'/ext/xsl/tests/xslt012.xsl',
'/ext/xsl/tests/xslt.xml',
'/ext/xsl/tests/xslt.xsl',
'/ext/xsl/tests/xslt.xsl.gz',
'/ext/zip/tests/utils.inc',
'/ext/zip/tests/test_with_comment.zip',
'/ext/zlib/tests/004.txt.gz',
'/ext/zlib/tests/bug_52944_corrupted_data.inc',
'/ext/zlib/tests/data.inc',
'/ext/zlib/tests/gzopen_include_path.inc',
'/ext/zlib/tests/reading_include_path.inc',
'/tests/classes/autoload_derived.p5c',
'/tests/classes/autoload_implements.p5c',
'/tests/classes/autoload_interface.p5c',
'/tests/classes/autoload_root.p5c',
'/tests/classes/constants_basic_003.inc',
'/tests/classes/interface_optional_arg_003.inc',
'/tests/lang/015.inc',
'/tests/lang/016.inc',
'/tests/lang/023-1.inc',
'/tests/lang/023-2.inc',
'/tests/lang/inc.inc',
'/tests/lang/include_files/echo.inc',
'/tests/lang/include_files/eval.inc',
'/tests/lang/include_files/function.inc',
'/tests/lang/inc_throw.inc',
'/tests/quicktester.inc',
'/Zend/tests/014.inc',
'/Zend/tests/bug39542/bug39542.php',
'/Zend/tests/bug46665_autoload.inc',
'/Zend/tests/bug54804.inc',
'/Zend/tests/bug67436/a.php',
| |
<reponame>strickyak/pythonine
## compile_pyth09.py -- tokenizer, parser, and code generator for Pythonine.
#if UNIX
import re
import sys
Stdin = sys.stdin
Stdout = sys.stdout
Stderr = sys.stderr
def is_in(a, b): return (a in b)
def is_not_in(a, b): return (a not in b)
import _generated_proto as T # Tags.
import py_pb as P # Protocol buffers.
#endif
E = Stderr
#if COCO
def is_in(a, b):
for e in b:
if e == a: return True
return False
def is_not_in(a, b):
return not is_in(a, b)
def reversed(vec):
z = []
n = len(vec)
for i in range(len(vec)):
z.append(vec[n - i - 1])
return z
#include "_generated_proto.py"
#include "py_pb.py"
#endif
def Inside(x):
if type(x) == list or type(x) == set:
s = '[* '
for e in x:
s = s + Inside(e) + ' , '
return s + '*]'
try:
return str(vars(x))
except:
return str(x)
BC_NUM_ARGS = 0
BC_NUM_LOCALS = 1
BC_NUM_TEMPS = 2
BC_MODULE = 3
BC_CLASS = 4
BC_NAME = 5
BC_HEADER_SIZE = 6
## Created by Lexer:
L_EOF = 0
L_INT = 1
L_STR = 2
L_IDENTIFIER = 3
L_MULTI = 4
L_PUNC = 5
L_BOL = 6
## Created by Parser::Advance:
P_INDENT = 7
P_DEDENT = 8
P_EOL = 9
STOPPERS = [']', '}', ')', ';']
#if UNIX
BytecodeNumbers = {}
SerialCounter = [0]
def SerialName():
n = SerialCounter[0]
n = n + 1
SerialCounter[0] = n
return '__%d' % n
def GetBytecodeNumbers():
## Demo: BC_Print = 7,
regexp = re.compile('BC_([A-Za-z0-9_]+) = ([0-9]+),')
with open('_generated_prim.h') as fd:
for line in fd:
m = regexp.match(line.strip())
if m:
BytecodeNumbers[m.group(1)] = int(m.group(2))
#endif
def LexKind(a):
if a == L_EOF: return 'L_EOF'
elif a == L_INT: return 'L_INT'
elif a == L_STR: return 'L_STR'
elif a == L_IDENTIFIER: return 'L_IDENTIFIER'
elif a == L_MULTI: return 'L_MULTI'
elif a == L_PUNC: return 'L_PUNC'
elif a == L_BOL: return 'L_BOL'
elif a == P_INDENT: return 'P_INDENT'
elif a == P_DEDENT: return 'P_DEDENT'
elif a == P_EOL: return 'P_EOL'
else: return 'L_UNKNOWN'
def IsDigit(c):
return '0' <= c and c <= '9'
def IsAlfa(c):
return 'A' <= c and c <= 'Z' or 'a' <= c and c <= 'z' or c == '_'
def IsWhite(c):
return (c) and (c <= ' ')
def ShowLex(kind, what):
return (kind, what)
class Lexer(object):
def __init__(self, program):
self.program = program
self.i = 0
def UnGetC(self):
self.i = self.i - 1
def GetC(self):
if self.i >= len(self.program):
return None
z = self.program[self.i]
self.i = self.i + 1
return z
def Next(self):
## Next only returns L_BOL (with the indent #) and L_BOL
## as framing tokens. Parser::Advance changes L_BOL to
## P_EOL and P_INDENT and P_DEDENT tokens.
c = self.GetC()
if not c: return ShowLex(L_EOF, None)
col, eol = 0, False
while c == '#' or IsWhite(c):
if c == '#':
while c != '\n' and c != '\r':
c = self.GetC()
if not c: return ShowLex(L_EOF, None)
col, eol = 0, True
elif c == '\n' or c == '\r':
col, eol = 0, True
else:
if c == '\t':
col = ((col + 4) >> 2) << 2
else:
col = col + 1
c = self.GetC()
return self.Next2(c, col, eol)
def Next2(self, c, col, eol):
if not c: return ShowLex(L_EOF, None)
if eol:
self.UnGetC()
return ShowLex(L_BOL, col)
if c == '"':
s = ''
c = self.GetC() # after the initial '"'
while c != '"':
s = s + c
c = self.GetC()
return ShowLex(L_STR, s)
if c == "'":
s = ''
c = self.GetC() # after the initial "'"
while c != "'":
s = s + c
c = self.GetC()
return ShowLex(L_STR, s)
return self.Next3(c)
def Next3(self, c):
if IsDigit(c):
x = 0
while IsDigit(c):
x = x * 10 + ord(c) - 48
c = self.GetC()
self.UnGetC()
return ShowLex(L_INT, x)
if IsAlfa(c):
x = ''
while IsAlfa(c) or IsDigit(c):
x = x + c
c = self.GetC()
self.UnGetC()
return ShowLex(L_IDENTIFIER, x)
if is_in(c, ['=', '!', '<', '>']):
d = self.GetC()
if is_in(d, ['=', '<', '>']):
return ShowLex(L_MULTI, c + d)
else:
self.UnGetC()
## and fallthrough
return ShowLex(L_PUNC, c)
## Python Precedence:
## await x
## x**...
## +x, -x, ~x
## ...**x
## * @ / // %
## + -
## << >>
## &
## ^
## |
## in, not in, is, is not, <, <=, >, >=, !=, ==
## not x
## and
## or
## if ... else
## lambda
## assignment
class Parser(object):
def __init__(self, program):
self.program = program
self.lex = Lexer(program)
self.indents = [0]
self.pending_indent = False
self.pending_dedents = 0
self.Advance()
def Advance(self):
self.Advance_()
## print >>E, 'Advance', '::', self.t, '::', repr(self.x), '::', repr(self.lex.program[:self.lex.i])
print >>E, 'Advance', '::', self.t, '::', repr(self.x)
def Advance_(self):
## Lexer::Next only returns L_BOL (with the indent column) and L_BOL
## as framing tokens. Advance changes L_BOL to P_EOL
## P_INDENT and P_DEDENT tokens.
if self.pending_indent:
self.pending_indent = False
self.t, self.x = P_INDENT, None
return
if self.pending_dedents:
self.pending_dedents = self.pending_dedents - 1
self.t, self.x = P_DEDENT, None
return
self.t, self.x = self.lex.Next()
if self.t == L_BOL:
if self.x > self.indents[-1]:
self.indents.append(self.x)
self.pending_indent = True
self.t, self.x = P_EOL, None
return
if self.x < self.indents[-1]:
self.indents.pop()
if is_not_in(self.x, self.indents):
raise Exception('bad DEDENT: %d %s' % (self.x, self.indents))
self.pending_dedents = 1
while self.indents[-1] != self.x:
self.pending_dedents = self.pending_dedents + 1
self.indents.pop()
self.t, self.x = P_EOL, None
return
## So self.x == self.indents[-1]
self.t, self.x = P_EOL, None
def ParsePrim(self):
val = self.x
if self.t == L_INT:
self.Advance()
return TInt(val)
if self.t == L_STR:
self.Advance()
return TStr(val)
if self.t == L_IDENTIFIER:
return self.ParseIdentifier()
if self.t == L_PUNC and self.x == '[': # ']'
return self.ParseList()
if self.t == L_PUNC and self.x == '{': # '}'
return self.ParseDict()
if self.t == L_PUNC and self.x == '(': # ')'
self.ConsumeX('(')
x = self.ParseCommaList(False)
self.ConsumeX(')')
return x
raise Exception('bad prim: %s %s' % (self.t, val))
def ParseDict(self):
dic = []
self.ConsumeX('{')
while self.t != L_PUNC or self.x != '}':
k = self.ParseSingle()
self.ConsumeX(':')
v = self.ParseSingle()
dic.append((k, v))
if self.t == L_PUNC and self.x == ',':
self.Advance()
elif self.t != L_PUNC or self.x != '}':
raise Exception('expected `,` or `}` after dict item')
self.Advance()
return TDict(dic)
def ParseList(self):
vec = []
self.ConsumeX('[')
while self.t != L_PUNC or self.x != ']':
a = self.ParseSingle()
vec.append(a)
if self.t == L_PUNC and self.x == ',':
self.Advance()
elif self.t != L_PUNC or self.x != ']':
raise Exception('expected `,` or `]` after list item')
self.Advance()
return TList(vec)
def ParsePrimEtc(self):
a = self.ParsePrim()
while True:
if self.t == L_PUNC and self.x == '(': # FunCall
self.Advance()
xlist = self.ParseCommaList(True).vec
self.ConsumeX(')')
a = TFunCall(a, xlist)
elif self.t == L_PUNC and self.x == '[': # GetItem
self.Advance()
key = self.ParseCommaList(False)
self.ConsumeX(']')
a = TGetItem(a, key)
elif self.t == L_PUNC and self.x == '.': # Member
self.Advance()
if self.t != L_IDENTIFIER:
raise Exception('expected identifier after `.`, got `%s`' % self.x)
a = TMember(a, self.x)
self.Advance()
else:
break
return a
def ParseIdentifier(self):
var = self.ConsumeT(L_IDENTIFIER)
if var=='in' or var=='if' or var=='is':
raise Exception('bad var: %s', var)
if var=='True' or var=='False' or var=='None':
return TSpecial(var)
## if var=='Stdin' or var=='Stdout' or var=='Stderr':
## return TSpecial(var)
return TIdent(var)
def ParseUnary(self):
if self.t == L_PUNC and self.x == '-':
op = self.x
self.Advance()
a = self.ParsePrimEtc()
return TUnaryOp(a, op)
return self.ParsePrimEtc()
def ParseProduct(self):
p = self.ParseUnary()
op = self.x
while op == '*' or op == '%' or op == '/':
self.Advance()
p2 = self.ParseUnary()
p = TBinaryOp(p, op, p2)
op = self.x
return p
def ParseSum(self):
p = self.ParseProduct()
op = self.x
while op == '+' or op == '-':
self.Advance()
p2 = self.ParseProduct()
p = TBinaryOp(p, op, p2)
op = self.x
return p
def ParseMiscop(self):
p = self.ParseSum()
op = self.x
p2 = None
while op == '<<' or op == '>>' or op == '&' or op == '|' or op == '^':
if p2: # do not cascade!
raise Exception('cascade_misc_op')
self.Advance()
p2 = self.ParseSum()
p = TBinaryOp(p, op, p2)
op = self.x
return p
def ParseRelop(self):
p = self.ParseMiscop()
op = self.x
p2 = None
while op == '==' or op == '!=' or op == '<' or op == '>' or op == '<=' or | |
max_var
m_2 = abs(kl_df[var_2_f]) < max_var
m_filter = (m_1 & m_2)
return m_filter
@ribo_deprecated
def get_mean_filter(kl_df, condition_1, condition_2, field, min_mean=1):
# first, get the field names for which we want significances
if field == "log_translational_efficiency":
# filter by both rna_abundance and ribo_abundance in both samples
ribo_mean_1_f = "ribo_abundance_mean_loc_{}".format(condition_1)
ribo_mean_2_f = "ribo_abundance_mean_loc_{}".format(condition_2)
rna_mean_1_f = "rna_abundance_mean_loc_{}".format(condition_1)
rna_mean_2_f = "rna_abundance_mean_loc_{}".format(condition_2)
m_ribo_1 = abs(kl_df[ribo_mean_1_f]) > min_mean
m_ribo_2 = abs(kl_df[ribo_mean_2_f]) > min_mean
m_rna_1 = abs(kl_df[rna_mean_1_f]) > min_mean
m_rna_2 = abs(kl_df[rna_mean_2_f]) > min_mean
m_filter = (m_ribo_1 & m_ribo_2 & m_rna_1 & m_rna_2)
else:
mean_1_f = "{}_mean_loc_{}".format(field, condition_1)
mean_2_f = "{}_mean_loc_{}".format(field, condition_2)
# also get the filter
m_1 = abs(kl_df[mean_1_f]) > min_mean
m_2 = abs(kl_df[mean_2_f]) > min_mean
m_filter = (m_1 & m_2)
return m_filter
@ribo_deprecated
def get_random_kl_divergence(kl_df, mean_1_f, scale_1_f, mean_2_f, scale_2_f, strategy='sampling'):
import numpy as np
import scipy.stats
import misc.math_utils as math_utils
if strategy == 'filtering':
m_filter = [False] * len(kl_df)
while sum(m_filter) == 0:
x = np.random.randint(len(kl_df))
row = kl_df.iloc[x]
mean_1 = row[mean_1_f]
scale_1 = row[scale_1_f]
p = (mean_1, scale_1)
mean_2 = row[mean_2_f]
scale_2 = row[scale_2_f]
m_min_scale = kl_df[scale_2_f] > 0.5*scale_2
m_max_scale = kl_df[scale_2_f] < 2*scale_2
m_scale = m_min_scale & m_max_scale
m_min_mean = kl_df[mean_2_f] > 0.5*mean_2
m_max_mean = kl_df[mean_2_f] < 2*mean_2
m_mean = m_min_mean & m_max_mean
m_filter = m_mean & m_scale
indices = np.where(m_filter)[0]
y = np.random.choice(indices)
#y = np.random.randint(len(kl_df))
row = kl_df.iloc[y]
mean_2 = row[mean_2_f]
scale_2 = row[scale_2_f]
q = (mean_2, scale_2)
elif strategy == 'sampling':
x = np.random.randint(len(kl_df))
row = kl_df.iloc[x]
mean_1 = row[mean_1_f]
scale_1 = row[scale_1_f]
p = (mean_1, scale_1)
mean_2 = row[mean_2_f]
scale_2 = row[scale_2_f]
means = kl_df[mean_2_f]
# we take the sqrt because scipy uses std, but we use var
#unnormalized_likelihoods = scipy.stats.norm.pdf(means, loc=mean_1, scale=np.sqrt(scale_1))
#unnormalized_likelihoods = scipy.stats.cauchy.pdf(means, loc=mean_1, scale=np.sqrt(scale_1))
# df=1 is the same as a cauchy
df = 1
unnormalized_likelihoods = scipy.stats.t.pdf(means, df, loc=mean_1, scale=np.sqrt(scale_1))
normalized_likelihoods = unnormalized_likelihoods / np.sum(unnormalized_likelihoods)
y = np.random.choice(len(normalized_likelihoods), p=normalized_likelihoods)
row = kl_df.iloc[y]
mean_2 = row[mean_2_f]
scale_2 = row[scale_2_f]
q = (mean_2, scale_2)
elif strategy == "random":
x = np.random.randint(len(kl_df))
row = kl_df.iloc[x]
mean_1 = row[mean_1_f]
scale_1 = row[scale_1_f]
p = (mean_1, scale_1)
y = np.random.randint(len(kl_df))
row = kl_df.iloc[y]
mean_2 = row[mean_2_f]
scale_2 = row[scale_2_f]
q = (mean_2, scale_2)
else:
msg = "Unrecognized permutation test strategy: {}".format(strategy)
raise ValueError(msg)
kl = math_utils.calculate_symmetric_kl_divergence(p, q, math_utils.calculate_univariate_gaussian_kl)
return kl, p, q
@ribo_deprecated
def get_background_kl_distribution(batch, filtered_kl_df, condition_1, condition_2, field,
num_random_samples=10000, seed=8675309, use_progress_bar=False):
import numpy as np
import tqdm
if seed is not None:
np.random.seed(seed)
random_kls = []
random_ps = []
random_qs = []
# first, get the field names for which we want significances
if field == "log_translational_efficiency":
mean_1_f = "{}_loc_{}".format(field, condition_1)
scale_1_f = "{}_scale_{}".format(field, condition_1)
mean_2_f = "{}_loc_{}".format(field, condition_2)
scale_2_f = "{}_scale_{}".format(field, condition_2)
else:
mean_1_f = "{}_mean_loc_{}".format(field, condition_1)
scale_1_f = "{}_var_loc_{}".format(field, condition_1)
mean_2_f = "{}_mean_loc_{}".format(field, condition_2)
scale_2_f = "{}_var_loc_{}".format(field, condition_2)
if use_progress_bar:
iter_range = tqdm.trange(num_random_samples)
else:
iter_range = np.arange(num_random_samples)
for i in iter_range:
kl, p, q = get_random_kl_divergence(filtered_kl_df, mean_1_f, scale_1_f, mean_2_f, scale_2_f)
random_kls.append(kl)
return random_kls
@ribo_deprecated
def get_transcript_pvalues(kl_df, condition_1, condition_2, field,
min_mean=1, max_var=None, var_power=None,
num_random_samples=10000, seed=8675309, num_cpus=1, num_groups=500):
import numpy as np
import misc.parallel as parallel
import misc.utils as utils
np.random.seed(seed)
m_mean_filter = get_mean_filter(kl_df, condition_1, condition_2,
field, min_mean=min_mean)
m_var_filter = True
if max_var is not None:
m_var_filter = get_variance_filter(kl_df, condition_1, condition_2,
field, max_var=max_var)
m_var_power_filter = True
if var_power is not None:
m_var_power_filter = get_variance_power_filter(kl_df, condition_1, condition_2,
field, power=var_power)
m_filter = m_mean_filter & m_var_filter & m_var_power_filter
msg = "Total transcripts: {}. Use for sampling: {}".format(len(kl_df), sum(m_filter))
logger.debug(msg)
samples_per_group = np.ceil(num_random_samples / num_groups)
# We do not need to use a seed for each group; otherwise, they all end up sampling
# exactly the same thing.
group_seed = None
it = np.arange(num_cpus)
random_kls = parallel.apply_parallel_iter(
it,
num_cpus,
get_background_kl_distribution,
kl_df[m_filter],
condition_1, condition_2, field, samples_per_group, group_seed,
progress_bar=True, num_groups=num_groups)
random_kls = utils.flatten_lists(random_kls)
kls = np.array(sorted(random_kls))
kl_field_name = "{}_{}_{}_kl_divergence".format(field, condition_1, condition_2)
kl_field = kl_df[kl_field_name]
pvals = kl_field.apply(get_pvalue, args=(kls,))
return m_filter, pvals, random_kls, random_ps.tolist(), random_qs.tolist()
@ribo_deprecated
def get_significant_differences(condition_1, condition_2, pval_df,
alpha=0.05, min_rpkm_mean=None, max_rpkm_var=None,var_power=None):
""" This function extracts the transcripts from pval_df which are
significantly differentially "expressed" between the two given
conditions (see below for the considered types of "expression").
The function first filters the pval list to ensure the specified
thresholds are met (min_rpkm_mean, max_rpkm_var, var_power). It
then extracts the transcripts which have the specified significance
level (alpha) or better (less) for log_transclational_efficiency,
rna_abundance, ribo_abundance. Finally, the function returns each of
the filters as boolean arrays.
This function is meant to be used with the output of the
estimate-kl-pvalues script from the ribo-te package.
This script uses a permutation test approach; therefore, multiple test
correction of the pvalues *is not* required.
Args:
condition_1, condition_2 (strings): the name of the conditions
pval_df (pd.DataFrame): a dataframe, which is just the output of
the estimate-kl-pvalues script
alpha (float): the significance value for filtering
min_rpkm_mean, max_rpkm_var, var_power (floats): the values for filtering,
or None if the relevant filter should not be applied.
Returns:
All of the return values are boolean masks of pval_df.
m_te_filter: the transcripts which meet the filters for both RNA-seq
and riboseq
m_rna_filter: the transcripts which meet the filter for RNA-seq (they
may or may not meet the riboseq filter)
m_ribo_filter: the transcripts which meet the filter for riboseq (they
may or may not meet the RNA-seq filter)
m_te_sig: the transcripts which meet m_te_filter and have a significant
KL-divergence (according to the pvalues) for log_translational_efficiency
m_rna_sig: the transcripts which meet m_rna_filter and have a significant
KL-divergence (according to the pvalues) for rna_abundance
m_ribo_sig: the transcripts which meet m_ribo_filter and have a significant
KL-divergence (according to the pvalues) for ribo_abundance
Imports:
numpy
"""
import numpy as np
te_kl_field = "log_translational_efficiency_{}_{}_kl_divergence".format(condition_1, condition_2)
kl = pval_df[te_kl_field]
if min_rpkm_mean is not None:
field = "log_translational_efficiency"
m_te_mean_filter = get_mean_filter(pval_df, condition_1, condition_2,
field, min_mean=min_rpkm_mean)
field = "rna_abundance"
m_rna_mean_filter = get_mean_filter(pval_df, condition_1, condition_2,
field, min_mean=min_rpkm_mean)
field = "ribo_abundance"
m_ribo_mean_filter = get_mean_filter(pval_df, condition_1, condition_2,
field, min_mean=min_rpkm_mean)
else:
m_te_mean_filter = True
m_rna_mean_filter = True
m_ribo_mean_filter = True
if max_rpkm_var is not None:
field = "log_translational_efficiency"
m_te_var_filter = get_variance_filter(pval_df, condition_1, condition_2,
field, max_var=max_rpkm_var)
field = "rna_abundance"
m_rna_var_filter = get_variance_filter(pval_df, condition_1, condition_2, field,
max_var=max_rpkm_var)
field = "ribo_abundance"
m_ribo_var_filter = get_variance_filter(pval_df, condition_1, condition_2, field,
max_var=max_rpkm_var)
else:
m_te_var_filter = True
m_rna_var_filter = True
m_ribo_var_filter = True
if var_power is not None:
field = "log_translational_efficiency"
m_te_var_power_filter = get_variance_power_filter(pval_df, condition_1, condition_2,
field, power=var_power)
field = "rna_abundance"
m_rna_var_power_filter = get_variance_power_filter(pval_df, condition_1, condition_2,
field, power=var_power)
field = "ribo_abundance"
m_ribo_var_power_filter = get_variance_power_filter(pval_df, condition_1, condition_2,
field, power=var_power)
else:
m_te_var_power_filter = True
m_rna_var_power_filter = True
m_ribo_var_power_filter = True
field = "log_translational_efficiency"
te_pval_field = "{}_{}_{}_pvalue".format(field, condition_1, condition_2)
field = "rna_abundance"
rna_pval_field = "{}_{}_{}_pvalue".format(field, condition_1, condition_2)
field = "ribo_abundance"
ribo_pval_field = "{}_{}_{}_pvalue".format(field, condition_1, condition_2)
m_te_filter = m_te_mean_filter & m_te_var_filter & m_te_var_power_filter
m_rna_filter = m_rna_mean_filter & m_rna_var_filter & m_rna_var_power_filter
m_ribo_filter = m_ribo_mean_filter & m_ribo_var_filter & m_ribo_var_power_filter
m_te_sig = (pval_df[te_pval_field] < alpha) & m_te_filter
m_rna_sig = (pval_df[rna_pval_field] < alpha) & m_rna_filter
m_ribo_sig = (pval_df[ribo_pval_field] < alpha) & m_ribo_filter
filters = (m_te_filter, m_rna_filter, m_ribo_filter, m_te_sig, m_rna_sig, m_ribo_sig)
filters= [ np.array(f) for f in filters ]
return filters
@ribo_deprecated
def get_significance_filter(filters, field, significant_only=True):
""" This function returns the appropriate mask to filter on significance
of the given field. It assumes the filters are in the same order as the
output of get_significant_differences.
Parameters
----------
filters : tuple
The result of the call to get_significant_differences
field : string
The name of the field on which to filter. Valid options are:
* ribo
* rna
* te
is_significant : bool
Whether to return the "significant" filter (True, default) or
the "basic" filter
Returns
-------
significant_only : boolean mask
The appropriate mask for filtering for significance based on the
given field.
"""
# just | |
<reponame>makistsantekidis/opendr<gh_stars>100-1000
# MIT License
#
# Copyright (c) 2021 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import torch
import torch.optim as optim
import pickle
from tqdm import tqdm
import os
import time
import numpy as np
def get_cosine_lr_scheduler(init_lr, final_lr):
def lr_scheduler(n_epoch, epoch_idx):
lr = final_lr + 0.5 * (init_lr - final_lr) * (1 + np.cos(np.pi * epoch_idx / n_epoch))
return lr
return lr_scheduler
def get_multiplicative_lr_scheduler(init_lr, drop_at, multiplicative_factor):
def lr_scheduler(n_epoch, epoch_idx):
lr = init_lr
for epoch in drop_at:
if epoch_idx + 1 >= epoch:
lr *= multiplicative_factor
return lr
return lr_scheduler
class ClassifierTrainer:
n_test_minibatch = 10
def __init__(self,
n_epoch,
epoch_idx,
lr_scheduler,
optimizer,
weight_decay,
temp_dir,
checkpoint_freq=1,
print_freq=1,
use_progress_bar=False,
test_mode=False):
assert epoch_idx < n_epoch,\
'epoch_idx ("{}") must be lower than number of epochs ("{}")'.format(epoch_idx, n_epoch)
self.n_epoch = n_epoch
self.epoch_idx = epoch_idx
self.lr_scheduler = lr_scheduler
self.optimizer = optimizer
self.weight_decay = weight_decay
self.temp_dir = temp_dir
self.checkpoint_freq = checkpoint_freq
self.print_freq = print_freq
self.use_progress_bar = use_progress_bar
self.test_mode = test_mode
self.metrics = ['cross_entropy', 'acc']
self.monitor_metric = 'acc'
self.monitor_direction = 'higher'
def fit(self, model, train_loader, val_loader, test_loader, device, tensorboard_logger=None, logger_prefix=''):
self.start_time = time.time()
n_epoch_done = 0
model.float()
model.to(device)
optimizer = self.get_optimizer(model)
self.load_from_checkpoint(model, optimizer, device)
while self.epoch_idx < self.n_epoch:
# optimize one epoch
self.optimize_epoch(model,
optimizer,
train_loader,
val_loader,
test_loader,
device)
n_epoch_done += 1
if self.print_freq > 0 and (self.epoch_idx + 1) % self.print_freq == 0:
self.print_metrics(n_epoch_done)
self.update_tensorboard(tensorboard_logger, logger_prefix)
# save checkpoint
if self.checkpoint_freq > 0 and (self.epoch_idx + 1) % self.checkpoint_freq == 0:
checkpoint = {'epoch_idx': self.epoch_idx,
'model_state_dict': model.state_dict(),
'optimizer_state_dict': optimizer.state_dict(),
'metric_values': self.metric_values}
checkpoint_file = os.path.join(self.temp_dir, 'checkpoint_{:09d}.pickle'.format(self.epoch_idx))
fid = open(checkpoint_file, 'wb')
pickle.dump(checkpoint, fid)
fid.close()
self.epoch_idx += 1
# load the best model based on validation performance if exist, or train performance
self.load_best(model)
# return non-empty performance metrics
performance = {}
for metric in self.metric_values.keys():
if len(self.metric_values[metric]) > 0:
performance[metric] = self.metric_values[metric]
return performance
def load_best(self, model):
# load the best model from checkpoints based on monitor_metric
if len(self.metric_values['val_' + self.monitor_metric]) > 0:
best_value = self.metric_values['val_' + self.monitor_metric][-1]
else:
best_value = self.metric_values['train_' + self.monitor_metric][-1]
state_dict = model.state_dict()
checkpoint_files = [os.path.join(self.temp_dir, f) for f in os.listdir(self.temp_dir)
if f.startswith('checkpoint_')]
for filename in checkpoint_files:
fid = open(filename, 'rb')
checkpoint = pickle.load(fid)
fid.close()
if len(checkpoint['metric_values']['val_' + self.monitor_metric]) > 0:
metric_value = checkpoint['metric_values']['val_' + self.monitor_metric][-1]
else:
metric_value = checkpoint['metric_values']['train_' + self.monitor_metric][-1]
if (self.monitor_direction == 'lower' and metric_value < best_value) or\
(self.monitor_direction == 'higher' and metric_value > best_value):
best_value = metric_value
state_dict = checkpoint['model_state_dict']
model.load_state_dict(state_dict)
def get_optimizer(self, model):
assert self.optimizer in ['adam', 'sgd'], 'Given optimizer "{}" is not supported'.format(self.optimizer)
# get current learning rate
lr = self.lr_scheduler(self.n_epoch, self.epoch_idx)
# get separate batchnorm parameters and other parameters
# if .get_parameters() is implemented in the model
if hasattr(model, 'get_parameters') and callable(model.get_parameters):
bn_params, other_params = model.get_parameters()
if len(bn_params) > 0:
params = [{'params': bn_params, 'weight_decay': 0},
{'params': other_params, 'weight_decay': self.weight_decay}]
else:
params = [{'params': other_params, 'weight_decay': self.weight_decay}]
if self.optimizer == 'adam':
optimizer = optim.Adam(params, lr=lr)
else:
optimizer = optim.SGD(params, lr=lr, momentum=0.9, nesterov=True)
else:
if self.optimizer == 'adam':
optimizer = optim.Adam(model.parameters(), weight_decay=self.weight_decay, lr=lr)
else:
optimizer = optim.SGD(model.parameters(),
weight_decay=self.weight_decay,
lr=lr,
momentum=0.9,
nesterov=True)
return optimizer
def eval(self, model, loader, device):
if loader is None:
return {}
model.eval()
L = torch.nn.CrossEntropyLoss()
n_correct = 0
n_sample = 0
loss = 0
if self.test_mode:
total_minibatch = min(self.n_test_minibatch, len(loader))
else:
total_minibatch = len(loader)
with torch.no_grad():
for minibatch_idx, (inputs, targets) in enumerate(loader):
if minibatch_idx == total_minibatch:
break
inputs = inputs.to(device)
targets = targets.to(device).long().flatten()
predictions = model(inputs)
n_sample += inputs.size(0)
loss += L(predictions, targets).item()
n_correct += (predictions.argmax(dim=-1) == targets).sum().item()
metrics = {'cross_entropy': loss / n_sample,
'acc': n_correct / n_sample}
return metrics
def update_lr(self,
optimizer):
# update learning rate using lr_scheduler
lr = self.lr_scheduler(self.n_epoch, self.epoch_idx)
for param_group in optimizer.param_groups:
param_group['lr'] = lr
def update_loop(self, model, loader, optimizer, device):
L = torch.nn.CrossEntropyLoss()
if self.test_mode:
total_minibatch = min(self.n_test_minibatch, len(loader))
else:
total_minibatch = len(loader)
minibatch_idx = 0
if self.use_progress_bar:
loader = tqdm(loader, desc='#Epoch {}/{}: '.format(self.epoch_idx + 1, self.n_epoch), ncols=80, ascii=True)
else:
loader = loader
for inputs, targets in loader:
optimizer.zero_grad()
self.update_lr(optimizer)
inputs = inputs.to(device)
targets = targets.to(device).long().flatten()
predictions = model(inputs)
loss = L(predictions, targets)
loss.backward()
optimizer.step()
minibatch_idx += 1
if minibatch_idx > total_minibatch:
break
def update_metrics(self, train_metrics, val_metrics, test_metrics):
for metric in train_metrics.keys():
if 'train_' + metric in self.metric_values.keys():
self.metric_values['train_' + metric].append(train_metrics[metric])
else:
self.metric_values['train_' + metric] = [train_metrics[metric]]
for metric in val_metrics.keys():
if 'val_' + metric in self.metric_values.keys():
self.metric_values['val_' + metric].append(val_metrics[metric])
else:
self.metric_values['val_' + metric] = [val_metrics[metric]]
for metric in test_metrics.keys():
if 'test_' + metric in self.metric_values.keys():
self.metric_values['test_' + metric].append(test_metrics[metric])
else:
self.metric_values['test_' + metric] = [test_metrics[metric]]
def print_metrics(self, n_epoch_done):
start_time = self.start_time
current_time = time.time()
n_epoch_remain = self.n_epoch - n_epoch_done
# compute the time taken
time_taken = current_time - start_time
hour_taken = int(time_taken / 3600)
minute_taken = int((time_taken - hour_taken * 3600) / 60)
second_taken = int((time_taken - hour_taken * 3600 - minute_taken * 60))
# compute estimated time remain
time_left = (time_taken / n_epoch_done) * n_epoch_remain
hour_left = int(time_left / 3600)
minute_left = int((time_left - hour_left * 3600) / 60)
second_left = int((time_left - hour_left * 3600 - minute_left * 60))
msg = '#Epoch {}/{}, '.format(self.epoch_idx + 1, self.n_epoch) +\
'total time taken: {:d}:{:02d}:{:02d}, '.format(hour_taken, minute_taken, second_taken) +\
'time remain: {:d}:{:02d}:{:02d}'.format(hour_left, minute_left, second_left)
print(msg)
names = list(self.metric_values.keys())
names.sort()
for name in names:
if len(self.metric_values[name]) > 0:
value = self.metric_values[name][-1]
if isinstance(value, (int, float)):
print('--- {}: {:.6f}'.format(name, value))
def update_tensorboard(self, tensorboard_logger, logger_prefix):
names = list(self.metric_values.keys())
names.sort()
if tensorboard_logger is not None:
for name in names:
if len(self.metric_values[name]) > 0:
value = self.metric_values[name][-1]
if isinstance(value, (int, float)):
tensorboard_logger.add_scalar(tag='{}/{}'.format(logger_prefix, name),
scalar_value=value,
global_step=self.epoch_idx + 1)
tensorboard_logger.flush()
def optimize_epoch(self,
model,
optimizer,
train_loader,
val_loader,
test_loader,
device):
model.train()
# perform parameter updates
self.update_loop(model, train_loader, optimizer, device)
# evaluate
train_metrics = self.eval(model, train_loader, device)
val_metrics = self.eval(model, val_loader, device)
test_metrics = self.eval(model, test_loader, device)
# append current performance to performance list
self.update_metrics(train_metrics, val_metrics, test_metrics)
def load_from_checkpoint(self, model, optimizer, device):
if self.epoch_idx == -1:
# load from latest checkpoint
files = [os.path.join(self.temp_dir, f) for f in os.listdir(self.temp_dir)
if f.startswith('checkpoint_')]
files.sort()
if len(files) > 0:
fid = open(files[-1], 'rb')
checkpoint = pickle.load(fid)
fid.close()
else:
checkpoint = None
elif self.epoch_idx == 0:
# train from scratch
checkpoint = None
else:
# load specific checkpoint
filename = os.path.join(self.temp_dir, 'checkpoint_{:9d}.pickle'.format(self.epoch_idx))
assert os.path.exists(filename),\
'checkpoint "{}" does not exist'.format(filename)
fid = open(filename, 'rb')
checkpoint = pickle.load(fid)
fid.close()
if checkpoint is None:
self.epoch_idx = 0
self.metric_values = {}
prefixes = ['train', 'val', 'test']
for prefix in prefixes:
for m in self.metrics:
self.metric_values['{}_{}'.format(prefix, m)] = []
else:
# set the epoch index and previous metric values
self.epoch_idx = checkpoint['epoch_idx'] + 1
self.metric_values = checkpoint['metric_values']
# load model state dict
model.load_state_dict(checkpoint['model_state_dict'])
# load optimizer state dict
optimizer.load_state_dict(checkpoint['optimizer_state_dict'])
for state in optimizer.state.values():
for k, v in state.items():
if torch.is_tensor(v):
state[k] = v.to(device)
class AutoRegressionTrainer(ClassifierTrainer):
def __init__(self,
n_epoch,
epoch_idx,
lr_scheduler,
optimizer,
weight_decay,
temp_dir,
checkpoint_freq=1,
print_freq=1,
use_progress_bar=False,
test_mode=False):
super(AutoRegressionTrainer, self).__init__(n_epoch, epoch_idx, lr_scheduler, optimizer, weight_decay,
temp_dir, checkpoint_freq, print_freq, use_progress_bar, test_mode)
self.metrics = ['mean_squared_error']
self.monitor_metric = 'mean_squared_error'
self.monitor_direction = 'lower'
def update_loop(self, model, loader, optimizer, device):
L = torch.nn.MSELoss()
if self.test_mode:
total_minibatch = min(self.n_test_minibatch, len(loader))
else:
total_minibatch = len(loader)
minibatch_idx = 0
if self.use_progress_bar:
| |
import hail as hl
from ukbb_common.resources.generic import *
AC_CUTOFFS = list(range(0, 6)) + [10, 20, 50, 100]
AF_CUTOFFS = sorted([0] + [y * 10 ** x for y in (1, 2, 5) for x in range(-4, 0)] + [0.99])
SIG_THRESHOLD = 5e-8
def format_pheno_dir(pheno):
return pheno.replace("/", "_")
def get_top_p_from_mt(mt, p, return_ht = True):
top_p_hit = hl.agg.filter(hl.is_defined(p) & ~hl.is_nan(p),
hl.agg.take(mt.entry.annotate(**mt.col), 1, ordering=p))
mt = mt.annotate_rows(top_p=hl.or_missing(hl.len(top_p_hit) > 0, top_p_hit[0]))
if return_ht:
ht = mt.rows()
return ht.transmute(**ht.top_p)
else:
return mt
def get_vep_formatted_data(ukb_vep_path: str, legacy_annotations: bool = False):
from ukb_common.utils.annotations import annotation_case_builder, annotation_case_builder_ukb_legacy
from gnomad.utils.vep import process_consequences
ht = hl.read_table(ukb_vep_path)
ht = process_consequences(ht)
ht = ht.explode(ht.vep.worst_csq_by_gene_canonical)
annotation_func = annotation_case_builder_ukb_legacy if legacy_annotations else annotation_case_builder
return ht.select(
gene=ht.vep.worst_csq_by_gene_canonical.gene_symbol,
annotation=annotation_func(ht.vep.worst_csq_by_gene_canonical))
def load_variant_data(directory: str, pheno_key_dict, ukb_vep_path: str, extension: str = 'single.txt',
n_cases: int = -1, n_controls: int = -1, heritability: float = -1.0,
saige_version: str = 'NA', inv_normalized: str = 'NA', overwrite: bool = False, legacy_annotations: bool = False,
num_partitions: int = 1000):
output_ht_path = f'{directory}/variant_results.ht'
ht = hl.import_table(f'{directory}/*.{extension}', delimiter=' ', impute=True)
print(f'Loading: {directory}/*.{extension} ...')
marker_id_col = 'markerID' if extension == 'single.txt' else 'SNPID'
locus_alleles = ht[marker_id_col].split('_')
if n_cases == -1: n_cases = hl.null(hl.tint)
if n_controls == -1: n_controls = hl.null(hl.tint)
if heritability == -1.0: heritability = hl.null(hl.tfloat)
if saige_version == 'NA': saige_version = hl.null(hl.tstr)
if inv_normalized == 'NA': inv_normalized = hl.null(hl.tstr)
ht = ht.key_by(locus=hl.parse_locus(locus_alleles[0]), alleles=locus_alleles[1].split('/'),
**pheno_key_dict).distinct().naive_coalesce(num_partitions)
if marker_id_col == 'SNPID':
ht = ht.drop('CHR', 'POS', 'rsid', 'Allele1', 'Allele2')
ht = ht.transmute(Pvalue=ht['p.value']).annotate_globals(
n_cases=n_cases, n_controls=n_controls, heritability=heritability, saige_version=saige_version, inv_normalized=inv_normalized)
ht = ht.drop('varT', 'varTstar', 'N', 'Tstat')
ht = ht.annotate(**get_vep_formatted_data(ukb_vep_path, legacy_annotations=legacy_annotations)[
hl.struct(locus=ht.locus, alleles=ht.alleles)]) # TODO: fix this for variants that overlap multiple genes
ht = ht.checkpoint(output_ht_path, overwrite=overwrite, _read_if_exists=not overwrite).drop('n_cases', 'n_controls', 'heritability')
# mt = ht.to_matrix_table(['locus', 'alleles'], list(pheno_key_dict.keys()),
# [marker_id_col, 'gene', 'annotation'], []).annotate_cols(
# n_cases=n_cases, n_controls=n_controls, heritability=heritability)
# mt.checkpoint(output_ht_path.replace('.ht', '.mt'), overwrite=overwrite, _read_if_exists=not overwrite)
def load_gene_data(directory: str, pheno_key_dict, gene_ht_map_path: str,
n_cases: int = -1, n_controls: int = -1, heritability: float = -1.0, saige_version: str = 'NA',
inv_normalized: str = 'NA', overwrite: bool = False):
output_ht_path = f'{directory}/gene_results.ht'
print(f'Loading: {directory}/*.gene.txt ...')
types = {f'Nmarker_MACCate_{i}': hl.tint32 for i in range(1, 9)}
types.update({x: hl.tfloat64 for x in ('Pvalue', 'Pvalue_Burden', 'Pvalue_SKAT', 'Pvalue_skato_NA', 'Pvalue_burden_NA', 'Pvalue_skat_NA')})
ht = hl.import_table(f'{directory}/*.gene.txt', delimiter=' ', impute=True, types=types)
if n_cases == -1: n_cases = hl.null(hl.tint)
if n_controls == -1: n_controls = hl.null(hl.tint)
if heritability == -1.0: heritability = hl.null(hl.tfloat)
if saige_version == 'NA': saige_version = hl.null(hl.tstr)
if inv_normalized == 'NA': inv_normalized = hl.null(hl.tstr)
fields = ht.Gene.split('_')
gene_ht = hl.read_table(gene_ht_map_path).select('interval').distinct()
ht = ht.key_by(gene_id=fields[0], gene_symbol=fields[1], annotation=fields[2],
**pheno_key_dict).drop('Gene').naive_coalesce(10).annotate_globals(
n_cases=n_cases, n_controls=n_controls, heritability=heritability, saige_version=saige_version, inv_normalized=inv_normalized)
ht = ht.annotate(total_variants=hl.sum([v for k, v in list(ht.row_value.items()) if 'Nmarker' in k]),
interval=gene_ht.key_by('gene_id')[ht.gene_id].interval)
ht = ht.checkpoint(output_ht_path, overwrite=overwrite, _read_if_exists=not overwrite).drop('n_cases', 'n_controls')
# mt = ht.to_matrix_table(['gene_symbol', 'gene_id', 'annotation', 'interval'],
# list(pheno_key_dict.keys()), [], []).annotate_cols(
# n_cases=n_cases, n_controls=n_controls, heritability=heritability)
# mt.checkpoint(output_ht_path.replace('.ht', '.mt'), overwrite=overwrite, _read_if_exists=not overwrite)
def get_cases_and_controls_from_log(log_format):
"""
'gs://path/to/result_chr{chrom}_000000001.variant.log'
"""
cases = controls = -1
for chrom in range(10, 23):
try:
with hl.hadoop_open(log_format.format(chrom=chrom)) as f:
for line in f:
line = line.strip()
if line.startswith('Analyzing'):
fields = line.split()
if len(fields) == 6:
try:
cases = int(fields[1])
controls = int(fields[4])
break
except ValueError:
logger.warn(f'Could not load number of cases or controls from {line}.')
elif line.endswith('samples were used in fitting the NULL glmm model and are found in sample file') or \
line.endswith('samples have been used to fit the glmm null model'):
# This is ahead of the case/control count line ("Analyzing ...") above so this should be ok
fields = line.split()
try:
cases = int(fields[0])
except ValueError:
logger.warn(f'Could not load number of cases or controls from {line}.')
return cases, controls
except:
pass
return cases, controls
def get_heritability_from_log(log_file, quantitative_trait: bool = False):
import math
heritability = -1
with hl.hadoop_open(log_file) as f:
for line in f:
if line.startswith('Final'):
fields = line.strip().split()
if len(fields) == 4:
try:
tau = float(fields[2])
if quantitative_trait:
tau1 = float(fields[1])
heritability = tau / (tau1 + tau)
else:
heritability = tau / (tau + math.pi ** 2 / 3)
break
except:
logger.warn(f'Could not load heritability from {line}.')
return heritability
def get_saige_version_from_log(null_glmm_log):
version = 'NA'
with hl.hadoop_open(null_glmm_log) as f:
for line in f:
if line.startswith('other attached packages:'):
try:
line2 = f.readline()
packages = line2.strip().split()
version = [x for x in packages if 'SAIGE' in x][0]
except:
logger.warning(f'Could not load version number from {line2} in {null_glmm_log}.')
return version
def get_inverse_normalize_status(null_glmm_log):
status = 'Unknown'
with hl.hadoop_open(null_glmm_log) as f:
for line in f:
if line.startswith('$invNormalize'):
try:
status = f.readline().strip().split()[1]
except:
logger.warning(f'Could not load inv_norm status from {line} in {null_glmm_log}.')
return status.capitalize()
def get_saige_timing_grep(all_files):
try:
grep_results = hl.grep('Analysis took', all_files, max_count=int(1e8), show=False)
except hl.utils.java.FatalError:
return
if sum([len(x) for x in grep_results.values()]) > 5e7:
logger.warning(f'Got more than 5e7 values in {all_files[0]}, etc. Check this!')
for log, result in grep_results.items():
try:
timing = float(result[0].split()[2])
except:
logger.warning(f'Could not load timing from {result} in {log}.')
continue
chrom, pos = log.rsplit('.', 2)[0].rsplit('_', 2)[1:3]
yield f'{chrom}:{pos}', timing
def get_null_model_timing(null_glmm_log):
cpu = wall = 'NA'
with hl.hadoop_open(null_glmm_log) as f:
for line in f:
if line.startswith('t_end - t_begin'):
try:
f.readline()
line2 = f.readline()
cpu, _, wall = line2.strip().split()
except:
logger.warning(f'Could not load null model timings from {line2} in {null_glmm_log}.')
return cpu, wall
def union_mts_by_tree(all_mts, temp_dir, debug=False):
chunk_size = int(len(all_mts) ** 0.5) + 1
outer_mts = []
for i in range(chunk_size):
if i * chunk_size >= len(all_mts): break
mt = all_mts[i * chunk_size]
for j in range(1, chunk_size):
if i * chunk_size + j >= len(all_mts): break
try:
mt = mt.union_cols(all_mts[i * chunk_size + j], row_join_type='outer')
except:
if debug:
print(f'problem with {i * chunk_size} and {i * chunk_size + j}')
mt.describe()
all_mts[i * chunk_size + j].describe()
raise
outer_mts.append(mt.checkpoint(f'{temp_dir}/temp_output_{i}.mt', overwrite=True))
mt = outer_mts[0]
for next_mt in outer_mts[1:]:
mt = mt.union_cols(next_mt, row_join_type='outer')
return mt
def union_hts_by_tree(all_hts, temp_dir, debug=False, inner_mode = 'overwrite'):
chunk_size = int(len(all_hts) ** 0.5) + 1
outer_hts = []
for i in range(chunk_size):
if i * chunk_size >= len(all_hts): break
hts = all_hts[i * chunk_size:(i + 1) * chunk_size]
try:
if isinstance(hts[0], str):
hts = list(map(lambda x: hl.read_table(x), hts))
ht = hts[0].union(*hts[1:], unify=True)
except:
if debug:
print(f'problem in range {i * chunk_size}-{i * chunk_size + chunk_size}')
_ = [ht.describe() for ht in hts]
raise
outer_hts.append(ht.checkpoint(f'{temp_dir}/temp_output_{i}.ht', **{inner_mode: True}))
return outer_hts[0].union(*outer_hts[1:], unify=True)
def get_files_in_parent_directory(parent_dir, fname: str = 'variant_results.ht'):
all_outputs = []
for directory in parent_dir:
if not directory['is_dir']:
continue
file_path = f'{directory["path"]}/{fname}'
if hl.hadoop_exists(f'{file_path}/_SUCCESS'):
all_outputs.append(file_path)
return all_outputs
def union_ht(all_hts, col_fields, pheno_dict, temp_dir, inner_mode: str = 'overwrite'):
print(f'Unioning {len(all_hts)} HTs...')
ht = union_hts_by_tree(all_hts, temp_dir, inner_mode=inner_mode)
return ht.annotate(**pheno_dict[ht.key.select(*col_fields)])
def pull_out_col_keys(all_hts, row_keys, col_keys):
rekeyed_hts = []
for ht in all_hts:
ht2 = ht.head(1)
glob = ht2.aggregate(hl.agg.take(hl.struct(**{x: ht2[x] for x in col_keys}), 1)[0], _localize=False)
rekeyed_hts.append(ht.key_by(*row_keys).drop(*col_keys).annotate_globals(**glob))
return rekeyed_hts
def join_pheno_hts_to_mt(all_hts, row_keys, col_keys, temp_dir = None, inner_mode: str = 'overwrite',
repartition_final: int = None):
rekeyed_hts = pull_out_col_keys(all_hts, row_keys, col_keys)
mt = mwzj_hts_by_tree(rekeyed_hts, temp_dir, col_keys, debug=True,
inner_mode=inner_mode, repartition_final=repartition_final)
print(f'Unioned MTs...')
return mt
def unify_saige_ht_schema(ht, patch_case_control_count: str = ''):
"""
:param Table ht:
:param str patch_case_control_count: Path to file (hack to get cases and controls back if loading later)
:return:
:rtype: Table
"""
assert ht.head(1).annotation.collect()[0] is None, f'failed at {patch_case_control_count}'
if 'AF.Cases' not in list(ht.row):
ht = ht.select('AC_Allele2', 'AF_Allele2', 'imputationInfo', 'N', 'BETA', 'SE', 'Tstat',
**{'p.value.NA': hl.null(hl.tfloat64), 'Is.SPA.converge': hl.null(hl.tint32),
'varT': ht.varT, 'varTstar': ht.varTstar, 'AF.Cases': hl.null(hl.tfloat64),
'AF.Controls': hl.null(hl.tfloat64), 'Pvalue': ht.Pvalue,
'gene': hl.or_else(ht.gene, ''), 'annotation': hl.or_else(ht.annotation, '')})
else:
ht = ht.select('AC_Allele2', 'AF_Allele2', 'imputationInfo', 'N', 'BETA', 'SE', 'Tstat',
'p.value.NA', 'Is.SPA.converge', 'varT', 'varTstar', 'AF.Cases',
'AF.Controls', 'Pvalue', gene=hl.or_else(ht.gene, ''), annotation=hl.or_else(ht.annotation, ''))
ht2 = ht.head(1)
pheno_key_dict = dict(ht2.aggregate(hl.agg.take(ht2.key, 1)[0]))
if patch_case_control_count:
if not ht.n_cases.collect()[0]:
directory, tpc, _ = patch_case_control_count.rsplit('/', 2)
pheno_results_dir = get_pheno_output_path(directory, pheno_key_dict, '', legacy=True)
prefix = get_results_prefix(pheno_results_dir, pheno_key_dict, '{chrom}', 1, legacy=True)
saige_log = f'{prefix}.variant.log'
cases, controls = get_cases_and_controls_from_log(saige_log)
print(f'Patched pheno: {tpc}. Got {cases} cases and {controls} controls.')
if cases == -1: cases = hl.null(hl.tint)
if controls == -1: controls = hl.null(hl.tint)
ht = ht.annotate_globals(n_cases=cases, n_controls=controls)
if 'heritability' not in list(ht.globals):
ht = ht.annotate_globals(heritability=hl.null(hl.tfloat64))
if 'saige_version' not in list(ht.globals):
ht = ht.annotate_globals(saige_version=hl.null(hl.tstr))
return ht
def stringify_pheno_key_dict(pheno_key_dict, format_phenocode_field: bool = False, delimiter='-'):
return delimiter.join([format_pheno_dir(pheno_key_dict[x])
if x == 'phenocode' and format_phenocode_field
else pheno_key_dict[x] for x in PHENO_KEY_FIELDS if x in pheno_key_dict])
def get_results_prefix(pheno_results_dir, pheno_key_dict, chromosome, start_pos, legacy: bool = False):
prefix = f'{pheno_results_dir}/result_'
if legacy:
prefix | |
<reponame>gigglearrows/anniesbot
import json
import time
import logging
from collections import UserDict
import argparse
import datetime
import re
from pajbot.tbutil import find
from pajbot.models.db import DBManager, Base
from pajbot.models.action import ActionParser, RawFuncAction, FuncAction
from sqlalchemy import orm
from sqlalchemy.orm import relationship, joinedload
from sqlalchemy import Column, Integer, Boolean, DateTime, ForeignKey, String
from sqlalchemy.dialects.mysql import TEXT
log = logging.getLogger('pajbot')
def parse_command_for_web(alias, command, list):
import markdown
from flask import Markup
if command in list:
return
command.json_description = None
command.parsed_description = ''
try:
if command.description is not None:
command.json_description = json.loads(command.description)
if 'description' in command.json_description:
command.parsed_description = Markup(markdown.markdown(command.json_description['description']))
if command.json_description.get('hidden', False) is True:
return
except ValueError:
# Invalid JSON
pass
except:
log.warn(command.json_description)
log.exception('Unhandled exception BabyRage')
return
if command.command is None:
command.command = alias
if command.action is not None and command.action.type == 'multi':
if command.command is not None:
command.main_alias = command.command.split('|')[0]
for inner_alias, inner_command in command.action.commands.items():
parse_command_for_web(alias if command.command is None else command.main_alias + ' ' + inner_alias, inner_command, list)
else:
test = re.compile('[^\w]')
first_alias = command.command.split('|')[0]
command.resolve_string = test.sub('', first_alias.replace(' ', '_'))
command.main_alias = '!' + first_alias
if len(command.parsed_description) == 0:
if command.action is not None:
if command.action.type == 'message':
command.parsed_description = command.action.response
if len(command.action.response) == 0:
return
if command.description is not None:
command.parsed_description = command.description
list.append(command)
class CommandData(Base):
__tablename__ = 'tb_command_data'
command_id = Column(Integer, ForeignKey('tb_command.id'), primary_key=True, autoincrement=False)
num_uses = Column(Integer, nullable=False, default=0)
def __init__(self, command_id, **options):
self.command_id = command_id
self.num_uses = 0
self.set(**options)
def set(self, **options):
self.num_uses = options.get('num_uses', self.num_uses)
class CommandExample(Base):
__tablename__ = 'tb_command_example'
id = Column(Integer, primary_key=True)
command_id = Column(Integer, ForeignKey('tb_command.id'), nullable=False)
title = Column(String(256), nullable=False)
chat = Column(TEXT, nullable=False)
description = Column(String(512), nullable=False)
def __init__(self, command_id, title, chat='', description=''):
self.id = None
self.command_id = command_id
self.title = title
self.chat = chat
self.description = description
self.chat_messages = []
@orm.reconstructor
def init_on_load(self):
self.parse()
def add_chat_message(self, type, message, user_from, user_to=None):
chat_message = {
'source': {
'type': type,
'from': user_from,
'to': user_to
},
'message': message
}
self.chat_messages.append(chat_message)
def parse(self):
self.chat_messages = []
for line in self.chat.split('\n'):
users, message = line.split(':', 1)
if '>' in users:
user_from, user_to = users.split('>', 1)
self.add_chat_message('whisper', message, user_from, user_to=user_to)
else:
self.add_chat_message('say', message, users)
return self
class Command(Base):
__tablename__ = 'tb_command'
id = Column(Integer, primary_key=True)
level = Column(Integer, nullable=False, default=100)
action_json = Column('action', TEXT)
extra_extra_args = Column('extra_args', TEXT)
command = Column(TEXT, nullable=False)
description = Column(TEXT, nullable=True)
delay_all = Column(Integer, nullable=False, default=5)
delay_user = Column(Integer, nullable=False, default=15)
enabled = Column(Boolean, nullable=False, default=True)
cost = Column(Integer, nullable=False, default=0)
can_execute_with_whisper = Column(Boolean)
sub_only = Column(Boolean, nullable=False, default=False)
mod_only = Column(Boolean, nullable=False, default=False)
data = relationship('CommandData',
uselist=False,
cascade='',
lazy='joined')
examples = relationship('CommandExample',
uselist=True,
cascade='',
lazy='noload')
MIN_WHISPER_LEVEL = 420
BYPASS_DELAY_LEVEL = 1000
BYPASS_SUB_ONLY_LEVEL = 500
BYPASS_MOD_ONLY_LEVEL = 500
DEFAULT_CD_ALL = 5
DEFAULT_CD_USER = 15
DEFAULT_LEVEL = 100
def __init__(self, **options):
self.id = options.get('id', None)
self.level = Command.DEFAULT_LEVEL
self.action = None
self.extra_args = {'command': self}
self.delay_all = Command.DEFAULT_CD_ALL
self.delay_user = Command.DEFAULT_CD_USER
self.description = None
self.enabled = True
self.type = '?' # XXX: What is this?
self.cost = 0
self.can_execute_with_whisper = False
self.sub_only = False
self.mod_only = False
self.command = None
self.last_run = 0
self.last_run_by_user = {}
self.data = None
self.set(**options)
def set(self, **options):
self.level = options.get('level', self.level)
if 'action' in options:
self.action_json = json.dumps(options['action'])
self.action = ActionParser.parse(self.action_json)
if 'extra_args' in options:
self.extra_args = {'command': self}
self.extra_args.update(options['extra_args'])
self.extra_extra_args = json.dumps(options['extra_args'])
self.command = options.get('command', self.command)
self.description = options.get('description', self.description)
self.delay_all = options.get('delay_all', self.delay_all)
if self.delay_all < 0:
self.delay_all = 0
self.delay_user = options.get('delay_user', self.delay_user)
if self.delay_user < 0:
self.delay_user = 0
self.enabled = options.get('enabled', self.enabled)
self.cost = options.get('cost', self.cost)
if self.cost < 0:
self.cost = 0
self.can_execute_with_whisper = options.get('can_execute_with_whisper', self.can_execute_with_whisper)
self.sub_only = options.get('sub_only', self.sub_only)
self.mod_only = options.get('mod_only', self.mod_only)
self.examples = options.get('examples', self.examples)
@orm.reconstructor
def init_on_load(self):
self.last_run = 0
self.last_run_by_user = {}
self.extra_args = {'command': self}
self.action = ActionParser.parse(self.action_json)
if self.extra_extra_args:
try:
self.extra_args.update(json.loads(self.extra_extra_args))
except:
log.exception('Unhandled exception caught while loading Command extra arguments ({0})'.format(self.extra_extra_args))
@classmethod
def from_json(cls, json):
cmd = cls()
if 'level' in json:
cmd.level = json['level']
cmd.action = ActionParser.parse(data=json['action'])
return cmd
@classmethod
def dispatch_command(cls, cb, **options):
cmd = cls(**options)
cmd.action = ActionParser.parse('{"type": "func", "cb": "' + cb + '"}')
return cmd
@classmethod
def raw_command(cls, cb, **options):
cmd = cls(**options)
try:
cmd.action = RawFuncAction(cb)
except:
log.exception('Uncaught exception in Command.raw_command. catch the following exception manually!')
cmd.enabled = False
return cmd
@classmethod
def pajbot_command(cls, bot, method_name, level=1000, **options):
from pajbot.bot import Bot
cmd = cls()
cmd.level = level
cmd.description = options.get('description', None)
cmd.can_execute_with_whisper = True
try:
cmd.action = RawFuncAction(getattr(bot, method_name))
except:
pass
return cmd
@classmethod
def multiaction_command(cls, default=None, fallback=None, **options):
from pajbot.models.action import MultiAction
cmd = cls(**options)
cmd.action = MultiAction.ready_built(options.get('commands'),
default=default,
fallback=fallback)
return cmd
def load_args(self, level, action):
self.level = level
self.action = action
def is_enabled(self):
return self.enabled == 1 and self.action is not None
def run(self, bot, source, message, event={}, args={}, whisper=False):
if self.action is None:
log.warning('This command is not available.')
return False
if source.level < self.level:
# User does not have a high enough power level to run this command
return False
if whisper and self.can_execute_with_whisper is False and source.level < Command.MIN_WHISPER_LEVEL and source.moderator is False:
# This user cannot execute the command through a whisper
return False
if self.sub_only and source.subscriber is False and source.level < Command.BYPASS_SUB_ONLY_LEVEL:
# User is not a sub or a moderator, and cannot use the command.
return False
if self.mod_only and source.moderator is False and source.level < Command.BYPASS_MOD_ONLY_LEVEL:
# User is not a twitch moderator, or a bot moderator
return False
cd_modifier = 0.2 if source.level >= 500 or source.moderator is True else 1.0
cur_time = time.time()
time_since_last_run = (cur_time - self.last_run) / cd_modifier
if time_since_last_run < self.delay_all and source.level < Command.BYPASS_DELAY_LEVEL:
log.debug('Command was run {0:.2f} seconds ago, waiting...'.format(time_since_last_run))
return False
time_since_last_run_user = (cur_time - self.last_run_by_user.get(source.username, 0)) / cd_modifier
if time_since_last_run_user < self.delay_user and source.level < Command.BYPASS_DELAY_LEVEL:
log.debug('{0} ran command {1:.2f} seconds ago, waiting...'.format(source.username, time_since_last_run_user))
return False
if self.cost > 0 and source.points < self.cost:
# User does not have enough points to use the command
return False
args.update(self.extra_args)
ret = self.action.run(bot, source, message, event, args)
if ret is not False:
if self.data is not None:
self.data.num_uses += 1
if self.cost > 0:
# Only spend points if the action did not fail
if not source.spend(self.cost):
# The user does not have enough points to spend!
log.warning('{0} used points he does not have.'.format(source.username))
return False
self.last_run = cur_time
self.last_run_by_user[source.username] = cur_time
def autogenerate_examples(self):
if len(self.examples) == 0 and self.id is not None and self.action.type == 'message':
examples = []
if self.can_execute_with_whisper is True:
example = CommandExample(self.id, 'Default usage through whisper')
subtype = self.action.subtype if self.action.subtype is not 'reply' else 'say'
example.add_chat_message('whisper', self.main_alias, 'user', 'bot')
if subtype == 'say' or subtype == 'me':
example.add_chat_message(subtype, self.action.response, 'bot')
elif subtype == 'whisper':
example.add_chat_message(subtype, self.action.response, 'bot', 'user')
examples.append(example)
example = CommandExample(self.id, 'Default usage')
subtype = self.action.subtype if self.action.subtype is not 'reply' else 'say'
example.add_chat_message('say', self.main_alias, 'user')
if subtype == 'say' or subtype == 'me':
example.add_chat_message(subtype, self.action.response, 'bot')
elif subtype == 'whisper':
example.add_chat_message(subtype, self.action.response, 'bot', 'user')
examples.append(example)
return examples
return self.examples
class CommandManager(UserDict):
""" This class is responsible for compiling commands from multiple sources
into one easily accessible source.
The following sources are used:
- internal_commands = Commands that are added in source
- db_commands = Commands that are loaded from the database
- module_commands = Commands that are loaded from enabled modules
"""
def __init__(self, socket_manager=None, module_manager=None, bot=None):
UserDict.__init__(self)
self.db_session = DBManager.create_session()
self.internal_commands = {}
self.db_commands = {}
self.module_commands = {}
self.bot = bot
self.module_manager = module_manager
if socket_manager:
socket_manager.add_handler('module.update', self.on_module_reload)
socket_manager.add_handler('command.update', self.on_command_update)
socket_manager.add_handler('command.remove', self.on_command_remove)
def on_module_reload(self, data, conn):
self.rebuild()
def on_command_update(self, data, conn):
try:
command_id = int(data['command_id'])
except (KeyError, ValueError):
log.warn('No command ID found in on_command_update')
return False
command = find(lambda command: command.id == command_id, self.db_commands.values())
if command is not None:
self.remove_command_aliases(command)
self.load_by_id(command_id)
log.debug('Reloaded command with id {}'.format(command_id))
self.rebuild()
def on_command_remove(self, data, conn):
try:
command_id = int(data['command_id'])
except (KeyError, ValueError):
log.warn('No command ID found in on_command_update')
return False
command = find(lambda command: command.id == command_id, self.db_commands.values())
if command is None:
log.warn('Invalid ID sent to on_command_update')
return | |
classic object container class """
def __init__(self, annos=None, cls=HypothesisAnnotation):
self._index = {a.id:a for a in annos}
if annos is None:
annos = []
self._annos = annos
self._replies = {}
def add(self, annos):
# TODO update self._index etc.
self._annos.extend(annos)
def byId(self, id_annotation):
try:
return self._index[id_annotation]
except KeyError as e:
pass
def getParents(self, anno):
# TODO consider auto retrieve on missing?
if not anno.references:
return None
else:
# go backward to get the direct parent first, slower for shareLink but ok
for parent_id in anno.references[::-1]:
parent = self.byId(parent_id)
if parent is not None:
if parent.id not in self._replies:
self._replies[parent.id] = set()
self._replies[parent.id].add(self)
yield parent
class iterclass(type):
def __iter__(self):
yield from self.objects.values() # don't sort unless required
@property
def uri_tags(self):
""" a dictionary all (processed) tags for a given uri """
if not hasattr(self, '_uri_tags'):
uri_tags = defaultdict(set)
for obj in self.objects.values(): # do not use self here because the
# sorting in __iter__ above can be extremely slow
uri_tags[obj.uri].update(obj._tags)
self._uri_tags = dict(uri_tags) # FIXME this will go stale
return self._uri_tags
@property
def uris(self):
""" uris that have been annotated with tags from this workflow """
if hasattr(self, 'namespace'):
return set(uri for uri, tags in self.uri_tags.items()
if any(tag.startswith(self.prefix_ast)
for tag in tags))
else:
return set(self.uri_tags)
@property
def orphans(self):
for id in self._orphanedReplies:
yield self.byId(id)
# HypothesisHelper class customized to deal with replacing
# exact, text, and tags based on its replies
# also for augmenting the annotation with distinct fields
# using annotation-text:exact or something like that... (currently using PROTCUR:annotation-exact which is super akward)
# eg annotation-text:children to say exactly what the fields are when there needs to be more than one
# it is possible to figure most of them out from their content but not always
class HypothesisHelper(metaclass=iterclass): # a better HypothesisAnnotation
""" A wrapper around sets of hypothes.is annotations
with referential structure an pretty printing.
XXX BIG WARNING HERE: you can only use ALL subclasses of HypothesisHelper
XXX for a single group of annotations at a time otherwise things will go
XXX completely haywire, transition to use AnnotationPool if at all possible
"""
objects = {} # TODO updates # NOTE: all child classes need their own copy of objects
_tagIndex = {}
_replies = {}
reprReplies = True
_embedded = False
_done_loading = False
_annos = {}
_orphanedReplies = set()
@classmethod
def addAnno(cls, anno):
return cls(anno, [anno])
@classmethod
def byId(cls, id_):
try:
return next(v for v in cls.objects.values()).getObjectById(id_)
except StopIteration as e:
msg = (f'{cls.__name__}.objects has not been '
'populated with annotations yet!')
raise Warning(msg) from e
@classmethod
def byTags(cls, *tags):
if cls._done_loading: # TODO maybe better than done loading is 'consistent'?
if not cls._tagIndex:
log.debug('populating tags')
# FIXME extremely inefficient on update
# and we want this to update as replies appear
# not all at once...
# FIXME this does not update if new annos are added on the fly!
[obj.populateTags() for obj in cls.objects.values()]
return sorted(set.intersection(*(cls._tagIndex[tag] for tag in tags)))
else:
log.warning('attempted to search by tags before done loading')
def populateTags(self):
# FIXME need a way to evict old annos on update
for tag in self.tags:
if tag not in self._tagIndex:
self._tagIndex[tag] = {self}
else:
self._tagIndex[tag].add(self)
tset = self._tagIndex[tag]
if tset not in self._remove_self_from:
self._remove_self_from.append((tag, tset))
def depopulateTags(self):
""" remove object from the tag index on delete """
log.debug(f'Removing {self._repr} from {len(self._remove_self_from)} tag sets')
for tag, tset in self._remove_self_from:
tset.remove(self) # this should never error if everything is working correctly
if not tset: # remove unused tags from the index in depopulate
self._tagIndex.pop(tag)
@classmethod
def byIri(cls, iri, prefix=False):
norm_iri = norm(iri)
for obj in cls.objects.values():
norm_ouri = norm(obj.uri)
if norm_ouri == norm_iri:
yield obj
elif prefix and norm_ouri.startswith(norm_iri):
yield obj
@classmethod
def reset(cls, reset_annos_dict=False):
""" explicitly reset the class state removing _annos_list and _annos
normally this should be called before the first time a program
populates annotations so that any persistent state from another
program is removed unfriendly if they need to coexist, but for that
to actually work this whole thing needs a rewrite to have explicit
representation of annotation groups
XXX WARNING this also resets ALL PARENT CLASSES
"""
cls.objects = {}
cls._tagIndex = {}
cls._replies = {}
cls.reprReplies = True
cls._embedded = False
cls._done_loading = False
if reset_annos_dict:
HypothesisHelper._annos = {}
HypothesisHelper._index = {}
# DO NOT RESET THIS (under normal circumstances)
# the risk of staleness is worth it since we have
# already worked through most of the possible issues
# around things going stale for that
# FIXME yes, yet another reason to switch to explicit
# representation of in memory annotation stores
# NOTE if you are swapping out a set of annos for a
# subset of those annos, then definitely reset this
for a in ('_annos_list',):
if hasattr(cls, a):
# cannot just use delattr here because
# the annos list might be on a parent class
# which is bad ... because this will reset
# ALL the parent classes as well, which is ...
# a result of the bad design of hypothesis helper
# NOTE we still have to delattr here because
# _annos_list IS set per class, but may also be
# set on parents >_< (screaming)
try:
delattr(cls, a)
except AttributeError: # LOL PYTHON
pass
# FIXME WARNING EVIL SIDE EFFECTS ON OTHER CLASSES
# YOU WERE WARNED ABOVE
for pcls in cls.mro()[1:]:
if hasattr(pcls, '_annos_list'):
try:
delattr(pcls, '_annos_list')
except AttributeError:
pass
def __new__(cls, anno, annos):
if not hasattr(cls, '_annos_list'):
cls._annos_list = annos
elif cls._annos_list is not annos: # FIXME STOP implement a real annos (SyncList) class FFS
# hack to fix O(n ** 2) behavior or worse behavior
# when readding the same set of annos over and over
# I'm pretty sure that there is pathalogical behavior
# hiding here because of the expectation that cls._annos_list is annos
# for sync purposes ... sigh bad design coming back to haunt me again
# having subclasses of HypothesisHelper act as singletons seems like
# a good idea but eventually it will bite you
sal = set(cls._annos_list)
sa = set(annos)
added = sa - sal
removed = sal - sa
if added:
new = [a for a in annos if a in added]
cls._annos_list.extend(new)
annos = cls._annos_list
if hasattr(anno, 'deleted'):
matches = [a for a in annos if a.id == anno.id] # FIXME ick get rid of the list!
for m in matches:
cls._annos_list.remove(m)
if anno.id in cls._annos: # it is set to True by convetion
cls._annos.pop(anno.id) # insurance
#else:
#print("It's ok we already deleted", anno.id)
if anno.id in cls.objects:
obj = cls.objects.pop(anno.id) # this is what we were missing
obj.depopulateTags()
#print('Found the sneek.', anno.id)
return # our job here is done
if not cls._annos or len(cls._annos) < len(annos): # much faster (as in O(n**2) -> O(1)) to populate once at the start
# we should not need `if not a.deleted` because a should not be in annos
cls._annos.update({a.id:a for a in annos}) # FIXME this fails on deletes...
if len(cls._annos) != len(annos):
msg = ('it seems you have duplicate entries for annos: '
f'{len(cls._annos)} != {len(annos)}')
logd.critical(msg)
try:
self = cls.objects[anno.id]
if self._updated == anno.updated:
#printD(f'{self.id} already exists')
return self
else:
#printD(f'{self.id} already exists but something has changed')
cls._annos[anno.id] = anno # update to the new anno version
self.__init__(anno, annos) # just updated the underlying refs no worries
return self
except KeyError:
#printD(f'{anno.id} doesnt exist')
return super().__new__(cls)
def __init__(self, anno, annos):
self._recursion_blocker = False
self.annos = annos
self.id = anno.id # hardset this to prevent shenanigans
self.objects[self.id] = self
self._remove_self_from = []
if self._tagIndex:
# if tagIndex is not empty and we make it to __init__
# then this anno helper has not been put into the tag index
# FIXME stale annos in the tag index are likely an issue
self.populateTags()
if hasattr(self, '_uri_tags'): # keep uri_tags in sync
| |
import torch
import numpy as np
from torchvision import transforms
# Adapted from zeiss_umbrella.adversarial
# FGSM attack code from https://pytorch.org/tutorials/beginner/fgsm_tutorial.html
def fgsm_attack(image, epsilon, data_grad):
# Collect the element-wise sign of the data gradient
sign_data_grad = data_grad.sign()
# Create the perturbed image by adjusting each pixel of the input image
perturbed_image = image + epsilon * sign_data_grad
# Adding clipping to maintain [0,1] range
perturbed_image = torch.clamp(perturbed_image, 0, 1)
# Return the perturbed image
return perturbed_image
def fgsm_k_image(data, target, model, criterion, device,
epsilon=1.0 / 255.0, alpha=None, steps=None, return_path=False, rand=False):
"""
Generate adversarial examples using bim(rand=False) or Madry-PGD(rand=True).
:param data: a set of input images from which we generate the adversarial examples
:param target: the corresponding target labels of the data
:param epsilon: maximum pixelwise amplitude of perturbation
:param model: model to be attacked
:param criterion: loss for the generation of the adversarial examples
:param device: cpu or cuda
:param alpha: step size of each step
:param steps: number of steps
:param return_path: the path to store the adversarial examples
:param rand: starting from a random point within the linf box or not. Yes for Madry-PGD, no for BIM
:return: a set of adversarial examples.
"""
# from https://arxiv.org/pdf/1611.01236.pdf adapted for range 0 1 instead of 0 255
if steps is None:
steps = int(np.round(min(epsilon + 4. / 255, 1.25 * epsilon) * 255))
# Alpha is set to be 2.5 * epsilon / steps as in http://arxiv.org/abs/1706.06083
if alpha is None:
alpha = 2.5 * epsilon / steps
# Send the data and label to the device
data, target = data.to(device), target.to(device)
with torch.no_grad():
if rand:
perturbed_image = data + (-2 * epsilon) * torch.rand_like(data) + epsilon
else:
perturbed_image = data
# Set requires_grad attribute of tensor. Important for Attack
perturbed_image.requires_grad = True
path = [perturbed_image]
for _ in range(steps):
# print("step",k)
# Forward pass the data through the model
output = model(perturbed_image)
# Calculate the loss
loss = criterion(output, target)
# Zero all existing gradients
model.zero_grad()
# Calculate gradients of model in backward pass
loss.backward()
with torch.no_grad():
# Collect datagrad
data_grad = perturbed_image.grad.data
# Collect the element-wise sign of the data gradient
sign_data_grad = data_grad.sign()
# Create the perturbed image by adjusting each pixel of the input image
perturbed_image = perturbed_image.detach() + alpha * sign_data_grad
# Projected the image on the l_inf circle
perturbed_image = torch.min(torch.max(perturbed_image, data - epsilon), data + epsilon)
# Adding clipping to maintain [0,1] range
perturbed_image = torch.clamp(perturbed_image, 0, 1)
if return_path:
path.append(perturbed_image.detach())
perturbed_image.requires_grad = True
# Return the perturbed image
if return_path:
return perturbed_image.detach(), path
else:
return perturbed_image.detach()
def pgd(data, target, model, criterion, device,
epsilon=1.0 / 255.0, alpha=None, steps=None, return_path=False):
return fgsm_k_image(data, target, model, criterion, device,
epsilon=epsilon, alpha=alpha, steps=steps, return_path=return_path, rand=True)
def fgsm_image(data, target, model, criterion, device, epsilon, skip_wrong=False, **kwargs):
# Send the data and label to the device
data, target = data.to(device), target.to(device)
# Set requires_grad attribute of tensor. Important for Attack
data.requires_grad = True
# Forward pass the data through the model
output = model(data)
init_pred = torch.max(output, 1)[1] # get the index of the max log-probability
# If the initial prediction is wrong, dont bother attacking, just move on
if skip_wrong and init_pred.item() != target.item():
return None
# Calculate the loss
loss = criterion(output, target)
# Zero all existing gradients
model.zero_grad()
# Calculate gradients of model in backward pass
loss.backward()
# Collect datagrad
data_grad = data.grad.data
# Call FGSM Attack
perturbed_data = fgsm_attack(data, epsilon, data_grad)
# so we don't collect unnecessary grads if we reuse this data...
data.requires_grad = False
if "return_path" in kwargs:
return perturbed_data, None
else:
return perturbed_data
# Boundary attack
def orthogonal_perturbation(deltas, prev_samples, target_samples, device):
"""
Calculate the orthogonal move
:param device: cpu or cuda
:param deltas: amplitudes of the move of size (batch_size)
:param prev_samples: previous sample of size (batch_size, c, h, w)
:param target_samples: target sample of size (batch_size, c, h, w)
:return: the perturbation of size (batch_size, c, h, w)
"""
prev_samples, target_samples = prev_samples.to(device), target_samples.to(device)
# Generate perturbation
perturb = torch.randn_like(prev_samples) / 255 # (batch_size, c, h, w)
# Normalize and times delta * d(o, o^{k-1})
perturb *= 1. / get_diff(perturb, torch.zeros_like(perturb), device).unsqueeze(-1).unsqueeze(-1)
perturb *= (deltas * torch.mean(get_diff(target_samples, prev_samples, device))).unsqueeze(-1).unsqueeze(
-1).unsqueeze(-1)
# Calculate unit vector pointing to target samples.
diff = (target_samples - prev_samples).type(torch.float32) # (batch_size, c, h, w)
diff *= 1. / get_diff(target_samples, prev_samples, device).unsqueeze(-1).unsqueeze(-1)
# Projection onto the equidistant disc
# perturb -= torch.matmul(perturb, diff) * diff
# Calculate the inner product corresponding to frobenius norm: tr(sqrt(A.t().matmul(B)))
inner_prods = torch.einsum('...ii->...i', perturb.transpose(2, 3).matmul(diff)).sum(dim=2)
# Projection onto diff
proj = inner_prods.unsqueeze(-1).unsqueeze(-1) * diff
perturb -= proj
t = transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
ones_normalized = t(torch.ones_like(perturb)[0]).repeat(perturb.shape[0], 1, 1, 1)
zeros_normalized = t(torch.zeros_like(perturb)[0]).repeat(perturb.shape[0], 1, 1, 1)
overflow = (prev_samples + perturb) - ones_normalized
perturb -= overflow * (overflow > 0).type(torch.float32)
underflow = (prev_samples + perturb) - zeros_normalized
perturb -= underflow * (underflow < 0).type(torch.float32)
return perturb.to(device)
def forward_perturbation(epsilons, prev_samples, target_samples, device):
"""
Calculate the perturbation needed towards target sample
:param device: cpu or cuda
:param epsilons: collection of epsilon of each entry in the batch size = (batch)
:param prev_samples: previous samples
:param target_samples: target samples
:return: the perturbation of size (batch_size, c, h, w)
"""
prev_samples, target_samples = prev_samples.to(device), target_samples.to(device)
perturb = (target_samples - prev_samples).type(torch.float32)
perturb *= 1. / get_diff(target_samples, prev_samples, device).unsqueeze(-1).unsqueeze(-1)
perturb *= epsilons.unsqueeze(-1).unsqueeze(-1)
return perturb.to(device)
def get_diff(samples_1, samples_2, device):
"""
Get the frobenius norm of difference between sample_1 and sample_2
:param device: cpu or cuda
:param samples_1: (batch_size, c, h, w) or (batch_size, h, w, c)
:param samples_2: (batch_size, c, h, w) or (batch_size, h, w, c)
:return: (batch_size, 3) dimension tensor of difference at each dimension
"""
samples_1, samples_2 = samples_1.to(device), samples_2.to(device)
if samples_1.shape[1] != 3:
samples_1 = samples_1.clone().permute(0, 3, 1, 2).to(device)
if samples_2.shape[1] != 3:
samples_2 = samples_2.clone().permute(0, 3, 1, 2).to(device)
batch_size = samples_1.shape[0]
num_channel = samples_1.shape[1]
diff = samples_1 - samples_2
return torch.norm(diff.view(batch_size, num_channel, -1), dim=2).to(device)
def generate_target_samples(data, labels, fundus_dataset=None, target_indices=(4, 5, 300, 6), device='cuda'):
"""
Generate target samples for decision boundary attack from the given data. Basically, for each input label, we take
a sample of different label in the data as a target sample. If all the labels are the same, we take a distinct label
from the target_indices which contains indices of the fundus dataset where labels are 0 - 4 and use the selected label
as well as the corresponding image to construnct a target image batch.
:param device:
:param data: input images
:param labels: target labels of data
:param fundus_dataset: fundus dataset object
:param target_indices: 5 positions in the fundus dataset where the labels are respectively 0 - 4
:return: target samples along with their labels used for decision boundary attack
"""
# If all the labels are the same
batch_size = data.shape[0]
all_zero = (labels != labels[0]).bitwise_not().all()
zero_and_the_other = len(torch.unique(labels)) == 2 and 0 in torch.unique(labels)
if all_zero or zero_and_the_other:
data_all = torch.Tensor()
labels_all = []
for index in target_indices:
data_all = torch.cat((data_all, fundus_dataset[index][0].unsqueeze(0)))
labels_all.append(torch.tensor(fundus_dataset[index][1]))
labels_all = torch.stack(labels_all).to(device)
if all_zero:
result_indices = torch.where((labels_all != labels[0].to(device)))
elif zero_and_the_other:
result_indices = torch.where((labels_all != torch.unique(labels)[1].to(device)))
result_indices = result_indices[torch.randperm(len(result_indices))]
target_labels = labels_all[result_indices][0].repeat(batch_size, 1)
target_samples = data_all[result_indices][0].repeat(batch_size, 1, 1, 1)
return target_samples, target_labels.view(batch_size)
else:
result_indices = []
for label in labels:
distinct_indices = torch.where((labels != label) * (labels != 0))
result_indices.append(distinct_indices[torch.randperm(len(distinct_indices))][0])
result_indices = torch.stack(result_indices)
target_labels = labels[result_indices].clone()
target_samples = data[result_indices].clone()
return target_samples, target_labels
def generate_initial_samples(data, labels, model, device, max_iter=100, epsilon=3.0 / 255.0):
data, labels = data.to(device), labels.to(device)
init_samples = data.detach().clone()
n_iter = 0
correct = torch.max(model(init_samples), 1)[1] == labels
while correct.any() and n_iter < max_iter:
init_samples = torch.rand_like(init_samples)
correct = torch.max(model(init_samples), 1)[1] == labels
n_iter += 1
print("generate {} initial samples".format(correct.bitwise_not().type(torch.int).sum()))
return init_samples[correct.bitwise_not()], correct.bitwise_not()
def move_to_boundary(model, epsilons, adversarial_samples, target_samples, init_preds, d_step_max, n_calls, device):
"""
Move first step to the boundary: first coincide with the target sample and gradually reduce step size
wrong/correct_indices is used for navigating in the global tensor (tensor with size | |
strategy that determines how
selections' data queries are resolved when applied in a filter transform,
conditional encoding rule, or scale domain.
translate : anyOf(string, boolean)
When truthy, allows a user to interactively move an interval selection
back-and-forth. Can be `true`, `false` (to disable panning), or a [Vega
event stream definition](https://vega.github.io/vega/docs/event-streams/)
which must include a start and end event to trigger continuous panning.
__Default value:__ `true`, which corresponds to `[mousedown, window:mouseup]
> window:mousemove!` which corresponds to clicks and dragging within an
interval selection to reposition it.
zoom : anyOf(string, boolean)
When truthy, allows a user to interactively resize an interval selection.
Can be `true`, `false` (to disable zooming), or a [Vega event stream
definition](https://vega.github.io/vega/docs/event-streams/). Currently,
only `wheel` events are supported. __Default value:__ `true`, which
corresponds to `wheel!`.
"""
_schema = {'$ref': '#/definitions/IntervalSelectionConfig'}
_rootschema = Root._schema
def __init__(self, bind=Undefined, empty=Undefined, encodings=Undefined,
fields=Undefined, mark=Undefined, on=Undefined, resolve=Undefined,
translate=Undefined, zoom=Undefined, **kwds):
super(IntervalSelectionConfig, self).__init__(bind=bind, empty=empty,
encodings=encodings,
fields=fields, mark=mark,
on=on, resolve=resolve,
translate=translate,
zoom=zoom, **kwds)
class JsonDataFormat(SchemaBase):
"""JsonDataFormat schema wrapper
Attributes
----------
parse : anyOf(string, mapping)
If set to auto (the default), perform automatic type inference to determine
the desired data types. Alternatively, a parsing directive object can be
provided for explicit data types. Each property of the object corresponds to
a field name, and the value to the desired data type (one of `"number"`,
`"boolean"` or `"date"`). For example, `"parse": {"modified_on": "date"}`
parses the `modified_on` field in each input record a Date value. For
`"date"`, we parse data based using Javascript's
[`Date.parse()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/parse).
For Specific date formats can be provided (e.g., `{foo: 'date:"%m%d%Y"'}`),
using the [d3-time-format
syntax](https://github.com/d3/d3-time-format#locale_format). UTC date format
parsing is supported similarly (e.g., `{foo: 'utc:"%m%d%Y"'}`). See more
about [UTC time](timeunit.html#utc)
property : string
The JSON property containing the desired data. This parameter can be used
when the loaded JSON file may have surrounding structure or meta-data. For
example `"property": "values.features"` is equivalent to retrieving
`json.values.features` from the loaded JSON object.
type : string
Type of input data: `"json"`, `"csv"`, `"tsv"`. The default format type is
determined by the extension of the file URL. If no extension is detected,
`"json"` will be used by default.
"""
_schema = {'$ref': '#/definitions/JsonDataFormat'}
_rootschema = Root._schema
def __init__(self, parse=Undefined, property=Undefined, type=Undefined, **kwds):
super(JsonDataFormat, self).__init__(parse=parse, property=property,
type=type, **kwds)
class Legend(SchemaBase):
"""
Properties of a legend or boolean flag for determining whether to show it.
Attributes
----------
entryPadding : float
Padding (in pixels) between legend entries in a symbol legend.
format : string
The formatting pattern for labels. This is D3's [number format
pattern](https://github.com/d3/d3-format#locale_format) for quantitative
fields and D3's [time format
pattern](https://github.com/d3/d3-time-format#locale_format) for time field.
See the [format documentation](format.html) for more information. __Default
value:__ derived from [numberFormat](config.html#format) config for
quantitative fields and from [timeFormat](config.html#format) config for
temporal fields.
offset : float
The offset, in pixels, by which to displace the legend from the edge of the
enclosing group or data rectangle. __Default value:__ `0`
orient : LegendOrient
The orientation of the legend, which determines how the legend is positioned
within the scene. One of "left", "right", "top-left", "top-right",
"bottom-left", "bottom-right", "none". __Default value:__ `"right"`
padding : float
The padding, in pixels, between the legend and axis.
tickCount : float
The desired number of tick values for quantitative legends.
title : anyOf(string, None)
A title for the field. If `null`, the title will be removed. __Default
value:__ derived from the field's name and transformation function
(`aggregate`, `bin` and `timeUnit`). If the field has an aggregate
function, the function is displayed as a part of the title (e.g., `"Sum of
Profit"`). If the field is binned or has a time unit applied, the applied
function will be denoted in parentheses (e.g., `"Profit (binned)"`,
`"Transaction Date (year-month)"`). Otherwise, the title is simply the
field name. __Note__: You can customize the default field title format by
providing the [`fieldTitle` property in the [config](config.html) or
[`fieldTitle` function via the `compile` function's
options](compile.html#field-title).
type : string
The type of the legend. Use `"symbol"` to create a discrete legend and
`"gradient"` for a continuous color gradient. __Default value:__
`"gradient"` for non-binned quantitative fields and temporal fields;
`"symbol"` otherwise.
values : anyOf(list, list, list)
Explicitly set the visible legend values.
zindex : float
A non-positive integer indicating z-index of the legend. If zindex is 0,
legend should be drawn behind all chart elements. To put them in front, use
zindex = 1.
"""
_schema = {'$ref': '#/definitions/Legend'}
_rootschema = Root._schema
def __init__(self, entryPadding=Undefined, format=Undefined, offset=Undefined,
orient=Undefined, padding=Undefined, tickCount=Undefined,
title=Undefined, type=Undefined, values=Undefined,
zindex=Undefined, **kwds):
super(Legend, self).__init__(entryPadding=entryPadding, format=format,
offset=offset, orient=orient, padding=padding,
tickCount=tickCount, title=title, type=type,
values=values, zindex=zindex, **kwds)
class LegendConfig(SchemaBase):
"""LegendConfig schema wrapper
Attributes
----------
cornerRadius : float
Corner radius for the full legend.
entryPadding : float
Padding (in pixels) between legend entries in a symbol legend.
fillColor : string
Background fill color for the full legend.
gradientHeight : float
The height of the gradient, in pixels.
gradientLabelBaseline : string
Text baseline for color ramp gradient labels.
gradientLabelLimit : float
The maximum allowed length in pixels of color ramp gradient labels.
gradientLabelOffset : float
Vertical offset in pixels for color ramp gradient labels.
gradientStrokeColor : string
The color of the gradient stroke, can be in hex color code or regular color
name.
gradientStrokeWidth : float
The width of the gradient stroke, in pixels.
gradientWidth : float
The width of the gradient, in pixels.
labelAlign : string
The alignment of the legend label, can be left, middle or right.
labelBaseline : string
The position of the baseline of legend label, can be top, middle or bottom.
labelColor : string
The color of the legend label, can be in hex color code or regular color
name.
labelFont : string
The font of the legend label.
labelFontSize : float
The font size of legend label. __Default value:__ `10`.
labelLimit : float
Maximum allowed pixel width of axis tick labels.
labelOffset : float
The offset of the legend label.
offset : float
The offset, in pixels, by which to displace the legend from the edge of the
enclosing group or data rectangle. __Default value:__ `0`
orient : LegendOrient
The orientation of the legend, which determines how the legend is positioned
within the scene. One of "left", "right", "top-left", "top-right",
"bottom-left", "bottom-right", "none". __Default value:__ `"right"`
padding : float
The padding, in pixels, between the legend and axis.
shortTimeLabels : boolean
Whether month names and weekday names should be abbreviated. __Default
value:__ `false`
strokeColor : string
Border stroke color for the full legend.
strokeDash : list
Border stroke dash pattern for the full legend.
strokeWidth : float
Border stroke width for the full legend.
symbolColor : string
The color of the legend symbol,
symbolSize : float
The size of the legend symbol, in pixels.
symbolStrokeWidth : float
The width of the symbol's stroke.
symbolType : string
Default shape type (such as "circle") for legend symbols.
titleAlign : string
Horizontal text alignment for legend titles.
titleBaseline : string
Vertical text baseline for legend titles.
titleColor : string
The color of the legend title, can be in hex color code or regular color
name.
titleFont : string
The font of the legend title.
titleFontSize : float
The font size of the legend title.
titleFontWeight : anyOf(string, float)
The font weight of the legend title.
titleLimit : float
Maximum allowed pixel width of axis titles.
titlePadding : float
The padding, in pixels, between title and legend.
"""
_schema = {'$ref': '#/definitions/LegendConfig'}
_rootschema = Root._schema
def __init__(self, cornerRadius=Undefined, entryPadding=Undefined,
fillColor=Undefined, gradientHeight=Undefined,
gradientLabelBaseline=Undefined, gradientLabelLimit=Undefined,
gradientLabelOffset=Undefined, gradientStrokeColor=Undefined,
gradientStrokeWidth=Undefined, gradientWidth=Undefined,
labelAlign=Undefined, labelBaseline=Undefined,
labelColor=Undefined, labelFont=Undefined, labelFontSize=Undefined,
labelLimit=Undefined, labelOffset=Undefined, offset=Undefined,
orient=Undefined, padding=Undefined, shortTimeLabels=Undefined,
strokeColor=Undefined, strokeDash=Undefined, strokeWidth=Undefined,
symbolColor=Undefined, symbolSize=Undefined,
symbolStrokeWidth=Undefined, symbolType=Undefined,
titleAlign=Undefined, titleBaseline=Undefined,
titleColor=Undefined, titleFont=Undefined, titleFontSize=Undefined,
titleFontWeight=Undefined, titleLimit=Undefined,
titlePadding=Undefined, **kwds):
super(LegendConfig, self).__init__(cornerRadius=cornerRadius,
entryPadding=entryPadding,
fillColor=fillColor,
gradientHeight=gradientHeight,
gradientLabelBaseline=gradientLabelBaseline,
gradientLabelLimit=gradientLabelLimit,
gradientLabelOffset=gradientLabelOffset,
gradientStrokeColor=gradientStrokeColor,
gradientStrokeWidth=gradientStrokeWidth,
gradientWidth=gradientWidth,
labelAlign=labelAlign,
labelBaseline=labelBaseline,
labelColor=labelColor,
labelFont=labelFont,
labelFontSize=labelFontSize,
labelLimit=labelLimit,
labelOffset=labelOffset, offset=offset,
orient=orient, padding=padding,
shortTimeLabels=shortTimeLabels,
strokeColor=strokeColor,
strokeDash=strokeDash,
strokeWidth=strokeWidth,
| |
13.5289, 0.0811, 17.0997, 0.0641, 1.24e-07, 1.24e-07, 1.24e-07, nan ],
[ 750, 12.8046, 0.0880, 19.4440, 0.0579, 14.2746, 0.0789, 16.3491, 0.0689, 1.22e-07, 1.22e-07, 1.63e-07, nan ],
[ 760, 12.7006, 0.0911, 19.6422, 0.0589, 14.3116, 0.0808, 16.2807, 0.0710, 1.20e-07, 1.20e-07, 1.20e-07, nan ],
[ 770, 13.0368, 0.0911, 19.7622, 0.0601, 14.3105, 0.0830, 16.2747, 0.0730, 1.19e-07, 1.19e-07, 1.59e-07, nan ],
[ 780, 13.1030, 0.0930, 20.2785, 0.0601, 14.6423, 0.0832, 16.0193, 0.0761, 1.57e-07, 1.17e-07, 1.57e-07, nan ],
[ 790, 13.4409, 0.0930, 21.2225, 0.0589, 15.2382, 0.0820, 16.2290, 0.0770, 1.55e-07, 1.55e-07, 1.16e-07, nan ],
[ 800, 13.9260, 0.0920, 21.3310, 0.0601, 15.4466, 0.0830, 16.2399, 0.0789, 1.53e-07, 1.14e-07, 1.53e-07, nan ],
[ 810, 13.7079, 0.0958, 21.1945, 0.0620, 15.2647, 0.0861, 17.0606, 0.0770, 1.13e-07, 1.13e-07, 1.51e-07, nan ],
[ 820, 13.8756, 0.0970, 22.0601, 0.0610, 15.6437, 0.0861, 17.2703, 0.0780, 1.49e-07, 1.12e-07, 1.49e-07, nan ],
[ 830, 14.3570, 0.0961, 22.6011, 0.0610, 16.2525, 0.0849, 16.8194, 0.0820, 1.47e-07, 1.10e-07, 1.10e-07, nan ],
[ 840, 14.1096, 0.1001, 22.4472, 0.0629, 16.0163, 0.0882, 16.9801, 0.0832, 1.45e-07, 1.45e-07, 1.09e-07, nan ],
[ 850, 14.0461, 0.1030, 22.8977, 0.0632, 16.4442, 0.0880, 17.2384, 0.0839, 1.08e-07, 1.44e-07, 1.08e-07, nan ],
[ 860, 14.6842, 0.1009, 23.8901, 0.0620, 16.8331, 0.0880, 17.0176, 0.0870, 1.42e-07, 1.42e-07, 1.06e-07, nan ],
[ 870, 14.5794, 0.1040, 23.6306, 0.0641, 16.9964, 0.0892, 17.4154, 0.0870, 1.40e-07, 1.40e-07, 1.40e-07, nan ],
[ 880, 14.8822, 0.1042, 24.2669, 0.0639, 17.2507, 0.0899, 17.6247, 0.0880, 1.39e-07, 1.39e-07, 1.39e-07, nan ],
[ 890, 15.2571, 0.1040, 24.8212, 0.0639, 17.8340, 0.0889, 17.5981, 0.0901, 1.03e-07, 1.37e-07, 1.03e-07, nan ],
[ 900, 14.7556, 0.1099, 24.5571, 0.0660, 17.6226, 0.0920, 18.0433, 0.0899, 1.36e-07, 1.36e-07, 1.36e-07, nan ],
[ 1000, 17.2778, 0.1159, 28.5612, 0.0701, 19.2592, 0.1040, 18.7016, 0.1070, 1.22e-07, 1.22e-07, 1.53e-07, nan ],
[ 1100, 18.3383, 0.1321, 31.4534, 0.0770, 20.3596, 0.1190, 20.6913, 0.1171, 1.66e-07, 1.39e-07, 1.39e-07, nan ],
[ 1200, 20.7370, 0.1390, 36.5247, 0.0789, 23.7985, 0.1211, 18.5709, 0.1552, 1.78e-07, 1.27e-07, 1.27e-07, nan ],
[ 1300, 22.0991, 0.1531, 39.7413, 0.0851, 26.0324, 0.1299, 24.5037, 0.1380, 1.64e-07, 1.17e-07, 1.64e-07, nan ],
[ 1400, 24.5207, 0.1600, 44.1110, 0.0889, 29.0696, 0.1349, 23.7766, 0.1650, 1.74e-07, 1.74e-07, 1.31e-07, nan ],
[ 1500, 26.6389, 0.1690, 48.9299, 0.0920, 29.8372, 0.1509, 24.5924, 0.1831, 2.03e-07, 1.63e-07, 1.63e-07, nan ],
[ 1600, 28.3113, 0.1810, 53.3207, 0.0961, 31.8345, 0.1609, 26.2693, 0.1950, 1.91e-07, 1.53e-07, 1.53e-07, nan ],
[ 1700, 29.9473, 0.1931, 54.6336, 0.1059, 32.5165, 0.1779, 27.4094, 0.2110, 1.80e-07, 2.15e-07, 1.80e-07, nan ],
[ 1800, 31.7689, 0.2041, 58.4821, 0.1109, 36.0188, 0.1800, 26.1483, 0.2480, 1.70e-07, 1.70e-07, 1.70e-07, nan ],
[ 1900, 33.9292, 0.2129, 64.4656, 0.1121, 38.0161, 0.1900, 24.9784, 0.2892, 2.25e-07, 1.61e-07, 1.61e-07, nan ],
[ 2000, 35.5627, 0.2251, 62.9854, 0.1271, 37.0543, 0.2160, 27.0300, 0.2961, 2.14e-07, 1.53e-07, 1.53e-07, nan ],
[ 2100, 37.9994, 0.2322, 65.8565, 0.1340, 24.3817, 0.3619, 25.7381, 0.3428, 2.03e-07, 1.74e-07, 1.74e-07, nan ],
[ 2200, 38.4289, 0.2520, 68.2678, 0.1419, 25.3554, 0.3819, 27.0435, 0.3581, 2.50e-07, 1.66e-07, 1.39e-07, nan ],
[ 2300, 41.0305, 0.2580, 71.4896, 0.1481, 25.8863, 0.4089, 29.6560, 0.3569, 2.92e-07, 1.86e-07, 1.86e-07, nan ],
[ 2400, 41.7431, 0.2761, 74.2527, 0.1552, 27.1717, 0.4241, 30.6522, 0.3760, 2.29e-07, 1.78e-07, 1.78e-07, nan ],
[ 2500, 43.5629, 0.2871, 79.7109, 0.1569, 27.9732, 0.4470, 4.3862, 2.8510, 2.69e-07, 1.71e-07, 1.46e-07, nan ],
[ 2600, 45.9715, 0.2942, 80.9255, 0.1671, 29.0321, 0.4659, 31.0163, 0.4361, 2.11e-07, 1.88e-07, 1.88e-07, nan ],
[ 2700, 47.2034, 0.3090, 82.8938, 0.1760, 30.1358, 0.4840, 34.0811, 0.4280, 2.26e-07, 1.81e-07, 1.58e-07, nan ],
[ 2800, 49.1705, 0.3190, 88.5467, 0.1771, 31.4334, 0.4990, 32.2027, 0.4871, 2.62e-07, 1.74e-07, 1.74e-07, nan ],
[ 2900, 50.8448, 0.3309, 90.9440, 0.1850, 32.4770, 0.5181, 33.5261, 0.5019, 2.53e-07, 2.10e-07, 1.68e-07, nan ],
[ 3000, 52.5192, 0.3428, 94.2854, 0.1910, 33.5955, 0.5360, 34.7710, 0.5178, 3.26e-07, 2.44e-07, 2.03e-07, nan ],
[ 3100, 54.1575, 0.3550, 97.6278, 0.1969, 34.4470, 0.5581, 35.2758, 0.5450, 2.36e-07, 2.36e-07, 1.97e-07, nan ],
[ 3200, 55.6517, 0.3681, 100.9708, 0.2029, 35.5655, 0.5760, 35.8773, 0.5710, 2.67e-07, 1.91e-07, 1.53e-07, nan ],
[ 3300, 57.4715, 0.3791, 103.7226, 0.2100, 36.1184, 0.6032, 36.9211, 0.5901, 2.96e-07, 2.22e-07, 1.85e-07, nan ],
[ 3400, 59.7664, 0.3870, 104.6395, 0.2210, 37.8466, 0.6111, 31.9924, 0.7229, 2.87e-07, 2.15e-07, 2.15e-07, nan ],
[ 3500, 60.9667, 0.4020, 107.8592, 0.2272, 38.4692, 0.6371, 32.5387, 0.7532, 3.14e-07, 2.09e-07, 1.74e-07, nan ],
[ 3600, 62.1764, 0.4170, 111.3066, 0.2329, 39.4724, 0.6568, 35.7132, 0.7260, 2.71e-07, 2.37e-07, 2.03e-07, nan ],
])
# numactl --interleave=all ./testing_ssymv -N 100 -N 1000 --range 10:90:1 --range 100:900:10 --range 1000:9000:100 --range 10000:20000:2000
ssymv_L = array([
[ 10, 0.0054, 0.0410, 0.0065, 0.0339, 0.0082, 0.0269, 0.1025, 0.0021, 4.77e-08, 4.77e-08, 4.77e-08, nan ],
[ 11, 0.0069, 0.0381, 0.0080, 0.0331, 0.0105, 0.0250, 0.0923, 0.0029, 4.33e-08, 4.33e-08, 4.33e-08, nan ],
[ 12, 0.0082, 0.0379, 0.0097, 0.0322, 0.0120, 0.0260, 0.1091, 0.0029, 3.97e-08, 1.99e-08, 1.99e-08, nan ],
[ 13, 0.0096, 0.0379, 0.0108, 0.0339, 0.0135, 0.0269, 0.1272, 0.0029, 7.34e-08, 7.34e-08, 7.34e-08, nan ],
[ 14, 0.0108, 0.0389, 0.0128, 0.0329, 0.0169, 0.0248, 0.1468, 0.0029, 6.81e-08, 6.81e-08, 6.81e-08, nan ],
[ 15, 0.0120, 0.0401, 0.0141, 0.0341, 0.0192, 0.0250, 0.1678, 0.0029, 3.18e-08, 4.77e-08, 6.36e-08, nan ],
[ 16, 0.0137, 0.0398, 0.0161, 0.0339, 0.0209, 0.0260, 0.2852, 0.0019, 5.96e-08, 5.96e-08, 5.96e-08, nan ],
[ 17, 0.0161, 0.0379, 0.0181, 0.0339, 0.0227, 0.0269, 0.2139, 0.0029, 5.61e-08, 2.80e-08, 5.61e-08, nan ],
[ 18, 0.0185, 0.0370, 0.0202, 0.0339, 0.0254, 0.0269, 0.2207, 0.0031, 7.95e-08, 2.65e-08, 5.30e-08, nan ],
[ 19, 0.0206, 0.0370, 0.0229, 0.0331, 0.0304, 0.0250, 0.2656, 0.0029, 5.02e-08, 5.02e-08, 5.02e-08, nan ],
[ 20, 0.0233, 0.0360, 0.0263, 0.0319, 0.0323, 0.0260, 0.2710, 0.0031, 4.77e-08, 4.77e-08, 4.77e-08, nan ],
[ 21, 0.0248, 0.0372, 0.0273, 0.0339, 0.0356, 0.0260, 0.3230, 0.0029, 9.08e-08, 6.81e-08, 4.54e-08, nan ],
[ 22, 0.0281, 0.0360, 0.0305, 0.0331, 0.0389, 0.0260, 0.3537, 0.0029, 6.50e-08, 4.33e-08, 6.50e-08, nan ],
[ 23, 0.0299, 0.0370, 0.0336, 0.0329, 0.0425, 0.0260, 0.3562, 0.0031, 4.15e-08, 8.29e-08, 4.15e-08, nan ],
[ 24, 0.0333, 0.0360, 0.0333, 0.0360, 0.0445, 0.0269, 0.2961, 0.0041, 7.95e-08, 3.97e-08, 3.97e-08, nan ],
[ 25, 0.0350, 0.0372, 0.0361, 0.0360, 0.0483, 0.0269, 0.2596, 0.0050, 7.63e-08, 7.63e-08, 5.72e-08, nan ],
[ 26, 0.0370, 0.0379, 0.0401, 0.0350, 0.0521, 0.0269, 0.3464, 0.0041, 7.34e-08, 7.34e-08, 7.34e-08, nan ],
[ 27, 0.0369, 0.0410, 0.0420, 0.0360, 0.0542, 0.0279, 0.3020, 0.0050, 7.06e-08, 7.06e-08, 7.06e-08, nan ],
[ 28, 0.0428, 0.0379, 0.0439, 0.0370, 0.0577, 0.0281, 0.4257, 0.0038, 6.81e-08, 6.81e-08, 1.02e-07, nan ],
[ 29, 0.0445, 0.0391, 0.0483, 0.0360, 0.0646, 0.0269, 0.2919, 0.0060, 6.58e-08, 6.58e-08, 9.87e-08, nan ],
[ 30, 0.0464, 0.0401, 0.0531, 0.0350, 0.0661, 0.0281, 0.3715, 0.0050, 9.54e-08, 9.54e-08, 1.27e-07, nan ],
[ 31, 0.0484, 0.0410, 0.0566, 0.0350, 0.0711, 0.0279, 0.3329, 0.0060, 9.23e-08, 6.15e-08, 9.23e-08, nan ],
[ 32, 0.0572, 0.0370, 0.0637, 0.0331, 0.0852, 0.0248, 0.4218, 0.0050, 5.96e-08, 5.96e-08, 5.96e-08, nan ],
[ 33, 0.0607, 0.0370, 0.0640, 0.0350, 0.0771, 0.0291, 0.3765, 0.0060, 8.67e-08, 1.16e-07, 5.78e-08, nan ],
[ 34, 0.0644, 0.0370, 0.0768, 0.0310, 0.1040, 0.0229, 0.5872, 0.0041, 5.61e-08, 8.41e-08, 5.61e-08, nan ],
[ 35, 0.0789, 0.0319, 0.0846, 0.0298, 0.1046, 0.0241, 0.3645, 0.0069, 5.45e-08, 5.45e-08, 5.45e-08, nan ],
[ 36, 0.0721, 0.0370, 0.0834, 0.0319, 0.1106, 0.0241, 0.3725, 0.0072, 7.95e-08, 5.30e-08, 5.30e-08, nan ],
[ 37, 0.0849, 0.0331, 0.0936, 0.0300, 0.1168, 0.0241, 0.6938, 0.0041, 1.03e-07, 1.03e-07, 1.03e-07, nan ],
[ 38, 0.0928, 0.0319, 0.0987, 0.0300, 0.1231, 0.0241, 0.7313, 0.0041, 7.53e-08, 5.02e-08, 7.53e-08, nan ],
[ 39, 0.0977, 0.0319, 0.0948, 0.0329, 0.1296, 0.0241, 0.8179, 0.0038, 4.89e-08, 4.89e-08, 4.89e-08, nan ],
[ 40, 0.1092, 0.0300, 0.1137, 0.0288, 0.1495, 0.0219, 1.0583, 0.0031, 7.15e-08, 9.54e-08, 7.15e-08, nan ],
[ 41, 0.1078, 0.0319, 0.1184, 0.0291, 0.1489, 0.0231, 1.1112, 0.0031, 6.98e-08, 6.98e-08, 6.98e-08, nan ],
[ 42, 0.1156, 0.0312, 0.1252, 0.0288, 0.1578, 0.0229, 1.2625, 0.0029, 9.08e-08, 9.08e-08, 9.08e-08, nan ],
[ 43, 0.1184, 0.0319, 0.1345, 0.0281, 0.1653, 0.0229, 0.9336, 0.0041, 8.87e-08, 8.87e-08, 8.87e-08, nan ],
[ 44, 0.1278, 0.0310, 0.1240, 0.0319, 0.1730, 0.0229, 0.9770, 0.0041, 8.67e-08, 8.67e-08, 8.67e-08, nan ],
[ 45, 0.1336, 0.0310, 0.1378, 0.0300, 0.1887, 0.0219, 1.0214, 0.0041, 8.48e-08, 8.48e-08, 8.48e-08, nan ],
[ 46, 0.1268, 0.0341, 0.1395, 0.0310, 0.1814, 0.0238, 0.8636, 0.0050, 8.29e-08, 1.24e-07, 8.29e-08, nan ],
[ 47, 0.1412, 0.0319, 0.1456, 0.0310, 0.1874, 0.0241, 1.1828, 0.0038, 8.12e-08, 8.12e-08, 8.12e-08, nan ],
[ 48, 0.1380, 0.0341, 0.1518, 0.0310, 0.1973, 0.0238, 1.1606, 0.0041, 7.95e-08, 3.97e-08, 7.95e-08, nan ],
[ 49, 0.1534, 0.0319, 0.1581, 0.0310, 0.1957, 0.0250, 1.2089, 0.0041, 7.79e-08, 7.79e-08, 7.79e-08, nan ],
[ 50, 0.1550, 0.0329, 0.1645, 0.0310, 0.2037, 0.0250, 1.2583, 0.0041, 3.81e-08, 5.72e-08, 5.72e-08, nan ],
[ 51, 0.1660, 0.0319, 0.1711, 0.0310, 0.2119, 0.0250, 1.3086, 0.0041, 7.48e-08, 5.61e-08, 7.48e-08, nan ],
[ 52, 0.1725, 0.0319, 0.1778, 0.0310, 0.2202, 0.0250, 1.4449, 0.0038, 1.10e-07, 1.10e-07, 1.10e-07, nan ],
[ 53, 0.1847, 0.0310, 0.1847, 0.0310, 0.2308, 0.0248, 1.1432, 0.0050, 9.00e-08, 1.08e-07, 7.20e-08, nan ],
[ 54, 0.1755, 0.0339, 0.1977, 0.0300, 0.2373, 0.0250, 1.1864, 0.0050, 1.06e-07, 1.06e-07, 1.06e-07, nan ],
[ 55, 0.1872, 0.0329, 0.2051, 0.0300, 0.2461, 0.0250, 1.5198, 0.0041, 6.94e-08, 6.94e-08, 6.94e-08, nan ],
[ 56, 0.1998, 0.0319, 0.2125, 0.0300, 0.2550, 0.0250, 1.5751, | |
# Copyright (C) 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import stat
from rpython.rlib.rarithmetic import intmask
from rpython.rtyper.lltypesystem.lltype import nullptr
from typhon import log, rsodium, ruv
from typhon.atoms import getAtom
from typhon.autohelp import autohelp, method
from typhon.errors import userError
from typhon.futures import FutureCtx, resolve, Ok, Break, Continue, LOOP_BREAK, LOOP_CONTINUE, OK, smash
from typhon.macros import macros, io
from typhon.objects.constants import NullObject
from typhon.objects.collections.lists import ConstList
from typhon.objects.data import BytesObject, StrObject, unwrapStr
from typhon.objects.refs import makePromise
from typhon.objects.root import Object, runnable
from typhon.vats import currentVat
ABORTFLOW_0 = getAtom(u"abortFlow", 0)
FLOWABORTED_1 = getAtom(u"flowAborted", 1)
FLOWSTOPPED_1 = getAtom(u"flowStopped", 1)
RECEIVE_1 = getAtom(u"receive", 1)
RUN_1 = getAtom(u"run", 1)
@autohelp
class FileUnpauser(Object):
"""
A pause on a file fount.
"""
def __init__(self, fount):
self.fount = fount
@method("Void")
def unpause(self):
if self.fount is not None:
self.fount.unpause()
# Let go so that the fount can be GC'd if necessary.
self.fount = None
def readLoopCore(state, data):
if data == "":
return Break("".join(state.pieces))
else:
state.pieces.append(data)
state.pos += len(data)
return Continue()
class _State1(FutureCtx):
def __init__(_1, vat, future, buf, pieces, pos, outerState, k):
_1.vat = vat
_1.future1 = future
_1.buf = buf
_1.pieces = pieces
_1.pos = pos
_1.outerState = outerState
_1.k1 = k
class ReadLoop_K0(ruv.FSReadFutureCallback):
def do(self, state, result):
(inStatus, data, inErr) = result
if inStatus != OK:
return state.k1.do(state.outerState, result)
(status, output, err) = readLoopCore(state, data)
if status == LOOP_CONTINUE:
state.future1.run(state, readLoop_k0)
elif status == LOOP_BREAK:
state.k1.do(state.outerState, Ok(output))
else:
raise ValueError(status)
readLoop_k0 = ReadLoop_K0()
class readLoop(object):
callbackType = ruv.FSReadFutureCallback
def __init__(self, f, buf):
self.f = f
self.buf = buf
def run(self, state, k):
ruv.magic_fsRead(state.vat, self.f, self.buf).run(
_State1(state.vat, self, self.buf, [], 0, state, k),
readLoop_k0)
class _State2(FutureCtx):
def __init__(_2, vat, future, outerState, k):
_2.vat = vat
_2.future2 = future
_2.outerState = outerState
_2.k = k
def writeLoopCore(state, size):
if state.future2.data:
return Continue()
else:
return Break(None)
class WriteLoop_K0(ruv.FSWriteFutureCallback):
def do(self, state, result):
(inStatus, size, inErr) = result
if inStatus != OK:
state.k.do(state.outerState, result)
state.future2.data = state.future2.data[size:]
if state.future2.data:
state.future2.run(state, writeLoop_k0)
else:
state.k.do(state.outerState, Ok(0))
writeLoop_k0 = WriteLoop_K0()
class writeLoop(object):
callbackType = ruv.FSWriteFutureCallback
def __init__(self, f, data):
self.f = f
self.data = data
def run(self, state, k):
ruv.magic_fsWrite(state.vat, self.f, self.data).run(
_State2(state.vat, self, state, k),
writeLoop_k0)
fileTypes = {
stat.S_IFSOCK: u"socket",
stat.S_IFLNK: u"symbolic link",
stat.S_IFREG: u"regular file",
stat.S_IFBLK: u"block device",
stat.S_IFDIR: u"directory",
stat.S_IFCHR: u"character device",
stat.S_IFIFO: u"named pipe",
}
def packTime(timespec):
return timespec.c_tv_sec + (timespec.c_tv_nsec / 1000000000)
@autohelp
class FileStatistics(Object):
"""
Information about an object on the filesystem.
"""
_immutable_fields_ = (
"major", "minor", "st_mode", "type", "hardLinks", "user", "group",
"inode", "size", "blockSize", "aTime", "mTime", "cTime",
)
def __init__(self, lstat):
# This is what glibc does.
st_dev = lstat.c_st_dev
self.major = intmask(((st_dev >> 8) & 0xfff) |
((st_dev >> 32) & ~0xfff))
self.minor = intmask(((st_dev >> 0) & 0xff) |
((st_dev >> 12) & ~0xff))
self.st_mode = intmask(lstat.c_st_mode)
self.type = fileTypes.get(stat.S_IFMT(self.st_mode),
u"unknown file type")
self.hardLinks = intmask(lstat.c_st_nlink)
self.user = intmask(lstat.c_st_uid)
self.group = intmask(lstat.c_st_gid)
# ...
self.inode = intmask(lstat.c_st_ino)
self.size = intmask(lstat.c_st_size)
self.blockSize = intmask(lstat.c_st_blksize)
# self.blocks = intmask(lstat.c_st_blocks)
self.aTime = packTime(lstat.c_st_atim)
self.mTime = packTime(lstat.c_st_mtim)
self.cTime = packTime(lstat.c_st_ctim)
# ...
def toString(self):
return u"<%s %d on device %d:%d>" % (self.type, self.inode,
self.major, self.minor)
@method("Int")
def deviceClass(self):
"The device class, or major ID."
return self.major
@method("Int")
def deviceInstance(self):
"The device instance, or minor ID."
return self.minor
@method("Str")
def fileType(self):
"""
The file type.
Known file types include "socket", "symbolic link", "regular file",
"block device", "directory", "character device", and "named pipe".
"""
return self.type
@method("Bool")
def runsAsUser(self):
"""
Whether executing this file would run the resulting process as this
file's user.
Note that it is possible for this file to be marked to run as user
even if it is not actually executable.
"""
return bool(self.st_mode & stat.S_ISUID)
@method("Bool")
def runsAsGroup(self):
"""
Whether executing this file would run the resulting process as this
file's group.
"""
return bool(self.st_mode & stat.S_ISGID) and bool(self.st_mode & stat.S_IXGRP)
@method("Bool")
def mandatesLocking(self):
"""
Whether this file is locked with a mandatory lock upon access.
"""
return bool(self.st_mode & stat.S_ISGID) and not bool(self.st_mode & stat.S_IXGRP)
@method("Bool")
def isSticky(self):
"""
Whether this file's permissions are sticky.
"""
return bool(self.st_mode & stat.S_ISVTX)
@method("Bool")
def ownerMayRead(self):
"Whether the owner has read permission."
return bool(self.st_mode & stat.S_IRUSR)
@method("Bool")
def ownerMayWrite(self):
"Whether the owner has write permission."
return bool(self.st_mode & stat.S_IWUSR)
@method("Bool")
def ownerMayExecute(self):
"Whether the owner has execute permission."
return bool(self.st_mode & stat.S_IXUSR)
@method("Bool")
def groupMayRead(self):
"Whether the group has read permission."
return bool(self.st_mode & stat.S_IRGRP)
@method("Bool")
def groupMayWrite(self):
"Whether the group has write permission."
return bool(self.st_mode & stat.S_IWGRP)
@method("Bool")
def groupMayExecute(self):
"Whether the group has execute permission."
return bool(self.st_mode & stat.S_IXGRP)
@method("Bool")
def othersMayRead(self):
"Whether others have read permission."
return bool(self.st_mode & stat.S_IROTH)
@method("Bool")
def othersMayWrite(self):
"Whether others have write permission."
return bool(self.st_mode & stat.S_IWOTH)
@method("Bool")
def othersMayExecute(self):
"Whether others have execute permission."
return bool(self.st_mode & stat.S_IXOTH)
@method("Int")
def hardLinks(self):
"The number of hard links."
return self.hardLinks
@method("Int")
def user(self):
"The owning user ID."
return self.user
@method("Int")
def group(self):
"The owning group ID."
return self.group
# uint64_t st_rdev;
@method("Int")
def indexNode(self):
"The index node ('inode') ID."
return self.inode
@method("Int", _verb="size")
def _size(self):
"The size."
return self.size
@method("Int", _verb="blockSize")
def _blockSize(self):
"The preferred block size."
return self.blockSize
# uint64_t st_blocks;
# uint64_t st_flags;
# uint64_t st_gen;
@method("Double")
def accessedTime(self):
"The last time of access."
return self.aTime
@method("Double")
def modifiedTime(self):
"The last time of modification."
return self.mTime
@method("Double")
def changedTime(self):
"The last time of metadata change."
return self.cTime
# uv_timespec_t st_birthtim;
@autohelp
class FileResource(Object):
"""
A resource which provides access to the file system.
This resource operates at a distance and cannot tell whether it references
a file or a directory, nor whether its referent exists.
"""
# For help understanding this class, consult FilePath, the POSIX
# standards, and a bottle of your finest and strongest liquor. Perhaps not
# in that order, though.
_immutable_fields_ = "segments[*]",
def __init__(self, segments):
self.segments = segments
def toString(self):
return u"<file resource %s>" % self.asBytes().decode("utf-8")
def asBytes(self):
return "/".join(self.segments)
def sibling(self, segment):
return FileResource(self.segments[:-1] + [segment])
def child(self, segment):
return FileResource(self.segments + [segment])
def temporarySibling(self, suffix):
fileName = rsodium.randomHex() + suffix
return self.sibling(fileName)
@method("Any")
def getListing(self):
"List the potential children of this directory."
p, r = makePromise()
path = self.asBytes()
try:
names = os.listdir(path)
wrapped = ConstList([StrObject(bs.decode("utf-8")) for bs in names])
r.resolve(wrapped)
except OSError as ose:
r.smash(StrObject(u"Couldn't list children for %s: %s" %
(path.decode("utf-8"), ose.strerror.decode("utf-8"))))
return p
@method("Any")
def makeDirectory(self):
"Create this directory."
p, r = makePromise()
path = self.asBytes()
try:
os.mkdir(path)
r.resolve(NullObject)
except OSError as ose:
r.smash(StrObject(u"Couldn't create directory %s: %s" %
(path.decode("utf-8"), ose.strerror.decode("utf-8"))))
return p
@method("Any")
def getContents(self):
p, r = makePromise()
vat = currentVat.get()
buf = ruv.allocBuf(16384)
path = self.asBytes()
log.log(["fs"], u"makeFileResource: Opening file '%s'" % path.decode("utf-8"))
with io:
f = 0
try:
f = ruv.magic_fsOpen(vat, path, os.O_RDONLY, 0000)
except object as err:
smash(r, StrObject(u"Couldn't open file fount for %s: %s" % (path.decode("utf-8"), err)))
else:
try:
contents = readLoop(f, buf)
except object as err:
ruv.magic_fsClose(vat, f)
smash(r, StrObject(u"libuv error: %s" % err))
else:
ruv.magic_fsClose(vat, f)
resolve(r, BytesObject(contents))
return p
@method("Any", "Bytes")
def setContents(self, data):
sibling = self.temporarySibling(".setContents")
p, r = makePromise()
vat = currentVat.get()
path = sibling.asBytes()
# Use CREAT | EXCL to cause a failure if the temporary file
# already exists.
flags = os.O_WRONLY | os.O_CREAT | os.O_EXCL
with io:
f = 0
try:
f = ruv.magic_fsOpen(vat, path, flags, 0777)
except object as err:
smash(r, StrObject(u"Couldn't open file fount: %s" % err))
else:
try:
writeLoop(f, data)
except object as err:
ruv.magic_fsClose(vat, f)
smash(r, StrObject(u"libuv error: %s" % err))
else:
ruv.magic_fsClose(vat, f)
ruv.magic_fsRename(vat, path, self.asBytes())
resolve(r, NullObject)
return p
@method("Any")
def getStatistics(self):
p, r = makePromise()
vat = currentVat.get()
# | |
import os
import sys
import psycopg2
import datetime
import yaml
"""Database Manager
Performs the CRUD methods on the database.
"""
class DBConnect:
_instance = None
_path = 'config.yml'
def __new__(cls):
"""
Makes sure there is only a single instance of the connection.
"""
if DBConnect._instance is None:
DBConnect._instance = object.__new__(cls)
return DBConnect._instance
def __init__(self):
self._host = None
self._username = None
self._password = None
self._port = None
self._dbname = None
self._conn = None
self._config = None
self._curr = None
def get_credentials(self):
"""
Get the database credentials.
"""
if os.path.exists(self._path):
with open(self._path, 'r') as config_file:
yaml_loader = yaml.load(config_file, Loader=yaml.BaseLoader)
self._config = yaml_loader['postgresql']
else:
print(f"The path {self._path} does not exist.")
def set_credentials(self):
"""
Set the credentials on self.
"""
self._host = self._config.get('host')
self._username = self._config.get('username')
self._password = self._config.get('password')
self._port = self._config.get('part')
self._dbname = self._config.get('database')
def get_connection(self):
"""
Check if there is a connection, and if there is done create one.
"""
try:
if(self._conn is None):
self._conn = psycopg2.connect(dbname=self._dbname,
user=self._username, password=self._password,
host=self._host, port=self._port, sslmode='require')
except psycopg2.DatabaseError as e:
print(f"Error: {e}")
sys.exit()
finally:
print('Connection opened successfully.')
def set_credentials_and_connections(self):
"""
Perform the set up tasks for the database.
"""
if(self._conn is None):
self.get_credentials()
self.set_credentials()
self.get_connection()
def close_connection(self):
"""
Close the connection to the database.
"""
self._conn.close()
def write_record(self, post_ts: str, subreddit: str, post_title: str, score_negative: str, score_neutral: str, score_positive: str, score_compound: str) -> None:
"""
Write a single record to the data base
Args:
post_ts (str): Time stamp of when the post was captured from Reddit
subreddit (str): Subreddit name
post_title (str): Post title from the subreddit
score_negative (str): Negative score from the sentiment scorer
score_neutral (str): Neutral score from the sentiment scorer
score_positive (str): Positive score from the sentiment scorer
score_compound (str): Compoud score from the sentiment scorer
"""
cur = self._conn.cursor()
cur.execute("INSERT INTO sentiment (post_ts, subreddit, post_title, score_negative, score_neutral, score_positive, score_compound) VALUES (%s, %s, %s, %s, %s, %s, %s)",
(post_ts,
subreddit,
post_title,
score_negative,
score_neutral,
score_positive,
score_compound))
self._conn.commit()
def write_bulk(self, data):
"""
Bulk write record to the database.
Args:
post_ts (str): Time stamp of when the post was captured from Reddit
subreddit (str): Subreddit name
post_title (str): Post title from the subreddit
score_negative (str): Negative score from the sentiment scorer
score_neutral (str): Neutral score from the sentiment scorer
score_positive (str): Positive score from the sentiment scorer
score_compound (str): Compoud score from the sentiment scorer
"""
cur = self._conn.cursor()
args_str = ','.join(cur.mogrify(
"(%s,%s,%s,%s,%s,%s,%s)", row).decode('utf-8') for row in data)
sql = f"INSERT INTO sentiment (post_ts, subreddit, post_title, score_negative, score_neutral, score_positive, score_compound) VALUES {args_str}"
cur.execute(sql)
self._conn.commit()
def did_write_this_hour(self) -> bool:
"""
Check if there has been a database write within the last hour.
Returns:
value_to_return (bool): Returns true if there has been a
write, flase if there has not been a write.
"""
value_to_return = None
cur = self._conn.cursor()
cur.execute("SELECT MAX(post_ts) FROM sentiment")
datetime_list = [i for i in cur.fetchone()]
max_datetime = datetime_list[0]
current_datetime = datetime.datetime.today()
current_hour = current_datetime.hour
current_day = current_datetime.day
is_same_hour = max_datetime.hour == current_hour
is_same_day = max_datetime.day == current_day
if is_same_hour and is_same_day:
value_to_return = True
else:
value_to_return = False
return value_to_return
def remove_duplicates(self) -> None:
"""
Remove duplicate entries from the database.
"""
query_string = f"""
DELETE FROM sentiment a USING (
SELECT
MIN(id) as id,
post_title
FROM sentiment
GROUP BY
post_title HAVING COUNT(*) > 1) b
WHERE
a.post_title = b.post_title
AND a.id <> b.id
"""
cur = self._conn.cursor()
cur.execute(query_string)
self._conn.commit()
cur.close()
def get_histogram_data(self, subreddit_name: str):
"""
Get the binned and normalized data for the histogram
Args:
subreddit_name (str): the name the subreddit the data
should come from.
Returns:
normalized_data_values (list)
data_labels (list)
subreddit_name (str)
"""
data_values = []
data_labels = []
query_string = f"""
WITH cte_scores AS (
SELECT
width_bucket(score_compound * 100, -110, 100, 21) - 12 AS buckets,
count(*) AS cnt
FROM sentiment
WHERE
subreddit = ('{subreddit_name}')
AND score_compound != 0
GROUP BY
buckets
ORDER BY
buckets
)
SELECT
series AS buckets,
coalesce(cnt, 0) as bucket_count
FROM generate_series(-10, 10) series
LEFT JOIN cte_scores ON cte_scores.buckets = series"""
cur = self._conn.cursor()
cur.execute(query_string)
result_set = cur.fetchall()
for row in result_set:
data_labels.append(row[0])
data_values.append(row[1])
data_values_total = sum(data_values)
normalized_data_values = [round(x / data_values_total, 5) for x in data_values]
return normalized_data_values, data_labels, subreddit_name
def get_unique_categories(self) -> list:
"""
Get a list of all the subreddits in the database.
Returns:
data_labels (list)
"""
data_labels = ['all']
query_string = f"""
SELECT DISTINCT subreddit
FROM sentiment
WHERE
subreddit != 'all'
ORDER BY
subreddit
"""
cur = self._conn.cursor()
cur.execute(query_string)
result_set = cur.fetchall()
for row in result_set:
data_labels.append(row[0])
return data_labels
def get_random_rows(self, subreddit_name: str) -> list:
"""
Get random rows from a sub reddit to populate a
table in the app.
Args:
subreddit_name (str): The subreddit to get the rows from
Return:
random_rows (list): A list of rows to generate
"""
random_rows = []
query_string = f"""
SELECT
subreddit,
post_title,
score_negative,
score_neutral,
score_positive,
score_compound
FROM
sentiment
WHERE
subreddit = ('{subreddit_name}')
ORDER BY
random()
LIMIT 3;
"""
cur = self._conn.cursor()
cur.execute(query_string)
result_set = cur.fetchall()
for row in result_set:
random_rows.append(
[row[0], row[1], row[2], row[3], row[4], row[5]])
return random_rows
def get_card_counts(self, subreddit_name: str) -> list:
"""
Get counts for the cards at the top of the app.
Args:
subreddit_name (str): The subreddit to get specific counts from
for each of the cards
Return:
data_results (list): A list counts for the cards
"""
data_results = []
# New posts
query_string = f"""
SELECT
COUNT(*) AS cnt
FROM sentiment
WHERE
subreddit = ('{subreddit_name}')
AND post_ts = (SELECT MAX(post_ts) FROM sentiment WHERE subreddit = ('{subreddit_name}') )
"""
cur = self._conn.cursor()
cur.execute(query_string)
result_set = cur.fetchall()
[data_results.append(row) for row in result_set]
# Total subreddits
query_string = f"""
SELECT
COUNT(*) AS cnt
FROM sentiment
WHERE subreddit = ('{subreddit_name}')
"""
cur = self._conn.cursor()
cur.execute(query_string)
result_set = cur.fetchall()
[data_results.append(row) for row in result_set]
# Posts per subreddit
query_string = f"""
SELECT
TRUNC(COUNT(*) / COUNT(DISTINCT subreddit)::DECIMAL, 2)::VARCHAR AS cnt
FROM sentiment;
"""
cur = self._conn.cursor()
cur.execute(query_string)
result_set = cur.fetchall()
[data_results.append(row) for row in result_set]
# Unique subreddits
query_string = f"""
SELECT
COUNT(DISTINCT subreddit) AS cnt
FROM sentiment;
"""
cur = self._conn.cursor()
cur.execute(query_string)
result_set = cur.fetchall()
[data_results.append(row) for row in result_set]
return data_results
def get_histogram_counts(self, subreddit_name: str = 'all') -> list:
"""
Get the individual counts of positive, negative, and neutral
scores for a specific subreddit.
Args:
subreddit_name (str): The subreddit to get specific counts for
Return:
data_results (list): A list counts for the subreddit
"""
data_results = []
# Posts last hour
query_string = f"""
SELECT
SUM(neg_post) AS neg_post,
SUM(pos_post) AS pos_post,
SUM(neu_post) AS neu_post
FROM (
SELECT
CASE WHEN score_compound < -.1 THEN 1 ELSE 0 END AS neg_post,
CASE WHEN score_compound > .1 THEN 1 ELSE 0 END AS pos_post,
CASE WHEN score_compound BETWEEN -.1 AND .1 THEN 1 ELSE 0 END AS neu_post
FROM sentiment
WHERE subreddit = ('{subreddit_name}') ) AS count_subquery;
"""
cur = self._conn.cursor()
cur.execute(query_string)
result_set = cur.fetchall()
[data_results.append(row) for row in result_set]
return data_results
def get_total_records(self):
"""
Get the total number of records in the database.
"""
query_string = f"""
SELECT
COUNT(*) AS cnt
FROM sentiment
"""
cur = self._conn.cursor()
cur.execute(query_string)
result_set = cur.fetchall()
return result_set[0][0]
def delete_oldest_two_datetime(self) -> None:
"""
Get the oldest two datetimes, and delete all values which
match that datetime in the database.
"""
query_string = f"""
DELETE FROM
sentiment
WHERE
post_title IN (SELECT
post_title
FROM
sentiment
ORDER BY RANDOM () LIMIT 1100)
"""
cur = self._conn.cursor()
cur.execute(query_string)
self._conn.commit()
cur.close()
def check_if_exists(self, user_name: str) -> bool:
"""
Check if a user exists in the database.
Args:
user_name (str): The user name (email) to check in the database
Returns:
result (bool): True if the user exists in the database, false
if not.
"""
query_string = f"""
SELECT
CASE WHEN user_name IS NULL
THEN False
ELSE True
END AS does_exist
FROM sentiment_users | |
<gh_stars>1-10
#coding:utf-8
# compute peak and drop-peak frequency detail of the tube
# by scipy.optimize.minimize_scalar
#
import sys
import argparse
import numpy as np
from scipy import signal
from scipy import optimize
import matplotlib.pyplot as plt
# Check version
# Python 3.6.4 on win32 (Windows 10)
# numpy 1.14.0
# matplotlib 2.1.1
# scipy 1.0.0
class compute_tube_peak(object):
def __init__(self, rg0=0.95, rl0=0.9 ,NUM_TUBE=2, sampling_rate=16000, disp=False):
self.rg0=rg0
self.rl0=0.9
self.C0=35000.0 # speed of sound in air, round 35000 cm/second
self.NUM_TUBE=NUM_TUBE
self.sampling_rate=sampling_rate
self.Delta_Freq=5
self.f_min=200
self.f_max=5000
self.f_out=100000 # 候補がないときに代入する値
self.f=np.arange(self.f_min, self.f_max, self.Delta_Freq)
self.xw= 2.0 * np.pi * self.f
self.sign0=1.0 # normal
self.disp=disp
self.counter=0
def __call__(self, X):
# X[0,1]= L1,L2
# X[2,3]= A1,A2
# they should be same as get_ft5
if (len(X) == 6) or (len(X) == 5) : # X=[L1,L2,L3,A1,A2,A3] or X=[L1,L2,L3,r1,r2] when three tube model
tu1= X[0] / self.C0 # delay time in 1st tube
tu2= X[1] / self.C0 # delay time in 2nd tube
tu3= X[2] / self.C0 # delay time in 2nd tube
if len(X) == 6:
r1=( X[4] - X[3]) / ( X[4] + X[3]) # reflection coefficient between 1st tube and 2nd tube
r2=( X[5] - X[4]) / ( X[5] + X[4]) # reflection coefficient between 2nd tube and 3rd tube
else:
r1=X[3]
r2=X[4]
func1= self.func_yb_t3
args1=(tu1,tu2,tu3,r1,r2)
# abs(yi) = abs( const * (cos wv + j sin wv)) becomes constant. So, max/min(abs(val)) depends on only yb
self.yi= 0.5 * ( 1.0 + self.rg0 ) * ( 1.0 + r1) * ( 1.0 + r2) * ( 1.0 + self.rl0 ) * \
np.exp( -1.0j * ( tu1 + tu2 + tu3 ) * self.xw)
# yb
yb1= 1.0 + r1 * self.rg0 * np.exp( -2.0j * tu1 * self.xw )
yb1= yb1 + r2 * r1 * np.exp( -2.0j * tu2 * self.xw )
yb1= yb1 + self.rl0 * r2 * np.exp( -2.0j * tu3 * self.xw )
yb2= r2 * self.rg0 * np.exp( -2.0j * ( tu1 + tu2) * self.xw )
yb2= yb2 + self.rl0 * r1 * np.exp( -2.0j * ( tu2 + tu3) * self.xw )
yb3= self.rl0 * r2 * r1 * self.rg0 * np.exp( -2.0j * ( tu1 + tu3) * self.xw )
yb4= self.rl0 * self.rg0 * np.exp( -2.0j * ( tu1 + tu2 + tu3) * self.xw )
self.yb= yb1 + yb2 + yb3 + yb4
elif (len(X) == 4) or (len(X) == 3): # else X=[L1,L2,A1,A2] or X=[L1,L2,r1] two tube model
tu1= X[0] / self.C0 # delay time in 1st tube
tu2= X[1] / self.C0 # delay time in 2nd tube
if len(X) == 4:
r1=( X[3] - X[2]) / ( X[3] + X[2]) # reflection coefficient between 1st tube and 2nd tube
else:
r1= X[2]
func1= self.func_yb_t2
args1=(tu1,tu2,r1)
# compute frequency response
# abs(yi) = abs( const * (cos wv + j sin wv)) becomes constant. So, max/min(abs(val)) depends on only yb
self.yi= 0.5 * ( 1.0 + self.rg0 ) * ( 1.0 + r1) * ( 1.0 + self.rl0 ) * \
np.exp( -1.0j * ( tu1 + tu2 ) * self.xw)
# yb
self.yb= 1.0 + r1 * self.rg0 * np.exp( -2.0j * tu1 * self.xw ) + \
self.rl0 * r1 * np.exp( -2.0j * tu2 * self.xw ) + \
self.rl0 * self.rg0 * np.exp( -2.0j * ( tu1 + tu2) * self.xw )
else:
print ('error: len(X) is not expected value.', len(X))
val= self.yi / self.yb
self.response=np.sqrt(val.real ** 2 + val.imag ** 2)
# get peak and drop-peak list
self.peaks_list=signal.argrelmax(self.response)[0] # signal.argrelmax output is triple
peaks= self.f[ self.peaks_list ]
self.drop_peaks_list=signal.argrelmin(self.response)[0]
drop_peaks= self.f[ self.drop_peaks_list ]
# 候補点がNUM_TUBEより少ないときは f_outを入れておく
if len(peaks) < self.NUM_TUBE:
peaks= np.concatenate( ( peaks, np.ones( self.NUM_TUBE - len(peaks)) * self.f_out ) )
elif len(peaks) > self.NUM_TUBE:
peaks=peaks[0: self.NUM_TUBE]
self.peaks_list=self.peaks_list[0: self.NUM_TUBE]
if len(drop_peaks) < self.NUM_TUBE:
drop_peaks= np.concatenate( ( drop_peaks, np.ones( self.NUM_TUBE - len(drop_peaks)) * self.f_out ) )
elif len(drop_peaks) > self.NUM_TUBE:
drop_peaks=drop_peaks[0: self.NUM_TUBE]
self.drop_peaks_list=self.drop_peaks_list[0: self.NUM_TUBE]
# より詳細に探索する
peaks_detail=np.zeros( len(peaks) )
drop_peaks_detail=np.zeros( len(drop_peaks) )
## peak
self.sign0=1.0 # normal
for l, xinit in enumerate( peaks ):
if xinit >= self.f_max:
peaks_detail[l]= xinit
else:
# Use brent method: 囲い込み戦略と二次近似を組み合わせ
b_xinit=[ xinit - self.Delta_Freq , xinit + self.Delta_Freq ]
res = optimize.minimize_scalar(func1, bracket=b_xinit, args=args1)
peaks_detail[l]= res.x
if self.disp:
print ('b_xinit', b_xinit)
print ('result x', res.x)
## drop-peak
self.sign0=-1.0 # turn upside down
for l, xinit in enumerate( drop_peaks ):
if xinit >= self.f_max:
drop_peaks_detail[l]= xinit
else:
b_xinit=[ xinit - self.Delta_Freq , xinit + self.Delta_Freq ]
res = optimize.minimize_scalar(func1, bracket=b_xinit, args=args1)
drop_peaks_detail[l]= res.x
if self.disp:
print ('b_xinit', b_xinit)
print ('result x', res.x)
return peaks_detail, drop_peaks_detail
def func_yb_t2(self, x, *args): # two tube *は可変長の引数
x = x
tu1,tu2,r1= args
xw= x * 2.0 * np.pi
yb= 1.0 + r1 * self.rg0 * np.exp( -2.0j * tu1 * xw ) + self.rl0 * r1 * np.exp( -2.0j * tu2 * xw ) + \
self.rl0 * self.rg0 * np.exp( -2.0j * ( tu1 + tu2) * xw )
return (yb.real**2 + yb.imag**2) * self.sign0
def func_yb_t3(self, x, *args): # three tube *は可変長の引数
tu1,tu2,tu3,r1,r2= args
xw= x * 2.0 * np.pi
yb1= 1.0 + r1 * self.rg0 * np.exp( -2.0j * tu1 * xw )
yb1= yb1 + r2 * r1 * np.exp( -2.0j * tu2 * xw )
yb1= yb1 + self.rl0 * r2 * np.exp( -2.0j * tu3 * xw )
yb2= r2 * self.rg0 * np.exp( -2.0j * ( tu1 + tu2) * xw )
yb2= yb2 + self.rl0 * r1 * np.exp( -2.0j * ( tu2 + tu3) * xw )
yb3= self.rl0 * r2 * r1 * self.rg0 * np.exp( -2.0j * ( tu1 + tu3) * xw )
yb4= self.rl0 * self.rg0 * np.exp( -2.0j * ( tu1 + tu2 + tu3) * xw )
yb= yb1 + yb2 + yb3 + yb4
return (yb.real**2 + yb.imag**2) * self.sign0
def show_freq(self,):
# show rough(accuracy=Delta_Freq) result
fig = plt.figure()
ax1 = fig.add_subplot(211)
plt.title('frequency response')
plt.xlabel('Frequency [Hz]')
plt.ylabel('Amplitude [dB]')
if 1: # show peak and drop peak
ax1.semilogy(self.f, self.response, 'b', ms=2)
ax1.semilogy(self.f[self.peaks_list] , self.response[self.peaks_list], 'ro', ms=3)
ax1.semilogy(self.f[self.drop_peaks_list] , self.response[self.drop_peaks_list], 'co', ms=3)
if 1: # show yi and yb
ax1.plot( self.f , np.abs(self.yi), 'g')
ax1.plot( self.f , np.abs(self.yb), 'y')
plt.grid()
plt.axis('tight')
plt.show()
def reset_counter(self,):
self.counter=0
def cost_0(self, peaks2, drop_peaks2, peaks, drop_peaks):
# lower cost function
return (abs(peaks - peaks2).mean() + abs(drop_peaks - drop_peaks2).mean()) / 2.0
def calc_cost(self, X , peaks, drop_peaks, display_count=100, disp=False):
# get mean of difference between target and new computed ones
peaks2, drop_peaks2= self.__call__(X)
cost0= self.cost_0( peaks2, drop_peaks2, peaks, drop_peaks)
# add penalty if reflection coefficient abs is over than 0.9
if len(X) == 3 and abs( X[2]) > 0.9:
cost0 += 1000.0
elif len(X) == 5 and ( abs( X[3]) > 0.9 or abs( X[4] > 0.9) ):
cost0 += 1000.0
if disp :
print (X,cost0, peaks2, drop_peaks2)
self.counter +=1
# show present counter value, don't show if display_count is negative
if display_count > 0 and self.counter % display_count == 0:
sys.stdout.write("\r%d" % self.counter)
sys.stdout.flush()
return cost0
# helper functions
def get_r1( X ):
return ( X[1] - X[0]) / ( X[1] + X[0]) # return reflection coefficient between 1st tube and 2nd tube
def get_A2( r1, A1 ):
if abs(r1) >= 1.0:
print ('error: abs(r1) > 1.0')
return (( 1.0 + r1) / ( 1 - r1)) * A1 # return cross-section area of 2nd tube
def get_A1( r1, A2 ):
if abs(r1) >= 1.0:
print ('error: abs(r1) > 1.0')
return (( 1.0 - r1) / ( 1 + r1)) * A2 # return cross-section area of 1st tube
def get_A1A2( r1, A_min=1.0):
# return cross-section area A1 and A2 under the condition of
# minimum cross-section is fixed as A_min
if r1 >= 0.0:
return | |
<reponame>naviocean/imgclsmob<filename>gluon/gluoncv2/models/diaresnet.py
"""
DIA-ResNet for ImageNet-1K, implemented in Gluon.
Original paper: 'DIANet: Dense-and-Implicit Attention Network,' https://arxiv.org/abs/1905.10671.
"""
__all__ = ['DIAResNet', 'diaresnet10', 'diaresnet12', 'diaresnet14', 'diaresnetbc14b', 'diaresnet16', 'diaresnet18',
'diaresnet26', 'diaresnetbc26b', 'diaresnet34', 'diaresnetbc38b', 'diaresnet50', 'diaresnet50b',
'diaresnet101', 'diaresnet101b', 'diaresnet152', 'diaresnet152b', 'diaresnet200', 'diaresnet200b',
'DIAAttention', 'DIAResUnit']
import os
from mxnet import cpu
from mxnet.gluon import nn, HybridBlock
from .common import conv1x1_block, DualPathSequential
from .resnet import ResBlock, ResBottleneck, ResInitBlock
class FirstLSTMAmp(HybridBlock):
"""
First LSTM amplifier branch.
Parameters:
----------
in_units : int
Number of input channels.
units : int
Number of output channels.
"""
def __init__(self,
in_units,
units,
**kwargs):
super(FirstLSTMAmp, self).__init__(**kwargs)
mid_units = in_units // 4
with self.name_scope():
self.fc1 = nn.Dense(
units=mid_units,
in_units=in_units)
self.activ = nn.Activation("relu")
self.fc2 = nn.Dense(
units=units,
in_units=mid_units)
def hybrid_forward(self, F, x):
x = self.fc1(x)
x = self.activ(x)
x = self.fc2(x)
return x
class DIALSTMCell(HybridBlock):
"""
DIA-LSTM cell.
Parameters:
----------
in_x_features : int
Number of x input channels.
in_h_features : int
Number of h input channels.
num_layers : int
Number of amplifiers.
dropout_rate : float, default 0.1
Parameter of Dropout layer. Faction of the input units to drop.
"""
def __init__(self,
in_x_features,
in_h_features,
num_layers,
dropout_rate=0.1,
**kwargs):
super(DIALSTMCell, self).__init__(**kwargs)
self.num_layers = num_layers
out_features = 4 * in_h_features
with self.name_scope():
self.x_amps = nn.HybridSequential(prefix="")
self.h_amps = nn.HybridSequential(prefix="")
for i in range(num_layers):
amp_class = FirstLSTMAmp if i == 0 else nn.Dense
self.x_amps.add(amp_class(
in_units=in_x_features,
units=out_features))
self.h_amps.add(amp_class(
in_units=in_h_features,
units=out_features))
in_x_features = in_h_features
self.dropout = nn.Dropout(rate=dropout_rate)
def hybrid_forward(self, F, x, h, c):
hy = []
cy = []
for i in range(self.num_layers):
hx_i = h[i]
cx_i = c[i]
gates = self.x_amps[i](x) + self.h_amps[i](hx_i)
i_gate, f_gate, c_gate, o_gate = F.split(gates, axis=1, num_outputs=4)
i_gate = F.sigmoid(i_gate)
f_gate = F.sigmoid(f_gate)
c_gate = F.tanh(c_gate)
o_gate = F.sigmoid(o_gate)
cy_i = (f_gate * cx_i) + (i_gate * c_gate)
hy_i = o_gate * F.sigmoid(cy_i)
cy.append(cy_i)
hy.append(hy_i)
x = self.dropout(hy_i)
return hy, cy
class DIAAttention(HybridBlock):
"""
DIA-Net attention module.
Parameters:
----------
in_x_features : int
Number of x input channels.
in_h_features : int
Number of h input channels.
num_layers : int, default 1
Number of amplifiers.
"""
def __init__(self,
in_x_features,
in_h_features,
num_layers=1,
**kwargs):
super(DIAAttention, self).__init__(**kwargs)
self.num_layers = num_layers
with self.name_scope():
self.lstm = DIALSTMCell(
in_x_features=in_x_features,
in_h_features=in_h_features,
num_layers=num_layers)
def hybrid_forward(self, F, x, hc=None):
w = F.contrib.AdaptiveAvgPooling2D(x, output_size=1)
w = w.flatten()
if hc is None:
h = [F.zeros_like(w)] * self.num_layers
c = [F.zeros_like(w)] * self.num_layers
else:
h, c = hc
h, c = self.lstm(w, h, c)
w = h[self.num_layers - 1].expand_dims(axis=-1).expand_dims(axis=-1)
x = F.broadcast_mul(x, w)
return x, (h, c)
class DIAResUnit(HybridBlock):
"""
DIA-ResNet unit with residual connection.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
strides : int or tuple/list of 2 int
Strides of the convolution.
padding : int or tuple/list of 2 int, default 1
Padding value for the second convolution layer in bottleneck.
dilation : int or tuple/list of 2 int, default 1
Dilation value for the second convolution layer in bottleneck.
bn_use_global_stats : bool, default False
Whether global moving statistics is used instead of local batch-norm for BatchNorm layers.
bottleneck : bool, default True
Whether to use a bottleneck or simple block in units.
conv1_stride : bool, default False
Whether to use stride in the first or the second convolution layer of the block.
attention : nn.Module, default None
Attention module.
"""
def __init__(self,
in_channels,
out_channels,
strides,
padding=1,
dilation=1,
bn_use_global_stats=False,
bottleneck=True,
conv1_stride=False,
attention=None,
**kwargs):
super(DIAResUnit, self).__init__(**kwargs)
self.resize_identity = (in_channels != out_channels) or (strides != 1)
with self.name_scope():
if bottleneck:
self.body = ResBottleneck(
in_channels=in_channels,
out_channels=out_channels,
strides=strides,
padding=padding,
dilation=dilation,
bn_use_global_stats=bn_use_global_stats,
conv1_stride=conv1_stride)
else:
self.body = ResBlock(
in_channels=in_channels,
out_channels=out_channels,
strides=strides,
bn_use_global_stats=bn_use_global_stats)
if self.resize_identity:
self.identity_conv = conv1x1_block(
in_channels=in_channels,
out_channels=out_channels,
strides=strides,
bn_use_global_stats=bn_use_global_stats,
activation=None)
self.activ = nn.Activation("relu")
self.attention = attention
def hybrid_forward(self, F, x, hc=None):
if self.resize_identity:
identity = self.identity_conv(x)
else:
identity = x
x = self.body(x)
x, hc = self.attention(x, hc)
x = x + identity
x = self.activ(x)
return x, hc
class DIAResNet(HybridBlock):
"""
DIA-ResNet model from 'DIANet: Dense-and-Implicit Attention Network,' https://arxiv.org/abs/1905.10671.
Parameters:
----------
channels : list of list of int
Number of output channels for each unit.
init_block_channels : int
Number of output channels for the initial unit.
bottleneck : bool
Whether to use a bottleneck or simple block in units.
conv1_stride : bool
Whether to use stride in the first or the second convolution layer in units.
bn_use_global_stats : bool, default False
Whether global moving statistics is used instead of local batch-norm for BatchNorm layers.
Useful for fine-tuning.
in_channels : int, default 3
Number of input channels.
in_size : tuple of two ints, default (224, 224)
Spatial size of the expected input image.
classes : int, default 1000
Number of classification classes.
"""
def __init__(self,
channels,
init_block_channels,
bottleneck,
conv1_stride,
bn_use_global_stats=False,
in_channels=3,
in_size=(224, 224),
classes=1000,
**kwargs):
super(DIAResNet, self).__init__(**kwargs)
self.in_size = in_size
self.classes = classes
with self.name_scope():
self.features = nn.HybridSequential(prefix="")
self.features.add(ResInitBlock(
in_channels=in_channels,
out_channels=init_block_channels,
bn_use_global_stats=bn_use_global_stats))
in_channels = init_block_channels
for i, channels_per_stage in enumerate(channels):
stage = DualPathSequential(
return_two=False,
prefix="stage{}_".format(i + 1))
attention = DIAAttention(
in_x_features=channels_per_stage[0],
in_h_features=channels_per_stage[0])
with stage.name_scope():
for j, out_channels in enumerate(channels_per_stage):
strides = 2 if (j == 0) and (i != 0) else 1
stage.add(DIAResUnit(
in_channels=in_channels,
out_channels=out_channels,
strides=strides,
bn_use_global_stats=bn_use_global_stats,
bottleneck=bottleneck,
conv1_stride=conv1_stride,
attention=attention))
in_channels = out_channels
self.features.add(stage)
self.features.add(nn.AvgPool2D(
pool_size=7,
strides=1))
self.output = nn.HybridSequential(prefix="")
self.output.add(nn.Flatten())
self.output.add(nn.Dense(
units=classes,
in_units=in_channels))
def hybrid_forward(self, F, x):
x = self.features(x)
x = self.output(x)
return x
def get_diaresnet(blocks,
bottleneck=None,
conv1_stride=True,
width_scale=1.0,
model_name=None,
pretrained=False,
ctx=cpu(),
root=os.path.join("~", ".mxnet", "models"),
**kwargs):
"""
Create DIA-ResNet model with specific parameters.
Parameters:
----------
blocks : int
Number of blocks.
bottleneck : bool, default None
Whether to use a bottleneck or simple block in units.
conv1_stride : bool, default True
Whether to use stride in the first or the second convolution layer in units.
width_scale : float, default 1.0
Scale factor for width of layers.
model_name : str or None, default None
Model name for loading pretrained model.
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '~/.mxnet/models'
Location for keeping the model parameters.
"""
if bottleneck is None:
bottleneck = (blocks >= 50)
if blocks == 10:
layers = [1, 1, 1, 1]
elif blocks == 12:
layers = [2, 1, 1, 1]
elif blocks == 14 and not bottleneck:
layers = [2, 2, 1, 1]
elif (blocks == 14) and bottleneck:
layers = [1, 1, 1, 1]
elif blocks == 16:
layers = [2, 2, 2, 1]
elif blocks == 18:
layers = [2, 2, 2, 2]
elif (blocks == 26) and not bottleneck:
layers = [3, 3, 3, 3]
elif (blocks == 26) and bottleneck:
layers = [2, 2, 2, 2]
elif blocks == 34:
layers = [3, 4, 6, 3]
elif (blocks == 38) and bottleneck:
layers = [3, 3, 3, 3]
elif blocks == 50:
layers = [3, 4, 6, 3]
elif blocks == 101:
layers = [3, 4, 23, 3]
elif blocks == 152:
layers = [3, 8, 36, 3]
elif blocks == 200:
layers = [3, 24, 36, 3]
else:
raise ValueError("Unsupported DIA-ResNet with number of blocks: {}".format(blocks))
if bottleneck:
assert (sum(layers) * 3 + 2 == blocks)
else:
assert (sum(layers) * 2 + 2 == blocks)
init_block_channels = 64
channels_per_layers = [64, 128, 256, 512]
if bottleneck:
bottleneck_factor = 4
channels_per_layers = [ci * bottleneck_factor for ci in channels_per_layers]
channels = [[ci] * li for (ci, li) in zip(channels_per_layers, layers)]
if width_scale != 1.0:
channels = [[int(cij * width_scale) if (i != len(channels) - 1) or (j != len(ci) - 1) else cij
for j, cij in enumerate(ci)] for i, ci in enumerate(channels)]
init_block_channels = int(init_block_channels * width_scale)
net = DIAResNet(
channels=channels,
init_block_channels=init_block_channels,
bottleneck=bottleneck,
conv1_stride=conv1_stride,
**kwargs)
if pretrained:
if (model_name is None) or (not model_name):
raise ValueError("Parameter `model_name` should be properly initialized for loading pretrained model.")
from .model_store import get_model_file
net.load_parameters(
filename=get_model_file(
model_name=model_name,
local_model_store_dir_path=root),
ctx=ctx)
return net
def diaresnet10(**kwargs):
"""
DIA-ResNet-10 model from 'DIANet: Dense-and-Implicit Attention Network,' https://arxiv.org/abs/1905.10671.
It's an experimental model.
Parameters:
----------
pretrained : bool, default | |
<reponame>ChaoPang/curation
# ---
# jupyter:
# jupytext:
# text_representation:
# extension: .py
# format_name: light
# format_version: '1.5'
# jupytext_version: 1.4.2
# kernelspec:
# display_name: Python 3
# language: python
# name: python3
# ---
from google.cloud import bigquery
# %reload_ext google.cloud.bigquery
client = bigquery.Client()
# %load_ext google.cloud.bigquery
# +
from notebooks import parameters
DATASET = parameters.LATEST_DATASET
LOOKUP_TABLES = parameters.LOOKUP_TABLES
print(f"Dataset to use: {DATASET}")
print(f"Lookup tables: {LOOKUP_TABLES}")
# +
import warnings
warnings.filterwarnings('ignore')
import pandas as pd
import matplotlib.pyplot as plt
import os
plt.style.use('ggplot')
pd.options.display.max_rows = 999
pd.options.display.max_columns = 999
pd.options.display.max_colwidth = 999
def cstr(s, color='black'):
return "<text style=color:{}>{}</text>".format(color, s)
# -
cwd = os.getcwd()
cwd = str(cwd)
print("Current working directory is: {cwd}".format(cwd=cwd))
# ### Get the list of HPO IDs
#
# ### NOTE: This assumes that all of the relevant HPOs have a person table.
hpo_id_query = f"""
SELECT REPLACE(table_id, '_person', '') AS src_hpo_id
FROM
`{DATASET}.__TABLES__`
WHERE table_id LIKE '%person'
AND table_id
NOT LIKE '%unioned_ehr_%'
AND table_id NOT LIKE '\\\_%'
"""
site_df = pd.io.gbq.read_gbq(hpo_id_query, dialect='standard')
get_full_names = f"""
select * from {LOOKUP_TABLES}.hpo_site_id_mappings
"""
full_names_df = pd.io.gbq.read_gbq(get_full_names, dialect='standard')
# +
full_names_df.columns = ['org_id', 'src_hpo_id', 'site_name', 'display_order']
columns_to_use = ['src_hpo_id', 'site_name']
full_names_df = full_names_df[columns_to_use]
full_names_df['src_hpo_id'] = full_names_df['src_hpo_id'].str.lower()
# +
cols_to_join = ['src_hpo_id']
site_df = pd.merge(site_df, full_names_df, on=['src_hpo_id'], how='left')
# -
# ### The below query is used to generate a 'procedure/visit dataframe'. This dataframe shows the difference between the start/end times for the same visit_occurrence_id with respect to the procedure table.
#
# ### Each row shows information for:
# - The difference between the visit start date and the procedure date
# - The difference between the visit end date and the procedure date
# - The difference between the visit start datetime (as a date) and the procedure date
# - The difference between the visit end datetime (as a date) and the procedure date
# - The difference between the visit start datetime (as a date) and the procedure datetime (as a date)
# - The difference between the visit end datetime (as a date) and the procedure datetime (as a date)
# - The sum of all the values listed above
#
# ### While we will only be using the 'total number of bad rows' at this time, the other columns may be useful for subsequent analyses down the line
p_v_query = """
SELECT
DISTINCT
a.*,
(a.procedure_vis_start_diff + a.procedure_vis_end_diff + a.procedure_vis_start_dt_diff + a.procedure_vis_end_dt_diff +
a.procedure_dt_vis_start_diff + a.procedure_dt_vis_end_diff + a.procedure_dt_vis_start_dt_diff + a.procedure_dt_vis_end_dt_diff) as total_diff
FROM
( SELECT
mpo.src_hpo_id, COUNT(mpo.src_hpo_id) as num_bad_records,
IFNULL(ABS(DATE_DIFF(po.procedure_date, vo.visit_start_date, DAY)), 0) as procedure_vis_start_diff,
IFNULL(ABS(DATE_DIFF(po.procedure_date, vo.visit_end_date, DAY)), 0) as procedure_vis_end_diff,
IFNULL(ABS(DATE_DIFF(po.procedure_date, CAST(vo.visit_start_datetime AS DATE), DAY)), 0) as procedure_vis_start_dt_diff,
IFNULL(ABS(DATE_DIFF(po.procedure_date, CAST(vo.visit_end_datetime AS DATE), DAY)), 0) as procedure_vis_end_dt_diff,
IFNULL(ABS(DATE_DIFF(CAST(po.procedure_datetime AS DATE), vo.visit_start_date, DAY)), 0) as procedure_dt_vis_start_diff,
IFNULL(ABS(DATE_DIFF(CAST(po.procedure_datetime AS DATE), vo.visit_end_date, DAY)), 0) as procedure_dt_vis_end_diff,
IFNULL(ABS(DATE_DIFF(CAST(po.procedure_datetime AS DATE), CAST(vo.visit_start_datetime AS DATE), DAY)), 0) as procedure_dt_vis_start_dt_diff,
IFNULL(ABS(DATE_DIFF(CAST(po.procedure_datetime AS DATE), CAST(vo.visit_end_datetime AS DATE), DAY)), 0) as procedure_dt_vis_end_dt_diff,
(
ABS(DATE_DIFF(po.procedure_date, vo.visit_start_date, DAY)) =
ABS(DATE_DIFF(po.procedure_date, vo.visit_end_date, DAY))
AND
ABS(DATE_DIFF(po.procedure_date, vo.visit_end_date, DAY)) =
ABS(DATE_DIFF(po.procedure_date, CAST(vo.visit_start_datetime AS DATE), DAY))
AND
ABS(DATE_DIFF(po.procedure_date, vo.visit_end_date, DAY)) =
ABS(DATE_DIFF(po.procedure_date, CAST(vo.visit_end_datetime AS DATE), DAY))
AND
ABS(DATE_DIFF(po.procedure_date, vo.visit_end_date, DAY)) =
ABS(DATE_DIFF(CAST(po.procedure_datetime AS DATE), vo.visit_start_date, DAY))
AND
ABS(DATE_DIFF(po.procedure_date, vo.visit_end_date, DAY)) =
ABS(DATE_DIFF(CAST(po.procedure_datetime AS DATE), vo.visit_end_date, DAY))
AND
ABS(DATE_DIFF(po.procedure_date, vo.visit_end_date, DAY)) =
ABS(DATE_DIFF(CAST(po.procedure_datetime AS DATE), CAST(vo.visit_start_datetime AS DATE), DAY))
AND
ABS(DATE_DIFF(po.procedure_date, vo.visit_end_date, DAY)) =
ABS(DATE_DIFF(CAST(po.procedure_datetime AS DATE), CAST(vo.visit_end_datetime AS DATE), DAY))
) as all_discrepancies_equal
FROM
`{DATASET}.unioned_ehr_procedure_occurrence` po
LEFT JOIN
`{DATASET}._mapping_procedure_occurrence` mpo
ON
po.procedure_occurrence_id = mpo.procedure_occurrence_id
LEFT JOIN
`{DATASET}.unioned_ehr_visit_occurrence` vo
ON
po.visit_occurrence_id = vo.visit_occurrence_id
WHERE
-- must have populated visit occurrence id
(
po.visit_occurrence_id IS NOT NULL
AND
po.visit_occurrence_id <> 0
AND
vo.visit_occurrence_id IS NOT NULL
AND
vo.visit_occurrence_id <> 0
)
AND
(
-- problem with procedure date
(po.procedure_date < vo.visit_start_date
OR
po.procedure_date > vo.visit_end_date)
OR
-- problem with datetime
(CAST(po.procedure_datetime AS DATE) < CAST(vo.visit_start_datetime AS DATE)
OR
CAST(po.procedure_datetime AS DATE) > CAST(vo.visit_end_datetime AS DATE))
OR
-- problem with the datetime (extracting date for comparison)
(po.procedure_date < CAST(vo.visit_start_datetime AS DATE)
OR
po.procedure_date > CAST(vo.visit_end_datetime AS DATE))
OR
--problem with the datetime
(CAST(po.procedure_datetime AS DATE) < vo.visit_start_date
OR
CAST(po.procedure_datetime AS DATE) > vo.visit_end_date
)
)
GROUP BY mpo.src_hpo_id, po.procedure_date, vo.visit_start_date, vo.visit_end_date, vo.visit_start_datetime, vo.visit_end_datetime, po.procedure_datetime
ORDER BY all_discrepancies_equal ASC, num_bad_records DESC
) a
WHERE
-- cannot compare date/datetime date accurately because of problem with UTC dates not converting properly. give 'wiggle room ' of 1
(
a.procedure_vis_start_dt_diff > 1
OR
a.procedure_vis_end_dt_diff > 1
OR
a.procedure_dt_vis_start_diff > 1
OR
a.procedure_dt_vis_end_diff > 1
OR
a.procedure_vis_start_diff > 0
OR
a.procedure_vis_end_diff > 0
OR
a.procedure_dt_vis_start_dt_diff > 0
OR
a.procedure_dt_vis_end_dt_diff > 0
)
ORDER BY src_hpo_id ASC, num_bad_records DESC, total_diff DESC, all_discrepancies_equal ASC
""".format(DATASET = DATASET)
print(p_v_query)
procedure_visit_df = pd.io.gbq.read_gbq(p_v_query, dialect='standard')
procedure_visit_df
# ### Now let's make the dataframe a little more condensed - only show the total number of 'bad records' for each site
bad_procedure_records_df = procedure_visit_df.groupby('src_hpo_id')['num_bad_records'].sum().to_frame()
bad_procedure_records_df
num_total_procedure_records_query = """
SELECT
DISTINCT
mp.src_hpo_id, count(p.procedure_occurrence_id) as num_total_records
FROM
`{DATASET}.unioned_ehr_procedure_occurrence`p
JOIN
`{DATASET}._mapping_procedure_occurrence` mp
ON
p.procedure_occurrence_id = mp.procedure_occurrence_id
GROUP BY 1
ORDER BY num_total_records DESC
""".format(DATASET = DATASET)
total_procedure_df = pd.io.gbq.read_gbq(num_total_procedure_records_query, dialect='standard')
total_procedure_df = pd.merge(total_procedure_df, site_df, how='outer', on='src_hpo_id')
total_procedure_df = total_procedure_df[['src_hpo_id', 'num_total_records']]
final_procedure_df = pd.merge(total_procedure_df, bad_procedure_records_df, how='outer', on='src_hpo_id')
final_procedure_df = final_procedure_df.fillna(0)
# ### Now we can actually calculate the 'tangible success rate'
final_procedure_df['procedure_occurrence'] = \
round((final_procedure_df['num_bad_records']) / final_procedure_df['num_total_records'] * 100, 2)
# +
final_procedure_df = final_procedure_df.fillna(0)
final_procedure_df = final_procedure_df.sort_values(by=['procedure_occurrence'], ascending = False)
# -
final_procedure_df
# ### to ensure all the dataframes are easy to ultimately merge, let's create a dataframe that only has the success rates and HPOs
short_procedure_df = final_procedure_df.drop(columns=['num_total_records', 'num_bad_records'])
# # Now let's move to the observation table
observation_visit_query = """
SELECT
DISTINCT
a.*,
(a.observation_vis_start_diff + a.observation_vis_end_diff + a.observation_vis_start_dt_diff + a.observation_vis_end_dt_diff +
a.observation_dt_vis_start_diff + a.observation_dt_vis_end_diff + a.observation_dt_vis_start_dt_diff + a.observation_dt_vis_end_dt_diff) as total_diff
FROM
( SELECT
mo.src_hpo_id, COUNT(mo.src_hpo_id) as num_bad_records,
IFNULL(ABS(DATE_DIFF(o.observation_date, vo.visit_start_date, DAY)), 0) as observation_vis_start_diff,
IFNULL(ABS(DATE_DIFF(o.observation_date, vo.visit_end_date, DAY)), 0) as observation_vis_end_diff,
IFNULL(ABS(DATE_DIFF(o.observation_date, CAST(vo.visit_start_datetime AS DATE), DAY)), 0) as observation_vis_start_dt_diff,
IFNULL(ABS(DATE_DIFF(o.observation_date, CAST(vo.visit_end_datetime AS DATE), DAY)), 0) as observation_vis_end_dt_diff,
IFNULL(ABS(DATE_DIFF(CAST(o.observation_datetime AS DATE), vo.visit_start_date, DAY)), 0) as observation_dt_vis_start_diff,
IFNULL(ABS(DATE_DIFF(CAST(o.observation_datetime AS DATE), vo.visit_end_date, DAY)), 0) as observation_dt_vis_end_diff,
IFNULL(ABS(DATE_DIFF(CAST(o.observation_datetime AS DATE), CAST(vo.visit_start_datetime AS DATE), DAY)), 0) as observation_dt_vis_start_dt_diff,
IFNULL(ABS(DATE_DIFF(CAST(o.observation_datetime AS DATE), CAST(vo.visit_end_datetime AS DATE), DAY)), 0) as observation_dt_vis_end_dt_diff,
(
ABS(DATE_DIFF(o.observation_date, vo.visit_start_date, DAY)) =
ABS(DATE_DIFF(o.observation_date, vo.visit_end_date, DAY))
AND
ABS(DATE_DIFF(o.observation_date, vo.visit_start_date, DAY)) =
ABS(DATE_DIFF(o.observation_date, CAST(vo.visit_start_datetime AS DATE), DAY))
AND
ABS(DATE_DIFF(o.observation_date, vo.visit_start_date, DAY)) =
ABS(DATE_DIFF(o.observation_date, CAST(vo.visit_end_datetime AS DATE), DAY))
AND
ABS(DATE_DIFF(o.observation_date, vo.visit_start_date, DAY)) =
ABS(DATE_DIFF(CAST(o.observation_datetime AS DATE), vo.visit_start_date, DAY))
AND
ABS(DATE_DIFF(o.observation_date, vo.visit_start_date, DAY)) =
ABS(DATE_DIFF(CAST(o.observation_datetime AS DATE), vo.visit_end_date, DAY))
AND
ABS(DATE_DIFF(o.observation_date, vo.visit_start_date, DAY)) =
ABS(DATE_DIFF(CAST(o.observation_datetime AS DATE), CAST(vo.visit_start_datetime AS DATE), DAY))
AND
ABS(DATE_DIFF(o.observation_date, vo.visit_start_date, DAY)) =
ABS(DATE_DIFF(CAST(o.observation_datetime AS DATE), CAST(vo.visit_end_datetime AS DATE), DAY))
) as all_discrepancies_equal
FROM
`{DATASET}.unioned_ehr_observation` o
LEFT JOIN
`{DATASET}._mapping_observation` mo
ON
o.observation_id = mo.observation_id
LEFT JOIN
`{DATASET}.unioned_ehr_visit_occurrence` vo
ON
o.visit_occurrence_id = vo.visit_occurrence_id
WHERE
-- must have populated visit occurrence id
(
o.visit_occurrence_id IS NOT NULL
AND
o.visit_occurrence_id <> 0
AND
vo.visit_occurrence_id IS NOT NULL
AND
vo.visit_occurrence_id <> 0
)
AND
(
-- problem with procedure date
(o.observation_date < vo.visit_start_date
OR
o.observation_date > vo.visit_end_date)
OR
-- problem with datetime
(CAST(o.observation_datetime AS DATE) < CAST(vo.visit_start_datetime AS DATE)
OR
CAST(o.observation_datetime AS DATE) > CAST(vo.visit_end_datetime AS DATE))
OR
-- problem with the datetime (extracting date for comparison)
(o.observation_date < CAST(vo.visit_start_datetime AS DATE)
OR
o.observation_date > CAST(vo.visit_end_datetime AS DATE))
OR
--problem with the datetime
(CAST(o.observation_datetime AS DATE) < vo.visit_start_date
OR
CAST(o.observation_datetime AS DATE) > vo.visit_end_date
)
)
GROUP BY mo.src_hpo_id, o.observation_date, vo.visit_start_date, vo.visit_end_date, vo.visit_start_datetime, vo.visit_end_datetime, o.observation_datetime
ORDER BY all_discrepancies_equal ASC, num_bad_records DESC
) a
WHERE
-- cannot compare date/datetime date accurately because of problem with UTC dates not converting properly. give 'wiggle room ' of 1
(
a.observation_vis_start_dt_diff > 1
OR
a.observation_vis_end_dt_diff > 1
OR
a.observation_dt_vis_start_diff > 1
OR
observation_dt_vis_end_diff > 1
OR
a.observation_vis_start_diff > 0
OR
a.observation_vis_end_diff > 0
OR
a.observation_dt_vis_start_dt_diff > 0
OR
a.observation_dt_vis_end_dt_diff > 0
)
ORDER BY src_hpo_id ASC, num_bad_records DESC, total_diff DESC, all_discrepancies_equal ASC
""".format(DATASET = DATASET)
observation_visit_df = pd.io.gbq.read_gbq(observation_visit_query, dialect='standard')
# ### Now let's make the dataframe a little more condensed - only show the total number of 'bad records' for each site
bad_observation_records_df = observation_visit_df.groupby('src_hpo_id')['num_bad_records'].sum().to_frame()
num_total_observation_records_query = """
SELECT
DISTINCT
mo.src_hpo_id, count(o.observation_id) as num_total_records
FROM
`{DATASET}.unioned_ehr_observation`o
JOIN
`{DATASET}._mapping_observation` mo
ON
o.observation_id = mo.observation_id
GROUP BY 1
ORDER BY num_total_records DESC
""".format(DATASET = DATASET)
total_observation_df = pd.io.gbq.read_gbq(num_total_observation_records_query, dialect='standard')
# +
total_observation_df = pd.merge(total_observation_df, site_df, how='outer', on='src_hpo_id')
total_observation_df = total_observation_df[['src_hpo_id', 'num_total_records']]
# -
final_observation_df = pd.merge(total_observation_df, bad_observation_records_df, how='outer', on='src_hpo_id')
final_observation_df = final_observation_df.fillna(0)
# ### Now we can actually calculate the 'tangible success rate'
final_observation_df['observation'] = \
round((final_observation_df['num_bad_records']) / final_observation_df['num_total_records'] * 100, 2)
# +
final_observation_df = final_observation_df.fillna(0)
final_observation_df = final_observation_df.sort_values(by=['observation'], ascending = False)
# -
# ### Creating a shorter df
short_observation_df = final_observation_df.drop(columns=['num_total_records', 'num_bad_records'])
short_observation_df
# # Next up: the measurement table
measurement_visit_query = """
SELECT
DISTINCT
a.*,
(a.measurement_vis_start_diff + a.measurement_vis_end_diff + a.measurement_vis_start_dt_diff + a.measurement_vis_end_dt_diff +
a.measurement_dt_vis_start_diff + a.measurement_dt_vis_end_diff + a.measurement_dt_vis_start_dt_diff + a.measurement_dt_vis_end_dt_diff) as | |
import asyncio
from collections import OrderedDict
from functools import wraps
from itertools import zip_longest
import hashlib
import logging
import pickle
import random
import socket
logger = logging.getLogger(__name__)
def remote(func):
'''
Indicates that this instance method defines a remote procedure call (RPC). All
RPCs must be instance methods on a DatagramRPCProtocol subclass, and must
include at least one positional argument to accept the connecting peer, a tuple
of (ip, port).
Applying this decorator converts the given instance method to a remote RPC
request, while storing the original implementation as the function to invoke
to reply to that call.
'''
@asyncio.coroutine
@wraps(func)
def inner(*args, **kwargs):
instance, peer, *args = args
answer = yield from instance.request(peer, inner.remote_name, *args, **kwargs)
return answer
inner.remote_name = func.__name__
inner.reply_function = func
return inner
class DatagramRPCProtocol(asyncio.DatagramProtocol):
'''Implements an RPC mechanism over UDP. Create a subcass of DatagramRPCProtocol, and
decorate some of its methods with @remote to designate them as part of the
RPC interface.'''
def __init__(self, reply_timeout=5):
'''Initialized a DatagramRPCProtocol, optionally specifying an acceptable
reply_timeout (in seconds) while waiting for a response from a remote
server.'''
self.outstanding_requests = {}
self.reply_functions = self.find_reply_functions()
self.reply_timeout = reply_timeout
super(DatagramRPCProtocol, self).__init__()
def find_reply_functions(self):
'''Locates the reply functions (decorated by @remote) for all RPC methods,
returning a dictionary mapping {RPC method name: reply function}.'''
return {func.remote_name: func.reply_function
for func in self.__class__.__dict__.values()
if hasattr(func, 'remote_name')}
def connection_made(self, transport):
'''A callback from asyncio.DatagramProtocol indicating that the system
has established a connection. The transport should be saved for later.'''
logger.info('connection_made: %r', transport)
self.transport = transport
def datagram_received(self, data, peer):
'''The callback from asyncio.DatagramProtocol upon receipt of a datagram
packet. The data are the bytes of the packet's payload, and the peer
is the IP and port of the peer who sent the packet.'''
logger.info('data_received: %r, %r', peer, data)
direction, message_identifier, *details = pickle.loads(data)
if direction == 'request':
procedure_name, args, kwargs = details
self.request_received(peer, message_identifier, procedure_name, args, kwargs)
elif direction == 'reply':
answer, = details
self.reply_received(peer, message_identifier, answer)
def request_received(self, peer, message_identifier, procedure_name, args, kwargs):
'''Handles replying to an incoming RPC. May be overridden to inspect/modify
the incoming arguments or procedure_name, or to implement authorization
checks.'''
logger.info('request from %r: %r(*%r, **%r) as message %r',
peer, procedure_name, args, kwargs, message_identifier)
reply_function = self.reply_functions[procedure_name]
answer = reply_function(self, peer, *args, **kwargs)
self.reply(peer, message_identifier, answer)
def reply_received(self, peer, message_identifier, answer):
'''Handles a reply to an RPC. May be overridden to pre-process a reply, or
otherwise verify its authenticity.'''
logger.info('reply to message %r, answer %r', message_identifier, answer)
if message_identifier in self.outstanding_requests:
reply = self.outstanding_requests.pop(message_identifier)
reply.set_result(answer)
def reply_timed_out(self, message_identifier):
'''Scheduled after each outbound request to enforce the wait timeout on RPCs.'''
if message_identifier in self.outstanding_requests:
reply = self.outstanding_requests.pop(message_identifier)
reply.set_exception(socket.timeout)
def request(self, peer, procedure_name, *args, **kwargs):
'''Issues an RPC to a remote peer, returning a future that may either yield
the reply to the RPC, or a socket.timeout if the peer does not reply.'''
message_identifier = get_random_identifier()
reply = asyncio.Future()
self.outstanding_requests[message_identifier] = reply
loop = asyncio.get_event_loop()
loop.call_later(self.reply_timeout, self.reply_timed_out, message_identifier)
message = pickle.dumps(('request', message_identifier, procedure_name, args, kwargs))
self.transport.sendto(message, peer)
return reply
def reply(self, peer, message_identifier, answer):
'''Sends a reply to an earlier RPC call.'''
message = pickle.dumps(('reply', message_identifier, answer))
self.transport.sendto(message, peer)
class KademliaNode(DatagramRPCProtocol):
'''Implements the Kademlia protocol with the four primitive RPCs (ping, store, find_node, find_value),
and the three iterative procedures (lookup_node, get, put).'''
def __init__(self, alpha=3, k=20, identifier=None):
'''Initializes a Kademlia node, with the optional configuration parameters alpha and k (see the
Kademlia paper for details on these constants).'''
if identifier is None:
identifier = get_random_identifier()
self.identifier = identifier
self.routing_table = RoutingTable(self.identifier, k=k)
self.k = k
self.alpha = alpha
self.storage = Storage()
super(KademliaNode, self).__init__()
def request_received(self, peer, message_identifier, procedure_name, args, kwargs):
'''Overridden to place all peers this node receives requests from in the routing_table.'''
peer_identifier = args[0]
self.routing_table.update_peer(peer_identifier, peer)
super(KademliaNode, self).request_received(peer, message_identifier, procedure_name, args, kwargs)
def reply_received(self, peer, message_identifier, answer):
'''Overridden to place all peers this node sends replies to in the routing_table.'''
peer_identifier, answer = answer
self.routing_table.update_peer(peer_identifier, peer)
super(KademliaNode, self).reply_received(peer, message_identifier, answer)
@remote
def ping(self, peer, peer_identifier):
'''The primitive PING RPC. Returns the node's identifier to the requesting node.'''
logger.info('ping(%r, %r)', peer, peer_identifier)
return (self.identifier, self.identifier)
@remote
def store(self, peer, peer_identifier, key, value):
'''The primitive STORE RPC. Stores the given value, returning True if it was successful.'''
logger.info('store(%r, %r, %r, %r)', peer, peer_identifier, key, value)
self.storage[key] = value
return (self.identifier, True)
@remote
def find_node(self, peer, peer_identifier, key):
'''The primitive FIND_NODE RPC. Returns the k-closest peers to a key that this node is aware of.'''
logger.info('find_node(%r, %r, %r)', peer, peer_identifier, key)
return (self.identifier, self.routing_table.find_closest_peers(key, excluding=peer_identifier))
@remote
def find_value(self, peer, peer_identifier, key):
'''The primitive FIND_VALUE RPC. Returns either the value of a key, or the k-closest peers to it.'''
logger.info('find_value(%r, %r, %r)', peer, peer_identifier, key)
if key in self.storage:
return (self.identifier, ('found', self.storage[key]))
return (self.identifier, ('notfound', self.routing_table.find_closest_peers(key, excluding=peer_identifier)))
@asyncio.coroutine
def lookup_node(self, hashed_key, find_value=False):
'''The iterative node lookup procedure to find either the nearest peers to or the value of a key.'''
distance = lambda peer: peer[0] ^ hashed_key
contacted, dead = set(), set()
peers = {(peer_identifier, peer)
for peer_identifier, peer in
self.routing_table.find_closest_peers(hashed_key)}
if not peers:
raise KeyError(hashed_key, 'No peers available.')
while True:
uncontacted = peers - contacted
if not uncontacted:
break
closest = sorted(uncontacted, key=distance)[:self.alpha]
for peer_identifier, peer in closest:
contacted.add((peer_identifier, peer))
try:
if find_value:
result, contacts = yield from self.find_value(peer, self.identifier, hashed_key)
if result == 'found':
return contacts
else:
contacts = yield from self.find_node(peer, self.identifier, hashed_key)
except socket.timeout:
self.routing_table.forget_peer(peer_identifier)
dead.add((peer_identifier, peer))
continue
for new_peer_identifier, new_peer in contacts:
if new_peer_identifier == self.identifier:
continue
peers.add((new_peer_identifier, new_peer))
if find_value:
raise KeyError(hashed_key, 'Not found among any available peers.')
else:
return sorted(peers - dead, key=distance)[:self.k]
@asyncio.coroutine
def put(self, raw_key, value):
'''Given a plain key (usually a unicode) and a value, store it on the Kademlia network and
return the number of nodes who successfully accepted the value.'''
hashed_key = get_identifier(raw_key)
peers = yield from self.lookup_node(hashed_key, find_value=False)
store_tasks = [self.store(peer, self.identifier, hashed_key, value) for _, peer in peers]
results = yield from asyncio.gather(*store_tasks, return_exceptions=True)
return len([r for r in results if r == True])
@asyncio.coroutine
def get(self, raw_key):
'''Given a plain key (usually a unicode), find the value from the Kademlia network.'''
hashed_key = get_identifier(raw_key)
if hashed_key in self.storage:
return self.storage[hashed_key]
answer = yield from self.lookup_node(hashed_key, find_value=True)
return answer
class RoutingTable(object):
'''Implements the routing table described in the Kademlia paper. Peers are organized
by their XOR distance from the given node, and the most recently contacted peers
are kept easily at hand.'''
def __init__(self, node_identifier, k=20):
'''Initializes a RoutingTable with the node_identifier of a node, and the desired
k value (defaults to 20, as indicated in the Kademlia paper).'''
self.node_identifier = node_identifier
self.k = k
self.buckets = [OrderedDict() for _ in range(160)]
self.replacement_caches = [OrderedDict() for _ in range(160)]
super(RoutingTable, self).__init__()
def distance(self, peer_identifier):
'''Computes the XOR distance of the given identifier from the node.'''
return self.node_identifier ^ peer_identifier
def bucket_index(self, peer_identifier):
'''Returns the index of the k-bucket covering the provided identifier.'''
if not (0 <= peer_identifier < 2**160):
raise ValueError('peer_identifier should be a number between 0 and 2*160-1.')
return 160 - self.distance(peer_identifier).bit_length()
def update_peer(self, peer_identifier, peer):
'''Adds or updates a peer that this node has recently communicated with.'''
if peer_identifier == self.node_identifier:
return
bucket_index = self.bucket_index(peer_identifier)
bucket = self.buckets[bucket_index]
if peer_identifier in bucket:
del bucket[peer_identifier]
bucket[peer_identifier] = peer
elif len(bucket) < self.k:
bucket[peer_identifier] = peer
else:
replacement_cache = self.replacement_caches[bucket_index]
if peer_identifier in replacement_cache:
del replacement_cache[peer_identifier]
replacement_cache[peer_identifier] = peer
def forget_peer(self, peer_identifier):
'''Removes a peer from the Routing Table, possibly rotating in a standby peer this
node has recently communicated with.'''
if peer_identifier == self.node_identifier:
return
bucket_index = self.bucket_index(peer_identifier)
bucket = self.buckets[bucket_index]
replacement_cache = self.replacement_caches[bucket_index]
if peer_identifier in bucket:
del bucket[peer_identifier]
if len(replacement_cache):
replacement_identifier, replacement_peer = replacement_cache.popitem()
bucket[replacement_identifier] = replacement_peer
def find_closest_peers(self, key, excluding=None, k=None):
'''Returns the k-closest peers this node is aware of, excluding the optional
identifier given as the excluding keyword argument. If k peers aren't known,
will return all nodes | |
1.00 25.87 C
ATOM 136 N1 DT B 19 -1.122 4.839 -8.816 1.00 24.60 N
ATOM 137 C2 DT B 19 -0.906 3.556 -9.283 1.00 22.21 C
ATOM 138 O2 DT B 19 0.197 3.084 -9.451 1.00 22.06 O
ATOM 139 N3 DT B 19 -2.038 2.833 -9.519 1.00 22.04 N
ATOM 140 C4 DT B 19 -3.339 3.262 -9.380 1.00 21.81 C
ATOM 141 O4 DT B 19 -4.247 2.495 -9.615 1.00 24.16 O
ATOM 142 C5 DT B 19 -3.499 4.613 -8.891 1.00 22.25 C
ATOM 143 C7 DT B 19 -4.879 5.143 -8.663 1.00 23.26 C
ATOM 144 C6 DT B 19 -2.396 5.327 -8.640 1.00 22.85 C
ATOM 145 P DT B 20 3.005 8.456 -9.725 1.00 32.03 P
ATOM 146 OP1 DT B 20 4.339 8.958 -9.284 1.00 35.31 O
ATOM 147 OP2 DT B 20 2.027 9.351 -10.442 1.00 33.99 O
ATOM 148 O5' DT B 20 3.144 7.102 -10.543 1.00 31.33 O
ATOM 149 C5' DT B 20 3.894 5.979 -10.032 1.00 28.60 C
ATOM 150 C4' DT B 20 3.851 4.840 -11.020 1.00 28.63 C
ATOM 151 O4' DT B 20 2.494 4.361 -11.145 1.00 26.47 O
ATOM 152 C3' DT B 20 4.300 5.211 -12.437 1.00 31.59 C
ATOM 153 O3' DT B 20 5.260 4.256 -12.875 1.00 39.07 O
ATOM 154 C2' DT B 20 3.027 5.147 -13.257 1.00 26.06 C
ATOM 155 C1' DT B 20 2.211 4.120 -12.529 1.00 24.42 C
ATOM 156 N1 DT B 20 0.757 4.123 -12.660 1.00 23.79 N
ATOM 157 C2 DT B 20 0.138 2.932 -12.972 1.00 25.04 C
ATOM 158 O2 DT B 20 0.741 1.921 -13.262 1.00 24.66 O
ATOM 159 N3 DT B 20 -1.229 2.977 -12.959 1.00 25.84 N
ATOM 160 C4 DT B 20 -2.022 4.071 -12.671 1.00 25.98 C
ATOM 161 O4 DT B 20 -3.234 3.948 -12.646 1.00 28.14 O
ATOM 162 C5 DT B 20 -1.311 5.298 -12.387 1.00 22.81 C
ATOM 163 C7 DT B 20 -2.094 6.540 -12.092 1.00 27.47 C
ATOM 164 C6 DT B 20 0.028 5.263 -12.401 1.00 26.29 C
TER
ATOM 165 P DA C 5 -8.062 -5.965 -15.755 1.00 42.17 P
ATOM 166 OP1 DA C 5 -8.426 -7.228 -16.405 1.00 50.61 O
ATOM 167 OP2 DA C 5 -8.689 -5.557 -14.457 1.00 51.75 O
ATOM 168 O5' DA C 5 -6.496 -5.961 -15.638 1.00 34.89 O
ATOM 169 C5' DA C 5 -5.791 -6.321 -16.790 1.00 30.71 C
ATOM 170 C4' DA C 5 -4.355 -5.917 -16.600 1.00 34.43 C
ATOM 171 O4' DA C 5 -4.303 -4.509 -16.239 1.00 33.96 O
ATOM 172 C3' DA C 5 -3.630 -6.687 -15.491 1.00 35.56 C
ATOM 173 O3' DA C 5 -2.407 -7.257 -16.020 1.00 33.08 O
ATOM 174 C2' DA C 5 -3.531 -5.654 -14.384 1.00 32.41 C
ATOM 175 C1' DA C 5 -3.435 -4.334 -15.130 1.00 28.44 C
ATOM 176 N9 DA C 5 -3.904 -3.143 -14.449 1.00 28.37 N
ATOM 177 C8 DA C 5 -5.187 -2.933 -14.022 1.00 27.53 C
ATOM 178 N7 DA C 5 -5.401 -1.724 -13.565 1.00 29.33 N
ATOM 179 C5 DA C 5 -4.187 -1.082 -13.747 1.00 23.78 C
ATOM 180 C6 DA C 5 -3.761 0.226 -13.474 1.00 25.22 C
ATOM 181 N6 DA C 5 -4.519 1.150 -12.896 1.00 25.69 N
ATOM 182 N1 DA C 5 -2.485 0.535 -13.749 1.00 24.39 N
ATOM 183 C2 DA C 5 -1.712 -0.389 -14.320 1.00 24.89 C
ATOM 184 N3 DA C 5 -2.001 -1.641 -14.653 1.00 28.33 N
ATOM 185 C4 DA C 5 -3.268 -1.935 -14.326 1.00 27.45 C
ATOM 186 P DA C 6 -1.382 -8.057 -15.083 1.00 33.49 P
ATOM 187 OP1 DA C 6 -0.596 -8.971 -15.989 1.00 35.26 O
ATOM 188 OP2 DA C 6 -2.097 -8.481 -13.890 1.00 34.48 O
ATOM 189 O5' DA C 6 -0.480 -6.949 -14.401 1.00 31.72 O
ATOM 190 C5' DA C 6 0.398 -6.138 -15.188 1.00 28.12 C
ATOM 191 C4' DA C 6 1.219 -5.272 -14.269 1.00 22.57 C
ATOM 192 O4' DA C 6 0.380 -4.203 -13.784 1.00 23.34 O
ATOM 193 C3' DA C 6 1.783 -5.982 -13.049 1.00 23.61 C
ATOM 194 O3' DA C 6 3.202 -5.785 -13.150 1.00 22.60 O
ATOM 195 C2' DA C 6 1.110 -5.289 -11.881 1.00 22.21 C
ATOM 196 C1' DA C 6 0.653 -3.958 -12.418 1.00 20.89 C
ATOM 197 N9 DA C 6 -0.561 -3.398 -11.831 1.00 21.71 N
ATOM 198 C8 DA C 6 -1.777 -4.017 -11.666 1.00 23.62 C
ATOM 199 N7 DA C 6 -2.693 -3.249 -11.139 1.00 23.57 N
ATOM 200 C5 DA C 6 -2.071 -2.016 -11.029 1.00 20.29 C
ATOM 201 C6 DA C 6 -2.506 -0.774 -10.519 1.00 20.33 C
ATOM 202 N6 DA C 6 -3.763 -0.525 -10.122 1.00 20.36 N
ATOM 203 N1 DA C 6 -1.604 0.233 -10.486 1.00 20.84 N
ATOM 204 C2 DA C 6 -0.341 -0.023 -10.868 1.00 21.15 C
ATOM 205 N3 DA C 6 0.174 -1.126 -11.378 1.00 22.91 N
ATOM 206 C4 DA C 6 -0.746 -2.101 -11.433 1.00 20.00 C
ATOM 207 P DT C 7 4.283 -6.215 -12.051 1.00 23.53 P
ATOM 208 OP1 DT C 7 5.598 -6.398 -12.780 1.00 27.73 O
ATOM 209 OP2 DT C 7 3.774 -7.297 -11.205 1.00 24.18 O
ATOM 210 O5' DT C 7 4.350 -4.948 -11.106 1.00 22.94 O
ATOM 211 C5' DT C 7 4.668 -3.709 -11.633 1.00 21.30 C
ATOM 212 C4' DT C 7 4.525 -2.656 -10.580 1.00 20.84 C
ATOM 213 O4' DT C 7 3.138 -2.512 -10.296 1.00 19.94 O
ATOM 214 C3' DT C 7 5.205 -2.966 -9.250 1.00 20.02 C
ATOM 215 O3' DT C 7 6.280 -2.035 -9.099 1.00 23.74 O
ATOM 216 C2' DT C 7 4.144 -2.717 -8.200 1.00 19.47 C
ATOM 217 C1' DT C 7 3.048 -2.015 -8.962 1.00 20.12 C
ATOM 218 N1 DT C 7 1.641 -2.197 -8.524 1.00 20.27 N
ATOM 219 C2 DT C 7 0.957 -1.108 -8.030 1.00 18.61 C
ATOM 220 O2 DT C 7 1.430 0.017 -7.926 1.00 19.56 O
ATOM 221 N3 DT C 7 -0.344 -1.365 -7.721 1.00 18.89 N
ATOM 222 C4 DT C 7 -1.018 -2.563 -7.836 1.00 21.94 C
ATOM 223 O4 DT C 7 -2.200 -2.640 -7.497 1.00 23.57 O
ATOM 224 C5 DT C 7 -0.226 -3.674 -8.271 1.00 18.09 C
ATOM 225 C7 DT C 7 -0.860 -5.022 -8.351 1.00 19.35 C
ATOM 226 C6 DT C 7 1.065 -3.446 -8.562 1.00 17.66 C
ATOM 227 P DT C 8 7.284 -1.980 -7.857 1.00 26.43 P
ATOM 228 OP1 DT C 8 8.611 -1.444 -8.278 1.00 28.45 O
ATOM 229 OP2 DT C 8 7.248 -3.298 -7.198 1.00 27.17 O
ATOM 230 O5' DT C 8 6.613 -0.927 -6.882 1.00 25.09 O
ATOM 231 C5' DT C 8 6.357 0.403 -7.340 1.00 24.67 C
ATOM 232 C4' DT C 8 5.543 1.125 -6.301 1.00 23.10 C
ATOM 233 O4' DT C 8 4.228 0.541 -6.229 1.00 23.60 O
ATOM 234 C3' DT C 8 6.127 1.057 -4.884 1.00 25.21 C
ATOM 235 O3' DT C 8 6.507 2.380 -4.493 1.00 28.93 O
ATOM 236 C2' DT C 8 5.018 0.434 -4.050 1.00 23.32 C
ATOM 237 C1' DT C 8 3.795 0.667 -4.883 1.00 22.06 C
ATOM 238 N1 DT C 8 2.713 -0.291 -4.689 1.00 19.79 N
ATOM 239 C2 DT C 8 1.466 0.223 -4.414 1.00 18.40 C
ATOM 240 O2 DT C 8 1.263 1.399 -4.157 1.00 20.56 O
ATOM 241 N3 DT C 8 0.484 -0.716 -4.337 1.00 19.20 N
ATOM 242 C4 DT C 8 0.588 -2.075 -4.597 1.00 18.45 C
ATOM 243 O4 DT C 8 -0.397 -2.789 -4.538 1.00 21.38 O
ATOM 244 C5 DT C 8 1.920 -2.549 -4.859 1.00 17.02 C
ATOM 245 C7 DT C 8 2.126 -4.006 -5.116 1.00 20.50 C
ATOM 246 C6 DT C 8 2.895 -1.634 -4.959 1.00 19.29 C
TER
ATOM 247 P DA D 17 -10.220 1.260 -1.207 1.00 27.94 P
ATOM 248 OP1 DA D 17 -11.370 2.143 -0.856 1.00 34.83 O
ATOM 249 OP2 DA D 17 -10.221 0.599 -2.553 1.00 31.17 O
ATOM 250 O5' DA D 17 -8.842 2.020 -1.098 1.00 26.12 O
ATOM 251 C5' DA D 17 -8.558 2.683 0.094 1.00 25.41 C
ATOM 252 | |
QtGui.QApplication.UnicodeUTF8))
self.comboCtfProgram.setItemText(4, QtGui.QApplication.translate("Automator_ui", "GCTF, sum", None, QtGui.QApplication.UnicodeUTF8))
self.label_46.setToolTip(QtGui.QApplication.translate("Automator_ui", "Whether to perform CTF estimation. Working on the \'sum\' is much faster than operating over all frames.", None, QtGui.QApplication.UnicodeUTF8))
self.label_46.setText(QtGui.QApplication.translate("Automator_ui", "CTF Tool ", None, QtGui.QApplication.UnicodeUTF8))
self.cbSaveMovie.setToolTip(QtGui.QApplication.translate("Automator_ui", "Whether to save the aligned movie (in float-32 format) to disk, this can greatly increase storage consumption.", None, QtGui.QApplication.UnicodeUTF8))
self.cbSaveMovie.setText(QtGui.QApplication.translate("Automator_ui", "Save aligned stack", None, QtGui.QApplication.UnicodeUTF8))
self.comboFilterMode.setToolTip(QtGui.QApplication.translate("Automator_ui", "Which filters to apply. See the wiki for detailed info. All raw files saved as integers shot have \'hot\' to suppress outlier pixels.", None, QtGui.QApplication.UnicodeUTF8))
self.comboFilterMode.setItemText(0, QtGui.QApplication.translate("Automator_ui", "None", None, QtGui.QApplication.UnicodeUTF8))
self.comboFilterMode.setItemText(1, QtGui.QApplication.translate("Automator_ui", "Hot", None, QtGui.QApplication.UnicodeUTF8))
self.comboFilterMode.setItemText(2, QtGui.QApplication.translate("Automator_ui", "Dose", None, QtGui.QApplication.UnicodeUTF8))
self.comboFilterMode.setItemText(3, QtGui.QApplication.translate("Automator_ui", "Hot, Dose", None, QtGui.QApplication.UnicodeUTF8))
self.comboFilterMode.setItemText(4, QtGui.QApplication.translate("Automator_ui", "Dose, Background", None, QtGui.QApplication.UnicodeUTF8))
self.comboFilterMode.setItemText(5, QtGui.QApplication.translate("Automator_ui", "Hot, Dose, Background", None, QtGui.QApplication.UnicodeUTF8))
self.comboFilterMode.setItemText(6, QtGui.QApplication.translate("Automator_ui", "DoseNorm", None, QtGui.QApplication.UnicodeUTF8))
self.comboFilterMode.setItemText(7, QtGui.QApplication.translate("Automator_ui", "Hot, DoseNorm", None, QtGui.QApplication.UnicodeUTF8))
self.comboFilterMode.setItemText(8, QtGui.QApplication.translate("Automator_ui", "DoseNorm, Background", None, QtGui.QApplication.UnicodeUTF8))
self.comboFilterMode.setItemText(9, QtGui.QApplication.translate("Automator_ui", "Hot, DoseNorm, Background", None, QtGui.QApplication.UnicodeUTF8))
self.comboFilterMode.setItemText(10, QtGui.QApplication.translate("Automator_ui", "GaussLP", None, QtGui.QApplication.UnicodeUTF8))
self.comboFilterMode.setItemText(11, QtGui.QApplication.translate("Automator_ui", "Hot, GaussLP", None, QtGui.QApplication.UnicodeUTF8))
self.comboFilterMode.setItemText(12, QtGui.QApplication.translate("Automator_ui", "GaussLP, Background", None, QtGui.QApplication.UnicodeUTF8))
self.comboFilterMode.setItemText(13, QtGui.QApplication.translate("Automator_ui", "Hot, GaussLP, Background", None, QtGui.QApplication.UnicodeUTF8))
self.label_49.setToolTip(QtGui.QApplication.translate("Automator_ui", "Which filters to apply. See the wiki for detailed info. All raw files saved as integers shot have \'hot\' to suppress outlier pixels.", None, QtGui.QApplication.UnicodeUTF8))
self.label_49.setText(QtGui.QApplication.translate("Automator_ui", "Filter mode", None, QtGui.QApplication.UnicodeUTF8))
self.comboAlignProgram.setToolTip(QtGui.QApplication.translate("Automator_ui", "Whether to perform CTF estimation. Working on the \'sum\' is much faster than operating over all frames.", None, QtGui.QApplication.UnicodeUTF8))
self.comboAlignProgram.setItemText(0, QtGui.QApplication.translate("Automator_ui", "Zorro", None, QtGui.QApplication.UnicodeUTF8))
self.comboAlignProgram.setItemText(1, QtGui.QApplication.translate("Automator_ui", "UnBlur v1.02", None, QtGui.QApplication.UnicodeUTF8))
self.comboAlignProgram.setItemText(2, QtGui.QApplication.translate("Automator_ui", "MotionCorr v2.1", None, QtGui.QApplication.UnicodeUTF8))
self.comboAlignProgram.setItemText(3, QtGui.QApplication.translate("Automator_ui", "None", None, QtGui.QApplication.UnicodeUTF8))
self.label_54.setToolTip(QtGui.QApplication.translate("Automator_ui", "Whether to perform CTF estimation. Working on the \'sum\' is much faster than operating over all frames.", None, QtGui.QApplication.UnicodeUTF8))
self.label_54.setText(QtGui.QApplication.translate("Automator_ui", "Alignment Tool", None, QtGui.QApplication.UnicodeUTF8))
self.toolboxConfiguration.setItemText(self.toolboxConfiguration.indexOf(self.pageCommonConfig), QtGui.QApplication.translate("Automator_ui", "Common Configuration", None, QtGui.QApplication.UnicodeUTF8))
self.sbShapePadX.setToolTip(QtGui.QApplication.translate("Automator_ui", "How much zero-padding to apply in realspace to images. Should be about 10-15 % bigger than the base image size, and a \'nice\' FFT dimension (i.e. decomposable by of 2,3,5)", None, QtGui.QApplication.UnicodeUTF8))
self.sbShapePadY.setToolTip(QtGui.QApplication.translate("Automator_ui", "How much zero-padding to apply in realspace to images. Should be about 10-15 % bigger than the base image size, and a \'nice\' FFT dimension (i.e. decomposable by of 2,3,5)", None, QtGui.QApplication.UnicodeUTF8))
self.sbFouCropX.setToolTip(QtGui.QApplication.translate("Automator_ui", "Distance to crop in Fourier space for cross-correlation. Does not affect output file shape. Smaller is faster, but throwing out atomic crystal contrast can negatively affect performance.", None, QtGui.QApplication.UnicodeUTF8))
self.sbFouCropY.setToolTip(QtGui.QApplication.translate("Automator_ui", "Distance to crop in Fourier space for cross-correlation. Does not affect output file shape. Smaller is faster, but throwing out atomic crystal contrast can negatively affect performance.", None, QtGui.QApplication.UnicodeUTF8))
self.label_19.setToolTip(QtGui.QApplication.translate("Automator_ui", "Pixelsize is _only_ applied if the input image does not store it in the file.", None, QtGui.QApplication.UnicodeUTF8))
self.label_19.setText(QtGui.QApplication.translate("Automator_ui", "Pixelsize", None, QtGui.QApplication.UnicodeUTF8))
self.sbPixelsize.setToolTip(QtGui.QApplication.translate("Automator_ui", "Pixelsize is _only_ applied if the input image does not store it in the file.", None, QtGui.QApplication.UnicodeUTF8))
self.label_20.setText(QtGui.QApplication.translate("Automator_ui", "Voltage (kV)", None, QtGui.QApplication.UnicodeUTF8))
self.label_21.setText(QtGui.QApplication.translate("Automator_ui", "C3 (mm)", None, QtGui.QApplication.UnicodeUTF8))
self.label_22.setText(QtGui.QApplication.translate("Automator_ui", "Max shift (pix)", None, QtGui.QApplication.UnicodeUTF8))
self.sbMaxShift.setToolTip(QtGui.QApplication.translate("Automator_ui", "The maximum shift within diagWidth frames (not over the entire stack). For high-resolution work should be ~half lattice spacing and Preshift-True", None, QtGui.QApplication.UnicodeUTF8))
self.label_12.setToolTip(QtGui.QApplication.translate("Automator_ui", "How much zero-padding to apply in realspace to images. Should be about 10-15 % bigger than the base image size, and a \'nice\' FFT dimension (i.e. decomposable by of 2,3,5)", None, QtGui.QApplication.UnicodeUTF8))
self.label_12.setText(QtGui.QApplication.translate("Automator_ui", "Padded image shape", None, QtGui.QApplication.UnicodeUTF8))
self.label_15.setToolTip(QtGui.QApplication.translate("Automator_ui", "Distance to crop in Fourier space for cross-correlation. Does not affect output file shape. Smaller is faster, but throwing out atomic crystal contrast can negatively affect performance.", None, QtGui.QApplication.UnicodeUTF8))
self.label_15.setText(QtGui.QApplication.translate("Automator_ui", "Fourier crop shape", None, QtGui.QApplication.UnicodeUTF8))
self.label_14.setText(QtGui.QApplication.translate("Automator_ui", "Y", None, QtGui.QApplication.UnicodeUTF8))
self.label_13.setText(QtGui.QApplication.translate("Automator_ui", "X", None, QtGui.QApplication.UnicodeUTF8))
self.label_17.setText(QtGui.QApplication.translate("Automator_ui", "Y", None, QtGui.QApplication.UnicodeUTF8))
self.label_16.setText(QtGui.QApplication.translate("Automator_ui", "X", None, QtGui.QApplication.UnicodeUTF8))
self.label_23.setToolTip(QtGui.QApplication.translate("Automator_ui", "How to filter the cross-correlations. \'opti\' is per-correlation optimized to improve the peak significance, \'conv\' and \'fourier\' are fixed radius real-space and Fourier-space filters.", None, QtGui.QApplication.UnicodeUTF8))
self.label_23.setText(QtGui.QApplication.translate("Automator_ui", "Filter mode", None, QtGui.QApplication.UnicodeUTF8))
self.comboBmode.setToolTip(QtGui.QApplication.translate("Automator_ui", "How to filter the cross-correlations. \'opti\' is per-correlation optimized to improve the peak significance, \'conv\' and \'fourier\' are fixed radius real-space and Fourier-space filters.", None, QtGui.QApplication.UnicodeUTF8))
self.comboBmode.setItemText(0, QtGui.QApplication.translate("Automator_ui", "opti", None, QtGui.QApplication.UnicodeUTF8))
self.comboBmode.setItemText(1, QtGui.QApplication.translate("Automator_ui", "conv", None, QtGui.QApplication.UnicodeUTF8))
self.comboBmode.setItemText(2, QtGui.QApplication.translate("Automator_ui", "fourier", None, QtGui.QApplication.UnicodeUTF8))
self.comboBmode.setItemText(3, QtGui.QApplication.translate("Automator_ui", "none", None, QtGui.QApplication.UnicodeUTF8))
self.label_24.setToolTip(QtGui.QApplication.translate("Automator_ui", "For fixed radius filter modes (\'conv\' and \'fourier\') the diameter in Fourier space in pixels to filter to.", None, QtGui.QApplication.UnicodeUTF8))
self.label_24.setText(QtGui.QApplication.translate("Automator_ui", "Filter rad (pix)", None, QtGui.QApplication.UnicodeUTF8))
self.sbBrad.setToolTip(QtGui.QApplication.translate("Automator_ui", "For fixed radius filter modes (\'conv\' and \'fourier\') the diameter in Fourier space in pixels to filter to.", None, QtGui.QApplication.UnicodeUTF8))
self.comboWeightMode.setToolTip(QtGui.QApplication.translate("Automator_ui", "Weighting scheme to apply to cross-correlations in solution solver for shifts. \'autologistic\' is normal for cryoTEM, \'corr\' is usually sufficient for high-resolution work. See the wiki for more info.", None, QtGui.QApplication.UnicodeUTF8))
self.comboWeightMode.setItemText(0, QtGui.QApplication.translate("Automator_ui", "autologistic", None, QtGui.QApplication.UnicodeUTF8))
self.comboWeightMode.setItemText(1, QtGui.QApplication.translate("Automator_ui", "logistic", None, QtGui.QApplication.UnicodeUTF8))
self.comboWeightMode.setItemText(2, QtGui.QApplication.translate("Automator_ui", "normalized", None, QtGui.QApplication.UnicodeUTF8))
self.comboWeightMode.setItemText(3, QtGui.QApplication.translate("Automator_ui", "corr", None, QtGui.QApplication.UnicodeUTF8))
self.comboWeightMode.setItemText(4, QtGui.QApplication.translate("Automator_ui", "unweighted", None, QtGui.QApplication.UnicodeUTF8))
self.comboPeakLocMethod.setItemText(0, QtGui.QApplication.translate("Automator_ui", "interpolated", None, QtGui.QApplication.UnicodeUTF8))
self.cbPreshift.setToolTip(QtGui.QApplication.translate("Automator_ui", "Used for high-resolution alignment with a small maxshift.", None, QtGui.QApplication.UnicodeUTF8))
self.cbPreshift.setText(QtGui.QApplication.translate("Automator_ui", "Preshift", None, QtGui.QApplication.UnicodeUTF8))
self.label_25.setToolTip(QtGui.QApplication.translate("Automator_ui", "Weighting scheme to apply to cross-correlations in solution solver for shifts. \'autologistic\' is normal for cryoTEM, \'corr\' is usually sufficient for high-resolution work. See the wiki for more info.", None, QtGui.QApplication.UnicodeUTF8))
self.label_25.setText(QtGui.QApplication.translate("Automator_ui", "Weight mode", None, QtGui.QApplication.UnicodeUTF8))
self.label_26.setText(QtGui.QApplication.translate("Automator_ui", "Peak location", None, QtGui.QApplication.UnicodeUTF8))
self.label_27.setToolTip(QtGui.QApplication.translate("Automator_ui", "Fraction of a pixel to estimate shifts to.", None, QtGui.QApplication.UnicodeUTF8))
self.label_27.setText(QtGui.QApplication.translate("Automator_ui", "Subpix precision", None, QtGui.QApplication.UnicodeUTF8))
self.sbSubpixReg.setToolTip(QtGui.QApplication.translate("Automator_ui", "Fraction of a pixel to estimate shifts to.", None, QtGui.QApplication.UnicodeUTF8))
self.label_28.setToolTip(QtGui.QApplication.translate("Automator_ui", "Subpixel shifting method, \'lanczos\' is fastest and has fewer edge artifacts. \'None\' will do integer shifts.", None, QtGui.QApplication.UnicodeUTF8))
self.label_28.setText(QtGui.QApplication.translate("Automator_ui", "Shift method", None, QtGui.QApplication.UnicodeUTF8))
self.comboShiftMethod.setToolTip(QtGui.QApplication.translate("Automator_ui", "Subpixel shifting method, \'lanczos\' is fastest and has fewer edge artifacts. \'None\' will do integer shifts.", None, QtGui.QApplication.UnicodeUTF8))
self.comboShiftMethod.setItemText(0, QtGui.QApplication.translate("Automator_ui", "lanczos", None, QtGui.QApplication.UnicodeUTF8))
self.comboShiftMethod.setItemText(1, QtGui.QApplication.translate("Automator_ui", "fourier", None, QtGui.QApplication.UnicodeUTF8))
self.comboShiftMethod.setItemText(2, QtGui.QApplication.translate("Automator_ui", "integer", None, QtGui.QApplication.UnicodeUTF8))
self.label_31.setToolTip(QtGui.QApplication.translate("Automator_ui", "Whether to center the drift by the centroid, or leave the translation of frame #0 at (0,0)", None, QtGui.QApplication.UnicodeUTF8))
self.label_31.setText(QtGui.QApplication.translate("Automator_ui", "Origin", None, QtGui.QApplication.UnicodeUTF8))
self.comboOriginMode.setToolTip(QtGui.QApplication.translate("Automator_ui", "Whether to center the drift by the centroid, or leave the translation of frame #0 at (0,0)", None, QtGui.QApplication.UnicodeUTF8))
self.comboOriginMode.setItemText(0, QtGui.QApplication.translate("Automator_ui", "centroid", None, QtGui.QApplication.UnicodeUTF8))
self.comboOriginMode.setItemText(1, QtGui.QApplication.translate("Automator_ui", "none", None, QtGui.QApplication.UnicodeUTF8))
self.cbSaveC.setToolTip(QtGui.QApplication.translate("Automator_ui", "Whether to save the cross-correlations in a *_xc.mrcs stack. Useful for diagnosing problems.", None, QtGui.QApplication.UnicodeUTF8))
self.cbSaveC.setText(QtGui.QApplication.translate("Automator_ui", "Save cross-correlations", None, QtGui.QApplication.UnicodeUTF8))
self.sbAutomax.setToolTip(QtGui.QApplication.translate("Automator_ui", "For align mode \'auto\', the maximum \'diagWidth\' to use. Saves on computation.", None, QtGui.QApplication.UnicodeUTF8))
self.label_29.setToolTip(QtGui.QApplication.translate("Automator_ui", "For align mode \'auto\', the maximum \'diagWidth\' to use. Saves on computation.", None, QtGui.QApplication.UnicodeUTF8))
self.label_29.setText(QtGui.QApplication.translate("Automator_ui", "Auto max", None, QtGui.QApplication.UnicodeUTF8))
self.label_18.setToolTip(QtGui.QApplication.translate("Automator_ui", "The upper triangular matrix scheme to employ. \'diag\' is used for most cases, correlates each frame to \'diagWidth\' neighbours. See the wiki for detailed notes. ", None, QtGui.QApplication.UnicodeUTF8))
self.label_18.setText(QtGui.QApplication.translate("Automator_ui", "Align mode", None, QtGui.QApplication.UnicodeUTF8))
self.label_8.setToolTip(QtGui.QApplication.translate("Automator_ui", "Only applicable for weightMode=\'logistic\', sets the threshold level.", None, QtGui.QApplication.UnicodeUTF8))
self.label_8.setText(QtGui.QApplication.translate("Automator_ui", "Peak significance", None, QtGui.QApplication.UnicodeUTF8))
self.comboTriMode.setToolTip(QtGui.QApplication.translate("Automator_ui", "The upper triangular matrix scheme to employ. \'diag\' is used for most cases, correlates each frame to \'diagWidth\' neighbours. See the wiki for detailed notes. ", None, QtGui.QApplication.UnicodeUTF8))
self.comboTriMode.setItemText(0, QtGui.QApplication.translate("Automator_ui", "diag", None, QtGui.QApplication.UnicodeUTF8))
self.comboTriMode.setItemText(1, QtGui.QApplication.translate("Automator_ui", "auto", None, QtGui.QApplication.UnicodeUTF8))
self.comboTriMode.setItemText(2, QtGui.QApplication.translate("Automator_ui", "refine", None, QtGui.QApplication.UnicodeUTF8))
self.comboTriMode.setItemText(3, QtGui.QApplication.translate("Automator_ui", "tri", None, QtGui.QApplication.UnicodeUTF8))
self.comboTriMode.setItemText(4, QtGui.QApplication.translate("Automator_ui", "autocorr", None, QtGui.QApplication.UnicodeUTF8))
self.sbPeaksigThres.setToolTip(QtGui.QApplication.translate("Automator_ui", "Only applicable for weightMode=\'logistic\', sets the threshold level.", None, QtGui.QApplication.UnicodeUTF8))
self.cbSuppressOrigin.setToolTip(QtGui.QApplication.translate("Automator_ui", "Whether to estimate the value of the origin in the cross-correlation by its neighbours. Helps if gain reference is not perfect.", None, QtGui.QApplication.UnicodeUTF8))
self.cbSuppressOrigin.setText(QtGui.QApplication.translate("Automator_ui", "Suppress origin", None, QtGui.QApplication.UnicodeUTF8))
self.sbBinCropY.setToolTip(QtGui.QApplication.translate("Automator_ui", "Whether to apply binning prior to alignment, generally used for binning superresolution images.", None, QtGui.QApplication.UnicodeUTF8))
self.label_51.setText(QtGui.QApplication.translate("Automator_ui", "X", None, QtGui.QApplication.UnicodeUTF8))
self.sbBinCropX.setToolTip(QtGui.QApplication.translate("Automator_ui", "Whether to apply binning prior to alignment, generally used for binning superresolution images.", None, QtGui.QApplication.UnicodeUTF8))
self.label_52.setText(QtGui.QApplication.translate("Automator_ui", "Y", None, QtGui.QApplication.UnicodeUTF8))
self.cbDoBinning.setToolTip(QtGui.QApplication.translate("Automator_ui", "Whether to apply binning prior to alignment, generally used for binning superresolution images.", None, QtGui.QApplication.UnicodeUTF8))
self.cbDoBinning.setText(QtGui.QApplication.translate("Automator_ui", "Bin image", None, QtGui.QApplication.UnicodeUTF8))
self.cbDebuggingOutput.setToolTip(QtGui.QApplication.translate("Automator_ui", "Whether to display debugging information, useful for giving more feedback to the developers.", None, QtGui.QApplication.UnicodeUTF8))
self.cbDebuggingOutput.setText(QtGui.QApplication.translate("Automator_ui", "Debugging output", None, QtGui.QApplication.UnicodeUTF8))
self.comboPixelunits.setToolTip(QtGui.QApplication.translate("Automator_ui", "Zorro uses \'nm\' internally, but MRC files are saved in Angstroms.", None, QtGui.QApplication.UnicodeUTF8))
self.comboPixelunits.setItemText(0, QtGui.QApplication.translate("Automator_ui", "nm", None, QtGui.QApplication.UnicodeUTF8))
self.comboPixelunits.setItemText(1, QtGui.QApplication.translate("Automator_ui", "Å", None, QtGui.QApplication.UnicodeUTF8))
self.comboPixelunits.setItemText(2, QtGui.QApplication.translate("Automator_ui", "pm", None, QtGui.QApplication.UnicodeUTF8))
self.comboPixelunits.setItemText(3, QtGui.QApplication.translate("Automator_ui", "μm", None, QtGui.QApplication.UnicodeUTF8))
self.sbGain.setToolTip(QtGui.QApplication.translate("Automator_ui", "Scaling factor applied to data after loading. So if the | |
records going back 1000 years.
[E] - First student records going back 1000 years. Then, a nearly perfected cure for dragon
pox. Finally, a mysterious handwritten book full of strange runes.
[F] - First, a mysterious handwritten book full of strange runes. Then student records going
back 1000 years. Finally, a nearly perfected cure for dragon pox.
""")
choice = question("ABCDEF")
if choice in 'A':
return 1, 0, 1, 0
elif choice in 'B':
return 0, 0, 0, 1
elif choice in 'C':
return 0, 1, 0, 0
elif choice in 'D':
return 1, 0, 0, 0
elif choice in 'E':
return 0, 0, 1, 1
elif choice in 'F':
return 0, 1, 0, 1
elif num == 3:
print("""
Question 04: Which would you rather be:
[A] - Envied?
[B] - Imitated?
[C] - Trusted?
[D] - Praised?
[E] - Liked?
[F] - Feared?
""")
choice = question("ABCDEF")
if choice in 'A':
return 0, 1, 0, 1
elif choice in 'B':
return 0, 1, 0, 0
elif choice in 'C':
return 1, 0, 1, 0
elif choice in 'D':
return 1, 0, 0, 0
elif choice in 'E':
return 0, 0, 1, 0
elif choice in 'F':
return 0, 0, 0, 1
def fifth_question():
num = randint(1, 3)
if num == 1:
print("""
Question 05: If you could have any power, which would you choose?
[A] - The power to read minds
[B] - The power of invisibility
[C] - The power of superhuman strength
[D] - The power to speak to animals
[E] - The power to change the past
[F] - The power to change your appearance at will
""")
choice = question("ABCDEF")
if choice in 'A':
return 0, 1, 0, 1
elif choice in 'B':
return 1, 0, 0, 0
elif choice in 'C':
return 0, 0, 1, 1
elif choice in 'D':
return 0, 0, 1, 0
elif choice in 'E':
return 1, 0, 0, 1
elif choice in 'F':
return 0, 1, 0, 0
elif num == 2:
print("""
Question 05: What are you most looking forward to learning at Hogwarts?
[A] - Apparition and Disapparition (being able to materialize and dematerialize at will)
[B] - Transfiguration (turning one object into another object)
[C] - Flying on a broomstick
[D] - Hexes and jinxes
[E] - All about magical creatures, and how to befriend/care for them
[F] - Secrets about the castle
[G] - Every area of magic I can
""")
choice = question("ABCDEFG")
if choice in 'A':
return 1, 0, 0, 1
elif choice in 'B':
return 0, 1, 0, 0
elif choice in 'C':
return 1, 0, 1, 0
elif choice in 'D':
return 0, 0, 0, 1
elif choice in 'E':
return 0, 0, 1, 0
elif choice in 'F':
return 1, 0, 0, 0
elif choice in 'G':
return 0, 1, 0, 0
elif num == 3:
print("""
Question 05: Which of the following would you most like to study?
[A] - Centaurs
[B] - Goblins
[C] - Merpeople
[D] - Ghosts
[E] - Vampires
[F] - Werewolves
[G] - Trolls
""")
choice = question("ABCDEFG")
if choice in 'A':
return 1, 1, 0, 0
elif choice in 'B':
return 0, 1, 0, 0
elif choice in 'C':
return 0, 0, 1, 1
elif choice in 'D':
return 1, 1, 0, 0
elif choice in 'E':
return 0, 0, 0, 1
elif choice in 'F':
return 1, 0, 1, 0
elif choice in 'G':
return 0, 0, 1, 1
def sixth_question():
num = randint(1, 6)
if num == 1:
print("""
Question 06: You and two friends need to cross a bridge guarded by a river troll who insists
on fighting one of you before he will let all of you pass. Do you:
[A] - Attempt to confuse the troll into letting all three of you pass without fighting?
[B] - Suggest drawing lots to decide which of you will fight?
[C] - Suggest that all three of you should fight (without telling the troll)?
[D] - Volunteer to fight?
""")
choice = question("ABCD")
if choice in 'A':
return 0, 1, 0, 0
elif choice in 'B':
return 0, 0, 1, 0
elif choice in 'C':
return 0, 0, 0, 1
elif choice in 'D':
return 1, 0, 0, 0
elif num == 2:
print("""
Question 06: One of your house mates has cheated in a Hogwarts exam by using a Self-Spelling
Quill. Now he has come top of the class in Charms, beating you into second place. Professor
Flitwick is suspicious of what happened. He draws you to one side after his lesson and asks
you whether or not your classmate used a forbidden quill. What do you do?
[A] - Lie and say you don’t know (but hope that somebody else tells Professor Flitwick the
truth).
[B] - Tell <NAME> that he ought to ask your classmate (and resolve to tell your
classmate that if he doesn’t tell the truth, you will).
[C] - Tell <NAME> the truth. If your classmate is prepared to win by cheating, he
deserves to be found out. Also, as you are both in the same house, any points he loses will be
regained by you, for coming first in his place.
[D] - You would not wait to be asked to tell <NAME> the truth. If you knew that
somebody was using a forbidden quill, you would tell the teacher before the exam started.
""")
choice = question("ABCD")
if choice in 'A':
return 0, 0, 1, 0
elif choice in 'B':
return 1, 0, 0, 0
elif choice in 'C':
return 0, 1, 0, 0
elif choice in 'D':
return 0, 0, 0, 1
elif num == 3:
print("""
Question 06: A Muggle confronts you and says that they are sure you are a witch or wizard. Do you:
[A] - Ask what makes them think so?
[B] - Agree, and ask whether they’d like a free sample of a jinx?
[C] - Agree, and walk away, leaving them to wonder whether you are bluffing?
[D] - Tell them that you are worried about their mental health, and offer to call a doctor.
""")
choice = question("ABCD")
if choice in 'A':
return 0, 1, 0, 0
elif choice in 'B':
return 0, 0, 0, 1
elif choice in 'C':
return 1, 0, 0, 0
elif choice in 'D':
return 0, 0, 1, 0
elif num == 4:
print("""
Question 06: Which nightmare would frighten you most?
[A] - Standing on top of something very high and realizing suddenly that there are no hand-
or footholds, nor any barrier to stop you falling.
[B] - An eye at the keyhole of the dark, windowless room in which you are locked.
[C] - Waking up to find that neither your friends nor your family have any idea who you are.
[D] - Being forced to speak in such a silly voice that hardly anyone can understand you, and
everyone laughs at you.
""")
choice = question("ABCD")
if choice in 'A':
return 0, 1, 0, 0
elif choice in 'B':
return 1, 0, 0, 0
elif choice in 'C':
return 0, 0, 1, 0
elif choice in 'D':
return 0, 0, 0, 1
elif num == 5:
print("""
Question 06: Which road tempts you most?
[A] - The wide, sunny, grassy lane
[B] - The narrow, dark, lantern-lit alley
[C] - The twisting, leaf-strewn path through woods
[D] - The cobbled street lined with ancient buildings
""")
choice = question("ABCD")
if choice in 'A':
return 0, 0, 1, 0
elif choice in 'B':
return 0, 0, 0, 1
elif choice in 'C':
return 1, 0, | |
<reponame>cfpb/regulations-parser<filename>regparser/api_writer.py
# -*- coding: utf-8 -*-
import os
import os.path
import shutil
import re
from git import Repo
from git.exc import InvalidGitRepositoryError
from lxml.etree import Element, SubElement
from lxml.etree import tostring, fromstring, strip_tags
from lxml.etree import XMLSyntaxError
import requests
from regparser.tree.struct import Node, NodeEncoder, find
from regparser.notice.encoder import AmendmentEncoder
from utils import interpolate_string
import settings
import logging
logger = logging.getLogger()
class AmendmentNodeEncoder(AmendmentEncoder, NodeEncoder):
pass
class FSWriteContent:
"""This writer places the contents in the file system """
def __init__(self, path, doc_number, layers=None, notices=None):
self.path = path
def write(self, python_obj, **kwargs):
"""Write the object as json to disk"""
path_parts = self.path.split('/')
dir_path = settings.OUTPUT_DIR + os.path.join(*path_parts[:-1])
if not os.path.exists(dir_path):
os.makedirs(dir_path)
full_path = settings.OUTPUT_DIR + os.path.join(*path_parts)
with open(full_path, 'w') as out:
text = AmendmentNodeEncoder(
sort_keys=True, indent=4,
separators=(', ', ': ')).encode(python_obj)
out.write(text)
class APIWriteContent:
"""This writer writes the contents to the specified API"""
def __init__(self, path, doc_number, layers=None, notices=None):
self.path = path
def write(self, python_obj, **kwargs):
"""Write the object (as json) to the API"""
requests.post(
settings.API_BASE + self.path,
data=AmendmentNodeEncoder().encode(python_obj),
headers={'content-type': 'application/json'})
class GitWriteContent:
"""This writer places the content in a git repo on the file system"""
def __init__(self, path, doc_number, layers=None, notices=None):
self.path = path
def folder_name(self, node):
"""Directories are generally just the last element a node's label,
but subparts and interpretations are a little special."""
if node.node_type == Node.SUBPART:
return '-'.join(node.label[-2:])
elif len(node.label) > 2 and node.label[-1] == Node.INTERP_MARK:
return '-'.join(node.label[-2:])
else:
return node.label[-1]
def write_tree(self, root_path, node):
"""Given a file system path and a node, write the node's contents and
recursively write its children to the provided location."""
if not os.path.exists(root_path):
os.makedirs(root_path)
node_text = u"---\n"
if node.title:
node_text += 'title: "' + node.title + '"\n'
node_text += 'node_type: ' + node.node_type + '\n'
child_folders = [self.folder_name(child) for child in node.children]
node_text += 'children: ['
node_text += ', '.join('"' + f + '"' for f in child_folders)
node_text += ']\n'
node_text += '---\n' + node.text
with open(root_path + os.sep + 'index.md', 'w') as f:
f.write(node_text.encode('utf8'))
for idx, child in enumerate(node.children):
child_path = root_path + os.sep + child_folders[idx]
shutil.rmtree(child_path, ignore_errors=True)
self.write_tree(child_path, child)
def write(self, python_object, **kwargs):
if "regulation" in self.path:
path_parts = self.path.split('/')
dir_path = settings.GIT_OUTPUT_DIR + os.path.join(*path_parts[:-1])
if not os.path.exists(dir_path):
os.makedirs(dir_path)
try:
repo = Repo(dir_path)
except InvalidGitRepositoryError:
repo = Repo.init(dir_path)
repo.index.commit("Initial commit for " + path_parts[-2])
# Write all files (and delete any old ones)
self.write_tree(dir_path, python_object)
# Add and new files to git
repo.index.add(repo.untracked_files)
# Delete and modify files as needed
deleted, modified = [], []
for diff in repo.index.diff(None):
if diff.deleted_file:
deleted.append(diff.a_blob.path)
else:
modified.append(diff.a_blob.path)
if modified:
repo.index.add(modified)
if deleted:
repo.index.remove(deleted)
# Commit with the notice id as the commit message
repo.index.commit(path_parts[-1])
class XMLWriteContent:
def __init__(self, path, doc_number, layers=None, notices=[]):
self.path = path
if not self.path.endswith('.xml'):
self.path = path + '.xml'
self.doc_number = doc_number
self.layers = layers
self.notices = notices
self.notice = next((n for n in notices
if n['document_number'] == doc_number), None)
self.appendix_sections = 1 # need to track these manually
self.caps = [chr(i) for i in range(65, 65 + 26)]
def write(self, python_object, **kwargs):
""" Write the given python object based on its type. Node
objects are handled as regulation trees, dicts as notices. """
if isinstance(python_object, Node):
self.write_regulation(python_object)
if isinstance(python_object, dict):
self.write_notice(python_object, **kwargs)
def write_regulation(self, reg_tree):
""" Write a regulation tree. """
self.layers['definitions'] = self.extract_definitions()
full_path = os.path.join(settings.OUTPUT_DIR, self.path)
dir_path = os.path.dirname(full_path)
if not os.path.exists(dir_path):
os.makedirs(dir_path)
xml_tree = self.to_xml(reg_tree)
xml_string = tostring(xml_tree, pretty_print=True,
xml_declaration=True, encoding='UTF-8')
with open(full_path, 'w') as f:
logger.info("Writing regulation to {}".format(full_path))
f.write(xml_string)
def write_notice(self, notice, changes={}, reg_tree=None,
left_doc_number=''):
""" Write a notice. """
if reg_tree is None:
raise RuntimeError("to write notices to XML, both a "
"changeset and a reg tree are required.")
full_path = os.path.join(settings.OUTPUT_DIR, self.path)
dir_path = os.path.dirname(full_path)
if not os.path.exists(dir_path):
os.makedirs(dir_path)
# Create a notice root element
notice_string = '<notice xmlns="eregs" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="eregs http://cfpb.github.io/regulations-schema/src/eregs.xsd"></notice>' # noqa
notice_elm = fromstring(notice_string)
# Get the fdsys and preamble
fdsys_elm = self.fdsys(reg_tree.label_id())
notice_elm.append(fdsys_elm)
preamble_elm = self.preamble(reg_tree.label_id())
notice_elm.append(preamble_elm)
# Because analysis kept in-line in RegML, and because the
# diffing functionality that generated our `changes` doesn't
# take analysis into account, we need to do so here. Analyzed
# labels are included as "modified" in the changes dict.
for label in self.layers['analyses']:
if label not in changes:
changes[label] = {'op': 'modified'}
# Get the changeset
changeset_elm = Element('changeset')
changeset_elm.set('leftDocumentNumber', left_doc_number)
changeset_elm.set('rightDocumentNumber', self.doc_number)
for label, change in changes.items():
# For each change, generate a change element with the label
# and operation as attributes.
change_elm = SubElement(changeset_elm, 'change')
change_elm.set('operation', change['op'])
change_elm.set('label', label)
# If the change is added/modified, we also need to include
# the added/modified node.
if change['op'] in ('added', 'modified'):
# Lookup the new label in the regulation tree
changed_node = find(reg_tree, label)
# Append it to as XML to the change element
content_elm = self.to_xml(changed_node)
change_elm.append(content_elm)
self.add_analyses(notice_elm)
notice_elm.append(changeset_elm)
xml_string = tostring(notice_elm, pretty_print=True,
xml_declaration=True, encoding='UTF-8')
# Write the file
with open(full_path, 'w') as f:
logger.info("Writing notice to {}".format(full_path))
print("Writing notice to {}".format(full_path))
f.write(xml_string)
def extract_definitions(self):
defs = self.layers['terms']['referenced']
references = {}
for _, defn in defs.items():
ref_node_label = defn['reference']
ref_offsets = defn['position']
term = defn['term']
defn_dict = {'offset': ref_offsets,
'term': term}
references[ref_node_label] = defn_dict
return references
@staticmethod
def apply_terms(text, replacements):
replacement_texts = []
replacement_offsets = []
for repl in replacements:
repl_text = repl['ref']
offsets = repl['offsets']
replacement_offsets.extend(offsets)
for offset in offsets:
replacement = text[offset[0]:offset[1]]
repl_target = repl_text.split(':')[1]
a = repl_target.encode('utf-8')
b = replacement.encode('utf-8')
replacement = ('<ref target="{}" reftype="term">'.format(a)
+ b + '</ref>')
replacement_texts.append(replacement)
return replacement_offsets, replacement_texts
@staticmethod
def apply_paragraph_markers(text, replacements):
replacement_texts = []
replacement_offsets = []
for i, repl in enumerate(replacements):
marker_text = repl['text']
marker_length = len(marker_text)
marker_locations = repl['locations']
for loc in marker_locations:
offset = [loc, loc + marker_length]
replacement_offsets.append(offset)
replacement_texts.append('')
return replacement_offsets, replacement_texts
@staticmethod
def apply_internal_citations(text, replacements):
replacement_texts = []
replacement_offsets = []
for repl in replacements:
citation = repl['citation']
offsets = repl['offsets']
citation_target = '-'.join(citation)
for offset in offsets:
ref_text = text[offset[0]:offset[1]]
replacement_text = '<ref target="{}" reftype="internal">'.format(citation_target) + \
ref_text.encode('utf-8') + '</ref>'
replacement_offsets.append(offset)
replacement_texts.append(replacement_text)
return replacement_offsets, replacement_texts
@staticmethod
def apply_external_citations(text, replacements):
replacement_texts = []
replacement_offsets = []
for repl in replacements:
citation = map(str, repl['citation'])
citation_type = repl['citation_type']
offsets = repl['offsets']
for offset in offsets:
ref_text = text[offset[0]:offset[1]]
# we need to form a URL for the external citation based
# on the citation type I don't know how to do that yet
# so the target is just a placeholder
target_url = '{}:{}'.format(citation_type,
'-'.join(citation))
replacement_text = '<ref target="{}" reftype="external">'.format(target_url) + \
ref_text.encode('utf-8') + '</ref>'
replacement_texts.append(replacement_text)
replacement_offsets.append(offset)
return replacement_offsets, replacement_texts
@staticmethod
def apply_definitions(text, replacement):
offset = replacement['offset']
term = replacement['term']
replacement_text = text[offset[0]:offset[1]]
replacement_text = '<def term="{}">'.format(term) + \
replacement_text.encode('utf-8') + '</def>'
return [offset], [replacement_text]
@staticmethod
def apply_graphics(graphics_layer):
graphics_elements = []
# graphics is a special layer because it's not inlined
for graphic in graphics_layer:
graphic_elem = Element('graphic')
alt_text_elem = SubElement(graphic_elem, 'altText')
text_elem = SubElement(graphic_elem, 'text')
url_elem = SubElement(graphic_elem, 'url')
alt_text_elem.text = graphic['alt']
text_elem.text = graphic['text']
url_elem.text = graphic['url']
if 'thumb_url' in graphic:
thumb_url_elem = SubElement(graphic_elem, 'thumbUrl')
thumb_url_elem.text = graphic['thumb_url']
graphics_elements.append(graphic_elem)
return graphics_elements
@staticmethod
def apply_keyterms(text, replacements):
""" Remove keyterm text from the text. It will need to be put in
the title at some other point in processing."""
# The keyterm belongs in the title of the element not the body.
# Remove it.
keyterm = replacements[0]['key_term']
if keyterm in text:
offset = (text.index(keyterm), text.index(keyterm) + len(keyterm))
return [offset], ['']
return [], []
@staticmethod
def apply_formatting(replacements):
replacement_texts = []
replacement_offsets = []
for repl in replacements:
if 'dash_data' in repl:
text = '<dash>' + repl['dash_data']['text'] + '</dash>'
elif 'table_data' in repl:
text = '<table><header>'
table_data = repl['table_data']
header = table_data['header']
if len(header) > 0:
for header_row in header:
text += '<columnHeaderRow>'
for col in header_row:
text += (
'<column colspan="{}" rowspan="{}">'.format(
col['colspan'], col['rowspan'])
+ col['text'] + '</column></columnHeaderRow>')
text += '</header>'
rows = table_data['rows']
for row in rows:
text += '<row>'
for item in row:
text += '<cell>' + item + '</cell>'
text += '</row>'
text += '</table>'
elif 'subscript_data' in repl:
text = ('<variable>'
'{variable}<subscript>{subscript}</subscript>'
'</variable>'.format(
| |
= data[:, self.bip_parameters[request.interaction_id]["dof_indices"]]
self.bip_instances[request.interaction_id].add_demonstration(data.T)
return intprim_framework_ros.srv.AddDemonstrationResponse(True)
##
# Callback for computing scaling parameters.
# @todo Need to update this call to use new scaling.
def compute_standardization_callback(self, request):
if(request.interaction_id == -1):
request.interaction_id = self.primary_instance
# Request is rows = time steps, columns = features.
# But BIP wants the transpose of that, so act accordingly.
data = np.array(request.observed_trajectory.data, dtype = np.float64)
data = np.reshape(data, (len(data) / request.observed_trajectory.stride, request.observed_trajectory.stride))
data = data[:, self.bip_parameters[request.interaction_id]["dof_indices"]]
self.bip_instances[request.interaction_id].compute_standardization(data.T)
return intprim_framework_ros.srv.ComputeStandardizationResponse(True)
##
# Evaluates the given trajectory.
# Functionally, behaves the same as generate_trajectory_callback except the values corresponding to generate_indices need to be valid for the given trajectory.
# These values are used to compute the MSE between the estimated trajectory and the actual one.
# As in generate, this service can be called multiple times in order to calculate the error after a subset of observations have been made.
#
# @param request Request containing a (possibly partial) N x M observed trajectory, where N is the number of time steps and M is the number of features.
# Must also contain a M x M noise covariance matrix.
#
# @returns float value which represents the mean squared error between the generated response trajectory and the actual response trajectory.
def evaluate_trajectory_callback(self, request):
if(request.interaction_id == -1):
request.interaction_id = self.primary_instance
data = np.array(request.observed_trajectory.data, dtype = np.float64)
data = np.reshape(data, (len(data) / request.observed_trajectory.stride, request.observed_trajectory.stride))
data = data[:, self.bip_parameters[request.interaction_id]["dof_indices"]]
covariance = self.get_covariance(request.covariance, data.shape[1], request.interaction_id)
active_dofs = self.get_active_dofs(request.interaction_id)
num_samples = int((1.0 - self.bip_state[request.interaction_id]["current_phase"]) * self.bip_parameters[request.interaction_id]["num_samples"])
if(num_samples > 0):
generated_trajectory, self.bip_state[request.interaction_id]["current_phase"], self.last_generated_mean, _ = self.bip_instances[request.interaction_id].generate_probable_trajectory_recursive(
data.T,
covariance,
active_dofs,
num_samples = num_samples,
starting_phase = None,
return_variance = False,
phase_lookahead = self.bip_parameters[request.interaction_id]["phase_lookahead"])
# Calculate MSE from the last observation compared to the first first generated trajectory sample.
mse = sklearn.metrics.mean_squared_error(data[-1:, self.bip_parameters[request.interaction_id]["generate_indices"]], generated_trajectory[self.bip_parameters[request.interaction_id]["generate_indices"], :1].T)
return intprim_framework_ros.srv.EvaluateTrajectoryResponse(
mse
)
else:
return intprim_framework_ros.srv.EvaluateTrajectoryResponse(
0.0
)
##
# Gets the covariance bucket index for the given BIP instance.
# A covariance "bucket" is created for each set of active phases, such that every phase value from [0, 1] belongs to a single active bucket.
# At run-time, we need to choose the appropriate covariance noise (some DoFs may be active and some inactive according to the parameter file), and so we need to find the proper bucket.
#
# @param interaction_id The ID of the BIP instance to use.
#
# @returns The index of the covariance bucket corresponding to the phase stored in current state of the given BIP instance.
def get_bucket_index(self, interaction_id):
bucket_idx = 0
for bucket_limits, cov_noise in self.bip_parameters[interaction_id]["cov_noise"]:
# print("Bucket limits: " + str(bucket_limits))
if(self.bip_state[interaction_id]["current_phase"] >= bucket_limits[0] and self.bip_state[interaction_id]["current_phase"] < bucket_limits[1]):
return bucket_idx
bucket_idx += 1
##
# Gets the covariance matrix for the given BIP instance and measurement noise.
# This covariance matrix accounts for both the given measurement noise as well as any active/inactive DoFs corresponding to the currently estimated phase.
# Active DoFs will have a noise value equivalent to the "noise_bias" parameter, inactive DoFs to "inactive_noise_bias".
#
# @param covariance 2M x 1 measurement noise vector, where M is the number of features.
# @param stride The stride of the measurement noise vector: M
# @param interaction_id The ID of the BIP instance to use.
#
# @returns The M x M covariance noise matrix corresponding to the currently estimated phase for the given BIP instance.
def get_covariance(self, covariance, stride, interaction_id):
covariance = np.array(covariance, dtype = np.float64)
covariance = np.reshape(covariance, (stride, stride))
bucket_idx = self.get_bucket_index(interaction_id)
if(bucket_idx is not None):
covariance[np.diag_indices(covariance.shape[0])] += self.bip_parameters[interaction_id]["cov_noise"][bucket_idx][1]
return covariance
##
# Gets the list of DoFs that are active according to the current state of the given BIP instance.
#
# @param interaction_id The ID of the BIP instance to use.
#
# @returns N dimensional vector containing indices into the full state dimension, where N is the number of currently active DoFs.
def get_active_dofs(self, interaction_id):
bucket_idx = self.get_bucket_index(interaction_id)
if(bucket_idx is not None):
return self.bip_parameters[interaction_id]["active_dofs"][bucket_idx][1]
##
# Performs inference on the given partially observed trajectory using the given BIP instance.
# This generation is recursive, and so the current state will be used (and updated) in the integration of any new sensor measurements.
#
# @param request Request containing a (possibly partial) N x M observed trajectory, where N is the number of time steps and M is the number of features.
# Must also contain a M x M noise covariance matrix.
#
# @returns P x Q response trajectory, where P is the number of time steps until the end of the demonstration and Q is the number of generated features.
def generate_trajectory_callback(self, request):
self.num_predictions += 1
if(request.interaction_id == -1):
request.interaction_id = self.primary_instance
data = np.array(request.observed_trajectory.data, dtype = np.float64)
data = np.reshape(data, (len(data) / request.observed_trajectory.stride, request.observed_trajectory.stride))
data = data[:, self.bip_parameters[request.interaction_id]["dof_indices"]]
covariance = self.get_covariance(request.covariance, data.shape[1], request.interaction_id)
active_dofs = self.get_active_dofs(request.interaction_id)
if(self.use_spt is True):
num_samples = 1
if(self.spt_phase == "current"):
starting_phase = None
else:
starting_phase = self.spt_phase
else:
num_samples = int((1.0 - self.bip_state[request.interaction_id]["current_phase"]) * self.bip_parameters[request.interaction_id]["num_samples"])
starting_phase = None
if(num_samples == 0):
num_samples = 1
if(active_dofs is not None):
generated_trajectory, self.bip_state[request.interaction_id]["current_phase"], self.last_generated_mean, _ = self.bip_instances[request.interaction_id].generate_probable_trajectory_recursive(
data.T,
covariance,
active_dofs,
num_samples = num_samples,
starting_phase = starting_phase,
return_variance = False,
phase_lookahead = self.bip_parameters[request.interaction_id]["phase_lookahead"])
print("Phase: " + str(self.bip_state[request.interaction_id]["current_phase"]))
#print("Num samples: " + str(num_samples))
print("Observed trajectory size: " + str(data.shape[0]))
#print("Stride: " + str(data.shape[1]))
if(self.bip_parameters[request.interaction_id]["debug"]):
self.stat_collector.collect(self.bip_instances[request.interaction_id], data, generated_trajectory.T, rospy.get_time())
if(self.bip_state[request.interaction_id]["current_phase"] > self.stop_generation_phase or self.bip_state[request.interaction_id]["current_phase"] < self.start_generation_phase):
print("Sending empty trajectory...")
return intprim_framework_ros.srv.GenerateTrajectoryResponse(
intprim_framework_ros.msg.Trajectory(len(self.bip_parameters[request.interaction_id]["generate_indices"]), [])
)
else:
return intprim_framework_ros.srv.GenerateTrajectoryResponse(
intprim_framework_ros.msg.Trajectory(len(self.bip_parameters[request.interaction_id]["generate_indices"]), generated_trajectory[self.bip_parameters[request.interaction_id]["generate_indices"]].T.flatten().tolist())
)
else:
print("Sending empty trajectory...")
return intprim_framework_ros.srv.GenerateTrajectoryResponse(
intprim_framework_ros.msg.Trajectory(len(self.bip_parameters[request.interaction_id]["generate_indices"]), [])
)
##
# Gets the basis approximation for the given trajectory and BIP instance.
#
# @param request Request containing an N x M observed trajectory, where N is the number of time steps and M is the number of features.
#
# @returns P x M approximated trajectory, where P is the number of time steps specified in the request and M is the number of features.
def get_approximate_trajectory_callback(self, request):
if(request.interaction_id == -1):
request.interaction_id = self.primary_instance
data = np.array(request.observed_trajectory.data, dtype = np.float64)
data = np.reshape(data, (len(data) / request.observed_trajectory.stride, request.observed_trajectory.stride))
data = data[:, self.bip_parameters[request.interaction_id]["dof_indices"]]
approx_traj = self.bip_instances[request.interaction_id].get_approximate_trajectory(data.T, num_samples = request.num_samples)
return intprim_framework_ros.srv.GetApproximateTrajectoryResponse(
intprim_framework_ros.msg.Trajectory(approx_traj.shape[0], approx_traj.T.flatten().tolist())
)
##
# Gets the probability distribution of the prior for the given BIP instance.
# The distribution is assumed to be Gaussian from which the parameters are found empirically.
#
# @param request Request containing the ID of the BIP instance to use.
#
# @returns three N x M matrices representing the mean, upper bound, and lower bound (within one standard deviation) where N is the number of time steps and M is the number of features.
def get_probability_distribution_callback(self, request):
if(request.interaction_id == -1):
request.interaction_id = self.primary_instance
mean, upper_bound, lower_bound = self.bip_instances[request.interaction_id].get_probability_distribution()
return intprim_framework_ros.srv.GetDistributionResponse(
intprim_framework_ros.msg.Trajectory(mean.shape[0], mean.T.flatten().tolist()),
intprim_framework_ros.msg.Trajectory(upper_bound.shape[0], upper_bound.T.flatten().tolist()),
intprim_framework_ros.msg.Trajectory(lower_bound.shape[0], lower_bound.T.flatten().tolist())
)
##
# Gets the parameters associated with the probability distribution of the prior for the given BIP instance.
# The distribution is assumed to be Gaussian from which the parameters are found empirically.
#
# @param request Request containing the ID of the BIP instance to use.
#
# @returns M x 1 vector containing the mean and a M x M matrix containing the covariance.
def get_distribution_parameters_callback(self, request):
if(request.interaction_id == -1):
request.interaction_id = self.primary_instance
mean, var = self.bip_instances[request.interaction_id].get_basis_weight_parameters()
var = np.array(self.bip_instances[request.interaction_id].basis_weights, dtype = np.float64)
var = np.corrcoef(var.T)
return intprim_framework_ros.srv.GetDistributionParametersResponse(
var.flatten().tolist(),
mean.tolist()
)
##
# Gets the mean trajectory of the prior for the given BIP instance.
#
# @param request Request containing the ID of the BIP instance to use.
#
# @returns N x M trajectory containing the mean of the trained prior, where N is the number of time steps and M is the number of features.
def get_mean_trajectory_callback(self, request):
if(request.interaction_id == -1):
request.interaction_id = self.primary_instance
generated_trajectory = self.bip_instances[request.interaction_id].get_mean_trajectory(request.num_samples)
if(request.return_full_trajectory):
return intprim_framework_ros.srv.GetMeanTrajectoryResponse(
intprim_framework_ros.msg.Trajectory(generated_trajectory.shape[0], generated_trajectory.T.flatten().tolist())
)
else:
return intprim_framework_ros.srv.GetMeanTrajectoryResponse(
intprim_framework_ros.msg.Trajectory(len(self.bip_parameters[request.interaction_id]["generate_indices"]), generated_trajectory[self.bip_parameters[request.interaction_id]["generate_indices"]].T.flatten().tolist())
)
##
# Gets | |
<reponame>HumbleSmarts/Cafemanagementsystem
from datetime import date
from tkinter import*
import tkinter
import tkinter.ttk
import tkinter.messagebox as messagebox
import random
import time
import datetime
from tkinter import *
from random import triangular
"""def register_user():
username_info = username.get()
password_info = password.get()
file=open(username_info+".txt", "w")
file.write(username_info)
file.write(password_info)
file.close()
username_entry.delete(0, END)
password_entry.delete(0, END)
Label(screen1, text = "Registration Successful", fg = "green",
font = ("calibri", 11)).pack()
def register():
global screen1
screen1 = Toplevel(screen)
screen1.title("Register")
screen1.geometry("300x250")
global username
global password
global username_entry
global password_entry
username = StringVar()
password = StringVar()
Label(screen1, text = "Please enter detail below").pack()
Label(screen1, text = "").pack()
Label(screen1, text = "Username * ").pack()
username_entry = Entry(screen1, textvariable = username)
username_entry.pack()
Label(screen1, text = "Password * ").pack()
password_entry = Entry(screen1, textvariable = password)
password_entry.pack()
Label(screen1, text = "").pack()
Button(screen1, text = "Register", width = 10, height = 1, command = register_user).pack()
Button(screen1, text = "Already have am account?", command = login).pack()
def login_user():
user = input("Username: ")
passw = input("Password: ")
f = open("users.txt", "r")
for line in f.readlines():
us, pw = line.strip().split("|")
if (user in us) and (passw in pw):
print ("login successful!")
return True
print ("Wrong username / password")
return False
def menu():
HumbleCafeManagementSystem.py
#here's a menu that the user can access if he logged in.
def main():
login_user()
log = login
if log == True:
menu()"""
"""def login_user():
username_info = username.get()
password_info = password.get()
file=open(username_info+".txt", "r")
file.read(None)
file.read(None)
file.close()
username_entry.delete(0, END)
password_entry.delete(0, END)
Label(screen1, text = "Login Successful", fg = "green", font = ("calibri", 11)).pack()
print("Login session started")
file=open (HumbleCafeManagementSystem.py , "r")
file.read(None)
file.read(None)
file.close()
def HumbleCafeManagementSystem():
HumbleCafeManagementSystem.py = HumbleCafeManagementSystem.get()
file=open (HumbleCafeManagementSystem+".py", "r", "w")
file.read(None)
file.read(None)
file.close()
def login():
global screen1
screen1 = Toplevel(screen)
screen1.title("Login")
screen1.geometry("300x250")
global username
global password
global username_entry
global password_entry
username = StringVar()
password = StringVar()
Label(screen1, text = "Please enter detail below...").pack()
Label(screen1, text = "").pack()
Label(screen1, text = "Username * ").pack()
username_entry = Entry(screen1, textvariable = username)
username_entry.pack()
Label(screen1, text = "Password * ").pack()
password_entry = Entry(screen1, textvariable = password)
password_entry.pack()
Label(screen1, text = "").pack()
Button(screen1, text = "Login", width = 10, height = 1, command = login_user).pack()
Label(screen1, text = "Forget you password?").pack()
Button(screen1, text = "Don't have am account?", command = register).pack()
def main_screen():
global screen
screen = Tk()
screen.geometry("300x250")
screen.title("Registration form")
Label(text = "Login Details", bg = "grey", width = "300", height = "2", font = ("Calibri", 13)).pack()
Label(text = "").pack()
Button(text = "Login", height = "2", width = "30", command = login).pack()
Label(text = "").pack()
Button(text = "Register", height = "2", width = "30", command = register).pack()
screen.mainloop()
main_screen()"""
"""start_date = datetime.date.today()
print(start_date)"""
root = tkinter.Tk()
root.geometry("1280x800+0+0")
root.title("Humble Cafe Management System")
root.configure(background='black')
Tops = Frame(root, width= 1280, height=100, bd=14, relief="raise")
Tops.pack(side=TOP)
f1 = Frame(root, width= 600, height=550, bd=8, relief="raise")
f1.pack(side=LEFT)
f2 = Frame(root, width= 340, height=450, bd=8, relief="raise")
f2.pack(side=RIGHT)
f1a= Frame(f1, width= 800, height=330, bd=8, relief="raise")
f1a.pack(side=TOP)
f2a= Frame(f1, width= 800, height=320, bd=6, relief="raise")
f2a.pack(side=BOTTOM)
ft2= Frame(f2, width= 340, height=350, bd=12, relief="raise")
ft2.pack(side=TOP)
fb2= Frame(f2, width= 340, height=200, bd=16, relief="raise")
fb2.pack(side=BOTTOM)
f1aa= Frame(f1a, width= 300, height=230, bd=16, relief="raise")
f1aa.pack(side=LEFT)
f1ab= Frame(f1a, width= 300, height=230, bd=16, relief="raise")
f1ab.pack(side=RIGHT)
f2aa= Frame(f2a, width= 350, height=230, bd=14, relief="raise")
f2aa.pack(side=LEFT)
f2ab= Frame(f2a, width= 350, height=230, bd=14, relief="raise")
f2ab.pack(side=RIGHT)
Tops.configure(background='black')
f1.configure(background='black')
f2.configure(background='black')
#===========================CostofItem=================================================
def CostofItem():
Item1=float(E_Latta.get())
Item2=float(E_Espresso.get())
Item3=float(E_Iced_Latta.get())
Item4=float(E_Vale_Coffee.get())
Item5=float(E_Cappuccino.get())
Item6=float(E_African_Coffee.get())
Item7=float(E_American_Coffee.get())
Item8=float(E_Iced_Cappuccino.get())
Item9=float(E_Coffee_Cake.get())
Item10=float(E_Red_Velvet_Cake.get())
Item11=float(E_Black_Forest_Cake.get())
Item12=float(E_Boston_Cream_Cake.get())
Item13=float(E_Lagos_Chocolate_Cake.get())
Item14=float(E_Kilburn_Chocolate_Cake.get())
Item15=float(E_Carlton_Hill_Chocolate_Cake.get())
Item16=float(E_Queen_Park_Chocolate_Cake.get())
PriceofDrinks =(Item1 * 1.2) + (Item2 * 1.99) +(Item3 * 2.05) \
+ (Item4 * 1.89) + (Item5 * 1.99) + (Item6 * 2.99) + (Item7 * 2.39) + (Item8 * 1.29)
PriceofCakes =(Item9 * 1.35) + (Item10 * 2.2) + (Item11 * 1.99) \
+ (Item12 * 1.49) + (Item13 * 1.8) + (Item14 * 1.67) + (Item15 * 1.6) + (Item16 * 1.99)
DrinksPrice ="#", str('%.2f'%(PriceofDrinks))
CakesPrice ="#", str('%.2f'%(PriceofCakes))
CostofCakes.set(CakesPrice)
CostofDrinks.set(DrinksPrice)
ServiceCharge = 1.59
SubTotalofITEMS ='#', str('%.2f'%(PriceofDrinks + PriceofCakes ))
SubTotal.set(SubTotalofITEMS)
Tax ="#", str('%.2f'%((PriceofDrinks + PriceofCakes ) * 0.15))
PaidTax.set(Tax)
TT = ((PriceofDrinks + PriceofCakes) * 0.15)
TC ="#", str('%.2f'%(PriceofDrinks + PriceofCakes + TT))
TotalCost.set(TC)
def dExit():
dExit = messagebox.askyesno("Quit System", "Do you want to quit")
if dExit:
root.destroy()
return
def ePrint():
ePrint = massagebox.askyesno(" Print System ", " Do you what to print ")
if ePrint:
root.exit()
return
def Reset():
PaidTax.set("")
SubTotal.set("")
TotalCost.set("")
CostofDrinks.set("")
CostofCakes.set("")
ServiceCharge.set("")
txtReceipt.delete("1.0", END)
E_Latta.set("0")
E_Espresso.set("0")
E_Iced_Latta.set("0")
E_Vale_Coffee.set("0")
E_Cappuccino.set("0")
E_African_Coffee.set("0")
E_American_Coffee.set("0")
E_Iced_Cappuccino.set("0")
E_Coffee_Cake.set("0")
E_Red_Velvet_Cake.set("0")
E_Black_Forest_Cake.set("0")
E_Boston_Cream_Cake.set("0")
E_Lagos_Chocolate_Cake.set("0")
E_Kilburn_Chocolate_Cake.set("0")
E_Carlton_Hill_Chocolate_Cake.set("0")
E_Queen_Park_Chocolate_Cake.set("0")
var1.set("0")
var2.set("0")
var3.set("0")
var4.set("0")
var5.set("0")
var6.set("0")
var7.set("0")
var8.set("0")
var9.set("0")
var10.set("0")
var11.set("0")
var12.set("0")
var13.set("0")
var14.set("0")
var15.set("0")
var16.set("0")
txtLatta.configure(state= DISABLED)
txtEspresso.configure(state= DISABLED)
txtIced_Latta.configure(state= DISABLED)
txtVale_Coffee.configure(state= DISABLED)
txtCappuccino.configure(state= DISABLED)
txtAfrican_Coffee.configure(state= DISABLED)
txtAmerican_Coffee.configure(state= DISABLED)
txtIced_Cappuccino.configure(state= DISABLED)
txtCoffee_Cake.configure(state= DISABLED)
txtRed_Velvet_Cake.configure(state= DISABLED)
txtBlack_Forest_Cake.configure(state= DISABLED)
txtBoston_Cream_Cake.configure(state= DISABLED)
txtLagos_Chocolate_Cake.configure(state= DISABLED)
txtKilburn_Chocolate_Cake.configure(state= DISABLED)
txtCarlton_Hill_Chocolate_Cake.configure(state= DISABLED)
txtQueen_Park_Chocolate_Cake.configure(state= DISABLED)
def Receipt():
txtReceipt.delete("1.0",END)
x = random.randint(10987, 599879)
randomRef = str(x)
Receipt_Ref.set("BILL"+ randomRef)
txtReceipt.insert(END,'Receipt Ref:\t\t\t'+Receipt_Ref.get() + '\t\t' + DateOfOrder.get() +"\n")
txtReceipt.insert(END,'Items\t\t\t\t\t' + "Cost of Items \n\n")
txtReceipt.insert(END,'Latta: \t\t\t\t\t' + E_Latta.get()+ "\n")
txtReceipt.insert(END,'Espresso: \t\t\t\t\t' + E_Espresso.get()+"\n")
txtReceipt.insert(END, 'Iced Latta: \t\t\t\t\t' + E_Iced_Latta.get()+"\n")
txtReceipt.insert(END, 'Vale Coffee: \t\t\t\t\t' + E_Vale_Coffee.get()+"\n")
txtReceipt.insert(END, 'Cappuccino: \t\t\t\t\t' + E_Cappuccino.get() +"\n")
txtReceipt.insert(END, 'African Coffee: \t\t\t\t\t' + E_African_Coffee.get()+ "\n")
txtReceipt.insert(END, 'American Coffee: \t\t\t\t\t' + E_American_Coffee.get()+ "\n")
txtReceipt.insert(END, 'Iced Cappuccino: \t\t\t\t\t' + E_Iced_Cappuccino.get() + "\n")
txtReceipt.insert(END, 'Coffee Cake: \t\t\t\t\t' + E_Coffee_Cake.get() +"\n")
txtReceipt.insert(END, 'Red Velvet Cake: \t\t\t\t\t' + E_Red_Velvet_Cake.get() +"\n")
txtReceipt.insert(END, 'Black Forest Cake: \t\t\t\t\t' + E_Black_Forest_Cake.get() + "\n")
txtReceipt.insert(END, 'Boston Cream Cake: \t\t\t\t\t' + E_Boston_Cream_Cake.get() + "\n")
txtReceipt.insert(END, 'Lagos Chocolate Cake: \t\t\t\t\t' + E_Lagos_Chocolate_Cake.get() +"\n")
txtReceipt.insert(END, 'Kilburn Chocolate Cake: \t\t\t\t\t' + E_Kilburn_Chocolate_Cake.get() + "\n")
txtReceipt.insert(END, 'Carlton Hill Chocolate Cake: \t\t\t\t\t' + E_Carlton_Hill_Chocolate_Cake.get() + "\n")
txtReceipt.insert(END, 'Queens Park Chocolate Cake: \t\t\t\t\t' + E_Queen_Park_Chocolate_Cake.get() + "\n")
txtReceipt.insert(END, 'Cost of Drinks: \t\t' + CostofDrinks.get() + '\tTax Paid:\t\t' +PaidTax.get()+"\n")
txtReceipt.insert(END, 'Cost of Cakes: \t\t' + CostofCakes.get() + '\tSubTotal:\t\t' +SubTotal.get()+"\n")
txtReceipt.insert(END, 'Service Charge: \t\t' + ServiceCharge.get() + '\tTotal Cost:\t\t' +TotalCost.get()+"\n")
#=============================Heading======================================
lblInfo = Label(Tops, font=('arial', 75, 'bold'), text="Humble Cafe Management System",
bd=8, anchor='w')
lblInfo.grid(row=0,column=0)
#================================Calculator===============================
def chkbutton_value():
if (var1.get() == 1):
txtLatta.configure(state= NORMAL)
elif var1.get()== 0:
txtLatta.configure(state= DISABLED)
E_Latta.set("0")
if (var2.get() == 1):
txtEspresso.configure(state= NORMAL)
elif var2.get()== 0:
txtEspresso.configure(state= DISABLED)
E_Espresso.set("0")
if (var3.get() == 1):
txtIced_Latta.configure(state= NORMAL)
elif var3.get()== 0:
txtIced_Latta.configure(state= DISABLED)
E_Iced_Latta.set("0")
if (var4.get() == 1):
txtVale_Coffee.configure(state= NORMAL)
elif var4.get()== 0:
txtVale_Coffee.configure(state= DISABLED)
E_Vale_Coffee.set("0")
if (var5.get() == 1):
txtCappuccino.configure(state= NORMAL)
elif var5.get()== 0:
txtCappuccino.configure(state= DISABLED)
E_Cappuccino.set("0")
if (var6.get() == 1):
txtAfrican_Coffee.configure(state= NORMAL)
elif var6.get()== 0:
txtAfrican_Coffee.configure(state= DISABLED)
E_African_Coffee.set("0")
if (var7.get() == 1):
txtAmerican_Coffee.configure(state= NORMAL)
elif var7.get()== 0:
txtAmerican_Coffee.configure(state= DISABLED)
E_American_Coffee.set("0")
if (var8.get() == 1):
txtIced_Cappuccino.configure(state= NORMAL)
elif var8.get()== 0:
txtIced_Cappuccino.configure(state= DISABLED)
E_Iced_Cappuccino.set("0")
if (var9.get() == 1):
txtCoffee_Cake.configure(state= NORMAL)
elif var9.get()== 0:
txtCoffee_Cake.configure(state= DISABLED)
E_Coffee_Cake.set("0")
if (var10.get() == 1):
txtRed_Velvet_Cake.configure(state= NORMAL)
elif var10.get()== 0:
txtRed_Velvet_Cake.configure(state= DISABLED)
E_Red_Velvet_Cake.set("0")
if (var11.get() == 1):
txtBlack_Forest_Cake.configure(state= NORMAL)
elif var11.get()== 0:
txtBlack_Forest_Cake.configure(state= DISABLED)
E_Black_Forest_Cake.set("0")
if (var12.get() == 1):
txtBoston_Cream_Cake.configure(state= NORMAL)
elif var12.get()== 0:
txtBoston_Cream_Cake.configure(state= DISABLED)
E_Boston_Cream_Cake.set("0")
if (var13.get() == 1):
txtLagos_Chocolate_Cake.configure(state= NORMAL)
elif var13.get()== 0:
txtLagos_Chocolate_Cake.configure(state= DISABLED)
E_Lagos_Chocolate_Cake.set("0")
if (var14.get() == 1):
txtKilburn_Chocolate_Cake.configure(state= NORMAL)
elif var14.get()== 0:
txtKilburn_Chocolate_Cake.configure(state= DISABLED)
E_Kilburn_Chocolate_Cake.set("0")
if (var15.get() == 1):
txtCarlton_Hill_Chocolate_Cake.configure(state= NORMAL)
elif var15.get()== 0:
txtCarlton_Hill_Chocolate_Cake.configure(state= DISABLED)
E_Carlton_Hill_Chocolate_Cake.set("0")
if (var16.get() == 1):
txtQueen_Park_Chocolate_Cake.configure(state= NORMAL)
elif var16.get()== 0:
txtQueen_Park_Chocolate_Cake.configure(state= DISABLED)
E_Queen_Park_Chocolate_Cake.set("0")
#===========================variables============================================
var1= IntVar()
var2= IntVar()
var3= IntVar()
var4= IntVar()
var5= IntVar()
var6= IntVar()
var7= IntVar()
var8= IntVar()
var9= IntVar()
var10= IntVar()
var11= IntVar()
var12= IntVar()
var13= IntVar()
var14= IntVar()
var15= IntVar()
var16= IntVar()
DateOfOrder=StringVar()
Receipt_Ref=StringVar()
PaidTax=StringVar()
SubTotal=StringVar()
TotalCost=StringVar()
CostofCakes=StringVar()
CostofDrinks=StringVar()
ServiceCharge=StringVar()
E_Latta=StringVar()
E_Espresso=StringVar()
E_Iced_Latta=StringVar()
E_Vale_Coffee=StringVar()
E_Cappuccino=StringVar()
E_African_Coffee=StringVar()
E_American_Coffee=StringVar()
E_Iced_Cappuccino=StringVar()
E_Coffee_Cake=StringVar()
E_Red_Velvet_Cake=StringVar()
E_Black_Forest_Cake=StringVar()
E_Boston_Cream_Cake=StringVar()
E_Lagos_Chocolate_Cake=StringVar()
E_Kilburn_Chocolate_Cake=StringVar()
E_Carlton_Hill_Chocolate_Cake=StringVar()
E_Queen_Park_Chocolate_Cake=StringVar()
E_Latta.set("0")
E_Espresso.set("0")
E_Iced_Latta.set("0")
E_Vale_Coffee.set("0")
E_Cappuccino.set("0")
E_African_Coffee.set("0")
E_American_Coffee.set("0")
E_Iced_Cappuccino.set("0")
E_Coffee_Cake.set("0")
E_Red_Velvet_Cake.set("0")
E_Black_Forest_Cake.set("0")
E_Boston_Cream_Cake.set("0")
E_Lagos_Chocolate_Cake.set("0")
E_Kilburn_Chocolate_Cake.set("0")
E_Carlton_Hill_Chocolate_Cake.set("0")
E_Queen_Park_Chocolate_Cake.set("0")
DateOfOrder.set(time.strftime("%d /%m /%y :: %H: %M: %S"))
#============================Drinks======================================================
Latta = Checkbutton(f1aa, text="Latta \t", variable = var1, onvalue = 1, offvalue=0,
font=('arial', 17, 'bold'), command=chkbutton_value).grid(row = 0, sticky=W)
Espresso = Checkbutton(f1aa, text="Espresso \t", variable = var2, onvalue = 1, offvalue=0,
font=('arial', 17, 'bold'), command=chkbutton_value).grid(row = 1, sticky=W)
Iced_Latta = Checkbutton(f1aa, text="Iced_Latta \t", variable = var3, onvalue = 1, offvalue=0,
font=('arial', 17, 'bold'), command=chkbutton_value).grid(row = 2, sticky=W)
Vale_Coffee = Checkbutton(f1aa, text="Vale_Coffee \t", variable = var4, onvalue = 1, offvalue=0,
font=('arial', 17, 'bold'), command=chkbutton_value).grid(row = 3, sticky=W)
Cappuccino = Checkbutton(f1aa, text="Cappuccino \t", variable = var5, onvalue = 1, offvalue=0,
font=('arial', 17, 'bold'), command=chkbutton_value).grid(row = 4, sticky=W)
African_Coffee = Checkbutton(f1aa, text="African_Coffee \t", variable = var6, onvalue = 1, offvalue=0,
font=('arial', 17, 'bold'), command=chkbutton_value).grid(row = 5, sticky=W)
American_Coffee = Checkbutton(f1aa, text="American_Coffee \t", variable = var7, onvalue = 1, offvalue=0,
font=('arial', 17, 'bold'), command=chkbutton_value).grid(row = 6, sticky=W)
Iced_Cappuccino = Checkbutton(f1aa, text="Iced_Cappuccino \t", variable = var8, onvalue = 1, offvalue=0,
font=('arial', 17, 'bold'), command=chkbutton_value).grid(row = 7, sticky=W)
#============================Cakes======================================================
CoffeeCake = Checkbutton(f1ab, text="Coffee Cake \t", variable = var9, onvalue = 1, offvalue=0,
font=('arial', 17, 'bold'), command=chkbutton_value).grid(row = 0, sticky=W)
Red_Velvet_Cake = Checkbutton(f1ab, text="Red Velvet | |
= {} , ## fit-parameters to reset/use
more_vars = {} , ## additional results to be calculated
gen_fun = None , ## generator function ( pdf , varset , **gen_config )
fit_fun = None , ## fit function ( pdf , dataset , **fit_config )
accept_fun = None , ## accept function ( fit-result, pdf, dataset )
silent = True ,
progress = True ,
logger = logger ,
frequency = 1000 ) :
"""Make `ntoys` pseudoexperiments
- Schematically:
>>> for toy in range ( nToys ) :
>>> ... dataset = gen_fun ( gen_pdf , ... , **gen_config )
>>> ... result = fit_fun ( fit_pdf , dataset , **fit_config )
>>> ... if not accept_fun ( result , fit_pdf , dataset ) : continue
>>> .... < collect statistics here >
For each experiment:
1. generate dataset using `pdf` with variables specified
in `data` and configuration specified via `gen_config`
for each generation the parameters of `pdf` are reset
for their initial values and valeus from `init_pars`
2. fit generated dataset with `pdf` using configuration
specified via `fit_config`
- `pdf` : PDF to be used for generation and fitting
- `nToys` : number of pseudoexperiments to generate
- `data` : variable list of variables to be used for dataset generation
- `gen_config` : configuration of <code>pdf.generate</code>
- `fit_config` : configuration of <code>pdf.fitTo</code>
- `gen_pars` : redefine these parameters for generation of each pseudoexperiment
- `fit_pars` : redefine these parameters for fit of each pseudoexperiment
- `silent` : silent toys?
- `progress` : show progress bar?
- `logger` : use this logger
- `frequency` : how often to dump the intermediate results ?
It returns a dictionary with fit results for the toys and a dictionary of statistics
>>> pdf = ...
... results, stats = make_toys ( pdf , ## PDF to use
... 1000 , ## number of toys
... [ 'mass' ] , ## varibales in dataset
... { 'nEvents' : 5000 } , ## configuration of `pdf.generate`
... { 'ncpus' : 2 } , ## configuration of `pdf.fitTo`
... { 'mean' : 0.0 , 'sigma' : 1.0 } ## parameters to use for generation
... )
"""
from ostap.core.ostap_types import string_types, integer_types
assert isinstance ( nToys , integer_types ) and 0 < nToys,\
'Invalid "nToys" argument %s/%s' % ( nToys , type ( nToys ) )
assert gen_config and 'nEvents' in gen_config,\
'Number of events per toy must be specified via "gen_config" %s' % gen_config
## 1. generator function?
if gen_fun is None :
if not silent : logger.info ( "make_toys2: use default ``generate_data'' function!")
gen_fun = generate_data
assert gen_fun and callable ( gen_fun ) , 'Invalid generator function!'
## 2. fitting function?
if fit_fun is None :
if not silent : logger.info ( "make_toys2: use default ``make_fit'' function!")
fit_fun = make_fit
assert fit_fun and callable ( fit_fun ) , 'Invalid fit function!'
## 3. accept function?
if accept_fun is None :
if not silent : logger.info ( "make_toys2: use default ``accept_fit'' function!")
accept_fun = accept_fit
assert accept_fun and callable ( accept_fun ) , 'Invalid accept function!'
if progress and not silent :
assert isinstance ( frequency , integer_types ) and 0 < frequency,\
"make_toys2: invalid ``frequency'' parameter %s" % frequency
import ostap.fitting.roofit
import ostap.fitting.dataset
import ostap.fitting.variables
import ostap.fitting.roofitresult
import ostap.fitting.basic
gparams = gen_pdf.params ()
varset = ROOT.RooArgSet ()
if isinstance ( data , ROOT.RooAbsData ) : varset = data.varset()
else :
for v in data :
if isinstance ( v , ROOT.RooAbsArg ) :
varset.add ( v )
elif isinstance ( v , string_types ) and v in gparams :
varset.add ( gparams [ v ] )
else :
raise TypeError('Invalid variable %s/%s' % ( v , type ( v ) ) )
## parameters for generation
fix_gen_init = vars_transform ( gparams )
fix_gen_pars = vars_transform ( gen_pars )
## parameters for fitting
fparams = fit_pdf.params ()
fix_fit_init = vars_transform ( fparams )
fix_fit_pars = vars_transform ( fit_pars )
fitcnf = {}
fitcnf.update ( fit_config )
if not 'silent' in fitcnf : fitcnf [ 'silent' ] = silent
from collections import defaultdict
results = defaultdict(list)
from ostap.core.core import SE
fits = defaultdict ( SE ) ## fit statuses
covs = defaultdict ( SE ) ## covarinace matrix quality
## run pseudoexperiments
from ostap.utils.progress_bar import progress_bar
for i in progress_bar ( range ( nToys ) , silent = not progress ) :
## 1. reset PDF parameters
gen_pdf.load_params ( params = fix_gen_init , silent = silent )
gen_pdf.load_params ( params = fix_gen_pars , silent = silent )
## 2. generate dataset!
dataset = gen_fun ( gen_pdf , varset = varset , **gen_config )
if not silent : logger.info ( 'Generated dataset #%d\n%s' % ( i , dataset ) )
## 3. reset parameters of fit_pdf
fit_pdf.load_params ( params = fix_fit_init , silent = silent )
fit_pdf.load_params ( params = fix_fit_pars , silent = silent )
## 4. fit it!
r = fit_fun ( fit_pdf , dataset , **fitcnf )
## fit status
fits [ r.status () ] += 1
## covariance matrix quality
covs [ r.covQual () ] += 1
## ok ?
if accept_fun ( r , fit_pdf , dataset ) :
## 5. save results
rpf = r.params ( float_only = True )
for j in rpf :
results [ j ].append ( rpf [ j ] [ 0 ] )
for v in more_vars :
func = more_vars[v]
results [ v ] .append ( func ( r , fit_pdf ) )
results [ '#' ] .append ( len ( dataset ) )
results [ '#sumw' ] .append ( dataset.sumVar ( '1' ) )
dataset.clear()
del dataset
if progress or not silent :
if 0 < frequency and 1 <= i and 0 == ( i + 1 ) % frequency :
stats = make_stats ( results , fits , covs )
print_stats ( stats , i + 1 , logger = logger )
## make a final statistics
stats = make_stats ( results , fits , covs )
if progress or not silent :
print_stats ( stats , nToys , logger = logger )
return results, stats
# =============================================================================
## run Jackknife analysis, useful for evaluaton of fit biases and uncertainty estimates
#
# For each <code>i</code> remove event with index <code>i</code> from the dataset,
# and refit it.
# @code
# dataset = ...
# model = ...
# r , f = model.fitTo ( dataset , .... ) ## fit the whole dataset
# results, stats = make_jackknife ( model , data ) ## run Jackknife
# print_jackknife ( r , stats ) ## print summary table
# @endcode
# @see printJackknife
#
# Derived parameters can be also retrived via <code>more_vars</code> argument:
# @code
# ratio = lambda res,pdf : res.ratio('x','y')
# more_vars = { 'Ratio' : ratio }
# r, s = make_jackknife ( .... , more_vars = more_vars , ... )
# @endcode
#
# @see https://en.wikipedia.org/wiki/Jackknife_resampling
# @param pdf fit model
# @param data original dataset
# @param fit_config configuration of <code>pdf.FitTo( data , ... )</code>
# @param fit_pars redefine these parameters before each fit
# @param more_vars calculate more variables from the fit-results
# @param fit_fun fitting function
# @param accept_fun accept function
# @param event_range event range to use for jackknife
# @param silent silent processing
# @param progress show progress bar?
# @param logger use this logger
# @param frequency how often to dump the intermediate results ?
# @return statistics of jackknife experiments
def make_jackknife ( pdf ,
| |
<reponame>osisoft/sample-ocs-sample_libraries-python<filename>ocs_sample_library_preview/Streams.py
import json
from jsonpatch import JsonPatch
from typing import Any
from .BaseClient import BaseClient
from .SDS.SdsBoundaryType import SdsBoundaryType
from .SDS.SdsResultPage import SdsResultPage
from .SDS.SdsStream import SdsStream
from .SDS.SdsType import SdsType
from .PatchableSecurable import PatchableSecurable
class Streams(PatchableSecurable, object):
"""
Client for interacting with Streams
"""
def __init__(self, client: BaseClient):
"""
:param client: base client that handles auth and base routing
"""
super().__init__(client=client, collection='Streams')
self.__tenant = client.tenant
self.__uri_api = client.uri_API
self.__base_client = client
self.__setPathAndQueryTemplates()
def getStream(self, namespace_id: str, stream_id: str) -> SdsStream:
"""
Retrieves a stream specified by 'stream_id' from the Sds Service
:param namespace_id: namespace to work against
:param stream_id: id of the stream
:return:the Stream as SdsStream
"""
if namespace_id is None:
raise TypeError
if stream_id is None:
raise TypeError
response = self.__base_client.request(
'get',
self.__stream_path.format(
tenant_id=self.__tenant,
namespace_id=namespace_id,
stream_id=self.__base_client.encode(stream_id)))
self.__base_client.checkResponse(
response, f'Failed to get SdsStream, {stream_id}.')
result = SdsStream.fromJson(response.json())
return result
def getStreamType(self, namespace_id: str, stream_id: str) -> SdsType:
"""
Retrieves a stream specified by 'stream_id' from the Sds Service
:param namespace_id: namespace to work against
:param stream_id: id of the stream
:return: the stream type as an SdsType
"""
if namespace_id is None:
raise TypeError
if stream_id is None:
raise TypeError
response = self.__base_client.request(
'get',
self.__stream_path.format(
tenant_id=self.__tenant,
namespace_id=namespace_id,
stream_id=self.__base_client.encode(stream_id)) + '/Type')
self.__base_client.checkResponse(
response, f'Failed to get SdsStream type, {stream_id}.')
result = SdsType.fromJson(response.json())
return result
def getStreams(self, namespace_id: str, query: str = '', skip: int = 0,
count: int = 100) -> list[SdsStream]:
"""
Retrieves a list of streams associated with 'namespace_id' under the current tenant
:param namespace_id: namespace to work against
:param query: filtering query
:param skip: number of streams to skip for paging
:param count: number of streams to limit to
:return: array of SdsStreams
"""
if namespace_id is None:
raise TypeError
if query is None:
raise TypeError
response = self.__base_client.request(
'get',
self.__streams_path.format(
tenant_id=self.__tenant,
namespace_id=namespace_id),
params={'query': query, 'skip': skip, 'count': count})
self.__base_client.checkResponse(
response, 'Failed to get all SdsStreams.')
content = response.json()
results: list[SdsStream] = []
for item in content:
results.append(SdsStream.fromJson(item))
return results
def getOrCreateStream(self, namespace_id: str, stream: SdsStream) -> SdsStream:
"""
Tells Sds Service to create a stream based on the local 'stream' SdsStream object
:param namespace_id: namespace to work against
:param stream: the stream to Create or retrieve, as a SDsStream
:return: the created Stream as an SdsStream
"""
if namespace_id is None:
raise TypeError
if stream is None or not isinstance(stream, SdsStream):
raise TypeError
response = self.__base_client.request(
'post',
self.__stream_path.format(
tenant_id=self.__tenant,
namespace_id=namespace_id,
stream_id=self.__base_client.encode(stream.Id)),
data=stream.toJson())
self.__base_client.checkResponse(
response, f'Failed to create SdsStream, {stream.Id}.')
result = SdsStream.fromJson(response.json())
return result
def createOrUpdateStream(self, namespace_id: str, stream: SdsStream):
"""
Tells Sds Service to create a stream based on the local 'stream' SdsStream object
:param namespace_id: namespace to work against
:param stream: the stream to Create or update, as a SDsStream
:return: the created or updated Stream as an SdsStream
"""
if namespace_id is None:
raise TypeError
if stream is None or not isinstance(stream, SdsStream):
raise TypeError
response = self.__base_client.request(
'put',
self.__stream_path.format(
tenant_id=self.__tenant,
namespace_id=namespace_id,
stream_id=self.__base_client.encode(stream.Id)),
data=stream.toJson())
self.__base_client.checkResponse(
response, f'Failed to create SdsStream, {stream.Id}.')
def updateStreamType(self, namespace_id: str, stream_id: str, stream_view_id: str):
"""
Tells Sds Service to update a stream based on the local 'stream' SdsStream object
:param namespace_id: namespace to work against
:param stream_id: id of the stream to change the type of
:param stream_view_id: if of the streamview to change the type to
:return:
"""
if namespace_id is None:
raise TypeError
if stream_id is None:
raise TypeError
if stream_view_id is None:
raise TypeError
response = self.__base_client.request(
'put',
self.__stream_type_path.format(
tenant_id=self.__tenant,
namespace_id=namespace_id,
stream_id=self.__base_client.encode(stream_id)),
params={'streamViewId': stream_view_id})
self.__base_client.checkResponse(
response, f'Failed to update SdsStream type, {stream_id}.')
def deleteStream(self, namespace_id: str, stream_id: str):
"""
Tells Sds Service to delete the stream speficied by 'stream_id'
:param namespace_id: id of namespace to work against
:param stream_id: id of the stream to delete
:return:
"""
if namespace_id is None:
raise TypeError
if stream_id is None:
raise TypeError
response = self.__base_client.request(
'delete',
self.__stream_path.format(
tenant_id=self.__tenant,
namespace_id=namespace_id,
stream_id=self.__base_client.encode(stream_id)))
self.__base_client.checkResponse(
response, f'Failed to delete SdsStream, {stream_id}.')
def createOrUpdateTags(self, namespace_id: str, stream_id: str, tags: list[str]):
"""
Tells Sds Service to create tags and associate them with the given stream_id
:param namespace_id: id of namespace to work against
:param stream_id: id of the stream to update with tags
:param tags: tags to create or update. expected for is an array of strings
:return:
"""
if namespace_id is None:
raise TypeError
response = self.__base_client.request(
'put',
self.__stream_path.format(
tenant_id=self.__tenant,
namespace_id=namespace_id,
stream_id=self.__base_client.encode(stream_id)) + '/Tags',
data=json.dumps(tags))
self.__base_client.checkResponse(
response, f'Failed to create tags for Stream: {stream_id}.')
def createOrUpdateMetadata(self, namespace_id: str, stream_id: str, metadata: dict[str, str]):
"""
Tells Sds Service to create metadata and associate them with the given stream_id
:param namespace_id: id of namespace to work against
:param stream_id: id of the stream to update with metadata
:param metadata: metadata to create or update. expected for is an dict(string,string)
:return:
"""
if namespace_id is None:
raise TypeError
response = self.__base_client.request(
'put',
self.__stream_path.format(
tenant_id=self.__tenant,
namespace_id=namespace_id,
stream_id=self.__base_client.encode(stream_id)) + '/Metadata',
data=json.dumps(metadata))
self.__base_client.checkResponse(
response, f'Failed to create metadata for Stream: {stream_id}.')
def patchMetadata(self, namespace_id: str, stream_id: str, patch: list[dict, Any]):
"""
Tells Sds Service to update metadata on the given streamId
:param namespace_id: id of namespace to work against
:param stream_id: id of the stream to update with metadata
:param patch: a JSON patch document
:return:
"""
if namespace_id is None:
raise TypeError
response = self.__base_client.request(
'patch',
self.__stream_path.format(
tenant_id=self.__tenant,
namespace_id=namespace_id,
stream_id=self.__base_client.encode(stream_id)) + '/Metadata',
data=json.dumps(patch))
self.__base_client.checkResponse(
response, f'Failed to update metadata for Stream: {stream_id}.')
def getTags(self, namespace_id: str, stream_id: str) -> list[str]:
"""
Tells Sds Service to get tags associated with the given stream_id
:param namespace_id: id of namespace to work against
:param stream_id: id of the stream to get the tags of
:return: stream's tags
"""
if namespace_id is None:
raise TypeError
response = self.__base_client.request(
'get',
self.__stream_path.format(
tenant_id=self.__tenant,
namespace_id=namespace_id,
stream_id=self.__base_client.encode(stream_id)) + '/Tags')
self.__base_client.checkResponse(
response, f'Failed to get tags for Stream: {stream_id}.')
result = response.json()
return result
def getMetadata(self, namespace_id: str, stream_id: str, key: str) -> Any:
"""
Tells Sds Service to get metadata associated with the given stream_id and key
:param namespace_id: id of namespace to work against
:param stream_id: id of the stream to get the metadata value of
:param key: specific metadata field to retrieve
:return: value at the key
"""
if namespace_id is None:
raise TypeError
response = self.__base_client.request(
'get',
self.__stream_path.format(
tenant_id=self.__tenant,
namespace_id=namespace_id,
stream_id=self.__base_client.encode(stream_id)) + '/Metadata/' + key)
self.__base_client.checkResponse(
response, f'Failed to get metadata for Stream: {stream_id}.')
result = response.json()
return result
# The following section provides functionality to interact with Data
# We assume the value(s) passed follow the Sds object patterns
# supporting fromJson and toJson method
def getValue(self, namespace_id: str, stream_id: str, index: int,
value_class: type = None) -> Any:
"""
Retrieves JSON object from Sds Service for value specified by 'index' from Sds Service
:param namespace_id: id of namespace to work against
:param stream_id: id of the stream to get the data of
:param index: index at which to get a value
:param value_class: use this to cast the value into a given type.
Type must support .fromJson() Default is None.
If None returns a dynamic Python object from the data.
:return: the value. If value_class is defined it is in this type.
Otherwise it is a dynamic Python object
"""
if namespace_id is None:
raise TypeError
if stream_id is None:
raise TypeError
if index is None:
raise TypeError
return self.getValueUrl(self.__stream_path.format(
tenant_id=self.__tenant,
namespace_id=namespace_id,
stream_id=self.__base_client.encode(stream_id)), index, value_class)
def getValueUrl(self, url: str, index: int, value_class: type = None) -> Any:
"""
Retrieves JSON object from Sds Service for value specified by 'index' from Sds Service
:param url: The URL path to the stream
:param index: index at which to get a value
:param value_class: use this to cast the value into a given type.
Type must support .fromJson() Default is None.
If None returns a dynamic Python object from the data.
:return: the value. If value_class is defined it is in this type.
Otherwise it is a dynamic Python object
"""
if url | |
1 2 0 594 591
1 1 2 0 594 588
1 1 2 0 594 582
1 1 2 0 588 582
1 1 2 0 588 579
1 1 2 0 585 579
1 1 2 0 573 543
1 1 2 0 573 540
1 1 2 0 573 531
1 1 2 0 573 510
1 1 2 0 573 504
1 1 2 0 573 489
1 1 2 0 573 474
1 1 2 0 573 465
1 1 2 0 570 561
1 1 2 0 570 522
1 1 2 0 570 516
1 1 2 0 570 504
1 1 2 0 570 489
1 1 2 0 570 477
1 1 2 0 570 462
1 1 2 0 567 564
1 1 2 0 567 546
1 1 2 0 567 522
1 1 2 0 567 507
1 1 2 0 567 504
1 1 2 0 567 474
1 1 2 0 567 462
1 1 2 0 564 558
1 1 2 0 564 495
1 1 2 0 564 477
1 1 2 0 564 474
1 1 2 0 564 462
1 1 2 0 564 456
1 1 2 0 561 534
1 1 2 0 561 522
1 1 2 0 561 507
1 1 2 0 561 498
1 1 2 0 561 471
1 1 2 0 558 555
1 1 2 0 558 543
1 1 2 0 558 540
1 1 2 0 558 492
1 1 2 0 558 474
1 1 2 0 558 456
1 1 2 0 555 546
1 1 2 0 555 543
1 1 2 0 555 540
1 1 2 0 555 534
1 1 2 0 555 531
1 1 2 0 555 519
1 1 2 0 555 513
1 1 2 0 555 471
1 1 2 0 552 546
1 1 2 0 552 528
1 1 2 0 552 525
1 1 2 0 552 513
1 1 2 0 552 507
1 1 2 0 552 501
1 1 2 0 552 459
1 1 2 0 549 543
1 1 2 0 549 540
1 1 2 0 549 513
1 1 2 0 546 513
1 1 2 0 546 495
1 1 2 0 546 471
1 1 2 0 543 540
1 1 2 0 543 534
1 1 2 0 543 510
1 1 2 0 543 483
1 1 2 0 543 474
1 1 2 0 540 531
1 1 2 0 540 528
1 1 2 0 540 519
1 1 2 0 540 513
1 1 2 0 540 510
1 1 2 0 540 486
1 1 2 0 540 456
1 1 2 0 537 510
1 1 2 0 537 498
1 1 2 0 537 477
1 1 2 0 537 465
1 1 2 0 534 519
1 1 2 0 534 510
1 1 2 0 534 507
1 1 2 0 534 504
1 1 2 0 534 471
1 1 2 0 534 465
1 1 2 0 534 456
1 1 2 0 531 510
1 1 2 0 531 507
1 1 2 0 531 492
1 1 2 0 531 468
1 1 2 0 531 462
1 1 2 0 528 513
1 1 2 0 528 495
1 1 2 0 528 456
1 1 2 0 525 522
1 1 2 0 525 513
1 1 2 0 525 486
1 1 2 0 525 483
1 1 2 0 525 465
1 1 2 0 522 519
1 1 2 0 522 495
1 1 2 0 522 489
1 1 2 0 522 468
1 1 2 0 519 495
1 1 2 0 519 489
1 1 2 0 519 480
1 1 2 0 519 468
1 1 2 0 516 501
1 1 2 0 516 489
1 1 2 0 516 483
1 1 2 0 516 480
1 1 2 0 516 459
1 1 2 0 513 495
1 1 2 0 513 462
1 1 2 0 513 459
1 1 2 0 510 486
1 1 2 0 510 474
1 1 2 0 510 456
1 1 2 0 507 486
1 1 2 0 507 483
1 1 2 0 507 456
1 1 2 0 504 501
1 1 2 0 504 486
1 1 2 0 504 483
1 1 2 0 504 462
1 1 2 0 504 456
1 1 2 0 501 483
1 1 2 0 501 468
1 1 2 0 501 459
1 1 2 0 501 456
1 1 2 0 498 489
1 1 2 0 498 471
1 1 2 0 495 492
1 1 2 0 495 465
1 1 2 0 492 468
1 1 2 0 492 465
1 1 2 0 492 456
1 1 2 0 489 471
1 1 2 0 489 456
1 1 2 0 486 474
1 1 2 0 486 456
1 1 2 0 483 465
1 1 2 0 480 462
1 1 2 0 480 456
1 1 2 0 477 471
1 1 2 0 477 459
1 1 2 0 474 471
1 1 2 0 474 468
1 1 2 0 474 462
1 1 2 0 468 462
1 1 2 0 468 459
1 1 2 0 465 459
1 1 2 0 453 423
1 1 2 0 453 420
1 1 2 0 453 411
1 1 2 0 453 390
1 1 2 0 453 384
1 1 2 0 453 369
1 1 2 0 453 354
1 1 2 0 453 345
1 1 2 0 450 441
1 1 2 0 450 402
1 1 2 0 450 396
1 1 2 0 450 384
1 1 2 0 450 369
1 1 2 0 450 357
1 1 2 0 450 342
1 1 2 0 447 444
1 1 2 0 447 426
1 1 2 0 447 402
1 1 2 0 447 387
1 1 2 0 447 384
1 1 2 0 447 354
1 1 2 0 447 342
1 1 2 0 444 438
1 1 2 0 444 375
1 1 2 0 444 357
1 1 2 0 444 354
1 1 2 0 444 342
1 1 2 0 444 336
1 1 2 0 441 414
1 1 2 0 441 402
1 1 2 0 441 387
1 1 2 0 441 378
1 1 2 0 441 351
1 1 2 0 438 435
1 1 2 0 438 423
1 1 2 0 438 420
1 1 2 0 438 372
1 1 2 0 438 354
1 1 2 0 438 336
1 1 2 0 435 426
1 1 2 0 435 423
1 1 2 0 435 420
1 1 2 0 435 414
1 1 2 0 435 411
1 1 2 0 435 399
1 1 2 0 435 393
1 1 2 0 435 351
1 1 2 0 432 426
1 1 2 0 432 408
1 1 2 0 432 405
1 1 2 0 432 393
1 1 2 0 432 387
1 1 2 0 432 381
1 1 2 0 432 339
1 1 2 0 429 423
1 1 2 0 429 420
1 1 2 0 429 393
1 1 2 0 426 393
1 1 2 0 426 375
1 1 2 0 426 351
1 1 2 0 423 420
1 1 2 0 423 414
1 1 2 0 423 390
1 1 2 0 423 363
1 1 2 0 423 354
1 1 2 0 420 411
1 1 2 0 420 408
1 1 2 0 420 399
1 1 2 0 420 393
1 1 2 0 420 390
1 1 2 0 420 366
1 1 2 0 420 336
1 1 2 0 417 390
1 1 2 0 417 378
1 1 2 0 417 357
1 1 2 0 417 345
1 1 2 0 414 399
1 1 2 0 414 390
1 1 2 0 414 387
1 1 2 0 414 384
1 1 2 0 414 351
1 1 2 0 414 345
1 1 2 0 414 336
1 1 2 0 411 390
1 1 2 0 411 387
1 1 2 0 411 372
1 1 2 0 411 348
1 1 2 0 411 342
1 1 2 0 408 393
1 1 2 0 408 375
1 1 2 0 408 336
1 1 2 0 405 402
1 1 2 0 405 393
1 1 2 0 405 366
1 1 2 0 405 363
1 1 2 0 405 345
1 1 2 0 402 399
1 1 2 0 402 375
1 1 2 0 402 369
1 1 2 0 402 348
1 1 2 0 399 375
1 | |
import copy
import itertools
import multiprocessing
import string
import traceback
import warnings
from multiprocessing import Pool
from operator import itemgetter
import jellyfish as jf
import numpy as np
import pandas as pd
from scipy.optimize import linear_sum_assignment
from scipy.stats import wasserstein_distance
from simod.configuration import Configuration, Metric
from . import alpha_oracle as ao
from .alpha_oracle import Rel
from ..support_utils import progress_bar_async
class SimilarityEvaluator:
"""Evaluates the similarity of two event-logs."""
def __init__(self, log_data: pd.DataFrame, simulation_data: pd.DataFrame, settings: Configuration, max_cases=500,
dtype='log'):
self.dtype = dtype
self.log_data = copy.deepcopy(log_data)
self.simulation_data = copy.deepcopy(simulation_data)
self.max_cases = max_cases
self.one_timestamp = settings.read_options.one_timestamp
self._preprocess_data(dtype)
def _preprocess_data(self, dtype):
preprocessor = self._get_preprocessor(dtype)
return preprocessor()
def _get_preprocessor(self, dtype):
if dtype == 'log':
return self._preprocess_log
elif dtype == 'serie':
return self._preprocess_serie
else:
raise ValueError(dtype)
def _preprocess_log(self):
self.ramp_io_perc = 0.2
self.log_data['source'] = 'log'
self.simulation_data['source'] = 'simulation'
data = pd.concat([self.log_data, self.simulation_data], axis=0, ignore_index=True)
if (('processing_time' not in data.columns) or ('waiting_time' not in data.columns)):
data = self.calculate_times(data)
data = self.scaling_data(data)
# save data
self.log_data = data[data.source == 'log']
self.simulation_data = data[data.source == 'simulation']
self.alias = self.create_task_alias(data, 'task')
self.alpha_concurrency = ao.AlphaOracle(self.log_data, self.alias, self.one_timestamp, True)
# reformat and sampling data
self.log_data = self.reformat_events(self.log_data.to_dict('records'), 'task')
self.simulation_data = self.reformat_events(self.simulation_data.to_dict('records'), 'task')
num_traces = int(len(self.simulation_data) * self.ramp_io_perc)
self.simulation_data = self.simulation_data[num_traces:-num_traces]
self.log_data = list(map(lambda i: self.log_data[i],
np.random.randint(0, len(self.log_data), len(self.simulation_data))))
def _preprocess_serie(self):
# load data
self.log_data['source'] = 'log'
self.simulation_data['source'] = 'simulation'
def measure_distance(self, metric: Metric, verbose=False):
"""
Measures the distance of two event-logs
with with tsd or dl and mae distance
Returns
-------
distance : float
"""
self.verbose = verbose
# similarity measurement and matching
evaluator = self._get_evaluator(metric)
if metric in [Metric.DAY_EMD, Metric.DAY_HOUR_EMD, Metric.CAL_EMD]:
distance = evaluator(self.log_data, self.simulation_data, criteria=metric)
else:
distance = evaluator(self.log_data, self.simulation_data, metric)
self.similarity = {'metric': metric, 'sim_val': np.mean([x['sim_score'] for x in distance])}
def _get_evaluator(self, metric: Metric):
if self.dtype == 'log':
if metric in [Metric.TSD, Metric.DL, Metric.MAE, Metric.DL_MAE]:
return self._evaluate_seq_distance
elif metric is Metric.LOG_MAE:
return self.log_mae_metric
elif metric in [Metric.HOUR_EMD, Metric.DAY_EMD, Metric.DAY_HOUR_EMD, Metric.CAL_EMD]:
return self.log_emd_metric
else:
raise ValueError(metric)
elif self.dtype == 'serie':
if metric in [Metric.HOUR_EMD, Metric.DAY_EMD, Metric.DAY_HOUR_EMD, Metric.CAL_EMD]:
return self.serie_emd_metric
else:
raise ValueError(metric)
else:
raise ValueError(self.dtype)
# =============================================================================
# Timed string distance
# =============================================================================
def _evaluate_seq_distance(self, log_data, simulation_data, metric: Metric):
"""
Timed string distance calculation
Parameters
----------
log_data : Ground truth list
simulation_data : List
Returns
-------
similarity : tsd similarity
"""
similarity = list()
# define the type of processing sequencial or parallel
cases = len(set([x['caseid'] for x in log_data]))
if cases <= self.max_cases:
args = (metric, simulation_data, log_data,
self.alpha_concurrency.oracle,
({'min': 0, 'max': len(simulation_data)},
{'min': 0, 'max': len(log_data)}))
df_matrix = self._compare_traces(args)
else:
cpu_count = multiprocessing.cpu_count()
mx_len = len(log_data)
ranges = self.define_ranges(mx_len, int(np.ceil(cpu_count / 2)))
ranges = list(itertools.product(*[ranges, ranges]))
reps = len(ranges)
pool = Pool(processes=cpu_count)
# Generate
args = [(metric, simulation_data[r[0]['min']:r[0]['max']],
log_data[r[1]['min']:r[1]['max']],
self.alpha_concurrency.oracle,
r) for r in ranges]
p = pool.map_async(self._compare_traces, args)
if self.verbose:
progress_bar_async(p, f'evaluating {metric}:', reps)
pool.close()
# Save results
df_matrix = pd.concat(list(p.get()), axis=0, ignore_index=True)
df_matrix.sort_values(by=['i', 'j'], inplace=True)
df_matrix = df_matrix.reset_index().set_index(['i', 'j'])
if metric == Metric.DL_MAE:
dl_matrix = df_matrix[['dl_distance']].unstack().to_numpy()
mae_matrix = df_matrix[['mae_distance']].unstack().to_numpy()
# MAE normalized
max_mae = mae_matrix.max()
mae_matrix = np.divide(mae_matrix, max_mae)
# multiple both matrixes by Beta equal to 0.5
dl_matrix = np.multiply(dl_matrix, 0.5)
mae_matrix = np.multiply(mae_matrix, 0.5)
# add each point in between
cost_matrix = np.add(dl_matrix, mae_matrix)
else:
cost_matrix = df_matrix[['distance']].unstack().to_numpy()
row_ind, col_ind = linear_sum_assignment(np.array(cost_matrix))
# Create response
for idx, idy in zip(row_ind, col_ind):
similarity.append(dict(caseid=simulation_data[idx]['caseid'],
sim_order=simulation_data[idx]['profile'],
log_order=log_data[idy]['profile'],
sim_score=(cost_matrix[idx][idy]
if metric == Metric.MAE else
(1 - (cost_matrix[idx][idy])))
)
)
return similarity
@staticmethod
def _compare_traces(args):
def ae_distance(et_1, et_2, st_1, st_2):
cicle_time_s1 = (et_1 - st_1).total_seconds()
cicle_time_s2 = (et_2 - st_2).total_seconds()
ae = np.abs(cicle_time_s1 - cicle_time_s2)
return ae
def tsd_alpha(s_1, s_2, p_1, p_2, w_1, w_2, alpha_concurrency):
"""
Compute the Damerau-Levenshtein distance between two given
strings (s_1 and s_2)
Parameters
----------
comp_sec : dict
alpha_concurrency : dict
Returns
-------
Float
"""
def calculate_cost(s1_idx, s2_idx):
t_1 = p_1[s1_idx] + w_1[s1_idx]
if t_1 > 0:
b_1 = (p_1[s1_idx] / t_1)
cost = ((b_1 * np.abs(p_2[s2_idx] - p_1[s1_idx])) +
((1 - b_1) * np.abs(w_2[s2_idx] - w_1[s1_idx])))
else:
cost = 0
return cost
dist = {}
lenstr1 = len(s_1)
lenstr2 = len(s_2)
for i in range(-1, lenstr1 + 1):
dist[(i, -1)] = i + 1
for j in range(-1, lenstr2 + 1):
dist[(-1, j)] = j + 1
for i in range(0, lenstr1):
for j in range(0, lenstr2):
if s_1[i] == s_2[j]:
cost = calculate_cost(i, j)
else:
cost = 1
dist[(i, j)] = min(
dist[(i - 1, j)] + 1, # deletion
dist[(i, j - 1)] + 1, # insertion
dist[(i - 1, j - 1)] + cost # substitution
)
if i and j and s_1[i] == s_2[j - 1] and s_1[i - 1] == s_2[j]:
if alpha_concurrency[(s_1[i], s_2[j])] == Rel.PARALLEL:
cost = calculate_cost(i, j - 1)
dist[(i, j)] = min(dist[(i, j)], dist[i - 2, j - 2] + cost) # transposition
return dist[lenstr1 - 1, lenstr2 - 1]
def gen(metric: Metric, serie1, serie2, oracle, r):
"""Reads the simulation results stats"""
try:
df_matrix = list()
for i, s1_ele in enumerate(serie1):
for j, s2_ele in enumerate(serie2):
element = {'i': r[0]['min'] + i, 'j': r[1]['min'] + j}
if metric in [Metric.TSD, Metric.DL, Metric.DL_MAE]:
element['s_1'] = s1_ele['profile']
element['s_2'] = s2_ele['profile']
element['length'] = max(len(s1_ele['profile']), len(s2_ele['profile']))
if metric is Metric.TSD:
element['p_1'] = s1_ele['proc_act_norm']
element['p_2'] = s2_ele['proc_act_norm']
element['w_1'] = s1_ele['wait_act_norm']
element['w_2'] = s2_ele['wait_act_norm']
if metric in [Metric.MAE, Metric.DL_MAE]:
element['et_1'] = s1_ele['end_time']
element['et_2'] = s2_ele['end_time']
element['st_1'] = s1_ele['start_time']
element['st_2'] = s2_ele['start_time']
df_matrix.append(element)
df_matrix = pd.DataFrame(df_matrix)
if metric is Metric.TSD:
df_matrix['distance'] = df_matrix.apply(
lambda x: tsd_alpha(x.s_1, x.s_2, x.p_1, x.p_2, x.w_1, x.w_2, oracle) / x.length, axis=1)
elif metric is Metric.DL:
df_matrix['distance'] = df_matrix.apply(
lambda x: jf.damerau_levenshtein_distance(''.join(x.s_1), ''.join(x.s_2)) / x.length, axis=1)
elif metric is Metric.MAE:
df_matrix['distance'] = df_matrix.apply(
lambda x: ae_distance(x.et_1, x.et_2, x.st_1, x.st_2), axis=1)
elif metric is Metric.DL_MAE:
df_matrix['dl_distance'] = df_matrix.apply(
lambda x: jf.damerau_levenshtein_distance(''.join(x.s_1), ''.join(x.s_2)) / x.length, axis=1)
df_matrix['mae_distance'] = df_matrix.apply(
lambda x: ae_distance(x.et_1, x.et_2, x.st_1, x.st_2), axis=1)
else:
raise ValueError(metric)
return df_matrix
except Exception:
traceback.print_exc()
return gen(*args)
# =============================================================================
# whole log MAE
# =============================================================================
def log_mae_metric(self, log_data: list, simulation_data: list, metric: Metric) -> list:
"""
Measures the MAE distance between two whole logs
Parameters
----------
log_data : list
simulation_data : list
Returns
-------
list
"""
similarity = list()
log_data = pd.DataFrame(log_data)
simulation_data = pd.DataFrame(simulation_data)
log_timelapse = (log_data.end_time.max() - log_data.start_time.min()).total_seconds()
sim_timelapse = (simulation_data.end_time.max() - simulation_data.start_time.min()).total_seconds()
similarity.append({'sim_score': np.abs(sim_timelapse - log_timelapse)})
return similarity
# =============================================================================
# Log emd distance
# =============================================================================
def log_emd_metric(self, log_data: list, simulation_data: list, criteria: Metric = Metric.HOUR_EMD) -> list:
"""
Measures the EMD distance between two logs on different aggregation
levels specified by user by defaul per hour
Parameters
----------
log_data : list
simulation_data : list
criteria : TYPE, optional
DESCRIPTION. The default is 'hour'.
Returns
-------
list
"""
similarity = list()
window = 1
# hist_range = [0, int((window * 3600))]
log_data = pd.DataFrame(log_data)
simulation_data = pd.DataFrame(simulation_data)
def split_date_time(dataframe, feature, source):
day_hour = lambda x: x[feature].hour
dataframe['hour'] = dataframe.apply(day_hour, axis=1)
date = lambda x: x[feature].date()
dataframe['date'] = dataframe.apply(date, axis=1)
# create time windows
i = 0
daily_windows = dict()
for hour in range(24):
if hour % window == 0:
i += 1
daily_windows[hour] = i
dataframe = dataframe.merge(
pd.DataFrame.from_dict(daily_windows, orient='index').rename_axis('hour'),
on='hour',
how='left').rename(columns={0: 'window'})
dataframe = dataframe[[feature, 'date', 'window']]
dataframe.rename(columns={feature: 'timestamp'}, inplace=True)
dataframe['timestamp'] = pd.to_datetime(dataframe['timestamp'], utc=True)
dataframe['source'] = source
return dataframe
data = split_date_time(log_data, 'start_time', 'log')
data = pd.concat([data, split_date_time(log_data, 'end_time', 'log')], ignore_index=True)
data = pd.concat([data, split_date_time(simulation_data, 'start_time', 'sim')], ignore_index=True)
data = pd.concat([data, split_date_time(simulation_data, 'end_time', 'sim')], ignore_index=True)
data['weekday'] = data.apply(lambda x: x.date.weekday(), axis=1)
g_criteria = {Metric.HOUR_EMD: 'window', Metric.DAY_EMD: 'weekday', Metric.DAY_HOUR_EMD: ['weekday', 'window'],
Metric.CAL_EMD: 'date'}
similarity = list()
for key, group in data.groupby(g_criteria[criteria]):
w_df = group.copy()
w_df = w_df.reset_index()
basetime = w_df.timestamp.min().floor(freq='H')
diftime = lambda x: (x['timestamp'] - basetime).total_seconds()
w_df['rel_time'] = w_df.apply(diftime, axis=1)
with warnings.catch_warnings():
warnings.filterwarnings('ignore')
log_hist = np.histogram(w_df[w_df.source == 'log'].rel_time, density=True)
sim_hist = np.histogram(w_df[w_df.source == 'sim'].rel_time, density=True)
if np.isnan(np.sum(log_hist[0])) or np.isnan(np.sum(sim_hist[0])):
similarity.append({'window': key, 'sim_score': 0})
else:
similarity.append({'window': key, 'sim_score': wasserstein_distance(log_hist[0], sim_hist[0])})
return similarity
# =============================================================================
# serie emd distance
# =============================================================================
def serie_emd_metric(self, log_data, simulation_data, criteria: Metric = Metric.HOUR_EMD):
similarity = list()
window = 1
log_data = pd.DataFrame(log_data)
simulation_data | |
inferred.
Returns
-------
%(klass)s
See Also
--------
interval_range : Function to create a fixed frequency IntervalIndex.
%(klass)s.from_arrays : Construct an %(klass)s from a left and
right array.
%(klass)s.from_breaks : Construct an %(klass)s from an array of
splits.
%(examples)s\
"""
)
@classmethod
@Appender(
_interval_shared_docs["from_tuples"]
% {
"klass": "IntervalArray",
"examples": textwrap.dedent(
"""\
Examples
--------
>>> pd.arrays.IntervalArray.from_tuples([(0, 1), (1, 2)])
<IntervalArray>
[(0, 1], (1, 2]]
Length: 2, dtype: interval[int64, right]
"""
),
}
)
def from_tuples(
cls, data, closed="right", copy=False, dtype: Optional[Dtype] = None
):
if len(data):
left, right = [], []
else:
# ensure that empty data keeps input dtype
left = right = data
for d in data:
if isna(d):
lhs = rhs = np.nan
else:
name = cls.__name__
try:
# need list of length 2 tuples, e.g. [(0, 1), (1, 2), ...]
lhs, rhs = d
except ValueError as err:
msg = f"{name}.from_tuples requires tuples of length 2, got {d}"
raise ValueError(msg) from err
except TypeError as err:
msg = f"{name}.from_tuples received an invalid item, {d}"
raise TypeError(msg) from err
left.append(lhs)
right.append(rhs)
return cls.from_arrays(left, right, closed, copy=False, dtype=dtype)
def _validate(self):
"""
Verify that the IntervalArray is valid.
Checks that
* closed is valid
* left and right match lengths
* left and right have the same missing values
* left is always below right
"""
if self.closed not in VALID_CLOSED:
msg = f"invalid option for 'closed': {self.closed}"
raise ValueError(msg)
if len(self._left) != len(self._right):
msg = "left and right must have the same length"
raise ValueError(msg)
left_mask = notna(self._left)
right_mask = notna(self._right)
if not (left_mask == right_mask).all():
msg = (
"missing values must be missing in the same "
"location both left and right sides"
)
raise ValueError(msg)
if not (self._left[left_mask] <= self._right[left_mask]).all():
msg = "left side of interval must be <= right side"
raise ValueError(msg)
def _shallow_copy(self, left, right):
"""
Return a new IntervalArray with the replacement attributes
Parameters
----------
left : Index
Values to be used for the left-side of the intervals.
right : Index
Values to be used for the right-side of the intervals.
"""
return self._simple_new(left, right, closed=self.closed, verify_integrity=False)
# ---------------------------------------------------------------------
# Descriptive
@property
def dtype(self):
return self._dtype
@property
def nbytes(self) -> int:
return self.left.nbytes + self.right.nbytes
@property
def size(self) -> int:
# Avoid materializing self.values
return self.left.size
# ---------------------------------------------------------------------
# EA Interface
def __iter__(self):
return iter(np.asarray(self))
def __len__(self) -> int:
return len(self._left)
def __getitem__(self, key):
key = check_array_indexer(self, key)
left = self._left[key]
right = self._right[key]
if not isinstance(left, (np.ndarray, ExtensionArray)):
# scalar
if is_scalar(left) and isna(left):
return self._fill_value
return Interval(left, right, self.closed)
if np.ndim(left) > 1:
# GH#30588 multi-dimensional indexer disallowed
raise ValueError("multi-dimensional indexing not allowed")
return self._shallow_copy(left, right)
def __setitem__(self, key, value):
value_left, value_right = self._validate_setitem_value(value)
key = check_array_indexer(self, key)
self._left[key] = value_left
self._right[key] = value_right
def _cmp_method(self, other, op):
# ensure pandas array for list-like and eliminate non-interval scalars
if is_list_like(other):
if len(self) != len(other):
raise ValueError("Lengths must match to compare")
other = array(other)
elif not isinstance(other, Interval):
# non-interval scalar -> no matches
return invalid_comparison(self, other, op)
# determine the dtype of the elements we want to compare
if isinstance(other, Interval):
other_dtype = pandas_dtype("interval")
elif not is_categorical_dtype(other.dtype):
other_dtype = other.dtype
else:
# for categorical defer to categories for dtype
other_dtype = other.categories.dtype
# extract intervals if we have interval categories with matching closed
if is_interval_dtype(other_dtype):
if self.closed != other.categories.closed:
return invalid_comparison(self, other, op)
other = other.categories.take(
other.codes, allow_fill=True, fill_value=other.categories._na_value
)
# interval-like -> need same closed and matching endpoints
if is_interval_dtype(other_dtype):
if self.closed != other.closed:
return invalid_comparison(self, other, op)
elif not isinstance(other, Interval):
other = type(self)(other)
if op is operator.eq:
return (self._left == other.left) & (self._right == other.right)
elif op is operator.ne:
return (self._left != other.left) | (self._right != other.right)
elif op is operator.gt:
return (self._left > other.left) | (
(self._left == other.left) & (self._right > other.right)
)
elif op is operator.ge:
return (self == other) | (self > other)
elif op is operator.lt:
return (self._left < other.left) | (
(self._left == other.left) & (self._right < other.right)
)
else:
# operator.lt
return (self == other) | (self < other)
# non-interval/non-object dtype -> no matches
if not is_object_dtype(other_dtype):
return invalid_comparison(self, other, op)
# object dtype -> iteratively check for intervals
result = np.zeros(len(self), dtype=bool)
for i, obj in enumerate(other):
try:
result[i] = op(self[i], obj)
except TypeError:
if obj is NA:
# comparison with np.nan returns NA
# github.com/pandas-dev/pandas/pull/37124#discussion_r509095092
result[i] = op is operator.ne
else:
raise
return result
@unpack_zerodim_and_defer("__eq__")
def __eq__(self, other):
return self._cmp_method(other, operator.eq)
@unpack_zerodim_and_defer("__ne__")
def __ne__(self, other):
return self._cmp_method(other, operator.ne)
@unpack_zerodim_and_defer("__gt__")
def __gt__(self, other):
return self._cmp_method(other, operator.gt)
@unpack_zerodim_and_defer("__ge__")
def __ge__(self, other):
return self._cmp_method(other, operator.ge)
@unpack_zerodim_and_defer("__lt__")
def __lt__(self, other):
return self._cmp_method(other, operator.lt)
@unpack_zerodim_and_defer("__le__")
def __le__(self, other):
return self._cmp_method(other, operator.le)
def argsort(
self,
ascending: bool = True,
kind: str = "quicksort",
na_position: str = "last",
*args,
**kwargs,
) -> np.ndarray:
ascending = nv.validate_argsort_with_ascending(ascending, args, kwargs)
if ascending and kind == "quicksort" and na_position == "last":
return np.lexsort((self.right, self.left))
# TODO: other cases we can use lexsort for? much more performant.
return super().argsort(
ascending=ascending, kind=kind, na_position=na_position, **kwargs
)
def fillna(self, value=None, method=None, limit=None):
"""
Fill NA/NaN values using the specified method.
Parameters
----------
value : scalar, dict, Series
If a scalar value is passed it is used to fill all missing values.
Alternatively, a Series or dict can be used to fill in different
values for each index. The value should not be a list. The
value(s) passed should be either Interval objects or NA/NaN.
method : {'backfill', 'bfill', 'pad', 'ffill', None}, default None
(Not implemented yet for IntervalArray)
Method to use for filling holes in reindexed Series
limit : int, default None
(Not implemented yet for IntervalArray)
If method is specified, this is the maximum number of consecutive
NaN values to forward/backward fill. In other words, if there is
a gap with more than this number of consecutive NaNs, it will only
be partially filled. If method is not specified, this is the
maximum number of entries along the entire axis where NaNs will be
filled.
Returns
-------
filled : IntervalArray with NA/NaN filled
"""
if method is not None:
raise TypeError("Filling by method is not supported for IntervalArray.")
if limit is not None:
raise TypeError("limit is not supported for IntervalArray.")
value_left, value_right = self._validate_fill_value(value)
left = self.left.fillna(value=value_left)
right = self.right.fillna(value=value_right)
return self._shallow_copy(left, right)
def astype(self, dtype, copy=True):
"""
Cast to an ExtensionArray or NumPy array with dtype 'dtype'.
Parameters
----------
dtype : str or dtype
Typecode or data-type to which the array is cast.
copy : bool, default True
Whether to copy the data, even if not necessary. If False,
a copy is made only if the old dtype does not match the
new dtype.
Returns
-------
array : ExtensionArray or ndarray
ExtensionArray or NumPy ndarray with 'dtype' for its dtype.
"""
from pandas import Index
from pandas.core.arrays.string_ import StringDtype
if dtype is not None:
dtype = pandas_dtype(dtype)
if is_interval_dtype(dtype):
if dtype == self.dtype:
return self.copy() if copy else self
# need to cast to different subtype
try:
# We need to use Index rules for astype to prevent casting
# np.nan entries to int subtypes
new_left = Index(self._left, copy=False).astype(dtype.subtype)
new_right = Index(self._right, copy=False).astype(dtype.subtype)
except TypeError as err:
msg = (
f"Cannot convert {self.dtype} to {dtype}; subtypes are incompatible"
)
raise TypeError(msg) from err
return self._shallow_copy(new_left, new_right)
elif is_categorical_dtype(dtype):
return Categorical(np.asarray(self), dtype=dtype)
elif isinstance(dtype, StringDtype):
return dtype.construct_array_type()._from_sequence(self, copy=False)
# TODO: This try/except will be repeated.
try:
return np.asarray(self).astype(dtype, copy=copy)
except (TypeError, ValueError) as err:
msg = f"Cannot cast {type(self).__name__} to dtype {dtype}"
raise TypeError(msg) from err
def equals(self, other) -> bool:
if type(self) != type(other):
return False
return bool(
self.closed == other.closed
and self.left.equals(other.left)
and self.right.equals(other.right)
)
@classmethod
def _concat_same_type(
cls: Type[IntervalArrayT], to_concat: Sequence[IntervalArrayT]
) -> IntervalArrayT:
"""
Concatenate multiple IntervalArray
Parameters
----------
to_concat : sequence of IntervalArray
Returns
-------
IntervalArray
"""
closed | |
element[1]["y"]
w = element[1]["w"]
h = element[1]["h"]
num = element[1]["num"]
big = element[1]["big"]
direction = element[1]["direction"]
die = dice[0]
dice = dice[1:]
if num != -1 and num <= 0:
return (True,)
if die <= 10:
return exit_door(engine, x, y, w, h, direction, num, big, dice)
elif die <= 20:
return exit_table_11_20(engine, x, y, w, h, direction, num, big, dice)
def describe_chamber(engine, d):
dice = engine.roll([100, 100])
chamber_type = general_chamber(dice[0])
ret = "<p><b>A {} foot by {} foot {}.</b></p>".\
format(d['w'], d['h'], chamber_type)
contents = chamber_contents(engine, dice[1])
ret += contents
return ret
# Build new chart for types of monsters (primary inhabitant etc.)?
def chamber_contents(engine, die=0):
die = engine.roll([100])[0]
a, b = "", ""
try:
if die <= 8:
return "<p>Monsters: " + chamber_monster(engine) + "</p>"
elif die <= 15:
return "<p>Monsters: {}</p> <p> With treasure: {}</p>".format(
chamber_monster(engine), which_treasure(engine))
elif die <= 27:
return "<p>Monsters: {}</p>".format(
chamber_monster(engine))
elif die <= 33:
return "<p>Monsters: {}</p><p>Guarding treasure: {}</p>".format(
chamber_monster(engine), which_treasure(engine))
elif die <= 42:
return "<p>Monsters: {}</p>".format(chamber_monster(engine))
elif die <= 50:
return "<p>Monsters: {}</p><p>With treasure: {}.</p>".format(
chamber_monster(engine), which_treasure(engine))
elif die <= 58:
return "<p>" + chamber_hazard(engine) + "</p> <p> And treasure " + treasure(engine) + "</p>"
elif die <= 63:
return "<p>" + chamber_obstacle(engine) + "</p>"
elif die <= 73:
return "<p>" + chamber_trap(engine) + "</p>"
elif die <= 76:
a = chamber_trap(engine)
if a is None:
raise RuntimeError("None trap!")
b = which_treasure(engine)
return "<p>" + a + "</p> <p>Protecting treasure: " + b + "</p>"
elif die <= 80:
return "<p>" + chamber_trick(engine) + "</p>"
elif die <= 88:
return "<p> This is an otherwise empty room </p>"
elif die <= 94:
return "<p>" + chamber_hazard(engine) + "</p>"
elif die <= 100:
return "<p> Treasure: " + which_treasure(engine) + "</p>"
except Exception as e:
raise RuntimeError("describe chamber ({}, {}) with die roll {} raised {}".format(a, b, die, e))
def chamber_trick(engine):
try:
die1, die2 = engine.roll([20,100])
ret = "There is a trick. "
if die1 <= 1:
ret += "A book"
elif die1 <= 2:
ret += "A preserved brain in a jar"
elif die1 <= 3:
ret += "A burning fire"
elif die1 <= 4:
ret += "A cracked gem"
elif die1 <= 5:
ret += "A door"
elif die1 <= 6:
ret += "A fresco"
elif die1 <= 7:
ret += "A piece of furniture"
elif die1 <= 8:
ret += "A glass sculpture"
elif die1 <= 9:
ret += "A mushroom field"
elif die1 <= 10:
ret += "A painting"
elif die1 <= 11:
ret += "A plant or tree"
elif die1 <= 12:
ret += "A pool of water"
elif die1 <= 13:
ret += "Runes engraved on the wall"
elif die1 <= 14:
ret += "A skull"
elif die1 <= 15:
ret += "A sphere of magical energy"
elif die1 <= 16:
ret += "A statue"
elif die1 <= 17:
ret += "A stone obelisk"
elif die1 <= 18:
ret += "A suit of armor"
elif die1 <= 19:
if engine.roll([2])[0] <= 1:
ret += "A tapestry"
else:
ret += "A rug"
elif die1 <= 20:
ret += "A target dummy"
else:
ret += "A bad die roll"
if die2 <= 3:
return ret + " ages the first person to touch it."
elif die2 <= 6:
return ret + " when touched animates, or animates a nearby object."
elif die2 <= 10:
return ret + " asks three skill testing questions. If answered,\
a reward appears." # TODO: what kind of reward?
elif die2 <= 13:
return ret + " bestows a resistance or vulnerability."
elif die2 <= 16:
return ret + " changes a character's alignment, personality, size,\
or appearance when touched."
elif die2 <= 19:
return ret + " transmutes one substance into another, such as\
gold to lead or metal to brittle crystal."
elif die2 <= 22:
return ret + " creates a force field."
elif die2 <= 26:
return ret + " creates an illusion."
elif die2 <= 29:
die3, die4 = engine.roll([4, 4])
return ret + " suppresses magic items for {} hours.".format(
die3 + die4)
elif die2 <= 32:
if engine.roll([2])[0] <= 1:
return ret + " enlarges characters."
else:
return ret + " reduces characters."
elif die2 <= 35:
return ret + " a 'Magic Mouth' speaks a riddle."
elif die2 <= 38:
return ret + " 'Confusion (save DC 15) targets all characters in 10 ft."
elif die2 <= 41:
if engine.roll([2])[0] <= 1:
return ret + " gives true directions."
else:
return ret + " gives false directions."
elif die2 <= 44:
return ret + " grants a 'Wish' (holy shit)."
elif die2 <= 47:
return ret + " flies around to avoid being touched."
elif die2 <= 50:
return ret + " casts 'Gaeas' on the characters."
elif die2 <= 53: # TODO: does more
return ret + " reverses gravity."
elif die2 <= 56:
return ret + " induces greed."
elif die2 <= 59:
return ret + " contains an imprisoned creature."
elif die2 <= 62:
return ret + " locks the exits. (DC 17)"
elif die2 <= 65:
return ret + " offers a game of chance for a reward of piece of\
information."
elif die2 <= 68:
return ret + " helps or harms certain types of creatures."
elif die2 <= 71:
return ret + " casts polymorph on the characters (lasts 1 hour)"
elif die2 <= 75:
return ret + " presents a puzzle or a riddle"
elif die2 <= 78:
return ret + " prevents movement"
elif die2 <= 81: # TODO: does more
return ret + " releases coins & gems"
elif die2 <= 84:
return ret + " turns into or summons a monster"
elif die2 <= 87:
return ret + " casts 'Suggestion' on the characters."
elif die2 <= 90:
return ret + " wails loudly when touched."
elif die2 <= 93:
return ret + " talks"
elif die2 <= 97:
return ret + " teleports the characters to another place."
elif die2 <= 100:
return ret + " swaps two of the characters' minds."
else:
return " dice problem."
except Exception as e:
raise RuntimeError("chamber_trick with dice {} {} threw {}.".format(die1, die2, e))
def which_treasure(engine):
dice = engine.roll([20])
try:
if dice[0] <= 17:
return treasure(engine)
elif dice[0] <= 20:
return horde(engine)
except Exception as e:
raise RuntimeError("which_treasure with dice {} threw {}".format(dice, e))
def chamber_obstacle(engine):
die = engine.roll([20])[0]
ret = "There is an obstacle: "
try:
if die <= 1:
die2 = engine.roll([10])[0]
ret += "There is an antilife aura with radius {} ft. While in the\
aura, creatures cannot gain hit points.".format(die2 * 10)
elif die <= 2:
ret += "There are strong winds that reduce speed by half and\
impose disadvantage on ranged attacks."
elif die <= 3:
ret += "A 'Blade Barrier' spell blocks a passage."
elif die <= 8:
if engine.roll([2])[0] <= 1:
ret += "There has been a cave-in here. The room is difficult\
terrain."
else:
ret += "The ceiling caves in when the players enter the room\
make a DC 15 Dex save or take 2d10 damage, half as much with a successful\
save."
elif die <= 12: # TODO: Implement chasms
ndice = engine.roll([4, 6, 6])
width = ndice[0] * 10
depth = ndice[1] * 10 + ndice[2] * 10
ret += "There is a chasm {} feet wide and {} feet deep.".format(
width, depth)
elif die <= 14: # TODO: Implement water
ndice = engine.roll([10, 10])
depth = ndice[0] + ndice[1]
ret += "The floor is sunken in and below {} feet of water.".format(
depth)
elif die <= 15: # TODO: Implement lava flows
ret += "Lava flows through this area!"
if engine.roll([2])[0] >= 1:
ret += " There is a stone bridge over it."
elif die <= 16:
ret += "Giant mushrooms must be hacked at | |
function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str name: name of the Pod (required)
:param str path: path to the resource (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'name', 'path']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method proxy_head_namespaced_pod_8" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `proxy_head_namespaced_pod_8`")
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `proxy_head_namespaced_pod_8`")
# verify the required parameter 'path' is set
if ('path' not in params) or (params['path'] is None):
raise ValueError("Missing the required parameter `path` when calling `proxy_head_namespaced_pod_8`")
resource_path = '/api/v1/proxy/namespaces/{namespace}/pods/{name}/{path}'.replace('{format}', 'json')
method = 'HEAD'
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
if 'name' in params:
path_params['name'] = params['name']
if 'path' in params:
path_params['path'] = params['path']
query_params = {}
header_params = {}
form_params = {}
files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['*/*'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = []
response = self.api_client.call_api(resource_path, method,
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def proxy_put_namespaced_pod_9(self, namespace, name, path, **kwargs):
"""
proxy PUT requests to Pod
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.proxy_put_namespaced_pod_9(namespace, name, path, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str name: name of the Pod (required)
:param str path: path to the resource (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'name', 'path']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method proxy_put_namespaced_pod_9" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `proxy_put_namespaced_pod_9`")
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `proxy_put_namespaced_pod_9`")
# verify the required parameter 'path' is set
if ('path' not in params) or (params['path'] is None):
raise ValueError("Missing the required parameter `path` when calling `proxy_put_namespaced_pod_9`")
resource_path = '/api/v1/proxy/namespaces/{namespace}/pods/{name}/{path}'.replace('{format}', 'json')
method = 'PUT'
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
if 'name' in params:
path_params['name'] = params['name']
if 'path' in params:
path_params['path'] = params['path']
query_params = {}
header_params = {}
form_params = {}
files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['*/*'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = []
response = self.api_client.call_api(resource_path, method,
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def proxy_post_namespaced_pod_10(self, namespace, name, path, **kwargs):
"""
proxy POST requests to Pod
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.proxy_post_namespaced_pod_10(namespace, name, path, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str name: name of the Pod (required)
:param str path: path to the resource (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'name', 'path']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method proxy_post_namespaced_pod_10" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `proxy_post_namespaced_pod_10`")
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `proxy_post_namespaced_pod_10`")
# verify the required parameter 'path' is set
if ('path' not in params) or (params['path'] is None):
raise ValueError("Missing the required parameter `path` when calling `proxy_post_namespaced_pod_10`")
resource_path = '/api/v1/proxy/namespaces/{namespace}/pods/{name}/{path}'.replace('{format}', 'json')
method = 'POST'
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
if 'name' in params:
path_params['name'] = params['name']
if 'path' in params:
path_params['path'] = params['path']
query_params = {}
header_params = {}
form_params = {}
files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['*/*'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = []
response = self.api_client.call_api(resource_path, method,
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def proxy_delete_namespaced_pod_11(self, namespace, name, path, **kwargs):
"""
proxy DELETE requests to Pod
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.proxy_delete_namespaced_pod_11(namespace, name, path, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str name: name of the Pod (required)
:param str path: path to the resource (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'name', 'path']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method proxy_delete_namespaced_pod_11" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `proxy_delete_namespaced_pod_11`")
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `proxy_delete_namespaced_pod_11`")
# verify the required parameter 'path' is set
if ('path' not in params) or (params['path'] is None):
raise ValueError("Missing the required parameter `path` when calling `proxy_delete_namespaced_pod_11`")
resource_path = '/api/v1/proxy/namespaces/{namespace}/pods/{name}/{path}'.replace('{format}', 'json')
method = 'DELETE'
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
if 'name' in params:
path_params['name'] = params['name']
if 'path' in params:
path_params['path'] = params['path']
query_params = {}
header_params = {}
form_params = {}
files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['*/*'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = []
response = self.api_client.call_api(resource_path, method,
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def proxy_options_namespaced_pod_12(self, namespace, name, path, **kwargs):
"""
proxy OPTIONS requests to Pod
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.proxy_options_namespaced_pod_12(namespace, name, path, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str name: name of the Pod (required)
:param str path: path | |
<filename>adapt/tests/test_guide_search.py
"""Tests for guide_search module.
"""
import logging
import random
import unittest
import numpy as np
from adapt import alignment
from adapt import guide_search
from adapt.utils import guide
from adapt.utils import index_compress
__author__ = '<NAME> <<EMAIL>>'
class TestGuideSearcherMinimizeGuides(unittest.TestCase):
"""Tests methods in the GuideSearcherMinimizeGuides class.
"""
def setUp(self):
# Disable logging
logging.disable(logging.WARNING)
# Set a random seed so hash functions are always the same
random.seed(0)
self.a_seqs = ['ATCGAA', 'ATCGAT', 'AYCGAA', 'AYCGAT', 'AGCGAA']
self.a_aln = alignment.Alignment.from_list_of_seqs(self.a_seqs)
self.a_window_size = 5
self.a = guide_search.GuideSearcherMinimizeGuides(self.a_aln, 4, 0, 1.0, (1, 1, 100))
self.b_seqs = ['ATCGAA', 'ATC-AA']
self.b_aln = alignment.Alignment.from_list_of_seqs(self.b_seqs)
self.b_window_size = 5
self.b = guide_search.GuideSearcherMinimizeGuides(self.b_aln, 4, 0, 1.0, (1, 1, 100))
self.c_seqs = ['GTATCATCGGCCATGNAC',
'GTNNCATCGG-CATGNAC',
'GTNNGATAGGCCATGNAC',
'GAAAAAAAAAAAAAAAAC',
'GTATCATCGGCCATGNAC',
'GTATCAGCGGCCATGNAC']
self.c_aln = alignment.Alignment.from_list_of_seqs(self.c_seqs)
self.c_window_size = 14
self.c = guide_search.GuideSearcherMinimizeGuides(self.c_aln, 5, 1, 1.0, (1, 1, 100))
self.c_partial = guide_search.GuideSearcherMinimizeGuides(self.c_aln, 5, 1, 0.6,
(1, 1, 100))
self.d_seqs = ['GTATACGG',
'ACGTACGG',
'TACTACGG']
self.d_aln = alignment.Alignment.from_list_of_seqs(self.d_seqs)
self.d_window_size = 8
self.d = guide_search.GuideSearcherMinimizeGuides(self.d_aln, 5, 0, 1.0, (1, 1, 100))
self.e_seqs = ['GTAGACGG',
'ACGTACGG',
'TACTTCGG']
self.e_aln = alignment.Alignment.from_list_of_seqs(self.e_seqs)
self.e_window_size = 8
self.e = guide_search.GuideSearcherMinimizeGuides(self.e_aln, 5, 1, 1.0, (1, 1, 100))
self.f_seqs = ['GTNNACGN',
'ANNTACGN',
'TANTTNNN']
self.f_aln = alignment.Alignment.from_list_of_seqs(self.f_seqs)
self.f_window_size = 8
self.f = guide_search.GuideSearcherMinimizeGuides(self.f_aln, 5, 1, 1.0, (1, 1, 100))
self.g_seqs = ['GTATCATCGGCCATCNAC',
'CTATCACCTGCTACGNAC',
'ATAGCACCGGCCATGNAC',
'TTAGGACCGACCATGNAC']
self.g_aln = alignment.Alignment.from_list_of_seqs(self.g_seqs)
self.g_window_size = 18
self.g = guide_search.GuideSearcherMinimizeGuides(self.g_aln, 5, 0, 1.0, (1, 1, 100))
self.g_partial = guide_search.GuideSearcherMinimizeGuides(self.g_aln, 5, 0, 0.5,
(1, 1, 100))
self.h_seqs = ['GTATCAGCGGCCATCNACAA',
'GTANCACCTGCTACGNACTT',
'GTATCAATGNCCATGNACCC',
'GTATCATCCACNATGNACGG']
self.h_aln = alignment.Alignment.from_list_of_seqs(self.h_seqs)
self.h_window_size = 18
self.h = guide_search.GuideSearcherMinimizeGuides(self.h_aln, 5, 1, 1.0, (0.5, 0, 1))
self.i_seqs = ['GTATCAGCGGCCATCAACAA',
'GT-TCACCTGCTACGAACTT',
'GT-TCAATGCCCATGAACCC',
'GTATCATCCACCATGAACGG']
self.i_aln = alignment.Alignment.from_list_of_seqs(self.i_seqs)
self.i_window_size = 5
self.i = guide_search.GuideSearcherMinimizeGuides(self.i_aln, 5, 1, 1.0, (0.5, 0, 1))
# Skip guide clustering, which may not work well when the guides
# are so short in these tests
self.a.guide_clusterer = None
self.b.guide_clusterer = None
self.c.guide_clusterer = None
self.d.guide_clusterer = None
self.e.guide_clusterer = None
self.f.guide_clusterer = None
self.g.guide_clusterer = None
self.h.guide_clusterer = None
self.i.guide_clusterer = None
def test_construct_guide_memoized_a(self):
self.assertEqual(self.a._construct_guide_memoized(0, {0: {0,1,2,3,4}}),
('ATCG', {0,1,2,3}))
self.assertEqual(self.a._construct_guide_memoized(0, {0: {0,1,2,3,4}}),
('ATCG', {0,1,2,3}))
self.assertEqual(self.a._construct_guide_memoized(0, {0: {0,1,2,3,4}}),
('ATCG', {0,1,2,3}))
self.assertIn(self.a._construct_guide_memoized(0, {0: {2,3,4}}),
[('ATCG', {2,3}), ('ACCG', {2,3}), ('AGCG', {4})])
self.assertEqual(self.a._construct_guide_memoized(0, {0: {4}}),
('AGCG', {4}))
def ic(idx):
return index_compress.compress_mostly_contiguous(idx)
key = (frozenset({(0, frozenset(ic({0,1,2,3,4})))}), None)
self.assertIn(key, self.a._memoized_guides)
self.assertIn(0, self.a._memoized_guides[key])
key = (frozenset({(0, frozenset(ic({2,3,4})))}), None)
self.assertIn(key, self.a._memoized_guides)
self.assertIn(0, self.a._memoized_guides[key])
key = (frozenset({(0, frozenset(ic({4})))}), None)
self.assertIn(key, self.a._memoized_guides)
self.assertIn(0, self.a._memoized_guides[key])
self.assertEqual(self.a._construct_guide_memoized(2, {0: {0,1,2,3,4}}),
('CGAA', {0,2,4}))
self.assertEqual(self.a._construct_guide_memoized(2, {0: {3}}),
('CGAT', {3}))
key = (frozenset({(0, frozenset(ic({0,1,2,3,4})))}), None)
self.assertIn(key, self.a._memoized_guides)
self.assertIn(2, self.a._memoized_guides[key])
key = (frozenset({(0, frozenset(ic({3})))}), None)
self.assertIn(key, self.a._memoized_guides)
self.assertIn(2, self.a._memoized_guides[key])
self.assertEqual(self.a._construct_guide_memoized(0, {0: {0,1,2,3,4}}),
('ATCG', {0,1,2,3}))
self.assertIn(self.a._construct_guide_memoized(0, {0: {2,3,4}}),
[('ATCG', {2,3}), ('ACCG', {2,3}), ('AGCG', {4})])
self.assertEqual(self.a._construct_guide_memoized(0, {0: {4}}),
('AGCG', {4}))
self.assertEqual(self.a._construct_guide_memoized(2, {0: {0,1,2,3,4}}),
('CGAA', {0,2,4}))
self.assertEqual(self.a._construct_guide_memoized(2, {0: {3}}),
('CGAT', {3}))
self.a._cleanup_memoized_guides(2)
for key in self.a._memoized_guides.keys():
self.assertNotIn(2, self.a._memoized_guides[key])
self.a._cleanup_memoized_guides(100)
for key in self.a._memoized_guides.keys():
self.assertNotIn(100, self.a._memoized_guides[key])
def test_construct_guide_memoized_b(self):
self.assertIsNone(self.b._construct_guide_memoized(0, {0: {1}}))
self.assertEqual(self.b._construct_guide_memoized(0, {0: {0,1}}),
('ATCG', {0}))
self.assertIsNone(self.b._construct_guide_memoized(0, {0: {1}}))
self.assertEqual(self.b._construct_guide_memoized(0, {0: {0,1}}),
('ATCG', {0}))
def test_construct_guide_memoized_a_with_needed(self):
# Use the num_needed argument
self.assertEqual(self.a._construct_guide_memoized(0, {0: {0,1,2,3,4}},
{0: 5}),
('ATCG', {0,1,2,3}))
self.assertEqual(self.a._construct_guide_memoized(0, {0: {0,1,2,3,4}},
{0: 3}),
('ATCG', {0,1,2,3}))
self.assertEqual(self.a._construct_guide_memoized(0, {0: {0,1,2,3,4}},
{0: 5}),
('ATCG', {0,1,2,3}))
self.assertEqual(self.a._construct_guide_memoized(0, {0: {0,1,2,3,4}},
{0: 3}),
('ATCG', {0,1,2,3}))
self.a._cleanup_memoized_guides(0)
for key in self.a._memoized_guides.keys():
self.assertNotIn(0, self.a._memoized_guides[key])
self.a._cleanup_memoized_guides(100)
for key in self.a._memoized_guides.keys():
self.assertNotIn(100, self.a._memoized_guides[key])
def test_construct_guide_memoized_a_use_last(self):
# Use the use_last argument
self.assertEqual(self.a._construct_guide_memoized(0, {0: {0,1,2,3,4}},
{0: 5}, use_last=False),
('ATCG', {0,1,2,3}))
self.assertEqual(self.a._construct_guide_memoized(0, {0: {0,1,2,3,4}},
{0: 3}, use_last=False),
('ATCG', {0,1,2,3}))
self.assertEqual(self.a._construct_guide_memoized(0, {0: {0,1,2,3,4}},
{0: 3}, use_last=True),
('ATCG', {0,1,2,3}))
self.assertEqual(self.a._construct_guide_memoized(0, {0: {0,1,2,3,4}},
{0: 3}, use_last=False),
('ATCG', {0,1,2,3}))
self.assertEqual(self.a._construct_guide_memoized(0, {0: {0,1,2,3,4}},
{0: 3}, use_last=True),
('ATCG', {0,1,2,3}))
self.a._cleanup_memoized_guides(0)
for key in self.a._memoized_guides.keys():
self.assertNotIn(0, self.a._memoized_guides[key])
def test_find_optimal_guide_in_window(self):
self.assertEqual(self.c._find_optimal_guide_in_window(
1, 1 + self.c_window_size,
{0: set([0,1,2,3,4,5])}, {0: 6}),
('ATCGG', set([0,1,2,4,5]), 5, 5))
def test_find_optimal_guide_in_window_at_end_boundary(self):
self.assertEqual(self.d._find_optimal_guide_in_window(
0, 0 + self.d_window_size,
{0: set([0,1,2])}, {0: 3}),
('TACGG', set([0,1,2]), 3, 3))
self.assertEqual(self.e._find_optimal_guide_in_window(
0, 0 + self.e_window_size,
{0: set([0,1,2])}, {0: 3}),
('TACGG', set([0,1,2]), 3, 3))
def test_find_optimal_guide_in_window_none(self):
self.assertEqual(self.f._find_optimal_guide_in_window(
0, 0 + self.f_window_size,
{0: set([0,1,2])}, {0: 3}),
(None, set(), None, 0))
def test_find_optimal_guide_in_window_with_groups_1(self):
g_opt = self.g._find_optimal_guide_in_window(0, 0 + self.g_window_size,
{2017: {0, 2}, 2018: {1, 3}}, {2017: 0, 2018: 1})
gd, gd_covered, gd_start, gd_score = g_opt
# We only need to cover 1 sequence from the 2018 group ({1, 3});
# check that at least one of these is covered
self.assertTrue(1 in gd_covered or 3 in gd_covered)
# Since we only need to cover 1 sequence in total, the score
# should only be 1
self.assertEqual(gd_score, 1)
def test_find_guides_in_window(self):
self.assertEqual(self.c._find_guides_in_window(
1, 1 + self.c_window_size),
set(['ATCGG', 'AAAAA']))
self.assertIn(self.c_partial._find_guides_in_window(
1, 1 + self.c_window_size),
{frozenset(['ATCGG']), frozenset(['TCATC'])})
self.assertIn(self.g._find_guides_in_window(
0, 0 + self.g_window_size),
[set(['TATCA', 'CCATG']), set(['CGGCC', 'TTAGG', 'CTATC'])])
self.assertIn(self.g_partial._find_guides_in_window(
0, 0 + self.g_window_size),
[set(['TATCA']), set(['CCATG']), set(['CGGCC'])])
def test_find_guides_with_missing_data(self):
# The best guides are in regions with missing data, but the
# alignment and thresholds on missing data are setup to avoid
# guides in these regions
self.assertEqual(self.h._find_guides_in_window(
0, 0 + self.h_window_size),
set(['CAACG', 'CACCC']))
def test_find_guides_with_gap(self):
# It should not be able to find a guide in a window where the only
# possible guides overlap sequences with a gap
with self.assertRaises(guide_search.CannotAchieveDesiredCoverageError):
self.i._find_guides_in_window(1, 1 + self.i_window_size)
# It should be able to find a guide in a window without a gap
self.i._find_guides_in_window(10, 10 + self.i_window_size)
def test_guide_is_suitable_fn(self):
seqs = ['GTATCAAAAAATCGGCTACCCCCTCTAC',
'CTACCAAAAAACCTGCTAGGGGGCGTAC',
'ATAGCAAAAAAACGTCCTCCCCCTGTAC',
'TTAGGAAAAAAGCGACCGGGGGGTCTAC']
aln = alignment.Alignment.from_list_of_seqs(seqs)
gs = guide_search.GuideSearcherMinimizeGuides(aln, 5, 0, 1.0, (1, 1, 100))
# The best guide is 'AAAAA'
self.assertEqual(gs._find_guides_in_window(0, 28),
set(['AAAAA']))
# Do not allow guides with 'AAA' in them
def f(guide):
if 'AAA' in guide:
return False
else:
return True
gs = guide_search.GuideSearcherMinimizeGuides(aln, 5, 0, 1.0, (1, 1, 100),
guide_is_suitable_fn=f)
self.assertEqual(gs._find_guides_in_window(0, 28),
set(['CCCCC', 'GGGGG']))
def test_with_groups(self):
seqs = ['ATCAAATCGATGCCCTAGTCAGTCAACT',
'ATCTTTACGATGCTCTGGTTAGCCATCT',
'ATCTTATCGTTGGACTCGTAAGGCACCT',
'ATCAGATCGCTGAGCTTGTGAGACAGCT',
'TAGATCTAATCCCAGTATGGTACTTATC',
'TAGAACTAATGGCAGTTTGGTCCTTGTC']
aln = alignment.Alignment.from_list_of_seqs(seqs)
gs = guide_search.GuideSearcherMinimizeGuides(aln, 5, 0, 1.0, (1, 1, 100))
# 4 guides are needed (3 for the first 4 sequences, 1 for the last
# 2 sequences)
self.assertEqual(len(gs._find_guides_in_window(0, 28)), 4)
# Divide into groups, wanting to cover more of group 2018; now
# we only need 1 guide from group 2010 and 1 from group 2018, so just
# 2 guides are needed
seq_groups = {2010: {0, 1, 2, 3}, 2018: {4, 5}}
cover_frac = {2010: 0.1, 2018: 1.0}
gs = guide_search.GuideSearcherMinimizeGuides(aln, 5, 0, cover_frac, (1, 1, 100),
seq_groups=seq_groups)
self.assertEqual(len(gs._find_guides_in_window(0, 28)), 2)
def test_score_collection_of_guides_without_groups(self):
seqs = ['ATCAAATCGATGCCCTAGTCAGTCAACT',
'ATCTTTACGATGCTCTGGTTAGCCATCT',
'ATCTTATCGTTGGACTCGTAAGGCACCT',
'ATCAGATCGCTGAGCTTGTGAGACAGCT',
'TAGATCTAATCCCAGTATGGTACTTATC',
'TAGAACTAATGGCAGTTTGGTACTTGTC']
aln = alignment.Alignment.from_list_of_seqs(seqs)
gs = guide_search.GuideSearcherMinimizeGuides(aln, 5, 0, 1.0, (1, 1, 100))
# The function _score_collection_of_guides() will need to know
# positions of guides (in _selected_guide_positions), so insert
# these
gs._selected_guide_positions = {'TCGAT': {6}, 'GGTAC': {18}}
guides = ['TCGAT', 'GGTAC']
# TCGAT covers 1 sequence (1/6) and GGTAC covers 2 sequences (2/6),
# so the average is 0.25
self.assertEqual(gs._score_collection_of_guides(guides), 0.25)
def test_score_collection_of_guides_with_groups(self):
seqs = ['ATCAAATCGATGCCCTAGTCAGTCAACT',
'ATCTTTTCGATGCTCTGGTTAGCCATCT',
'ATCTTATCGTTGGACTCGTAAGGCACCT',
'ATCAGATCGCTGAGCTTGTGAGACAGCT',
'TAGATCTAATCCCAGTATGGTACTTATC',
'TAGAACTAATGGCAGTTTGGTTCTTGTC']
aln = alignment.Alignment.from_list_of_seqs(seqs)
seq_groups = {2010: {0, 1, 2, 3}, 2018: {4, 5}}
cover_frac = {2010: 0.1, 2018: 1.0}
gs = guide_search.GuideSearcherMinimizeGuides(aln, 5, 0, cover_frac, (1, 1, 100),
seq_groups=seq_groups)
# The function _score_collection_of_guides() will need to know
# positions of guides (in _selected_guide_positions), so insert
# these
gs._selected_guide_positions = {'TCGAT': {6}, 'GGTAC': {18}}
guides = ['TCGAT', 'GGTAC']
# 3 sequences are needed in total (1 from 2010 and 2 from 2018)
# TCGAT covers 1 needed sequence from 2010 and 0 needed sequences
# from 2018: so it covers 1/3 needed sequences
# GGTAC covers 0 needed sequences from 2010 and 1 needed sequence
# from 2018: so it covers 1/3 needed sequences
# The average of these fractions (the score) is 1/3
self.assertEqual(gs._score_collection_of_guides(guides), 1/3.0)
def test_find_optimal_guide_with_gu_pairing(self):
seqs = ['GTATTAACACTTCGGCTACCCCCTCTAC',
'CTACCAACACACCTGCTAGGGGGCGTAC',
'ATAGCAACACAACGTCCTCCCCCTGTAC',
'TTAGGGGTGTGGCGACCGGGGGGTCTAC']
aln = alignment.Alignment.from_list_of_seqs(seqs)
# Two guides are needed for coverage
gs = guide_search.GuideSearcherMinimizeGuides(aln, 5, 0, 1.0, (1, 1, 100),
allow_gu_pairs=False)
self.assertEqual(len(gs._find_guides_in_window(0, 28)), 2)
# Only one guide is needed for coverage: 'AACAC'
gs = guide_search.GuideSearcherMinimizeGuides(aln, 5, 0, 1.0, (1, 1, 100),
allow_gu_pairs=True)
self.assertEqual(gs._find_guides_in_window(0, 28),
set(['AACAC']))
def test_with_required_guides_full_coverage(self):
seqs = ['ATCAAATCGATGCCCTAGTCAGTCAACT',
'ATCTAATCGATGCTCTGGTTAGCCATCT',
'ATCCAATCGCAGTACTCGTAAGGCACCT',
'ATCAAATCGGTGAGCTTGTGAGACAGCT',
'TAGAAATCGAACTAGTATGGTACTTATC',
'TAGAAATCGTGGCAGTTTGGTTCTTGTC']
aln = alignment.Alignment.from_list_of_seqs(seqs)
# First 5 listed guides are from the alignment and the
# positions given are correct; last guide ('AAAAA') is made up
# but should still be in | |
#! /usr/bin/env python
# Mathematica nb from Alex & Laurent
# <EMAIL> major reorg as LG++ 2018 01
# python3 required (int( (len(coeffs) -1)/2 )) because of float int/int result change from python2
import numpy as np
import scipy.special
import numpy.linalg as linalg
import sys
from scipy.special import comb
import os, pickle
from uncertainties import unumpy # pip install if you need
m = 1.0
mm = 1.0e-3 * m
um = 1.0e-6 * m
def scaling(img, photons): # RENAME this function
# img gives a perfect psf to count its total flux
# photons is the desired number of photons (total flux in data)
total = np.sum(img)
print("total", total)
return photons / total
def matrix_operations(img, model, flux = None, verbose=False, linfit=False, dqm=None):
# meta-question: why & when do we use linfit?
# least squares matrix operations to solve A x = b, where A is the model,
# b is the data (image), and x is the coefficient vector we are solving for.
# In 2-D data x = inv(At.A).(At.b)
#
# img 2d array of image data
# dqm 2d bool array of bad pixel locations (same shape as 2d img), or None (for all-good data)
print("leastsqnrm.matrix_operations() - equally-weighted")
flatimg = img.reshape(np.shape(img)[0] * np.shape(img)[1])
flatdqm = dqm.reshape(np.shape(img)[0] * np.shape(img)[1])
if verbose:
print(f'fringefitting.leastsqnrm.matrix_operations(): ', end='')
print(f'\n\timg {img.shape:} \n\tdqm {dqm.shape:}', end='')
print(f'\n\tL x W = {img.shape[0]:d} x {img.shape[1]:d} = {img.shape[0] * img.shape[1]:d}', end='')
print(f'\n\tflatimg {flatimg.shape:}', end='')
print(f'\n\tflatdqm {flatdqm.shape:}', end='')
# Originally Alex had nans coding bad pixels in the image.
# Anand: re-use the nan terminology code but driven by bad pixel frame
# nanlist shoud get renamed eg donotuselist
if verbose: print('\n\ttype(dqm)', type(dqm), end='')
if dqm is not None: nanlist = np.where(flatdqm==True) # where DO_NOT_USE up.
else: nanlist = (np.array(()), ) # shouldn't occur w/MAST JWST data
if verbose:
print(f'\n\ttype(nanlist) {type(nanlist):}, len={len(nanlist):}', end='')
print(f'\n\tnumber of nanlist pixels: {len(nanlist[0]):d} items', end='')
print(f'\n\t{len(nanlist[0]):d} DO_NOT_USE pixels found in data slice',
end='')
else:
print(f'\t{len(nanlist[0]):d} DO_NOT_USE pixels found in data slice')
flatimg = np.delete(flatimg, nanlist)
if verbose: print(f'\n\tflatimg {flatimg.shape:} after deleting {len(nanlist[0]):d}',
end='')
if flux is not None:
flatimg = flux * flatimg / flatimg.sum()
# A
flatmodel_nan = model.reshape(np.shape(model)[0] * np.shape(model)[1],
np.shape(model)[2])
flatmodel = np.zeros((len(flatimg), np.shape(model)[2]))
if verbose:
print(f'\n\tflatmodel_nan {flatmodel_nan.shape:}', end='')
print(f'\n\tflatmodel {flatmodel.shape:}', end='')
print(f'\n\tdifference {flatmodel_nan.shape[0] - flatmodel.shape[0]:}', end='')
print()
print("flat model dimensions ", np.shape(flatmodel))
print("flat image dimensions ", np.shape(flatimg))
for fringe in range(np.shape(model)[2]):
flatmodel[:,fringe] = np.delete(flatmodel_nan[:,fringe], nanlist)
# At (A transpose)
flatmodeltransp = flatmodel.transpose()
# At.A (makes square matrix)
modelproduct = np.dot(flatmodeltransp, flatmodel)
# At.b
data_vector = np.dot(flatmodeltransp, flatimg)
# inv(At.A)
inverse = linalg.inv(modelproduct)
cond = np.linalg.cond(inverse)
x = np.dot(inverse, data_vector)
res = np.dot(flatmodel, x) - flatimg
# put bad pixels back
naninsert = nanlist[0] - np.arange(len(nanlist[0]))
# calculate residuals with fixed but unused bad pixels as nans
res = np.insert(res, naninsert, np.nan)
res = res.reshape(img.shape[0], img.shape[1])
if verbose:
print('model flux', flux)
print('data flux', flatimg.sum())
print("flat model dimensions ", np.shape(flatmodel))
print("model transpose dimensions ", np.shape(flatmodeltransp))
print("flat image dimensions ", np.shape(flatimg))
print("transpose * image data dimensions", np.shape(data_vector))
print("flat img * transpose dimensions", np.shape(inverse))
if linfit:
try:
from linearfit import linearfit
# dependent variables
M = np.mat(flatimg)
# photon noise
noise = np.sqrt(np.abs(flatimg))
# this sets the weights of pixels fulfilling condition to zero
weights = np.where(np.abs(flatimg)<=1.0, 0.0, 1.0/(noise**2))
# uniform weight
wy = weights
S = np.mat(np.diag(wy));
# matrix of independent variables
C = np.mat(flatmodeltransp)
# initialize object
result = linearfit.LinearFit(M,S,C)
# do the fit
result.fit()
# delete inverse_covariance_matrix to reduce size of pickled file
result.inverse_covariance_matrix = []
linfit_result = result
print("Returned linearfit result")
except ImportError:
linfit_result = None
# if verbose:
print("linearfit module not imported, no covariances saved.")
else:
linfit_result = None
print("linearfit module not imported, no covariances saved.")
return x, res, cond, linfit_result
#######################################################################
def weighted_operations(img, model, verbose=False, dqm=None):
# return x, res, condition_number (None=>no condition number yet), singvals
# x: solution vector
# res: residuals array, nan-flagged for bad dq values?
# cond: condition number not calculateds (no inversion done here, so not available)
# singvals: singular values returned by the SVD solution for the parameters
#
# meta-question: why & when do we use linfit? I removed it here - anand 2022 Jan
# least squares matrix operations to solve A x = b, where
# A is the model,
# b is the data (image),
# x is the coefficient vector we are solving for.
#
# Solution 1: equal weighting of data (matrix_operations()).
# x = inv(At.A).(At.b)
#
# Solution 2: weighting data by Poisson variance (weighted_operations())
# x = inv(At.W.A).(At.W.b)
# where W is a diagonal matrix of weights w_i,
# weighting each data point i by the inverse of its variance:
# w_i = 1 / sigma_i^2
# For photon noise, the data, i.e. the image values b_i have variance
# proportional to b_i with an e.g. ADU to electrons coonversion factor.
# If this factor is the same for all pixels, we do not need to include
# it here (is that really true? Yes I think so because we're not
# normalizing wts here, just ascribing rel wts.).
#
# Possibly replace or campare with a MAD minimization using fast simplex
# https://theoryl1.wordpress.com/2016/08/03/solve-weighted-least-squares-with-numpy/
# Solve for x in Ax = b
#
# np.set_printoptions(formatter={'float': lambda x: '{:+.1e}'.format(x)}, linewidth=80)
#
# Ax = b
# b: data vector nd long; nd=5
# A: model matrix; np x nd matrix 4 x 5: np=4 parameters, nd=5 data points.
# x: parameter, vector np=4 long, unknown
#
# A=np.array([[3,1,4,2],[2,7,1,2],[1,6,1,8],[6,1,8,2],[1,4,1,4]])
# print("A:", A.shape)
# b = np.array([1.2,1.3,1.4,1.5,1.6])
# print("b:", b.shape)
# w = np.array([1,2,3,4,5])
# print("w:", w.shape)
# Aw = A * np.sqrt(w[:,np.newaxis])
# print("Aw:", Aw.shape)
# bw = w * np.sqrt(w)
# x, r, rank, s = np.linalg.lstsq(Aw, bw, rcond=None)
# print("x.shape:", x.shape)
# print("x:", x)
# print("r:", r)
# print("rank:", rank)
# print("s:", s)
# Also a good summary at:
# https://math.stackexchange.com/questions/3094925/weighted-least-squares
# Remove not-to-be-fit data from the flattened "img" data vector
flatimg = img.reshape(np.shape(img)[0] * np.shape(img)[1])
flatdqm = dqm.reshape(np.shape(img)[0] * np.shape(img)[1])
if dqm is not None: nanlist = np.where(flatdqm==True) # where DO_NOT_USE up.
else: nanlist = (np.array(()), ) # shouldn't occur w/MAST JWST data
# see original linearfit https://github.com/agreenbaum/ImPlaneIA:
# agreenbaum committed on May 21, 2017 1 parent 3e0fb8b
# commit bf02eb52c5813cb5d77036174a7caba703f9d366
#
flatimg = np.delete(flatimg, nanlist) # DATA values
# photon noise variance - proportional to ADU
# (for roughly uniform adu2electron factor)
variance = np.abs(flatimg)
# this resets the weights of pixels with negative or unity values to zero
# we ignore data with unity or lower values - weight it not-at-all..
weights = np.where(flatimg <= 1.0, 0.0, 1.0/np.sqrt(variance)) # anand 2022 Jan
print("fringefitting.leastsqnrm.weighted_operations:", len(nanlist[0]),
"bad pixels skipped in weighted fringefitter")
# A - but delete all pixels flagged by dq array
flatmodel_nan = model.reshape(np.shape(model)[0] * np.shape(model)[1],
np.shape(model)[2])
flatmodel = np.zeros((len(flatimg), np.shape(model)[2]))
for fringe in range(np.shape(model)[2]):
flatmodel[:,fringe] = np.delete(flatmodel_nan[:,fringe], nanlist)
print(flatmodel.shape)
# A.w
# Aw = A * np.sqrt(w[:,np.newaxis]) # w as a column vector
Aw = flatmodel * weights[:,np.newaxis]
# bw = b * np.sqrt(w)
bw = flatimg * weights
# x = np.linalg.lstsq(Aw, bw)[0]
# resids are pixel value residuals, flattened to 1d vector
x, rss, rank, singvals = np.linalg.lstsq(Aw, bw)
#inverse = linalg.inv(Atww)
#cond = np.linalg.cond(inverse)
# actual residuals in image: is this sign convention odd?
# res = np.dot(flatmodel, x) - flatimg
# changed here to data - model
res = flatimg - np.dot(flatmodel, x)
# put bad pixels back
naninsert = nanlist[0] - np.arange(len(nanlist[0]))
# calculate residuals with fixed but unused bad pixels as nans
res = np.insert(res, naninsert, np.nan)
res = res.reshape(img.shape[0], img.shape[1])
cond = None
return x, res, cond, singvals # no condition number yet...
def deltapistons(pistons):
# This function is used for comparison to calculate relative pistons from given | |
<gh_stars>1-10
# -*- coding: utf-8 -*-
"""
Created on 2017
Author : <NAME>
Affiliation : Université catholique de Louvain - ICTEAM - UCL Crypto Group
Address : Place du Levant 3, 1348 Louvain-la-Neuve, BELGIUM
LELEC2770 : Privacy Enhancing Technologies
Exercice Session : ORAM
Path ORAM
"""
from Crypto.Random.random import randint
from random import sample
import time
NO_PRINT = False
class PathORAMTree :
def __init__(self,root = None, bucketList = [], Z = 4, nbChildren = 2 ,depth = 10, treeHash = '', treeID=''):
'''
- root, the root of the tree
- bucketList is the list of all the nodes of the tree
- Z is the exact size of the bucket
- nbChildren is the exact number of children a node must have except for
the leaf nodes which have none and their parents which have only one
- depth is the number of level of the tree
- treeHash is the Merkle-Damgard hash of the tree
- treeID is a string used to identify the tree
'''
self.root = root
self.bucketList = bucketList
self.Z = Z # exact number of blocks in each bucket
self.nbChildren = nbChildren # exact number of children a bucket has
self.depth = depth # of the tree
self.treeID = treeID
tLoad = Z
st = 1
for i in range(depth):
st = st*nbChildren
tLoad = tLoad + Z*st
self.tLoad = tLoad
def __str__(self):
return 'Path ORAM Tree '+str(self.treeID)+' with root \n\t'+str(self.root)+'\n\t Z = '+str(self.Z)+'\n\t number of children = '+str(self.nbChildren)+'\n\t depth = '+str(self.depth)+'\n\t and bucket list : \n\t\t'+str(self.bucketList)
def __repr__(self):
return self.__str__()
def setup(self,fillingBlockMethod):
'''
Build the PO tree by filling each node of the tree by buckets and by
filling each bucket with self.Z blocks where a block is constructed using
the fillingBlockMethod argument
'''
L = []
for i in range(self.Z):
L.append(fillingBlockMethod())
root = PathORAMBucket(self,None,[],L,(0,0),isRoot=True)
self.root = root
self.bucketList.append(self.root)
def createChildren(bucket, depth):
if depth == 0 :
leaf = PathORAMBucket(self,bucket,[],[],(bucket.position[0]+1,0),isLeaf=True)
bucket.children = [leaf]
self.bucketList.append(leaf)
else :
childrenList = []
for i in range(self.nbChildren):
L = []
for j in range(self.Z):
L.append(fillingBlockMethod())
childrenList.append(PathORAMBucket(self,bucket,[],L,(bucket.position[0]+1,i)))
bucket.children = childrenList
for child in childrenList :
self.bucketList.append(child)
createChildren(child,depth-1)
createChildren(self.root,self.depth)
def isEmpty(self):
if self.bucketList == [] :
assert self.root == None
return True
else :
return False
class PathORAMBucket :
def __init__(self,POTree,parent,children,blockList, position, subTreeHash=None, isRoot=False,isLeaf=False):
'''
- POTree is the Path ORAM tree in which the bucket is
- parent is the parent node of the bucket
- children is a list containing the children nodes of bucket
- blockList is a list containing the blocks stored in the bucket its size
is exaclty POTree.Z
- position is a pair of int (x,y) where
- x is the level of the bucket
- y is the (unique) order among the other siblings
- subTreeHash is the hash of the sub tree of which bucket is the root
- isRoot is a boolean whose meaning is obvious
- isLeaf is a boolean whose meaning is obvious
'''
self.POTree = POTree
self.parent = parent
self.children = children
self.blockList = blockList
self.position = position
self.subTreeHash = subTreeHash # MD hash of the subtree whose root is self
self.isRoot = isRoot
self.isLeaf = isLeaf
if self.isRoot :
assert self.parent == None
assert self.isLeaf is False
self.idNumber = '0'
else :
self.idNumber = self.parent.idNumber + str(self.position[1])
if self.isLeaf :
assert self.children == []
assert self.blockList == []
assert self.isRoot is False
assert self.parent != None
def __str__(self):
if self.isRoot :
return 'Root Bucket of the PO tree '+self.POTree.treeID
elif self.isLeaf :
return 'Leaf Bucket '+str(self.idNumber) +' of the PO tree '+self.POTree.treeID
else :
return 'PO Bucket '+str(self.idNumber) +' of the PO tree '+self.POTree.treeID
def __repr__(self):
return self.__str__()
def merkleDamgardHash(self):
return None
class PathORAM :
def __init__(self,POTree, creatDummyBlock = None, rerandomizeBlock = None):
'''
- POTree is the Path ORAM tree in which the data will be stored
The ethod initialize the folowing variables:
- positionDic is a dictionnary used to store the position in which a block
is currently stored, an item of the dictionnary is of the form
{bucketID : [(blockID,path),...,] of size Z} ; bucketID is set to 'stash', when the
block is stored in the client Stash, in this cas blockID is set to None
- positionMap is a dictionary of the form {blockID : (bucketID,path)}
- clientStash is a dictionary { blockID : block } where
path is the path on which some blocks must be stored
'''
self.POTree = POTree
self.positionDic = {'stash':[]} # stores entries of the form {bucketID : [(blockID,path),...,] of size Z}
self.positionMap = {} # stores entires of the form {blockID : (bucketID,path)}
self.clientStash = {} # stores entires of the form {blockID : block }
self.pathList = self.buildPathList()
for node in self.POTree.bucketList : # follow the path from leaf to root
nodeID = node.idNumber
if not len(nodeID) == self.POTree.depth + 2 :
# is not a leaf
self.positionDic[nodeID] = [('','')]*self.POTree.Z
if creatDummyBlock == None :
def f():
return 'dummy block'
self.createDummyBlock = f
else :
self.createDummyBlock = creatDummyBlock
if rerandomizeBlock == None :
def fb(block):
return ('rerand', block)
self.rerandomizeBlock = fb
else :
self.rerandomizeBlock = rerandomizeBlock
def buildPathList(self):
'''
this method returns an iterable of the path of self.POTree
A path is a string of the form '025103...40' where a letter x at index i
indicates that the child x of the previous node of level i-1 is in the
path. The first letter is 0, for the root and the last is always 0 for a
leaf.
'''
def genWords(alphabet,length):
'''
alphabet is a list of string
'''
if length == 1 :
return alphabet
else :
new_words = []
words = genWords(alphabet,length-1)
for word in words :
for letter in alphabet :
new_words.append(letter+word)
return new_words
alphabet = []
for i in range(self.POTree.nbChildren):
alphabet.append(str(i))
paths = genWords(alphabet,self.POTree.depth)
pathList = []
for path in paths :
pathList.append('0'+path)
return pathList
def fillupStash(self,blockList):
'''
Given a blockList (of the form blockId, block = blockList[i]), this
method fills up the self.clientStash and attributes uniformly randomly
a path to each block. The method also sets up the self.positionDic
'''
n = len(self.pathList)
assert self.positionDic['stash'] == [] # Stash is not empty do not use this method!
for i in range(len(blockList)):
blockID, block = blockList[i]
r = randint(0,n-1)
path = self.pathList[r]
self.positionDic['stash'].append((blockID,path))
self.positionMap[blockID] = ('stash',path)
self.clientStash[blockID] = block
def randomlyAssignStash(self):
'''
This method randomly assign each block of the stash into the PO Tree.
For this method to work, the PO tree must contain enough empty spaces!
'''
stash_copy = self.clientStash.copy()
bucketList = self.positionDic.keys()
bucketList.remove('stash')
for blockID in stash_copy :
bucketID,path = self.positionMap[blockID]
assert bucketID == 'stash'
assert (blockID,path) in self.positionDic['stash']
# reassign block
cond = True
nbTries = 0
while cond :
if nbTries > 1000 :
print 'the number of tries exceed 1000, the method stops'
return
r = randint(0, len(bucketList)-1)
randomBucketID = bucketList[r]
if ('', '') in self.positionDic[randomBucketID] :
# there is one empty block in the bucket
i = self.positionDic[randomBucketID].index(('',''))
self.clientStash.pop(blockID)
self.positionDic[randomBucketID][i] = (blockID,path)
self.positionMap[blockID] = (randomBucketID,path)
cond = False
nbTries += 1
def queryBlock(self,blockID):
'''
This method returns a block whose Id is blockID. Doing so, the method
changes all the buckets (and blocks) that are on the path of the block.
Also the self.clientStash, the self.positionDic and the self.positionMap
are modified at the end of the execution.
'''
Z = self.POTree.Z
lstash0 = len(self.clientStash)
bucketID,path = self.positionMap[blockID]
n = len(self.pathList)
r = randint(0,n-1)
new_path = self.pathList[r] # Chose a new location for the querried block
print 'Querrying block ', blockID, ' stored on path ', path, ', the block is reassigned to ',new_path
if bucketID == 'stash':
# the block is stored in the stash
queriedBlock = self.clientStash[blockID]
for i in range(len(self.positionDic['stash'])):
if self.positionDic['stash'][i][0] == blockID :
blockOrder = i
break
self.positionDic['stash'][blockOrder] = (blockID,new_path) # update positionDic accordingly
self.positionMap[blockID] = ('stash',new_path) # update positionMap accordingly
node = self.POTree.root
path_copy = path[1:]+'0'
| |
'wǒ',
0x27D96: 'xī',
0x27D99: 'bèi',
0x27D9C: 'shāng,shǎng',
0x27DA0: 'yù',
0x27DA1: 'mì',
0x27DB2: 'duǎn,zhuàn',
0x27DB5: 'chà',
0x27DB7: 'zé',
0x27DB8: 'chèng',
0x27DBA: 'tíng',
0x27DC5: 'yí',
0x27DCB: 'yāo',
0x27DCE: 'kū',
0x27DD0: 'fén',
0x27DD1: 'xié',
0x27DD2: 'chèng',
0x27DDB: 'kuì',
0x27DDF: 'bīn',
0x27DE1: 'lóu,lòu',
0x27DE5: 'yì',
0x27DE6: 'mì',
0x27DE7: 'xiè',
0x27DF1: 'guī',
0x27DF3: 'luó',
0x27DF4: 'huò',
0x27DF6: 'shàn',
0x27DFE: 'jú',
0x27DFF: 'dū',
0x27E02: 'xiān',
0x27E05: 'zhǐ',
0x27E08: 'bìn',
0x27E15: 'zhǐ',
0x27E16: 'zhuàn,lián',
0x27E17: 'xué',
0x27E18: 'liàn,biǎn,jiǎn',
0x27E19: 'suì',
0x27E26: 'làn',
0x27E27: 'jù',
0x27E28: 'mián',
0x27E29: 'xùn',
0x27E2A: 'zhàn',
0x27E2B: 'gùn',
0x27E2E: 'wéi',
0x27E32: 'zhì',
0x27E3D: 'wèi',
0x27E3E: 'quǎn,xuàn',
0x27E3F: 'chài',
0x27E48: 'réng',
0x27E4A: 'yuè',
0x27E4C: 'zī',
0x27E50: 'luò',
0x27E5B: 'zhī',
0x27E5E: 'nǎn,niǎn',
0x27E63: 'hān',
0x27E68: 'xī',
0x27E69: 'lín',
0x27E6C: 'yān',
0x27E6D: 'xù',
0x27E72: 'hù',
0x27E73: 'gàn',
0x27E74: 'xù,huò',
0x27E76: 'xì',
0x27E7A: 'cuì',
0x27E7D: 'xì',
0x27E7E: 'hú',
0x27E85: 'yān',
0x27E86: 'zǒu',
0x27E8D: 'péi',
0x27E8E: 'yì',
0x27E8F: 'chí',
0x27E90: 'jué',
0x27E92: 'zú',
0x27E9C: 'jiào',
0x27E9D: 'yì',
0x27E9F: 'tǎn',
0x27EA0: 'chì',
0x27EA1: 'bá',
0x27EA2: 'tòu,yì',
0x27EA3: 'zōng',
0x27EA4: 'qiú,jū',
0x27EA7: 'chì',
0x27EA8: 'xǐ',
0x27EAB: 'qǐ',
0x27EB0: 'nì',
0x27EB2: 'cū',
0x27EB4: 'wǔ',
0x27EB6: 'chù',
0x27EB7: 'sū',
0x27EB8: 'yóng',
0x27EB9: 'jǔ',
0x27EBA: 'bá',
0x27EBC: 'cǐ',
0x27EBD: 'dì',
0x27EBE: 'pǎn',
0x27EBF: 'chì,yì',
0x27EC1: 'qiǔ',
0x27EC3: 'yán,qù',
0x27ECD: 'zhǎi',
0x27ED2: 'xiàn',
0x27ED3: 'bèng',
0x27ED4: 'kuāng',
0x27ED5: 'qì',
0x27ED6: 'zhōu',
0x27ED7: 'jú',
0x27ED8: 'qiè',
0x27ED9: 'mò,pò',
0x27EDA: 'yuán',
0x27EDC: 'guì,kuǐ',
0x27EDD: 'zuī',
0x27EE7: 'qiè',
0x27EF0: 'hú,zào',
0x27EF1: 'qiú',
0x27EF2: 'hái,kuī',
0x27EF3: 'fù',
0x27EF4: 'làng',
0x27EF5: 'shà',
0x27EF6: 'xī',
0x27EF7: 'bū',
0x27EF8: 'shì',
0x27EF9: 'yǒng',
0x27EFA: 'guāng,kuāng',
0x27EFC: 'niè',
0x27EFF: 'hǒu',
0x27F0A: 'mì',
0x27F0E: 'è',
0x27F0F: 'xián',
0x27F10: 'yǔn,qūn',
0x27F11: 'xù',
0x27F12: 'qǐn',
0x27F13: 'dōng',
0x27F14: 'léng',
0x27F15: 'qì',
0x27F16: 'lán',
0x27F17: 'fú',
0x27F18: 'qǐ',
0x27F19: 'chǒng',
0x27F1C: 'cù',
0x27F1F: 'mò',
0x27F20: 'bēi',
0x27F24: 'dào',
0x27F28: 'jié,jué',
0x27F29: 'chòng,dòng',
0x27F2A: 'chì',
0x27F2B: 'yù',
0x27F2C: 'cuī',
0x27F2D: 'sù,sōu,sǒu,qiù',
0x27F2E: 'tì',
0x27F2F: 'shù,yú',
0x27F30: 'zhá',
0x27F31: 'fú,bí',
0x27F33: 'chè',
0x27F34: 'fó,zhì',
0x27F35: 'hóu',
0x27F36: 'zhá',
0x27F44: 'jié',
0x27F45: 'zhá',
0x27F46: 'zhān',
0x27F49: 'yǎn',
0x27F4A: 'hái',
0x27F4B: 'wǔ',
0x27F4C: 'huá',
0x27F4D: 'diān,diàn',
0x27F4E: 'yáo',
0x27F4F: 'sōu',
0x27F50: 'qiān',
0x27F51: 'jí',
0x27F52: 'xiòng',
0x27F53: 'qì',
0x27F54: 'jūn',
0x27F56: 'hái',
0x27F5E: 'yǎn',
0x27F5F: 'jié',
0x27F60: 'cuī',
0x27F62: 'tuán',
0x27F63: 'zhāng',
0x27F64: 'piāo',
0x27F65: 'lù',
0x27F66: 'zhī',
0x27F67: 'chù',
0x27F68: 'mì',
0x27F69: 'qiāng',
0x27F6B: 'liàn',
0x27F6F: 'jiàn',
0x27F72: 'lì',
0x27F76: 'é',
0x27F77: 'sù',
0x27F78: 'jué,guì',
0x27F7B: 'jú',
0x27F7C: 'tán',
0x27F7D: 'liáo',
0x27F7E: 'sān,cún',
0x27F7F: 'dòng',
0x27F81: 'zá',
0x27F82: 'zhí',
0x27F86: 'xuàn',
0x27F87: 'líng',
0x27F8A: 'dēng',
0x27F8D: 'zhān,zhàn,chán',
0x27F8E: 'xuān',
0x27F8F: 'qǐn',
0x27F90: 'jiào',
0x27F91: 'pì',
0x27F94: 'hǎn',
0x27F9A: 'yú',
0x27F9B: 'guó',
0x27F9D: 'xún',
0x27FA0: 'xún',
0x27FA1: 'chán',
0x27FA2: 'jié,jí',
0x27FA3: 'jú',
0x27FA4: 'yǎn',
0x27FA5: 'dú',
0x27FA7: 'hòng',
0x27FA8: 'xiàn,xiǎn',
0x27FA9: 'xún,xuàn',
0x27FAB: 'biān',
0x27FAE: 'líng',
0x27FAF: 'jié',
0x27FB0: 'yì',
0x27FB1: 'qú',
0x27FB2: 'gān',
0x27FB3: 'fēng',
0x27FB5: 'jué',
0x27FB6: 'qū',
0x27FBB: 'jiù',
0x27FBD: 'jì',
0x27FBE: 'jǐ',
0x27FC5: 'xí',
0x27FC6: 'pāng',
0x27FC9: 'kù,wù',
0x27FCB: 'kù',
0x27FCC: 'zhà',
0x27FCF: 'bà',
0x27FD2: 'chěn',
0x27FD3: 'hù',
0x27FD4: 'nù',
0x27FD5: 'é',
0x27FD6: 'xiōng',
0x27FD7: 'dǔn',
0x27FD8: 'shēng',
0x27FD9: 'wán',
0x27FDA: 'fēn',
0x27FDD: 'xī',
0x27FDE: 'zī',
0x27FE0: 'hù,dì',
0x27FE5: 'bié',
0x27FE7: 'tuò',
0x27FE8: 'bǎn',
0x27FE9: 'gé',
0x27FEB: 'kē',
0x27FF2: 'zhuì,bó',
0x27FF3: 'fú,fèi',
0x27FF4: 'mò',
0x27FF5: 'jiá',
0x27FF6: 'tuó',
0x27FF7: 'yù',
0x27FF9: 'mǔ',
0x27FFA: 'jué',
0x27FFB: 'jú',
0x27FFC: 'guā',
0x27FFD: 'pǒ',
0x28000: 'nǐ,niǎn',
0x28004: 'wǎ',
0x28005: 'yǎn',
0x28014: 'chǒu',
0x28015: 'kuāng',
0x28016: 'hài',
0x28018: 'xiáng',
0x28019: 'xī',
0x2801B: 'cún',
0x2801C: 'tōng',
0x2801D: 'ruò',
0x2801F: 'duó',
0x28020: 'chè',
0x28024: 'lèi',
0x28025: 'zī',
0x28027: 'zhěng',
0x28028: 'zuǒ',
0x2802B: 'kāng',
0x2802C: 'zài',
0x2802E: 'yuān,xuān',
0x2802F: 'qióng',
0x28033: 'fá',
0x28034: 'xún',
0x28035: 'zhuàng',
0x28036: 'jì',
0x28038: 'chā',
0x28040: 'shū,chōu',
0x28041: 'xuàn',
0x28042: 'xié',
0x28043: 'tī',
0x28044: 'hàn',
0x28045: 'xiān',
0x28046: 'shān',
0x28047: 'tùn',
0x28048: 'háng,gēng',
0x28049: 'kǔn',
0x2804A: 'cén',
0x2804B: 'dōu',
0x2804C: 'nuó',
0x2804D: 'yàn',
0x2804E: 'chéng,jìng',
0x2804F: 'pū',
0x28050: 'qì',
0x28051: 'yuè',
0x28052: 'fū',
0x28057: 'tǐng',
0x2805F: 'wǒ',
0x28060: 'shēng',
0x28061: 'tuǒ',
0x28076: 'yǎ,yā',
0x28077: 'zhì',
0x28078: 'lù,lì',
0x28079: 'yǎn',
0x2807A: 'jū',
0x2807D: 'dé',
0x2807F: 'chù,zhuó',
0x28080: 'zǔ',
0x28081: 'è',
0x28082: 'zhí,xuě',
0x28083: 'péng',
0x28085: 'biē',
0x28087: 'dǐ',
0x2808A: 'zhōu',
0x2808B: 'jī',
0x28090: 'lái',
0x28092: 'yè',
0x2809C: 'háo',
0x2809D: 'pán',
0x2809E: 'tàn',
0x2809F: 'kāng',
0x280A0: 'xū,lǚ',
0x280A1: 'zòu',
0x280A2: 'jì',
0x280A3: 'wù',
0x280A6: 'chuàn',
0x280A9: 'pò',
0x280AA: 'yǎn',
0x280AB: 'tuò',
0x280AD: 'dú',
0x280AF: 'pián',
0x280B0: 'chì',
0x280B1: 'hùn',
0x280B2: 'pīng',
0x280B4: 'cōng',
0x280B5: 'zhǎ',
0x280BB: 'dǎo',
0x280BD: 'dàn',
0x280BE: 'nàn',
0x280C3: 'è',
0x280C4: 'wèi',
0x280C5: 'bāi',
0x280C7: 'jiāng',
0x280C9: 'zhì',
0x280D3: 'chá',
0x280D5: 'chù',
0x280D6: 'kuà',
0x280D7: 'téng',
0x280D8: 'zōu,qū',
0x280D9: 'lì',
0x280DA: 'tà',
0x280DB: 'sà',
0x280DE: 'pán',
0x280DF: 'pán',
0x280E3: 'sào',
0x280E4: 'qiāo,kào',
0x280E9: 'xiàn',
0x280ED: 'zú',
0x280EF: 'zhì',
0x280F0: 'yǎn',
0x280F2: 'jié',
0x280F3: 'néng',
0x28105: 'qū',
0x28107: 'dèng,téng',
0x28108: 'liáng',
0x28109: 'chǎn',
0x2810A: 'qiè',
0x2810B: 'lòu',
0x2810C: 'dié,xiè',
0x2810D: 'cuī',
0x28110: 'jǐ',
0x28113: 'cháo',
0x28114: 'shuàn',
0x28115: 'zú',
0x28117: 'kāng',
0x2811A: 'qiāng',
0x2811B: 'lí',
0x2812E: 'shuāi',
0x2812F: 'yù',
0x28130: 'zhāng',
0x28131: 'lěi',
0x28145: 'pó',
0x2814A: 'zhé,chè',
0x2814B: 'xiào',
0x2814D: 'tǎn',
0x2814E: 'cuì',
0x2814F: 'lán',
0x28151: 'xū',
0x28152: 'shù,chú',
0x28153: 'zhǎ,dá',
0x28154: 'cán',
0x28157: 'bǐ',
0x28158: 'pèng',
0x2815D: 'chéng',
0x28163: 'qiáo',
0x28164: 'jī',
0x2816A: 'zhāi',
0x2816D: 'tán',
0x28181: 'tiǎn,yǎn',
0x28182: 'sà',
0x28183: 'jīn',
0x28184: 'zhù',
0x28185: 'duò',
0x28187: 'chà',
0x28188: 'juàn',
0x28189: 'táng',
0x2818A: 'bèng',
0x2818B: 'zhī',
0x2818C: 'fán',
0x2818D: 'liè',
0x2818E: 'zéi',
0x2818F: 'suì',
0x28193: 'qín',
0x28199: 'sè',
0x281A7: 'zhì',
0x281A8: 'tuí',
0x281AA: 'qīng',
0x281AC: 'chuò',
0x281B0: 'tà,dà',
0x281B1: 'bìng',
0x281B2: 'wěn',
0x281B5: 'pǒ',
0x281BD: 'mó',
0x281BE: 'cā',
0x281C1: 'kuàng',
0x281C3: 'cuó,zuān',
0x281C4: 'rǎo',
0x281C5: 'bào',
0x281C6: 'lài',
0x281CD: 'niǎn',
0x281CE: 'lí',
0x281D5: 'jiǎo',
0x281D6: 'lú',
0x281D7: 'lì',
0x281D8: 'lóng',
0x281D9: 'guì',
0x281DD: 'chǎn',
0x281E4: 'xiān',
0x281E6: 'chàn',
0x281E8: 'xiè',
0x281E9: 'zhàn',
0x281EF: 'shuāng',
0x281FB: 'mǐ',
0x281FC: 'luán',
0x281FD: 'luò',
0x28200: 'diān',
0x28206: 'lán',
0x28207: 'làn',
0x28208: 'dié',
0x2820A: 'wān',
0x2820B: 'yuè',
0x2820C: 'luán',
0x2820E: 'luán',
0x28213: 'léng',
0x28215: 'wǎi',
0x28216: 'dìn',
0x28217: 'nèn',
0x28218: 'shǎo',
0x28219: 'xiè,zhī',
0x2821A: 'pí',
0x28225: 'máo',
0x28227: 'yǐn',
0x28229: 'bó',
0x2822B: 'zhù',
0x2822E: 'chōng',
0x28236: 'mǔ',
0x28237: 'tuó',
0x28239: 'tǒng',
0x2823A: 'yé',
0x28241: 'huàng',
0x28243: 'rèn',
0x28245: 'yè',
0x2824B: 'tuó',
0x28256: 'zuān',
0x2825A: 'ā',
0x2825C: 'zhōu',
0x2825D: 'wān',
0x28261: 'duǒ',
0x28262: 'zhòng',
0x28263: 'hā',
0x28264: 'huáng',
0x28265: 'miàn,tǐ',
0x28269: 'chūn',
0x2826A: 'qiè',
0x2826B: 'gōng,qiōng',
0x2826C: 'tíng',
0x2826D: 'méi',
0x28271: 'tàng',
0x28274: 'róng',
0x28277: 'róng',
0x28278: 'qí',
0x28279: 'guó',
0x2827D: 'xiàng',
0x2827E: 'tián',
0x28285: 'xiāo',
0x28288: 'zhān',
0x28289: 'cuì',
0x28294: 'lán',
0x28298: 'shēn,qū',
0x2829A: 'lěi',
0x2829B: 'lì',
0x2829D: 'chān',
0x2829E: 'niè',
0x2829F: 'luán',
0x282A1: 'tīng',
0x282A2: 'huì,sháo',
0x282A7: 'gōng',
0x282B0: 'qì',
0x282B1: 'yú',
0x282B3: 'xīn',
0x282B8: 'yuè',
0x282B9: 'bā',
0x282BA: 'dài',
0x282BB: 'jī',
0x282BC: 'xuàn',
0x282BF: 'jué',
0x282C0: 'niǔ',
0x282C8: 'dù',
0x282C9: 'jí',
0x282D0: 'pā',
0x282D1: 'gǒng',
0x282D2: 'bèn',
0x282D4: 'kēng,jú',
0x282D5: 'yàng,ǎng',
0x282D6: 'liǔ',
0x282D7: 'ní',
0x282D8: 'zhà',
0x282D9: 'yìn',
0x282DA: 'niǎn,ruǎn',
0x282DB: 'pào',
0x282DD: 'gōng',
0x282DE: 'bù',
0x282DF: 'hé',
0x282E0: 'rǒng',
0x282E1: 'guì',
0x282E5: 'bì',
0x282E6: 'xī',
0x282E7: 'jú',
0x282E8: 'hún',
0x282E9: 'bì,fú',
0x282EB: 'tiāo',
0x282EC: 'zhěng,chèng',
0x282EF: 'yì',
0x282F0: 'cì',
0x282F2: 'bìng',
0x282F7: 'gōng',
0x282FA: 'fá',
0x282FD: 'yáng',
0x282FE: 'xǔ',
0x28301: 'hōng,chūn',
0x28304: 'zàng',
0x28305: 'chái',
0x28306: 'hóng',
0x28308: 'tián',
0x2830C: 'zhī',
0x2830D: 'xīng',
0x2830E: 'xú',
0x28311: 'zhèn',
0x28314: 'wǎn,wàn',
0x28318: 'jùn',
0x2831D: 'wò,huò',
0x28320: 'lù',
0x28322: 'zhēng',
0x28323: 'rǒng',
0x28324: 'chéng,chèng',
0x28325: 'fú',
0x28327: 'è',
0x28328: 'tāo',
0x28329: 'táng',
0x2832B: 'juān',
0x2832C: 'chào',
0x2832D: 'tà',
0x2832E: 'dǐ',
0x28330: 'zōng',
0x28332: 'mǐn',
0x28333: 'kēng',
0x28334: 'tuī',
0x28336: 'kēng',
0x28345: 'rǒng',
0x28346: 'yūn',
0x28347: 'hé',
0x28348: 'zǒng',
0x28349: 'cōng,zǒng',
0x2834A: 'qiū',
0x2834E: 'mù',
0x28351: 'kēng',
0x28352: 'xiàn,jiàn',
0x2835B: 'dú',
0x2835C: 'kǎn',
0x2835E: 'yīng',
0x28362: 'zī',
0x28367: 'huáng',
0x28369: 'péng',
0x2836B: 'lì',
0x2836D: 'bó,pò',
0x2836E: 'gé,lì',
0x2836F: 'jú',
0x28370: 'kē',
0x28372: 'hú,gǔn',
0x28373: 'diāo,yáo',
0x28374: 'táng',
0x28376: 'qióng',
0x28377: 'rǒng',
0x28378: 'liǔ',
0x28379: 'huì',
0x2837A: 'jī',
0x2837D: 'gǔn',
0x28389: 'zhì',
0x2838B: 'táng,chēng',
0x2838C: 'zhǐ',
0x2838D: 'kāng,liáng',
0x28394: 'yàng',
0x28396: 'tǎng,chǎng',
0x28397: 'hōng',
0x2839B: 'liáng',
0x2839D: 'cáo',
0x283A1: 'nǎi',
0x283A2: 'zǒng',
0x283A4: 'dèng',
0x283A6: 'jiāo',
0x283A7: 'péng',
0x283A9: 'guāng',
0x283AB: 'jiàn',
0x283AC: 'jiào',
0x283AD: 'nuó',
0x283AE: 'zǎo',
0x283B3: 'péng',
0x283B4: 'dāng',
0x283B6: 'qú',
0x283B7: | |
import numpy as np
from random import random
from numba import njit
import random as rand
import matplotlib.pyplot as plt
class RotSurCode():
nbr_eq_classes = 4
def __init__(self, size):
self.system_size = size
self.qubit_matrix = np.zeros((self.system_size, self.system_size), dtype=np.uint8)
self.plaquette_defects = np.zeros((size + 1, size + 1))
def generate_random_error(self, p_x, p_y, p_z):
size = self.system_size
for i in range(size):
for j in range(size):
q = 0
r = rand.random()
if r < p_z:
q = 3
if p_z < r < (p_z + p_x):
q = 1
if (p_z + p_x) < r < (p_z + p_x + p_y):
q = 2
self.qubit_matrix[i, j] = q
self.syndrome()
def generate_zbiased_error(self, p_error, eta): # Z-biased noise
eta = eta
p = p_error
p_z = p * eta / (eta + 1)
p_x = p / (2 * (eta + 1))
p_y = p_x
size = self.system_size
for i in range(size):
for j in range(size):
q = 0
r = rand.random()
if r < p_z:
q = 3
elif p_z < r < (p_z + p_x):
q = 1
elif (p_z + p_x) < r < (p_z + p_x + p_y):
q = 2
self.qubit_matrix[i, j] = q
self.syndrome()
# def generate_random_error(self, p_error, eta): # Y-biased noise
# eta = eta
# p = p_error
# p_y = p * eta / (eta + 1)
# p_x = p / (2 * (eta + 1))
# p_z = p_x
# size = self.system_size
# for i in range(size):
# for j in range(size):
# q = 0
# r = rand.random()
# if r < p_y:
# q = 2
# elif p_y < r < (p_y + p_x):
# q = 1
# elif (p_y + p_x) < r < (p_y + p_x + p_z):
# q = 3
# self.qubit_matrix[i, j] = q
def chain_lengths(self):
nx = np.count_nonzero(self.qubit_matrix[:, :] == 1)
ny = np.count_nonzero(self.qubit_matrix[:, :] == 2)
nz = np.count_nonzero(self.qubit_matrix[:, :] == 3)
return nx, ny, nz
def count_errors(self):
return _count_errors(self.qubit_matrix)
def apply_logical(self, operator: int, X_pos=0, Z_pos=0):
return _apply_logical(self.qubit_matrix, operator, X_pos, Z_pos)
def apply_stabilizer(self, row: int, col: int, operator: int):
return _apply_stabilizer(self.qubit_matrix, row, col, operator)
def apply_random_logical(self):
return _apply_random_logical(self.qubit_matrix)
def apply_random_stabilizer(self):
return _apply_random_stabilizer(self.qubit_matrix)
def apply_stabilizers_uniform(self, p=0.5):
return _apply_stabilizers_uniform(self.qubit_matrix, p)
def define_equivalence_class(self):
return _define_equivalence_class(self.qubit_matrix)
def to_class(self, eq):
eq_class = self.define_equivalence_class()
op = eq_class ^ eq
return self.apply_logical(op)[0]
def syndrome(self):
size = self.qubit_matrix.shape[1]
qubit_matrix = self.qubit_matrix
for i in range(size-1):
for j in range(size-1):
self.plaquette_defects[i+1, j+1] = _find_syndrome(qubit_matrix, i, j, 1)
for i in range(int((size - 1)/2)):
for j in range(4):
row = 0
col = 0
if j == 0:
row = 0
col = 2 * i + 2
elif j == 1:
row = 2 * i + 2
col = size
elif j == 2:
row = size
col = 2 * i + 1
elif j == 3:
row = 2 * i + 1
col = 0
self.plaquette_defects[row, col] = _find_syndrome(qubit_matrix, i, j, 3)
def plot(self, title):
system_size = self.system_size
xLine = np.linspace(0, system_size - 1, system_size)
a = range(system_size)
X, Y = np.meshgrid(a, a)
XLine, YLine = np.meshgrid(a, xLine)
plaquette_defect_coordinates = np.where(self.plaquette_defects)
x_error = np.where(self.qubit_matrix[:, :] == 1)
y_error = np.where(self.qubit_matrix[:, :] == 2)
z_error = np.where(self.qubit_matrix[:, :] == 3)
def generate_semicircle(center_x, center_y, radius, stepsize=0.1):
x = np.arange(center_x, center_x + radius + stepsize, stepsize)
y = np.sqrt(radius ** 2 - x ** 2)
x = np.concatenate([x, x[::-1]])
y = np.concatenate([y, -y[::-1]])
return x, y + center_y
markersize_qubit = 15
markersize_excitation = 7
markersize_symbols = 7
linewidth = 2
# Plot grid lines
ax = plt.subplot(111)
x, y = generate_semicircle(0, 1, 0.5, 0.01)
for i in range(int((system_size - 1) / 2)):
ax.plot(y + 0.5 + i * 2, x + system_size - 1, color='black', linewidth=linewidth)
ax.plot(-y + 1.5 + 2 * i, -x, color='black', linewidth=linewidth)
ax.plot(x + system_size - 1, y - 0.5 + i * 2, color='black', linewidth=linewidth)
ax.plot(-x, -y + 0.5 + system_size - 1 - 2 * i, color='black', linewidth=linewidth)
ax.plot(XLine, YLine, 'black', linewidth=linewidth)
ax.plot(YLine, XLine, 'black', linewidth=linewidth)
ax.plot(X, Y, 'o', color='black', markerfacecolor='white', markersize=markersize_qubit + 1)
ax.plot(x_error[1], system_size - 1 - x_error[0], 'o', color='blue', markersize=markersize_symbols, marker=r'$X$')
ax.plot(y_error[1], system_size - 1 - y_error[0], 'o', color='blue', markersize=markersize_symbols, marker=r'$Y$')
ax.plot(z_error[1], system_size - 1 - z_error[0], 'o', color='blue', markersize=markersize_symbols, marker=r'$Z$')
for i in range(len(plaquette_defect_coordinates[1])):
if plaquette_defect_coordinates[1][i] == 0:
ax.plot(plaquette_defect_coordinates[1][i] - 0.5 + 0.25, system_size - plaquette_defect_coordinates[0][i] - 0.5, 'o', color='red', label="flux", markersize=markersize_excitation)
elif plaquette_defect_coordinates[0][i] == 0:
ax.plot(plaquette_defect_coordinates[1][i] - 0.5, system_size - plaquette_defect_coordinates[0][i] - 0.5 - 0.25, 'o', color='red', label="flux", markersize=markersize_excitation)
elif plaquette_defect_coordinates[1][i] == system_size:
ax.plot(plaquette_defect_coordinates[1][i] - 0.5 - 0.25, system_size - plaquette_defect_coordinates[0][i] - 0.5, 'o', color='red', label="flux", markersize=markersize_excitation)
elif plaquette_defect_coordinates[0][i] == system_size:
ax.plot(plaquette_defect_coordinates[1][i] - 0.5, system_size - plaquette_defect_coordinates[0][i] - 0.5 + 0.25, 'o', color='red', label="flux", markersize=markersize_excitation)
else:
ax.plot(plaquette_defect_coordinates[1][i] - 0.5, system_size - plaquette_defect_coordinates[0][i] - 0.5, 'o', color='red', label="flux", markersize=markersize_excitation)
# ax.plot(plaquette_defect_coordinates[1] - 0.5, system_size - plaquette_defect_coordinates[0] - 0.5, 'o', color='red', label="flux", markersize=markersize_excitation)
ax.axis('off')
plt.axis('equal')
#plt.show()
plt.savefig('plots/graph_'+str(title)+'.png')
# plt.close()
@njit('(uint8[:,:],)')
def _count_errors(qubit_matrix):
return np.count_nonzero(qubit_matrix)
@njit('(uint8[:,:], int64, int64, int64)')
def _find_syndrome(qubit_matrix, row: int, col: int, operator: int):
def flip(a):
if a == 0:
return 1
elif a == 1:
return 0
size = qubit_matrix.shape[1]
result_qubit_matrix = np.copy(qubit_matrix)
defect = 0
op = 0
if operator == 1: # full
qarray = [[0 + row, 0 + col], [0 + row, 1 + col], [1 + row, 0 + col], [1 + row, 1 + col]]
if row % 2 == 0:
if col % 2 == 0:
op = 1
else:
op = 3
else:
if col % 2 == 0:
op = 3
else:
op = 1
elif operator == 3: # half
if col == 0:
op = 1
qarray = [[0, row*2 + 1], [0, row*2 + 2]]
elif col == 1:
op = 3
qarray = [[row*2 + 1, size - 1], [row*2 + 2, size - 1]]
elif col == 2:
op = 1
qarray = [[size - 1, row*2], [size - 1, row*2 + 1]]
elif col == 3:
op = 3
qarray = [[row*2, 0], [row*2 + 1, 0]]
for i in qarray:
old_qubit = result_qubit_matrix[i[0], i[1]]
if old_qubit != 0 and old_qubit != op:
defect = flip(defect)
return defect
@njit('(uint8[:,:], int64, int64, int64)') # Z-biased noise
def _apply_logical(qubit_matrix, operator: int, X_pos=0, Z_pos=0):
result_qubit_matrix = np.copy(qubit_matrix)
# List to store how errors redestribute when logical is applied
n_eq = [0, 0, 0, 0]
if operator == 0:
return result_qubit_matrix, (0, 0, 0)
size = qubit_matrix.shape[0]
do_X = (operator == 1 or operator == 2)
do_Z = (operator == 3 or operator == 2)
if do_X:
for i in range(size):
old_qubit = result_qubit_matrix[i, X_pos]
new_qubit = 1 ^ old_qubit
result_qubit_matrix[i, X_pos] = new_qubit
n_eq[old_qubit] -= 1
n_eq[new_qubit] += 1
if do_Z:
for i in range(size):
old_qubit = result_qubit_matrix[Z_pos, i]
new_qubit = 3 ^ old_qubit
result_qubit_matrix[Z_pos, i] = new_qubit
n_eq[old_qubit] -= 1
n_eq[new_qubit] += 1
return result_qubit_matrix, (n_eq[1], n_eq[2], n_eq[3])
@njit('(uint8[:,:],)')
def _apply_random_logical(qubit_matrix):
size = qubit_matrix.shape[0]
op = int(random() * 4)
if op == 1 or op == 2:
X_pos = int(random() * size)
else:
X_pos = 0
if op == 3 or op == 2:
Z_pos = int(random() * size)
else:
Z_pos = 0
return _apply_logical(qubit_matrix, op, X_pos, Z_pos)
@njit('(uint8[:,:], int64, int64, int64)')
def _apply_stabilizer(qubit_matrix, row: int, col: int, operator: int):
size = qubit_matrix.shape[0]
result_qubit_matrix = np.copy(qubit_matrix)
# List to store how errors redestribute when stabilizer is applied
n_eq = [0, 0, 0, 0]
op = 0
if operator == 1: # full
qarray = [[0 + row, 0 + col], [0 + row, 1 + col], [1 + row, 0 + col], [1 + row, 1 + col]]
if row % 2 == 0:
if col % 2 == 0:
op = 1
else:
op = 3
else:
if col % 2 == 0:
op = 3
else:
op = 1
elif operator == | |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
r"""
References
.. [burt2020svgp]
<NAME> and <NAME> and <NAME>,
Convergence of Sparse Variational Inference in Gaussian Process Regression,
Journal of Machine Learning Research, 2020,
http://jmlr.org/papers/v21/19-1015.html.
.. [chen2018dpp]
<NAME> and <NAME> and <NAME>, Fast greedy MAP inference
for determinantal point process to improve recommendation diversity,
Proceedings of the 32nd International Conference on Neural Information
Processing Systems, 2018, https://arxiv.org/abs/1709.05135.
.. [hensman2013svgp]
<NAME> and <NAME> and <NAME>, Gaussian Processes
for Big Data, Proceedings of the 29th Conference on Uncertainty in
Artificial Intelligence, 2013, https://arxiv.org/abs/1309.6835.
"""
from __future__ import annotations
import copy
from typing import Optional, Type, Union
import torch
from botorch.models.gpytorch import GPyTorchModel
from botorch.models.transforms.input import InputTransform
from botorch.models.transforms.outcome import OutcomeTransform
from botorch.models.utils import validate_input_scaling
from botorch.posteriors.gpytorch import GPyTorchPosterior
from botorch.sampling import MCSampler
from gpytorch.constraints import GreaterThan
from gpytorch.distributions import MultivariateNormal
from gpytorch.kernels import Kernel, MaternKernel, ScaleKernel
from gpytorch.lazy import LazyTensor
from gpytorch.likelihoods import (
GaussianLikelihood,
Likelihood,
MultitaskGaussianLikelihood,
)
from gpytorch.means import ConstantMean, Mean
from gpytorch.models import ApproximateGP
from gpytorch.module import Module
from gpytorch.priors import GammaPrior
from gpytorch.utils.memoize import clear_cache_hook
from gpytorch.variational import (
_VariationalDistribution,
_VariationalStrategy,
CholeskyVariationalDistribution,
IndependentMultitaskVariationalStrategy,
VariationalStrategy,
)
from torch import Tensor
MIN_INFERRED_NOISE_LEVEL = 1e-4
NEG_INF = -(torch.tensor(float("inf")))
class ApproximateGPyTorchModel(GPyTorchModel):
def __init__(
self,
model: Optional[ApproximateGP] = None,
likelihood: Optional[Likelihood] = None,
num_outputs: int = 1,
*args,
**kwargs,
) -> None:
r"""
Botorch wrapper class for various (variational) approximate GP models in
gpytorch. This can either include stochastic variational GPs (SVGPs) or
variational implementations of weight space approximate GPs.
Args:
model: Instance of gpytorch.approximate GP models. If omitted,
constructs a `_SingleTaskVariationalGP`.
likelihood: Instance of a GPyYorch likelihood. If omitted, uses a
either a `GaussianLikelihood` (if `num_outputs=1`) or a
`MultitaskGaussianLikelihood`(if `num_outputs>1`).
num_outputs: Number of outputs expected for the GP model.
args: Optional positional arguments passed to the
`_SingleTaskVariationalGP` constructor if no model is provided.
kwargs: Optional keyword arguments passed to the
`_SingleTaskVariationalGP` constructor if no model is provided.
"""
super().__init__()
if model is None:
model = _SingleTaskVariationalGP(num_outputs=num_outputs, *args, **kwargs)
if likelihood is None:
if num_outputs == 1:
likelihood = GaussianLikelihood()
else:
likelihood = MultitaskGaussianLikelihood(num_tasks=num_outputs)
self.model = model
self.likelihood = likelihood
self._desired_num_outputs = num_outputs
@property
def num_outputs(self):
return self._desired_num_outputs
def posterior(
self, X, output_indices=None, observation_noise=False, *args, **kwargs
) -> GPyTorchPosterior:
self.eval() # make sure model is in eval mode
# input transforms are applied at `posterior` in `eval` mode, and at
# `model.forward()` at the training time
X = self.transform_inputs(X)
# check for the multi-batch case for multi-outputs b/c this will throw
# warnings
X_ndim = X.ndim
if self.num_outputs > 1 and X_ndim > 2:
X = X.unsqueeze(-3).repeat(*[1] * (X_ndim - 2), self.num_outputs, 1, 1)
dist = self.model(X)
if observation_noise:
dist = self.likelihood(dist, *args, **kwargs)
posterior = GPyTorchPosterior(mvn=dist)
if hasattr(self, "outcome_transform"):
posterior = self.outcome_transform.untransform_posterior(posterior)
return posterior
def forward(self, X, *args, **kwargs) -> MultivariateNormal:
if self.training:
X = self.transform_inputs(X)
return self.model(X)
def fantasize(self, X, sampler=MCSampler, observation_noise=True, *args, **kwargs):
raise NotImplementedError(
"Fantasization of approximate GPs has not been implemented yet."
)
class _SingleTaskVariationalGP(ApproximateGP):
def __init__(
self,
train_X: Tensor,
train_Y: Optional[Tensor] = None,
num_outputs: int = 1,
learn_inducing_points=True,
covar_module: Optional[Kernel] = None,
mean_module: Optional[Mean] = None,
variational_distribution: Optional[_VariationalDistribution] = None,
variational_strategy: Type[_VariationalStrategy] = VariationalStrategy,
inducing_points: Optional[Union[Tensor, int]] = None,
) -> None:
r"""
Base class wrapper for a stochastic variational Gaussian Process (SVGP)
model [hensman2013svgp]_. Uses pivoted cholesky initialization for the
inducing points.
Args:
train_X: Training inputs (due to the ability of the SVGP to sub-sample
this does not have to be all of the training inputs).
train_Y: Training targets (optional).
num_outputs: Number of output responses per input.
covar_module: Kernel function. If omitted, uses a `MaternKernel`.
mean_module: Mean of GP model. If omitted, uses a `ConstantMean`.
variational_distribution: Type of variational distribution to use
(default: CholeskyVariationalDistribution), the properties of the
variational distribution will encourage scalability or ease of
optimization.
variational_strategy: Type of variational strategy to use (default:
VariationalStrategy). The default setting uses "whitening" of the
variational distribution to make training easier.
inducing_points: The number or specific locations of the inducing points.
"""
# We use the model subclass wrapper to deal with input / outcome transforms.
# The number of outputs will be correct here due to the check in
# SingleTaskVariationalGP.
input_batch_shape = train_X.shape[:-2]
aug_batch_shape = copy.deepcopy(input_batch_shape)
if num_outputs > 1:
aug_batch_shape += torch.Size((num_outputs,))
self._aug_batch_shape = aug_batch_shape
if mean_module is None:
mean_module = ConstantMean(batch_shape=self._aug_batch_shape).to(train_X)
if covar_module is None:
covar_module = ScaleKernel(
base_kernel=MaternKernel(
nu=2.5,
ard_num_dims=train_X.shape[-1],
batch_shape=self._aug_batch_shape,
lengthscale_prior=GammaPrior(3.0, 6.0),
),
batch_shape=self._aug_batch_shape,
outputscale_prior=GammaPrior(2.0, 0.15),
).to(train_X)
self._subset_batch_dict = {
"mean_module.constant": -2,
"covar_module.raw_outputscale": -1,
"covar_module.base_kernel.raw_lengthscale": -3,
}
# initialize inducing points with a pivoted cholesky init if they are not given
if not isinstance(inducing_points, Tensor):
if inducing_points is None:
# number of inducing points is 25% the number of data points
# as a heuristic
inducing_points = int(0.25 * train_X.shape[-2])
inducing_points = _select_inducing_points(
inputs=train_X,
covar_module=covar_module,
num_inducing=inducing_points,
input_batch_shape=input_batch_shape,
)
if variational_distribution is None:
variational_distribution = CholeskyVariationalDistribution(
num_inducing_points=inducing_points.shape[-2],
batch_shape=self._aug_batch_shape,
)
variational_strategy = variational_strategy(
self,
inducing_points=inducing_points,
variational_distribution=variational_distribution,
learn_inducing_locations=learn_inducing_points,
)
# wrap variational models in independent multi-task variational strategy
if num_outputs > 1:
variational_strategy = IndependentMultitaskVariationalStrategy(
base_variational_strategy=variational_strategy,
num_tasks=num_outputs,
task_dim=-1,
)
super().__init__(variational_strategy=variational_strategy)
self.mean_module = mean_module
self.covar_module = covar_module
def forward(self, X) -> MultivariateNormal:
mean_x = self.mean_module(X)
covar_x = self.covar_module(X)
latent_dist = MultivariateNormal(mean_x, covar_x)
return latent_dist
class SingleTaskVariationalGP(ApproximateGPyTorchModel):
r"""A single-task variational GP model following [hensman2013svgp]_ with pivoted
cholesky initialization following [chen2018dpp]_ and [burt2020svgp]_.
A single-task variational GP using relatively strong priors on the Kernel
hyperparameters, which work best when covariates are normalized to the unit
cube and outcomes are standardized (zero mean, unit variance).
This model works in batch mode (each batch having its own hyperparameters).
When the training observations include multiple outputs, this model will use
batching to model outputs independently. However, batches of multi-output models
are not supported at this time, if you need to use those, please use a
ModelListGP.
Use this model if you have a lot of data or if your responses are non-Gaussian.
To train this model, you should use `gpytorch.mlls.VariationalELBO` and not the
exact marginal log likelihood. Example mll:
mll = VariationalELBO(model.likelihood, model, num_data=train_X.shape[-2])
"""
def __init__(
self,
train_X: Tensor,
train_Y: Optional[Tensor] = None,
likelihood: Optional[Likelihood] = None,
num_outputs: int = 1,
learn_inducing_points: bool = True,
covar_module: Optional[Kernel] = None,
mean_module: Optional[Mean] = None,
variational_distribution: Optional[_VariationalDistribution] = None,
variational_strategy: Type[_VariationalStrategy] = VariationalStrategy,
inducing_points: Optional[Union[Tensor, int]] = None,
outcome_transform: Optional[OutcomeTransform] = None,
input_transform: Optional[InputTransform] = None,
) -> None:
r"""
A single task stochastic variational Gaussian process model (SVGP) as described
by [hensman2013svgp]_. We use pivoted cholesky initialization [burt2020svgp]_ to
initialize the inducing points of the model.
Args:
train_X: Training inputs (due to the ability of the SVGP to sub-sample
this does not have to be all of the training inputs).
train_Y: Training targets (optional).
likelihood: Instance of a GPyYorch likelihood. If omitted, uses a
either a `GaussianLikelihood` (if `num_outputs=1`) or a
`MultitaskGaussianLikelihood`(if `num_outputs>1`).
num_outputs: Number of output responses per input (default: 1).
covar_module: Kernel function. If omitted, uses a `MaternKernel`.
mean_module: Mean of GP model. If omitted, uses a `ConstantMean`.
variational_distribution: Type of variational distribution to use
(default: CholeskyVariationalDistribution), the properties of the
variational distribution will encourage scalability or ease of
optimization.
variational_strategy: Type of variational strategy to use (default:
VariationalStrategy). The default setting uses "whitening" of the
variational distribution to make training easier.
inducing_points: The number or specific locations of the inducing points.
"""
with torch.no_grad():
transformed_X = self.transform_inputs(
X=train_X, input_transform=input_transform
)
if train_Y is not None:
if outcome_transform is not None:
train_Y, _ = outcome_transform(train_Y)
self._validate_tensor_args(X=transformed_X, Y=train_Y)
validate_input_scaling(train_X=transformed_X, train_Y=train_Y)
if train_Y.shape[-1] != num_outputs:
num_outputs = train_Y.shape[-1]
self._num_outputs = num_outputs
self._input_batch_shape = train_X.shape[:-2]
aug_batch_shape = copy.deepcopy(self._input_batch_shape)
if num_outputs > 1:
aug_batch_shape += torch.Size([num_outputs])
self._aug_batch_shape = aug_batch_shape
if likelihood is None:
if num_outputs == 1:
noise_prior = GammaPrior(1.1, 0.05)
noise_prior_mode = (noise_prior.concentration - 1) / noise_prior.rate
likelihood = GaussianLikelihood(
noise_prior=noise_prior,
batch_shape=self._aug_batch_shape,
noise_constraint=GreaterThan(
MIN_INFERRED_NOISE_LEVEL,
transform=None,
initial_value=noise_prior_mode,
),
)
else:
likelihood = MultitaskGaussianLikelihood(num_tasks=num_outputs)
else:
self._is_custom_likelihood = True
model = _SingleTaskVariationalGP(
train_X=transformed_X,
train_Y=train_Y,
num_outputs=num_outputs,
learn_inducing_points=learn_inducing_points,
covar_module=covar_module,
mean_module=mean_module,
variational_distribution=variational_distribution,
| |
12.0, 1.0)
n = self.nss
U = b[1] * self.A
V = b[0] * self.ident
U[:n] += np.dot(self.ssA, b[3] * self.A2)
V[:n] += b[2] * self.A2
return U, V
def pade5(self):
b = (30240.0, 15120.0, 3360.0, 420.0, 30.0, 1.0)
n = self.nss
U = b[1] * self.A
V = b[0] * self.ident
U[:n] += np.dot(self.ssA, b[5] * self.A4 + b[3] * self.A2)
V[:n] += b[4] * self.A4 + b[2] * self.A2
return U, V
def pade7(self):
b = (17297280.0, 8648640.0, 1995840.0, 277200.0, 25200.0, 1512.0, 56.0, 1.0)
n = self.nss
U = b[1] * self.A
V = b[0] * self.ident
U[:n] += np.dot(self.ssA, (b[7] * self.A6 + b[5] * self.A4 + b[3] * self.A2))
V[:n] += b[6] * self.A6 + b[4] * self.A4 + b[2] * self.A2
return U, V
def pade9(self):
b = (
17643225600.0,
8821612800.0,
2075673600.0,
302702400.0,
30270240.0,
2162160.0,
110880.0,
3960.0,
90.0,
1.0,
)
n = self.nss
U = b[1] * self.A
V = b[0] * self.ident
U[:n] += np.dot(
self.ssA,
(b[9] * self.A8 + b[7] * self.A6 + b[5] * self.A4 + b[3] * self.A2),
)
V[:n] += b[8] * self.A8 + b[6] * self.A6 + b[4] * self.A4 + b[2] * self.A2
return U, V
def pade13_scaled(self, s):
b = (
64764752532480000.0,
32382376266240000.0,
7771770303897600.0,
1187353796428800.0,
129060195264000.0,
10559470521600.0,
670442572800.0,
33522128640.0,
1323241920.0,
40840800.0,
960960.0,
16380.0,
182.0,
1.0,
)
B = self.A * 2 ** -s
B2 = self.A2 * 2 ** (-2 * s)
B4 = self.A4 * 2 ** (-4 * s)
B6 = self.A6 * 2 ** (-6 * s)
n = self.nss
U = b[1] * B
V = b[0] * self.ident
U2 = np.dot(B6[:, :n], b[13] * B6 + b[11] * B4 + b[9] * B2)
V2 = np.dot(B6[:, :n], b[12] * B6 + b[10] * B4 + b[8] * B2)
U[:n] += np.dot(B[:n, :n], U2 + b[7] * B6 + b[5] * B4 + b[3] * B2)
V[:n] += V2 + b[6] * B6 + b[4] * B4 + b[2] * B2
return U, V
def _expm_SS(A, ssA, order): # , use_exact_onenorm='auto'):
# Track functions of A to help compute the matrix exponential.
h = _ExpmPadeHelper_SS(A, ssA, order)
structure = None
# Try Pade order 3.
eta_1 = max(h.d4_loose, h.d6_loose)
if eta_1 < 1.495585217958292e-002 and mf._ell(h.A, 3) == 0:
U, V = h.pade3()
return mf._solve_P_Q(U, V, structure=structure)
# Try Pade order 5.
eta_2 = max(h.d4_tight, h.d6_loose)
if eta_2 < 2.539398330063230e-001 and mf._ell(h.A, 5) == 0:
U, V = h.pade5()
return mf._solve_P_Q(U, V, structure=structure)
# Try Pade orders 7 and 9.
eta_3 = max(h.d6_tight, h.d8_loose)
if eta_3 < 9.504178996162932e-001 and mf._ell(h.A, 7) == 0:
U, V = h.pade7()
return mf._solve_P_Q(U, V, structure=structure)
if eta_3 < 2.097847961257068e000 and mf._ell(h.A, 9) == 0:
U, V = h.pade9()
return mf._solve_P_Q(U, V, structure=structure)
# Use Pade order 13.
eta_4 = max(h.d8_loose, h.d10_loose)
eta_5 = min(eta_3, eta_4)
theta_13 = 4.25
s = max(int(np.ceil(np.log2(eta_5 / theta_13))), 0)
s = s + mf._ell(2 ** -s * h.A, 13)
U, V = h.pade13_scaled(s)
X = mf._solve_P_Q(U, V, structure=structure)
# X = r_13(A)^(2^s) by repeated squaring.
for _ in range(s):
X = X.dot(X)
return X
def getEPQ2(A, h, order=1, B=None, half=False):
"""
Returns E, P, Q for the exponential solver given the state-space
matrix `A`.
Parameters
----------
A : 2d ndarray
The state-space matrix: ``xdot = A x + B u``
h : scalar
Time step.
order : integer, optional
- 0 for the zero order hold (force stays constant across
time step)
- 1 for the 1st order hold (force can vary linearly across
time step)
B : d2 ndarray or None; optional
If array, it multiplies the inputs; if None, it is assumed
identity.
half : bool; optional
If `B` is a 2d ndarray, `half` is ignored. Otherwise, if
`half` is False, a full size identity (same size as `A`) is
used for `B`. If `half` is True, only the first half of the
columns are retained (which is handy for converting a 2nd
order ODE into a 1st order ODE as
:class:`pyyeti.ode.SolveExp2` does -- where there are force
inputs only for the first half of the equations).
Returns
-------
E, P, Q : 2d ndarrays, except if ``order == 0``, ``Q = 0.``
These are the coefficient matrices used to solve the ODE::
for j in range(nt):
d[:, j+1] = E*d[:, j] + P*F[:, j] + Q*F[:, j+1]
Notes
-----
Normally, :func:`getEPQ` would be called and that routine will
call this one or :func:`getEPQ1`.
This routine is an alternative to :func:`getEPQ1` and is
generally slower but more robust for large time steps. (If `B` has
only a few columns, it could also be faster than
:func:`getEPQ1`.) `E` is the matrix exponential ``exp(A*h)`` and
`P` and `Q` are functions of the integral(s) of the matrix
exponential. They are calculated as follows (text from
:func:`scipy.signal.lsim`).
If order == 0::
Zero-order hold
Algorithm: to integrate from time 0 to time dt, we solve
xdot = A x + B u, x(0) = x0
udot = 0, u(0) = u0.
Solution is
[ x(dt) ] [ A*dt B*dt ] [ x0 ]
[ u(dt) ] = exp [ 0 0 ] [ u0 ]
The `E`, and `P` matrices are partitions of the matrix
exponential and `Q` is zero.
If order == 1::
Linear interpolation between steps
Algorithm: to integrate from time 0 to time dt, with
linear interpolation between inputs u(0) = u0 and u(dt) = u1,
we solve:
xdot = A x + B u, x(0) = x0
udot = (u1 - u0) / dt, u(0) = u0.
Solution is
[ x(dt) ] [ A*dt B*dt 0 ] [ x0 ]
[ u(dt) ] = exp [ 0 0 I ] [ u0 ]
[u1 - u0] [ 0 0 0 ] [u1 - u0]
The `E`, `P` and `Q` matrices are partitions of the matrix
exponential.
See also
--------
:func:`getEPQ1`, :func:`getEPQ_pow`,
:class:`pyyeti.ode.SolveExp1`, :class:`pyyeti.ode.SolveExp2`
Examples
--------
>>> from pyyeti import expmint
>>> import numpy as np
>>> import scipy.linalg as la
>>> np.set_printoptions(4)
>>> A = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
>>> B = np.array([[0, 1, 0]]).T
>>> E, P, Q = expmint.getEPQ2(A, .05, order=1, B=B)
>>> E
array([[ 1.0996, 0.1599, 0.2202],
[ 0.3099, 1.3849, 0.46 ],
[ 0.5202, 0.61 , 1.6998]])
>>> P
array([[ 0.0024],
[ 0.0308],
[ 0.0091]])
>>> Q
array([[ 0.0011],
[ 0.0276],
[ 0.0041]])
>>> E, P, Q = expmint.getEPQ2(A, .05, order=0, B=B)
>>> E
array([[ 1.0996, 0.1599, 0.2202],
[ 0.3099, 1.3849, 0.46 ],
[ 0.5202, 0.61 , 1.6998]])
>>> P
array([[ 0.0034],
[ 0.0583],
[ 0.0133]])
>>> Q
0.0
"""
n = A.shape[0]
if B is not None:
i = B.shape[1]
else:
if half:
if n & 1:
raise ValueError(
"`A` must have an even number "
"of rows/cols (or use ``half=False``"
)
i = n // 2
else:
i = n
B = np.eye(i)
r = B.shape[0]
Ah = A * h
Bh = B * h
if order == 1:
N = n + 2 * i
M = np.zeros((N, N), float)
M[:n, :n] = Ah
M[:r, n : n + i] = Bh
M[n : n + i, n + i :] = np.eye(i)
# start = time.time()
# EM1 = la.expm(M, order)
# print('1 la.expm et = ', time.time()-start)
# start = time.time()
EM = _expm_SS(M, Ah, order)
# print('1 expm_SS et = ', time.time()-start)
# print('error :', abs(EM-EM1).max())
E = EM[:n, :n]
Q = EM[:n, n + i :]
P = EM[:n, n : n + i] - Q
elif order == 0:
M = np.zeros((n + i, n + i), float)
M[:n, :n] = Ah
M[:r, n:] = Bh
# start = time.time()
# EM1 = la.expm(M, order)
# print('0 la.expm et = ', time.time()-start)
# start = time.time()
EM | |
<gh_stars>0
'''
Copyright {2017} {<NAME> | <EMAIL>}
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
#if __name__ == '__main__':from GUI_layout import *
import pygame
from pygame.locals import *
import time,random
from OpenGL.GL import *
from OpenGL.GLU import *
from math import *
from geometry import *
from import_export import *
from GA import *
'''
==========================================
camera class
==========================================
'''
class Cam:
def __init__(self,pos=(0,0,0),rot=(0,0),center=(0,0,0)):
self.pos = list(pos) #The sphere's center
self.rot = list(rot) #The spherical coordinates' angles (degrees).
self.radius = 3.0 #The sphere's radius
self.center = list(center)
def update(self,dt,key):
s = dt*10
if key[pygame.K_DOWN]: self.pos[2] -= s
if key[pygame.K_UP]: self.pos[2] += s
if key[pygame.K_q]: self.pos[1] += s
if key[pygame.K_e]: self.pos[1] -= s
if key[pygame.K_LEFT]: self.pos[0] -= s
if key[pygame.K_RIGHT]: self.pos[0] += s
def rotateCam(self,dt,key,dtheta,radius=1):
c1,s1 = math.cos(dtheta),math.sin(dtheta)
c2,s2 = math.cos(-dtheta),math.sin(-dtheta)
d = math.sqrt((self.pos[0]**2)+(self.pos[1]**2)+(self.pos[2]**2))
if key[pygame.K_w]:
temp = self.pos[1]
#self.pos[1] = d*c1 - d*s1
#self.pos[2] = temp*s1 + d*c1
self.pos[1] = self.pos[1]*c1 - self.pos[2]*s1
self.pos[2] = temp*s1 + self.pos[2]*c1
if key[pygame.K_s]:
temp = self.pos[1]
self.pos[1] = self.pos[1]*c2 - self.pos[2]*s2
self.pos[2] = temp*s2 + self.pos[2]*c2
if key[pygame.K_a]:
temp = self.pos[0]
self.pos[0] = self.pos[0]*c1 - self.pos[2]*s1
self.pos[2] = temp*s1 + self.pos[2]*c1
if key[pygame.K_d]:
temp = self.pos[0]
self.pos[0] = self.pos[0]*c2 - self.pos[2]*s2
self.pos[2] = temp*s2 + self.pos[2]*c2
#if key[pygame.K_c]:
# temp = self.pos[0]
# self.pos[0] = self.pos[0]*c1 + self.pos[1]*s1
# self.pos[1] = -temp*s1 + self.pos[1]*c1
def updateGL(self,mouse_buttons,mouse_rel,key):
if mouse_buttons[0]:
self.rot[0] += mouse_rel[0]
self.rot[1] += mouse_rel[1]
s = 0.015*10
if key[pygame.K_q]: self.pos[2] -= s
if key[pygame.K_e]: self.pos[2] += s
'''
==========================================
colors class
==========================================
'''
class colors(object):
def __init__(self):
self.WHITE = (254,254,254)
self.BLACK = (0,0,0)
self.RED = (254,0,0)
self.BLUE = (0,0,254)
self.GREEN = (0,254,0)
self.GRAY = (100,100,100)
self.YELLOW = (254,254,0)
self.MAGENTA = (254,0,254)
'''
==========================================
global functions
==========================================
'''
def coords():
#Start drawing lines. Each subsequent pair of glVertex*() calls will draw one line.
glBegin(GL_LINES)
#Change the color to red. All subsequent geometry we draw will be red.
glColor3f(1,0,0)
#Make two vertices, thereby drawing a (red) line.
glVertex(0,0,0); glVertex3f(1,0,0)
#Change the color to green. All subsequent geometry we draw will be green.
glColor3f(0,1,0)
#Make two vertices, thereby drawing a (green) line.
glVertex(0,0,0); glVertex3f(0,1,0)
#Change the color to blue. All subsequent geometry we draw will be blue.
glColor3f(0,0,1)
#Make two vertices, thereby drawing a (blue) line.
glVertex(0,0,0); glVertex3f(0,0,1)
#Change the color to white again. All subsequent geometry we draw will be white. Strictly
#speaking this isn't required (since we reset the color on line 166 before we draw anything
#again). However, it is good practice to reset the color to white, since forgetting to can be a
#hard-to-track-down bug (e.g. when combining with texturing).
glColor3f(1,1,1)
#We're done drawing lines; tell OpenGL so.
glEnd()
def timeline():
global _object_sequence_
for _object_ in _object_sequence_:
if _object_[1].velocity.isnull() == False:
oldPos = _object_[1].pos.get()
_object_[1].updatePos()
newPos = _object_[1].pos.get()
dx,dy,dz = newPos[0]-oldPos[0],newPos[1]-oldPos[1],newPos[2]-oldPos[2]
translate3d_t(_object_[1],(dx,dy,dz))
if _object_[1].omega.isnull() == False:
oldPos = _object_[1].theta.get()
_object_[1].updateTheta()
newPos = _object_[1].theta.get()
dx,dy,dz = newPos[0]-oldPos[0],newPos[1]-oldPos[1],newPos[2]-oldPos[2]
rotate3d(_object_[1],'x',dx)
rotate3d(_object_[1],'y',dy)
rotate3d(_object_[1],'z',dz)
def mouseTracker(LastPosition):
CurrentPosition = pygame.mouse.get_pos()
dx = CurrentPosition[0] -LastPosition[0]
dy = CurrentPosition[1] -LastPosition[1]
return dx,dy
# dstance claculator function
def calDistance(a,b):
return math.sqrt(((a[0]-b[0])**2)+((a[1]-b[1])**2)+((a[2]-b[2])**2))
def calDistance2d(a,b):
return math.sqrt(((a[0]-b[0])**2)+((a[1]-b[1])**2))
# this function finds the geometrical center of the surface
def calCenter(surface):
xc,yc,zc = 0.0,0.0,0.0
for vert in surface:
xc += vert[0]; yc += vert[1]; zc += vert[2]
return xc/len(surface),yc/len(surface),zc/len(surface)
def screenToWorld(cords):
global pixelFactor,scalex,scaley
x,y,z = cords[0],cords[1],0
x,y = x/pixelFactor,y/pixelFactor
x,y = scalex/2+x,scaley/2+y
x,y = x+cam.pos[0],y+cam.pos[1]
return [x,y,z]
def worldToScreen(cords):
global pixelFactor,cx,cy
x,y,z = cords[0],cords[1],cords[2]
x,y,z = x-cam.pos[0],y-cam.pos[1],z+cam.pos[2]
#pixelFactor = (pixelFactor*cam.pos[2])/z
f = 200/z
#x,y = x*pixelFactor,y*pixelFactor
x,y = x*f,y*f
x,y = cx+int(x),cy+int(y)
#x,y = scalex/2+x,scaley/2+y
return [x,y]
'''
==========================================
input function
==========================================
'''
def fetchKey():
global _key_sequence
for k in _key_sequence_:
_key_sequence_.remove(k)
break;return k
def pushKey():
global _key_sequence_
if pygame.key.get_pressed():
_id_ = -1
for k in _key_sequence_:
_id_ = k[0][0]
break
_id_ = _id_ + 1
key = pygame.key.get_pressed()
_key_sequence_.append([_id_,key])
def keyOpration():
global _key_sequence_
# _key_sequence_ [_id_,key,hit_time]
k = fetchKey()
if k[1] == pygame.K_a:
_key_sequence_.remove(k)
elif k[1] == pygame.K_b:
_key_sequence_.remove(k)
elif k[1] == pygame.K_c:
_key_sequence_.remove(k)
elif k[1] == pygame.K_d:
_key_sequence_.remove(k)
elif k[1] == pygame.K_e:
_key_sequence_.remove(k)
elif k[1] == pygame.K_f:
_key_sequence_.remove(k)
elif k[1] == pygame.K_g:
_key_sequence_.remove(k)
elif k[1] == pygame.K_h:
_key_sequence_.remove(k)
elif k[1] == pygame.K_i:
_key_sequence_.remove(k)
elif k[1] == pygame.K_j:
_key_sequence_.remove(k)
elif k[1] == pygame.K_k:
_key_sequence_.remove(k)
elif k[1] == pygame.K_l:
_key_sequence_.remove(k)
elif k[1] == pygame.K_m:
_key_sequence_.remove(k)
elif k[1] == pygame.K_n:
_key_sequence_.remove(k)
elif k[1] == pygame.K_o:
_key_sequence_.remove(k)
elif k[1] == pygame.K_p:
_key_sequence_.remove(k)
elif k[1] == pygame.K_q:
_key_sequence_.remove(k)
elif k[1] == pygame.K_r:
_key_sequence_.remove(k)
elif k[1] == pygame.K_s:
_key_sequence_.remove(k)
elif k[1] == pygame.K_t:
_key_sequence_.remove(k)
elif k[1] == pygame.K_u:
_key_sequence_.remove(k)
elif k[1] == pygame.K_v:
_key_sequence_.remove(k)
elif k[1] == pygame.K_w:
_key_sequence_.remove(k)
elif k[1] == pygame.K_x:
_key_sequence_.remove(k)
elif k[1] == pygame.K_y:
_key_sequence_.remove(k)
elif k[1] == pygame.K_z:
_key_sequence_.remove(k)
elif k[1] == pygame.K_RCTRL or k[1] == pygame.K_LCTRL :
_key_sequence_.remove(k)
k2 = fetchKey()
if k2[1] == pygame.K_a:
_key_sequence_.remove(k)
elif k2[1] == pygame.K_b:
_key_sequence_.remove(k)
elif k2[1] == pygame.K_c:
_key_sequence_.remove(k)
elif k2[1] == pygame.K_d:
_key_sequence_.remove(k)
elif k2[1] == pygame.K_e:
_key_sequence_.remove(k)
elif k2[1] == pygame.K_f:
_key_sequence_.remove(k)
elif k2[1] == pygame.K_g:
_key_sequence_.remove(k)
elif k2[1] == pygame.K_h:
_key_sequence_.remove(k)
elif k2[1] == pygame.K_i:
_key_sequence_.remove(k)
elif k2[1] == pygame.K_j:
_key_sequence_.remove(k)
elif k2[1] == pygame.K_k:
_key_sequence_.remove(k)
elif k2[1] == pygame.K_l:
_key_sequence_.remove(k)
elif k2[1] == pygame.K_m:
_key_sequence_.remove(k)
elif k2[1] == pygame.K_n:
_key_sequence_.remove(k)
elif k2[1] == pygame.K_o:
_key_sequence_.remove(k)
elif k2[1] == pygame.K_p:
_key_sequence_.remove(k)
elif k2[1] == pygame.K_q:
_key_sequence_.remove(k)
elif k2[1] == pygame.K_r:
_key_sequence_.remove(k)
elif k2[1] == pygame.K_s:
_key_sequence_.remove(k)
elif k2[1] == pygame.K_t:
_key_sequence_.remove(k)
elif k2[1] == pygame.K_u:
_key_sequence_.remove(k)
elif k2[1] == pygame.K_v:
_key_sequence_.remove(k)
elif k2[1] == pygame.K_w:
_key_sequence_.remove(k)
elif k2[1] == pygame.K_x:
_key_sequence_.remove(k)
elif k2[1] == pygame.K_y:
_key_sequence_.remove(k)
elif k2[1] == pygame.K_z:
_key_sequence_.remove(k)
def _input_(key,mouse_rel,mouse_buttons):
global cube1,pointer3d,point,_object_sequence_
if key[pygame.K_x]:
for o in _object_sequence_:
if o[3] == True:
rotate3d(o[1],'x',dtheta)
if key[pygame.K_y]:
for o in _object_sequence_:
if o[3] == True:
rotate3d(o[1],'y',dtheta)
if key[pygame.K_z]:
for o in _object_sequence_:
if o[3] == True:
rotate3d(o[1],'z',dtheta)
if key[pygame.K_f]:
for o in _object_sequence_:
if o[3] == True:
translateConnectedObjects(o[1],[0,1,0])
if key[pygame.K_h]:
for o in _object_sequence_:
if o[3] == True:
translateConnectedObjects(o[1],[1,0,0])
if key[pygame.K_g]:
for o in _object_sequence_:
if o[3] == True:
scale3d(o[1],(0.1,0.1,0.1))
if key[pygame.K_p]:
for o in _object_sequence_:
if o[3] == True:
if len(o[1].joints):
print o[1].joints
else:
translate3d(o[1],(1,0,0))
if key[pygame.K_n]:createNewObject('cube') # creating the new object
if key[pygame.K_l]:
v = vector((0,1,0),(0,0,0))
for o in _object_sequence_:
if o[3] == True:
o[1].applyAcc(9.87,v)# apply gravity
if key[pygame.K_o]:
v = vector((0,4,0),(0,0,0))
for o in _object_sequence_:
if o[3] == True:
o[1].applyForce1((1,1,0),1,v) # apply the torque
if key[pygame.K_u]:
v = vector((0,1,1),(0,0,0))
for o in _object_sequence_:
if o[3] == True:
o[1].applyAngAcc(0.11,v)# apply gravity
if key[pygame.K_v]:deselectAllObjects()# deselects all the objects
if key[pygame.K_k]:gaHolder.append(initGA())
if key[pygame.K_j]:livePopulation()
if key[pygame.K_c]:
# printing out the data of all the objects
for o in _object_sequence_:
print (o[0],' | ',o[2],' | ',o[3])
if key[pygame.K_t]:
#_selected_object_sequence_
s = _selected_object_sequence_[-1]
# fetch the last selected object as master
master = fetchObjectById(s)
# filling the master joints
for o in _object_sequence_:
if o[0] != s:
# found the object
if o[3] == True: # if the object is selected
l = calDistance([master.pos.x,master.pos.y,master.pos.z],[o[1].pos.x,o[1].pos.y,o[1].pos.z])
j = joints(o[0],'rigid',0,l) # creating a new joint
master.joints.append(j) # appened a new joint in the list of joints
if mouse_buttons[0]:
# getting the current position of the mouse
p = pygame.mouse.get_pos()
# convert the screen coordinates to world co-ordinates
loc = screenToWorld(p)
# translatting the 3d pointer to the location of the mouse
translate3d(pointer3d,(loc[0],loc[1],loc[2]))
# print the mouse position for any use
print p
if mouse_buttons[2]:
#print "right key confirmed"
p = pygame.mouse.get_pos()
# convert the screen coordinates to world co-ordinates
loc = screenToWorld(p)
# searching for objects those are under the radius of pointer (screen coords)
for | |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import mock
from openstack.compute.v2 import _proxy
from openstack.compute.v2 import aggregate
from openstack.compute.v2 import availability_zone as az
from openstack.compute.v2 import extension
from openstack.compute.v2 import flavor
from openstack.compute.v2 import hypervisor
from openstack.compute.v2 import image
from openstack.compute.v2 import keypair
from openstack.compute.v2 import limits
from openstack.compute.v2 import server
from openstack.compute.v2 import server_group
from openstack.compute.v2 import server_interface
from openstack.compute.v2 import server_ip
from openstack.compute.v2 import server_remote_console
from openstack.compute.v2 import service
from openstack.tests.unit import test_proxy_base
class TestComputeProxy(test_proxy_base.TestProxyBase):
def setUp(self):
super(TestComputeProxy, self).setUp()
self.proxy = _proxy.Proxy(self.session)
class TestFlavor(TestComputeProxy):
def test_flavor_create(self):
self.verify_create(self.proxy.create_flavor, flavor.Flavor)
def test_flavor_delete(self):
self.verify_delete(self.proxy.delete_flavor, flavor.Flavor, False)
def test_flavor_update(self):
self.verify_update(self.proxy.update_flavor, flavor.Flavor, False)
def test_flavor_delete_ignore(self):
self.verify_delete(self.proxy.delete_flavor, flavor.Flavor, True)
def test_flavor_find(self):
self.verify_find(self.proxy.find_flavor, flavor.Flavor)
def test_flavor_find_query(self):
self.verify_find(
self.proxy.find_flavor, flavor.Flavor,
method_kwargs={"a": "b"},
expected_kwargs={"a": "b", "ignore_missing": False}
)
def test_flavor_find_fetch_extra(self):
"""fetch extra_specs is triggered"""
with mock.patch(
'openstack.compute.v2.flavor.Flavor.fetch_extra_specs'
) as mocked:
res = flavor.Flavor()
mocked.return_value = res
self._verify2(
'openstack.proxy.Proxy._find',
self.proxy.find_flavor,
method_args=['res', True, True],
expected_result=res,
expected_args=[flavor.Flavor, 'res'],
expected_kwargs={'ignore_missing': True}
)
mocked.assert_called_once()
def test_flavor_find_skip_fetch_extra(self):
"""fetch extra_specs not triggered"""
with mock.patch(
'openstack.compute.v2.flavor.Flavor.fetch_extra_specs'
) as mocked:
res = flavor.Flavor(extra_specs={'a': 'b'})
mocked.return_value = res
self._verify2(
'openstack.proxy.Proxy._find',
self.proxy.find_flavor,
method_args=['res', True],
expected_result=res,
expected_args=[flavor.Flavor, 'res'],
expected_kwargs={'ignore_missing': True}
)
mocked.assert_not_called()
def test_flavor_get_no_extra(self):
"""fetch extra_specs not triggered"""
with mock.patch(
'openstack.compute.v2.flavor.Flavor.fetch_extra_specs'
) as mocked:
res = flavor.Flavor()
mocked.return_value = res
self._verify2(
'openstack.proxy.Proxy._get',
self.proxy.get_flavor,
method_args=['res'],
expected_result=res,
expected_args=[flavor.Flavor, 'res']
)
mocked.assert_not_called()
def test_flavor_get_fetch_extra(self):
"""fetch extra_specs is triggered"""
with mock.patch(
'openstack.compute.v2.flavor.Flavor.fetch_extra_specs'
) as mocked:
res = flavor.Flavor()
mocked.return_value = res
self._verify2(
'openstack.proxy.Proxy._get',
self.proxy.get_flavor,
method_args=['res', True],
expected_result=res,
expected_args=[flavor.Flavor, 'res']
)
mocked.assert_called_once()
def test_flavor_get_skip_fetch_extra(self):
"""fetch extra_specs not triggered"""
with mock.patch(
'openstack.compute.v2.flavor.Flavor.fetch_extra_specs'
) as mocked:
res = flavor.Flavor(extra_specs={'a': 'b'})
mocked.return_value = res
self._verify2(
'openstack.proxy.Proxy._get',
self.proxy.get_flavor,
method_args=['res', True],
expected_result=res,
expected_args=[flavor.Flavor, 'res']
)
mocked.assert_not_called()
@mock.patch("openstack.proxy.Proxy._list", auto_spec=True)
@mock.patch("openstack.compute.v2.flavor.Flavor.fetch_extra_specs",
auto_spec=True)
def test_flavors_detailed(self, fetch_mock, list_mock):
res = self.proxy.flavors(details=True)
for r in res:
self.assertIsNotNone(r)
fetch_mock.assert_not_called()
list_mock.assert_called_with(
flavor.Flavor,
base_path="/flavors/detail"
)
@mock.patch("openstack.proxy.Proxy._list", auto_spec=True)
@mock.patch("openstack.compute.v2.flavor.Flavor.fetch_extra_specs",
auto_spec=True)
def test_flavors_not_detailed(self, fetch_mock, list_mock):
res = self.proxy.flavors(details=False)
for r in res:
self.assertIsNotNone(r)
fetch_mock.assert_not_called()
list_mock.assert_called_with(
flavor.Flavor,
base_path="/flavors"
)
@mock.patch("openstack.proxy.Proxy._list", auto_spec=True)
@mock.patch("openstack.compute.v2.flavor.Flavor.fetch_extra_specs",
auto_spec=True)
def test_flavors_query(self, fetch_mock, list_mock):
res = self.proxy.flavors(details=False, get_extra_specs=True, a="b")
for r in res:
fetch_mock.assert_called_with(self.proxy)
list_mock.assert_called_with(
flavor.Flavor,
base_path="/flavors",
a="b"
)
@mock.patch("openstack.proxy.Proxy._list", auto_spec=True)
@mock.patch("openstack.compute.v2.flavor.Flavor.fetch_extra_specs",
auto_spec=True)
def test_flavors_get_extra(self, fetch_mock, list_mock):
res = self.proxy.flavors(details=False, get_extra_specs=True)
for r in res:
fetch_mock.assert_called_with(self.proxy)
list_mock.assert_called_with(
flavor.Flavor,
base_path="/flavors"
)
def test_flavor_get_access(self):
self._verify("openstack.compute.v2.flavor.Flavor.get_access",
self.proxy.get_flavor_access,
method_args=["value"],
expected_args=[])
def test_flavor_add_tenant_access(self):
self._verify("openstack.compute.v2.flavor.Flavor.add_tenant_access",
self.proxy.flavor_add_tenant_access,
method_args=["value", "fake-tenant"],
expected_args=["fake-tenant"])
def test_flavor_remove_tenant_access(self):
self._verify("openstack.compute.v2.flavor.Flavor.remove_tenant_access",
self.proxy.flavor_remove_tenant_access,
method_args=["value", "fake-tenant"],
expected_args=["fake-tenant"])
def test_flavor_fetch_extra_specs(self):
self._verify("openstack.compute.v2.flavor.Flavor.fetch_extra_specs",
self.proxy.fetch_flavor_extra_specs,
method_args=["value"],
expected_args=[])
def test_create_flavor_extra_specs(self):
specs = {
'a': 'b'
}
self._verify("openstack.compute.v2.flavor.Flavor.create_extra_specs",
self.proxy.create_flavor_extra_specs,
method_args=["value", specs],
expected_kwargs={"specs": specs})
def test_get_flavor_extra_specs_prop(self):
self._verify(
"openstack.compute.v2.flavor.Flavor.get_extra_specs_property",
self.proxy.get_flavor_extra_specs_property,
method_args=["value", "prop"],
expected_args=["prop"])
def test_update_flavor_extra_specs_prop(self):
self._verify(
"openstack.compute.v2.flavor.Flavor.update_extra_specs_property",
self.proxy.update_flavor_extra_specs_property,
method_args=["value", "prop", "val"],
expected_args=["prop", "val"])
def test_delete_flavor_extra_specs_prop(self):
self._verify(
"openstack.compute.v2.flavor.Flavor.delete_extra_specs_property",
self.proxy.delete_flavor_extra_specs_property,
method_args=["value", "prop"],
expected_args=["prop"])
class TestKeyPair(TestComputeProxy):
def test_keypair_create(self):
self.verify_create(self.proxy.create_keypair, keypair.Keypair)
def test_keypair_delete(self):
self.verify_delete(self.proxy.delete_keypair, keypair.Keypair, False)
def test_keypair_delete_ignore(self):
self.verify_delete(self.proxy.delete_keypair, keypair.Keypair, True)
def test_keypair_delete_user_id(self):
self.verify_delete(
self.proxy.delete_keypair, keypair.Keypair,
True,
method_kwargs={'user_id': 'fake_user'},
expected_kwargs={'user_id': 'fake_user'}
)
def test_keypair_find(self):
self.verify_find(self.proxy.find_keypair, keypair.Keypair)
def test_keypair_find_user_id(self):
self.verify_find(
self.proxy.find_keypair, keypair.Keypair,
method_kwargs={'user_id': 'fake_user'},
expected_kwargs={'user_id': 'fake_user'}
)
def test_keypair_get(self):
self.verify_get(self.proxy.get_keypair, keypair.Keypair)
def test_keypair_get_user_id(self):
self.verify_get(
self.proxy.get_keypair, keypair.Keypair,
method_kwargs={'user_id': 'fake_user'},
expected_kwargs={'user_id': 'fake_user'}
)
def test_keypairs(self):
self.verify_list_no_kwargs(self.proxy.keypairs, keypair.Keypair)
def test_keypairs_user_id(self):
self.verify_list(
self.proxy.keypairs, keypair.Keypair,
method_kwargs={'user_id': 'fake_user'},
expected_kwargs={'user_id': 'fake_user'}
)
class TestAggregate(TestComputeProxy):
def test_aggregate_create(self):
self.verify_create(self.proxy.create_aggregate, aggregate.Aggregate)
def test_aggregate_delete(self):
self.verify_delete(
self.proxy.delete_aggregate, aggregate.Aggregate, False)
def test_aggregate_delete_ignore(self):
self.verify_delete(
self.proxy.delete_aggregate, aggregate.Aggregate, True)
def test_aggregate_find(self):
self.verify_find(self.proxy.find_aggregate, aggregate.Aggregate)
def test_aggregates(self):
self.verify_list_no_kwargs(self.proxy.aggregates, aggregate.Aggregate)
def test_aggregate_get(self):
self.verify_get(self.proxy.get_aggregate, aggregate.Aggregate)
def test_aggregate_update(self):
self.verify_update(self.proxy.update_aggregate, aggregate.Aggregate)
def test_aggregate_add_host(self):
self._verify("openstack.compute.v2.aggregate.Aggregate.add_host",
self.proxy.add_host_to_aggregate,
method_args=["value", "host"],
expected_args=["host"])
def test_aggregate_remove_host(self):
self._verify("openstack.compute.v2.aggregate.Aggregate.remove_host",
self.proxy.remove_host_from_aggregate,
method_args=["value", "host"],
expected_args=["host"])
def test_aggregate_set_metadata(self):
self._verify("openstack.compute.v2.aggregate.Aggregate.set_metadata",
self.proxy.set_aggregate_metadata,
method_args=["value", {'a': 'b'}],
expected_args=[{'a': 'b'}])
def test_aggregate_precache_image(self):
self._verify(
"openstack.compute.v2.aggregate.Aggregate.precache_images",
self.proxy.aggregate_precache_images,
method_args=["value", '1'],
expected_args=[[{'id': '1'}]])
def test_aggregate_precache_images(self):
self._verify(
"openstack.compute.v2.aggregate.Aggregate.precache_images",
self.proxy.aggregate_precache_images,
method_args=["value", ['1', '2']],
expected_args=[[{'id': '1'}, {'id': '2'}]])
class TestService(TestComputeProxy):
def test_services(self):
self.verify_list_no_kwargs(
self.proxy.services, service.Service)
@mock.patch('openstack.utils.supports_microversion', autospec=True,
return_value=False)
def test_enable_service_252(self, mv_mock):
self._verify2(
'openstack.compute.v2.service.Service.enable',
self.proxy.enable_service,
method_args=["value", "host1", "nova-compute"],
expected_args=[self.proxy, "host1", "nova-compute"]
)
@mock.patch('openstack.utils.supports_microversion', autospec=True,
return_value=True)
def test_enable_service_253(self, mv_mock):
self._verify2(
'openstack.proxy.Proxy._update',
self.proxy.enable_service,
method_args=["value"],
method_kwargs={},
expected_args=[service.Service, "value"],
expected_kwargs={'status': 'enabled'}
)
@mock.patch('openstack.utils.supports_microversion', autospec=True,
return_value=False)
def test_disable_service_252(self, mv_mock):
self._verify2(
'openstack.compute.v2.service.Service.disable',
self.proxy.disable_service,
method_args=["value", "host1", "nova-compute"],
expected_args=[self.proxy, "host1", "nova-compute", None])
@mock.patch('openstack.utils.supports_microversion', autospec=True,
return_value=True)
def test_disable_service_253(self, mv_mock):
self._verify2(
'openstack.proxy.Proxy._update',
self.proxy.disable_service,
method_args=["value"],
method_kwargs={'disabled_reason': 'some_reason'},
expected_args=[service.Service, "value"],
expected_kwargs={
'status': 'disabled',
'disabled_reason': 'some_reason'
}
)
@mock.patch('openstack.utils.supports_microversion', autospec=True,
return_value=False)
def test_force_service_down_252(self, mv_mock):
self._verify2(
'openstack.compute.v2.service.Service.set_forced_down',
self.proxy.update_service_forced_down,
method_args=["value", "host1", "nova-compute"],
expected_args=[self.proxy, "host1", "nova-compute", True])
@mock.patch('openstack.utils.supports_microversion', autospec=True,
return_value=False)
def test_force_service_down_252_empty_vals(self, mv_mock):
self.assertRaises(
ValueError,
self.proxy.update_service_forced_down,
"value", None, None
)
@mock.patch('openstack.utils.supports_microversion', autospec=True,
return_value=False)
def test_force_service_down_252_empty_vals_svc(self, mv_mock):
self._verify2(
'openstack.compute.v2.service.Service.set_forced_down',
self.proxy.update_service_forced_down,
method_args=[{'host': 'a', 'binary': 'b'}, None, None],
expected_args=[self.proxy, None, None, True])
def test_find_service(self):
self.verify_find(
self.proxy.find_service,
service.Service,
)
def test_find_service_args(self):
self.verify_find(
self.proxy.find_service,
service.Service,
method_kwargs={'host': 'h1'},
expected_kwargs={'host': 'h1'}
)
class TestHypervisor(TestComputeProxy):
def test_hypervisors_not_detailed(self):
self.verify_list(self.proxy.hypervisors, hypervisor.Hypervisor,
method_kwargs={"details": False})
def test_hypervisors_detailed(self):
self.verify_list(self.proxy.hypervisors, hypervisor.HypervisorDetail,
method_kwargs={"details": True})
@mock.patch('openstack.utils.supports_microversion', autospec=True,
return_value=False)
def test_hypervisors_search_before_253_no_qp(self, sm):
self.verify_list(
self.proxy.hypervisors,
hypervisor.Hypervisor,
method_kwargs={'details': True},
base_path='/os-hypervisors/detail'
)
@mock.patch('openstack.utils.supports_microversion', autospec=True,
return_value=False)
def test_hypervisors_search_before_253(self, sm):
self.verify_list(
self.proxy.hypervisors,
hypervisor.Hypervisor,
method_kwargs={'hypervisor_hostname_pattern': 'substring'},
base_path='/os-hypervisors/substring/search'
)
@mock.patch('openstack.utils.supports_microversion', autospec=True,
return_value=True)
def test_hypervisors_search_after_253(self, sm):
self.verify_list(
self.proxy.hypervisors,
hypervisor.Hypervisor,
method_kwargs={'hypervisor_hostname_pattern': 'substring'},
base_path=None,
expected_kwargs={'hypervisor_hostname_pattern': 'substring'}
)
def test_find_hypervisor_detail(self):
self.verify_find(self.proxy.find_hypervisor,
hypervisor.Hypervisor,
expected_kwargs={
'list_base_path': '/os-hypervisors/detail',
'ignore_missing': False})
def test_find_hypervisor_no_detail(self):
self.verify_find(self.proxy.find_hypervisor,
hypervisor.Hypervisor,
method_kwargs={'details': False},
expected_kwargs={
'list_base_path': None,
'ignore_missing': False})
def test_get_hypervisor(self):
self.verify_get(self.proxy.get_hypervisor,
hypervisor.Hypervisor)
def test_get_hypervisor_uptime(self):
self._verify(
"openstack.compute.v2.hypervisor.Hypervisor.get_uptime",
self.proxy.get_hypervisor_uptime,
method_args=["value"],
expected_args=[])
class TestCompute(TestComputeProxy):
def test_extension_find(self):
self.verify_find(self.proxy.find_extension, extension.Extension)
def test_extensions(self):
self.verify_list_no_kwargs(self.proxy.extensions, extension.Extension)
def test_image_delete(self):
self.verify_delete(self.proxy.delete_image, image.Image, False)
def test_image_delete_ignore(self):
self.verify_delete(self.proxy.delete_image, image.Image, True)
def test_image_find(self):
self.verify_find(self.proxy.find_image, image.Image)
def test_image_get(self):
self.verify_get(self.proxy.get_image, image.Image)
def test_images_detailed(self):
self.verify_list(self.proxy.images, image.ImageDetail,
method_kwargs={"details": True, "query": 1},
expected_kwargs={"query": 1})
def test_images_not_detailed(self):
self.verify_list(self.proxy.images, image.Image,
method_kwargs={"details": False, "query": 1},
expected_kwargs={"query": 1})
def test_limits_get(self):
self.verify_get(self.proxy.get_limits, limits.Limits, value=[])
def test_server_interface_create(self):
self.verify_create(self.proxy.create_server_interface,
server_interface.ServerInterface,
method_kwargs={"server": "test_id"},
expected_kwargs={"server_id": "test_id"})
def test_server_interface_delete(self):
self.proxy._get_uri_attribute = lambda *args: args[1]
interface_id = "test_interface_id"
server_id = "test_server_id"
test_interface = server_interface.ServerInterface(id=interface_id)
test_interface.server_id = server_id
# Case1: ServerInterface instance is provided as value
self._verify2("openstack.proxy.Proxy._delete",
self.proxy.delete_server_interface,
method_args=[test_interface],
method_kwargs={"server": server_id},
expected_args=[server_interface.ServerInterface],
expected_kwargs={"server_id": server_id,
"port_id": interface_id,
"ignore_missing": True})
# Case2: ServerInterface ID is provided as value
self._verify2("openstack.proxy.Proxy._delete",
self.proxy.delete_server_interface,
method_args=[interface_id],
method_kwargs={"server": server_id},
expected_args=[server_interface.ServerInterface],
expected_kwargs={"server_id": server_id,
"port_id": interface_id,
"ignore_missing": True})
def test_server_interface_delete_ignore(self):
self.proxy._get_uri_attribute = lambda *args: args[1]
self.verify_delete(self.proxy.delete_server_interface,
server_interface.ServerInterface, True,
method_kwargs={"server": "test_id"},
expected_args=[server_interface.ServerInterface],
expected_kwargs={"server_id": "test_id",
"port_id": "resource_or_id"})
def test_server_interface_get(self):
self.proxy._get_uri_attribute = lambda *args: args[1]
interface_id = "test_interface_id"
server_id = "test_server_id"
test_interface = server_interface.ServerInterface(id=interface_id)
test_interface.server_id = server_id
# Case1: ServerInterface instance is provided as value
self._verify2('openstack.proxy.Proxy._get',
self.proxy.get_server_interface,
method_args=[test_interface],
method_kwargs={"server": server_id},
expected_args=[server_interface.ServerInterface],
expected_kwargs={"port_id": interface_id,
"server_id": server_id})
# Case2: ServerInterface ID is provided as value
self._verify2('openstack.proxy.Proxy._get',
self.proxy.get_server_interface,
method_args=[interface_id],
method_kwargs={"server": server_id},
expected_args=[server_interface.ServerInterface],
expected_kwargs={"port_id": interface_id,
"server_id": server_id})
def test_server_interfaces(self):
self.verify_list(self.proxy.server_interfaces,
server_interface.ServerInterface,
method_args=["test_id"],
expected_kwargs={"server_id": "test_id"})
def test_server_ips_with_network_label(self):
self.verify_list(self.proxy.server_ips, server_ip.ServerIP,
method_args=["test_id"],
method_kwargs={"network_label": "test_label"},
expected_kwargs={"server_id": "test_id",
"network_label": "test_label"})
def test_server_ips_without_network_label(self):
self.verify_list(self.proxy.server_ips, server_ip.ServerIP,
method_args=["test_id"],
expected_kwargs={"server_id": "test_id",
"network_label": None})
def test_server_create_attrs(self):
self.verify_create(self.proxy.create_server, server.Server)
def test_server_delete(self):
self.verify_delete(self.proxy.delete_server, server.Server, False)
def test_server_delete_ignore(self):
self.verify_delete(self.proxy.delete_server, server.Server, True)
def test_server_force_delete(self):
self._verify("openstack.compute.v2.server.Server.force_delete",
self.proxy.delete_server,
method_args=["value", False, True])
def test_server_find(self):
self.verify_find(self.proxy.find_server, server.Server)
def test_server_get(self):
self.verify_get(self.proxy.get_server, server.Server)
def test_servers_detailed(self):
self.verify_list(self.proxy.servers, server.Server,
method_kwargs={"details": True,
"changes_since": 1, "image": 2},
expected_kwargs={"changes_since": 1, "image": 2,
"base_path": "/servers/detail"})
def test_servers_not_detailed(self):
self.verify_list(self.proxy.servers, server.Server,
method_kwargs={"details": False,
"changes_since": 1, "image": 2},
expected_kwargs={"changes_since": 1, "image": 2})
def test_server_update(self):
self.verify_update(self.proxy.update_server, server.Server)
def test_server_wait_for(self):
value = server.Server(id='1234')
self.verify_wait_for_status(
self.proxy.wait_for_server,
method_args=[value],
expected_args=[value, 'ACTIVE', ['ERROR'], 2, 120])
def test_server_resize(self):
self._verify("openstack.compute.v2.server.Server.resize",
self.proxy.resize_server,
method_args=["value", "test-flavor"],
expected_args=["test-flavor"])
def test_server_confirm_resize(self):
self._verify("openstack.compute.v2.server.Server.confirm_resize",
self.proxy.confirm_server_resize,
method_args=["value"])
def test_server_revert_resize(self):
self._verify("openstack.compute.v2.server.Server.revert_resize",
self.proxy.revert_server_resize,
method_args=["value"])
def test_server_rebuild(self):
id = 'test_image_id'
image_obj = image.Image(id='test_image_id')
# Case1: image object is provided
# NOTE: Inside of Server.rebuild is where image_obj gets converted
# to an ID instead of object.
self._verify('openstack.compute.v2.server.Server.rebuild',
self.proxy.rebuild_server,
method_args=["value", "test_server", "test_pass"],
method_kwargs={"metadata": {"k1": "v1"},
"image": image_obj},
expected_args=["test_server", "test_pass"],
expected_kwargs={"metadata": {"k1": "v1"},
"image": image_obj})
# Case2: image name or id is provided
self._verify('openstack.compute.v2.server.Server.rebuild',
self.proxy.rebuild_server,
method_args=["value", "test_server", "test_pass"],
method_kwargs={"metadata": {"k1": "v1"},
"image": id},
expected_args=["test_server", "test_pass"],
expected_kwargs={"metadata": {"k1": "v1"},
"image": id})
def test_add_fixed_ip_to_server(self):
self._verify("openstack.compute.v2.server.Server.add_fixed_ip",
self.proxy.add_fixed_ip_to_server,
method_args=["value", "network-id"],
expected_args=["network-id"])
def test_fixed_ip_from_server(self):
self._verify("openstack.compute.v2.server.Server.remove_fixed_ip",
self.proxy.remove_fixed_ip_from_server,
method_args=["value", "address"],
expected_args=["address"])
def test_floating_ip_to_server(self):
self._verify("openstack.compute.v2.server.Server.add_floating_ip",
self.proxy.add_floating_ip_to_server,
method_args=["value", "floating-ip"],
expected_args=["floating-ip"],
expected_kwargs={'fixed_address': None})
def test_add_floating_ip_to_server_with_fixed_addr(self):
self._verify("openstack.compute.v2.server.Server.add_floating_ip",
self.proxy.add_floating_ip_to_server,
method_args=["value", "floating-ip", 'fixed-addr'],
expected_args=["floating-ip"],
expected_kwargs={'fixed_address': 'fixed-addr'})
def test_remove_floating_ip_from_server(self):
self._verify("openstack.compute.v2.server.Server.remove_floating_ip",
self.proxy.remove_floating_ip_from_server,
method_args=["value", "address"],
expected_args=["address"])
def test_server_backup(self):
self._verify("openstack.compute.v2.server.Server.backup",
self.proxy.backup_server,
method_args=["value", "name", "daily", 1],
expected_args=["name", "daily", 1])
def test_server_pause(self):
| |
from threading import Thread
# from tqdm import tqdm
# from torch.utils.data import Dataset
# from PIL import Image
# from typing import Callable, Optional, List, Tuple
# import glob
# from torch import nn
# from torchvision.models.resnet import ResNet, Bottleneck
# from torch import Tensor
# import torchvision
# import numpy as np
# import cv2
# import uuid
# # --------------- hy ---------------
# class HomographicAlignment:
# """
# Apply homographic alignment on background to match with the source image.
# """
# def __init__(self):
# self.detector = cv2.ORB_create()
# self.matcher = cv2.DescriptorMatcher_create(cv2.DESCRIPTOR_MATCHER_BRUTEFORCE)
# def __call__(self, src, bgr):
# src = np.asarray(src)
# bgr = np.asarray(bgr)
# keypoints_src, descriptors_src = self.detector.detectAndCompute(src, None)
# keypoints_bgr, descriptors_bgr = self.detector.detectAndCompute(bgr, None)
# matches = self.matcher.match(descriptors_bgr, descriptors_src, None)
# matches.sort(key=lambda x: x.distance, reverse=False)
# num_good_matches = int(len(matches) * 0.15)
# matches = matches[:num_good_matches]
# points_src = np.zeros((len(matches), 2), dtype=np.float32)
# points_bgr = np.zeros((len(matches), 2), dtype=np.float32)
# for i, match in enumerate(matches):
# points_src[i, :] = keypoints_src[match.trainIdx].pt
# points_bgr[i, :] = keypoints_bgr[match.queryIdx].pt
# H, _ = cv2.findHomography(points_bgr, points_src, cv2.RANSAC)
# h, w = src.shape[:2]
# bgr = cv2.warpPerspective(bgr, H, (w, h))
# msk = cv2.warpPerspective(np.ones((h, w)), H, (w, h))
# # For areas that is outside of the background,
# # We just copy pixels from the source.
# bgr[msk != 1] = src[msk != 1]
# src = Image.fromarray(src)
# bgr = Image.fromarray(bgr)
# return src, bgr
# class Refiner(nn.Module):
# # For TorchScript export optimization.
# __constants__ = ['kernel_size', 'patch_crop_method', 'patch_replace_method']
# def __init__(self,
# mode: str,
# sample_pixels: int,
# threshold: float,
# kernel_size: int = 3,
# prevent_oversampling: bool = True,
# patch_crop_method: str = 'unfold',
# patch_replace_method: str = 'scatter_nd'):
# super().__init__()
# assert mode in ['full', 'sampling', 'thresholding']
# assert kernel_size in [1, 3]
# assert patch_crop_method in ['unfold', 'roi_align', 'gather']
# assert patch_replace_method in ['scatter_nd', 'scatter_element']
# self.mode = mode
# self.sample_pixels = sample_pixels
# self.threshold = threshold
# self.kernel_size = kernel_size
# self.prevent_oversampling = prevent_oversampling
# self.patch_crop_method = patch_crop_method
# self.patch_replace_method = patch_replace_method
# channels = [32, 24, 16, 12, 4]
# self.conv1 = nn.Conv2d(channels[0] + 6 + 4, channels[1], kernel_size, bias=False)
# self.bn1 = nn.BatchNorm2d(channels[1])
# self.conv2 = nn.Conv2d(channels[1], channels[2], kernel_size, bias=False)
# self.bn2 = nn.BatchNorm2d(channels[2])
# self.conv3 = nn.Conv2d(channels[2] + 6, channels[3], kernel_size, bias=False)
# self.bn3 = nn.BatchNorm2d(channels[3])
# self.conv4 = nn.Conv2d(channels[3], channels[4], kernel_size, bias=True)
# self.relu = nn.ReLU(True)
# def forward(self,
# src: torch.Tensor,
# bgr: torch.Tensor,
# pha: torch.Tensor,
# fgr: torch.Tensor,
# err: torch.Tensor,
# hid: torch.Tensor):
# H_full, W_full = src.shape[2:]
# H_half, W_half = H_full // 2, W_full // 2
# H_quat, W_quat = H_full // 4, W_full // 4
# src_bgr = torch.cat([src, bgr], dim=1)
# if self.mode != 'full':
# err = F.interpolate(err, (H_quat, W_quat), mode='bilinear', align_corners=False)
# ref = self.select_refinement_regions(err)
# idx = torch.nonzero(ref.squeeze(1))
# idx = idx[:, 0], idx[:, 1], idx[:, 2]
# if idx[0].size(0) > 0:
# x = torch.cat([hid, pha, fgr], dim=1)
# x = F.interpolate(x, (H_half, W_half), mode='bilinear', align_corners=False)
# x = self.crop_patch(x, idx, 2, 3 if self.kernel_size == 3 else 0)
# y = F.interpolate(src_bgr, (H_half, W_half), mode='bilinear', align_corners=False)
# y = self.crop_patch(y, idx, 2, 3 if self.kernel_size == 3 else 0)
# x = self.conv1(torch.cat([x, y], dim=1))
# x = self.bn1(x)
# x = self.relu(x)
# x = self.conv2(x)
# x = self.bn2(x)
# x = self.relu(x)
# x = F.interpolate(x, 8 if self.kernel_size == 3 else 4, mode='nearest')
# y = self.crop_patch(src_bgr, idx, 4, 2 if self.kernel_size == 3 else 0)
# x = self.conv3(torch.cat([x, y], dim=1))
# x = self.bn3(x)
# x = self.relu(x)
# x = self.conv4(x)
# out = torch.cat([pha, fgr], dim=1)
# out = F.interpolate(out, (H_full, W_full), mode='bilinear', align_corners=False)
# out = self.replace_patch(out, x, idx)
# pha = out[:, :1]
# fgr = out[:, 1:]
# else:
# pha = F.interpolate(pha, (H_full, W_full), mode='bilinear', align_corners=False)
# fgr = F.interpolate(fgr, (H_full, W_full), mode='bilinear', align_corners=False)
# else:
# x = torch.cat([hid, pha, fgr], dim=1)
# x = F.interpolate(x, (H_half, W_half), mode='bilinear', align_corners=False)
# y = F.interpolate(src_bgr, (H_half, W_half), mode='bilinear', align_corners=False)
# if self.kernel_size == 3:
# x = F.pad(x, (3, 3, 3, 3))
# y = F.pad(y, (3, 3, 3, 3))
# x = self.conv1(torch.cat([x, y], dim=1))
# x = self.bn1(x)
# x = self.relu(x)
# x = self.conv2(x)
# x = self.bn2(x)
# x = self.relu(x)
# if self.kernel_size == 3:
# x = F.interpolate(x, (H_full + 4, W_full + 4))
# y = F.pad(src_bgr, (2, 2, 2, 2))
# else:
# x = F.interpolate(x, (H_full, W_full), mode='nearest')
# y = src_bgr
# x = self.conv3(torch.cat([x, y], dim=1))
# x = self.bn3(x)
# x = self.relu(x)
# x = self.conv4(x)
# pha = x[:, :1]
# fgr = x[:, 1:]
# ref = torch.ones((src.size(0), 1, H_quat, W_quat), device=src.device, dtype=src.dtype)
# return pha, fgr, ref
# def select_refinement_regions(self, err: torch.Tensor):
# """
# Select refinement regions.
# Input:
# err: error map (B, 1, H, W)
# Output:
# ref: refinement regions (B, 1, H, W). FloatTensor. 1 is selected, 0 is not.
# """
# if self.mode == 'sampling':
# # Sampling mode.
# b, _, h, w = err.shape
# err = err.view(b, -1)
# idx = err.topk(self.sample_pixels // 16, dim=1, sorted=False).indices
# ref = torch.zeros_like(err)
# ref.scatter_(1, idx, 1.)
# if self.prevent_oversampling:
# ref.mul_(err.gt(0).float())
# ref = ref.view(b, 1, h, w)
# else:
# # Thresholding mode.
# ref = err.gt(self.threshold).float()
# return ref
# def crop_patch(self,
# x: torch.Tensor,
# idx: Tuple[torch.Tensor, torch.Tensor, torch.Tensor],
# size: int,
# padding: int):
# """
# Crops selected patches from image given indices.
# Inputs:
# x: image (B, C, H, W).
# idx: selection indices Tuple[(P,), (P,), (P,),], where the 3 values are (B, H, W) index.
# size: center size of the patch, also stride of the crop.
# padding: expansion size of the patch.
# Output:
# patch: (P, C, h, w), where h = w = size + 2 * padding.
# """
# if padding != 0:
# x = F.pad(x, (padding,) * 4)
# if self.patch_crop_method == 'unfold':
# # Use unfold. Best performance for PyTorch and TorchScript.
# return x.permute(0, 2, 3, 1) \
# .unfold(1, size + 2 * padding, size) \
# .unfold(2, size + 2 * padding, size)[idx[0], idx[1], idx[2]]
# elif self.patch_crop_method == 'roi_align':
# # Use roi_align. Best compatibility for ONNX.
# idx = idx[0].type_as(x), idx[1].type_as(x), idx[2].type_as(x)
# b = idx[0]
# x1 = idx[2] * size - 0.5
# y1 = idx[1] * size - 0.5
# x2 = idx[2] * size + size + 2 * padding - 0.5
# y2 = idx[1] * size + size + 2 * padding - 0.5
# boxes = torch.stack([b, x1, y1, x2, y2], dim=1)
# return torchvision.ops.roi_align(x, boxes, size + 2 * padding, sampling_ratio=1)
# else:
# # Use gather. Crops out patches pixel by pixel.
# idx_pix = self.compute_pixel_indices(x, idx, size, padding)
# pat = torch.gather(x.view(-1), 0, idx_pix.view(-1))
# pat = pat.view(-1, x.size(1), size + 2 * padding, size + 2 * padding)
# return pat
# def replace_patch(self,
# x: torch.Tensor,
# y: torch.Tensor,
# idx: Tuple[torch.Tensor, torch.Tensor, torch.Tensor]):
# """
# Replaces patches back into image given index.
# Inputs:
# x: image (B, C, H, W)
# y: patches (P, C, h, w)
# idx: selection indices Tuple[(P,), (P,), (P,)] where the 3 values are (B, H, W) index.
# Output:
# image: (B, C, H, W), where patches at idx locations are replaced with y.
# """
# xB, xC, xH, xW = x.shape
# yB, yC, yH, yW = y.shape
# if self.patch_replace_method == 'scatter_nd':
# # Use scatter_nd. Best performance for PyTorch and TorchScript. Replacing patch by patch.
# x = x.view(xB, xC, xH // yH, yH, xW // yW, yW).permute(0, 2, 4, 1, 3, 5)
# x[idx[0], idx[1], idx[2]] = y
# x = x.permute(0, 3, 1, 4, 2, 5).view(xB, xC, xH, xW)
# return x
# else:
# # Use scatter_element. Best compatibility for ONNX. Replacing pixel by pixel.
# idx_pix = self.compute_pixel_indices(x, idx, size=4, padding=0)
# return x.view(-1).scatter_(0, idx_pix.view(-1), y.view(-1)).view(x.shape)
# def compute_pixel_indices(self,
# x: torch.Tensor,
# idx: Tuple[torch.Tensor, torch.Tensor, torch.Tensor],
# size: int,
# padding: int):
# """
# Compute selected pixel indices in the tensor.
# Used for crop_method == 'gather' and replace_method == 'scatter_element', which crop and replace pixel by pixel.
# Input:
# x: image: (B, C, H, W)
# idx: selection indices Tuple[(P,), (P,), (P,),], where the 3 values are (B, H, W) index.
# size: center size of the patch, also stride of the crop.
# padding: expansion size of the patch.
# Output:
# idx: (P, C, O, O) long tensor where O is the output size: size + 2 * padding, P is number of patches.
# the element are indices pointing to the input x.view(-1).
# """
# B, C, H, W = x.shape
# S, P = size, padding
# O = S + 2 * P
# | |
<reponame>Subibhu/ZeBot
from telethon import events
import random, re
from userbot.utils import admin_cmd
import asyncio
@borg.on(admin_cmd("fbanbisi ?(.*)"))
async def _(event):
if event.fwd_from:
return
await event.edit("Bringing Justice...")
FBAN = event.pattern_match.group(1)
chat = -1001390230877
if not event.text[0].isalpha() and event.text[0] not in ("/", "#", "@", "!"):
await borg.send_message(chat, f"/start")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed a26a769d-616f-486f-b89f-fac0cb0a5c11 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 88c4a4fe-4a8a-4880-87d5-d83554455918 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 37708662-274e-4264-a61f-26ce50fcacda ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed ca91a4e6-d6fa-4209-b8b1-6584d8f1dbf3 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 8eb707da-3894-4611-8766-48d408324969 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed a4bcd10a-8f14-4bf3-9226-13fb873a0316 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 0b3371e6-0b20-4fb6-9285-6689aa9035aa ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed b65416ba-6b72-487a-8cf9-0d9b857cd234 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 54cef9da-112c-4d63-9fa0-dc0fb3ced3fd ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 76f8fd60-1745-410a-975b-a021ce6da365 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed abe54d90-d67a-4ce8-afbc-0cd827084ecf ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 5fcbde53-88cb-40a9-916f-f04223476663 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed c06a32d6-c9b9-4840-91e9-29b4de14ceff ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed e1c0cf1f-220a-46bb-b508-46eb6b1ecf55 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 8feb6ad8-1655-4dbc-9dc4-3d0546a3c1e1 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 5d47a47c-9ee1-405b-bb33-d4cadfd0c102 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed a3048abf-43d1-4291-a92e-822e883b2ae0 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 924e05f0-2c93-4a81-a91a-a9cb4ff1993c ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 0e32f678-9c5f-47d5-b512-a3a607f1fd99 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 7389172d-b5ad-42bb-80af-5d0b50605390 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 1f1cc20a-9f44-43ec-b2c0-9a4713b4fddb ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed ca7a9f6e-3316-431c-b6d8-4bb929922bc4 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed f0321efe-83d3-4dde-aef0-fdb36df5bc50 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 5e9e0a69-15cb-4998-bbd0-4e782ba8b649 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 52ad135b-a917-4d08-9d9d-d7adca31d4ac ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed b71af67b-c39b-4606-b7ae-6de20880229a ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed a2f1c742-f322-42d4-9966-7e2e0ad0ee70 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed a9e59bba-5725-45a4-82f0-95412986f838 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed e3e97adc-9e83-43a7-abf8-b2839ab4481f ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed fc7d5b2b-fbde-47ad-b11a-8d6edaaeb69f ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed dc8fccc6-5505-4337-afe8-26843bc899fa ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 4591dedc-8b3b-4bfc-91a2-b13bfc7f2d78 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 84a464bb-61f5-49e9-b8b7-90d321fe458e ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 96bf9e56-cfcd-483d-afb4-c63dfcfc5dc8 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed f665c8c9-9502-4e1b-9dc7-64337de00bfc ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed c896db4f-4ea4-4251-95e0-defccdc2b7ef ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 7d054dd3-b578-4fca-925d-55e87170732d ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 3901c154-532b-4075-98b2-bbb889d614c9 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 10b712f6-53c6-4e68-9224-b84b21b198d0 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed b67015c6-a867-4dc2-839d-a03b13089c48 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 26fedd5f-9666-4733-8f33-7b667b99abb6 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 25f073df-1b7f-41ac-8f05-6489207e613a ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 306e1ae6-ca72-4618-9474-046cbcb04fa2 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 955a6ec4-2ca5-4f60-a1fe-9dc89efde33a ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 955fa90c-0235-4df9-ac8e-95825d441e0a ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed cc1fa3af-06d2-4aaa-8572-0d25bb7b7b51 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/fban {FBAN}")
await asyncio.sleep(1)
await asyncio.sleep(1)
await asyncio.sleep(10)
await event.edit("Justice Has Been Served")
@borg.on(admin_cmd("unfbanbisi ?(.*)"))
async def _(event):
if event.fwd_from:
return
await event.edit("Bringing Justice...")
UNFBAN = event.pattern_match.group(1)
chat = -1001390230877
if not event.text[0].isalpha() and event.text[0] not in ("/", "#", "@", "!"):
await borg.send_message(chat, f"/start")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed a26a769d-616f-486f-b89f-fac0cb0a5c11 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 88c4a4fe-4a8a-4880-87d5-d83554455918 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 37708662-274e-4264-a61f-26ce50fcacda ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed ca91a4e6-d6fa-4209-b8b1-6584d8f1dbf3 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 8eb707da-3894-4611-8766-48d408324969 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed a4bcd10a-8f14-4bf3-9226-13fb873a0316 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 0b3371e6-0b20-4fb6-9285-6689aa9035aa ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed b65416ba-6b72-487a-8cf9-0d9b857cd234 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 54cef9da-112c-4d63-9fa0-dc0fb3ced3fd ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 76f8fd60-1745-410a-975b-a021ce6da365 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed abe54d90-d67a-4ce8-afbc-0cd827084ecf ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 5fcbde53-88cb-40a9-916f-f04223476663 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed c06a32d6-c9b9-4840-91e9-29b4de14ceff ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed e1c0cf1f-220a-46bb-b508-46eb6b1ecf55 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 8feb6ad8-1655-4dbc-9dc4-3d0546a3c1e1 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 5d47a47c-9ee1-405b-bb33-d4cadfd0c102 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed a3048abf-43d1-4291-a92e-822e883b2ae0 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 924e05f0-2c93-4a81-a91a-a9cb4ff1993c ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 0e32f678-9c5f-47d5-b512-a3a607f1fd99 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 7389172d-b5ad-42bb-80af-5d0b50605390 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 1f1cc20a-9f44-43ec-b2c0-9a4713b4fddb ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed ca7a9f6e-3316-431c-b6d8-4bb929922bc4 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed f0321efe-83d3-4dde-aef0-fdb36df5bc50 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 5e9e0a69-15cb-4998-bbd0-4e782ba8b649 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 52ad135b-a917-4d08-9d9d-d7adca31d4ac ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed b71af67b-c39b-4606-b7ae-6de20880229a ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed a2f1c742-f322-42d4-9966-7e2e0ad0ee70 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed a9e59bba-5725-45a4-82f0-95412986f838 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed e3e97adc-9e83-43a7-abf8-b2839ab4481f ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed fc7d5b2b-fbde-47ad-b11a-8d6edaaeb69f ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed dc8fccc6-5505-4337-afe8-26843bc899fa ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 4591dedc-8b3b-4bfc-91a2-b13bfc7f2d78 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 84a464bb-61f5-49e9-b8b7-90d321fe458e ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 96bf9e56-cfcd-483d-afb4-c63dfcfc5dc8 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed f665c8c9-9502-4e1b-9dc7-64337de00bfc ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed c896db4f-4ea4-4251-95e0-defccdc2b7ef ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 7d054dd3-b578-4fca-925d-55e87170732d ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 3901c154-532b-4075-98b2-bbb889d614c9 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 10b712f6-53c6-4e68-9224-b84b21b198d0 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed b67015c6-a867-4dc2-839d-a03b13089c48 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 26fedd5f-9666-4733-8f33-7b667b99abb6 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 25f073df-1b7f-41ac-8f05-6489207e613a ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 306e1ae6-ca72-4618-9474-046cbcb04fa2 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 955a6ec4-2ca5-4f60-a1fe-9dc89efde33a ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed 955fa90c-0235-4df9-ac8e-95825d441e0a ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await borg.send_message(chat, '/joinfed cc1fa3af-06d2-4aaa-8572-0d25bb7b7b51 ')
await asyncio.sleep(1)
await borg.send_message(chat, f"/unfban {UNFBAN}")
await asyncio.sleep(1)
await asyncio.sleep(10)
await | |
<reponame>PavanKishore21/probability<filename>tensorflow_probability/python/experimental/vi/automatic_structured_vi.py
# Copyright 2021 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Utilities for constructing structured surrogate posteriors."""
from __future__ import absolute_import
from __future__ import division
# [internal] enable type annotations
from __future__ import print_function
import collections
import copy
import functools
import inspect
import tensorflow.compat.v2 as tf
from tensorflow_probability.python import util as tfp_util
from tensorflow_probability.python.bijectors import identity
from tensorflow_probability.python.bijectors import restructure
from tensorflow_probability.python.bijectors import scale as scale_lib
from tensorflow_probability.python.bijectors import shift
from tensorflow_probability.python.bijectors import sigmoid
from tensorflow_probability.python.distributions import beta
from tensorflow_probability.python.distributions import chi2
from tensorflow_probability.python.distributions import exponential
from tensorflow_probability.python.distributions import gamma
from tensorflow_probability.python.distributions import half_normal
from tensorflow_probability.python.distributions import independent
from tensorflow_probability.python.distributions import joint_distribution_auto_batched
from tensorflow_probability.python.distributions import joint_distribution_coroutine
from tensorflow_probability.python.distributions import markov_chain
from tensorflow_probability.python.distributions import sample
from tensorflow_probability.python.distributions import transformed_distribution
from tensorflow_probability.python.distributions import truncated_normal
from tensorflow_probability.python.distributions import uniform
from tensorflow_probability.python.internal import distribution_util
from tensorflow_probability.python.internal import prefer_static as ps
from tensorflow_probability.python.internal import samplers
__all__ = [
'register_asvi_substitution_rule',
'build_asvi_surrogate_posterior'
]
Root = joint_distribution_coroutine.JointDistributionCoroutine.Root
_NON_STATISTICAL_PARAMS = [
'name', 'validate_args', 'allow_nan_stats', 'experimental_use_kahan_sum',
'reinterpreted_batch_ndims', 'dtype', 'force_probs_to_zero_outside_support',
'num_probit_terms_approx'
]
_NON_TRAINABLE_PARAMS = ['low', 'high']
ASVIParameters = collections.namedtuple(
'ASVIParameters', ['prior_weight', 'mean_field_parameter'])
# Registry of transformations that are applied to distributions in the prior
# before defining the surrogate family.
ASVI_SURROGATE_SUBSTITUTIONS = {}
def _as_substituted_distribution(distribution):
"""Applies all substitution rules that match a distribution."""
for condition, substitution_fn in ASVI_SURROGATE_SUBSTITUTIONS.items():
if condition(distribution):
distribution = substitution_fn(distribution)
return distribution
def register_asvi_substitution_rule(condition, substitution_fn):
"""Registers a rule for substituting distributions in ASVI surrogates.
Args:
condition: Python `callable` that takes a Distribution instance and
returns a Python `bool` indicating whether or not to substitute it.
May also be a class type such as `tfd.Normal`, in which case the
condition is interpreted as
`lambda distribution: isinstance(distribution, class)`.
substitution_fn: Python `callable` that takes a Distribution
instance and returns a new Distribution instance used to define
the ASVI surrogate posterior. Note that this substitution does not modify
the original model.
#### Example
To use a Normal surrogate for all location-scale family distributions, we
could register the substitution:
```python
tfp.experimental.vi.register_asvi_surrogate_substitution(
condition=lambda distribution: (
hasattr(distribution, 'loc') and hasattr(distribution, 'scale'))
substitution_fn=lambda distribution: (
# Invoking the event space bijector applies any relevant constraints,
# e.g., that HalfCauchy samples must be `>= loc`.
distribution.experimental_default_event_space_bijector()(
tfd.Normal(loc=distribution.loc, scale=distribution.scale)))
```
This rule will fire when ASVI encounters a location-scale distribution,
and instructs ASVI to build a surrogate 'as if' the model had just used a
(possibly constrained) Normal in its place. Note that we could have used a
more precise condition, e.g., to limit the substitution to distributions with
a specific `name`, if we had reason to think that a Normal distribution would
be a good surrogate for some model variables but not others.
"""
global ASVI_SURROGATE_SUBSTITUTIONS
if inspect.isclass(condition):
condition = lambda distribution, cls=condition: isinstance( # pylint: disable=g-long-lambda
distribution, cls)
ASVI_SURROGATE_SUBSTITUTIONS[condition] = substitution_fn
# Default substitutions attempt to express distributions using the most
# flexible available parameterization.
# pylint: disable=g-long-lambda
register_asvi_substitution_rule(
half_normal.HalfNormal,
lambda dist: truncated_normal.TruncatedNormal(
loc=0., scale=dist.scale, low=0., high=dist.scale * 10.))
register_asvi_substitution_rule(
uniform.Uniform,
lambda dist: shift.Shift(dist.low)(
scale_lib.Scale(dist.high - dist.low)(
beta.Beta(concentration0=tf.ones_like(dist.mean()),
concentration1=1.))))
register_asvi_substitution_rule(
exponential.Exponential,
lambda dist: gamma.Gamma(concentration=1., rate=dist.rate))
register_asvi_substitution_rule(
chi2.Chi2,
lambda dist: gamma.Gamma(concentration=0.5 * dist.df, rate=0.5))
# pylint: enable=g-long-lambda
# TODO(kateslin): Add support for models with prior+likelihood written as
# a single JointDistribution.
def build_asvi_surrogate_posterior(
prior,
mean_field=False,
initial_prior_weight=0.5,
seed=None,
name=None):
"""Builds a structured surrogate posterior inspired by conjugate updating.
ASVI, or Automatic Structured Variational Inference, was proposed by
Ambrogioni et al. (2020) [1] as a method of automatically constructing a
surrogate posterior with the same structure as the prior. It does this by
reparameterizing the variational family of the surrogate posterior by
structuring each parameter according to the equation
```none
prior_weight * prior_parameter + (1 - prior_weight) * mean_field_parameter
```
In this equation, `prior_parameter` is a vector of prior parameters and
`mean_field_parameter` is a vector of trainable parameters with the same
domain as `prior_parameter`. `prior_weight` is a vector of learnable
parameters where `0. <= prior_weight <= 1.`. When `prior_weight =
0`, the surrogate posterior will be a mean-field surrogate, and when
`prior_weight = 1.`, the surrogate posterior will be the prior. This convex
combination equation, inspired by conjugacy in exponential families, thus
allows the surrogate posterior to balance between the structure of the prior
and the structure of a mean-field approximation.
Args:
prior: tfd.JointDistribution instance of the prior.
mean_field: Optional Python boolean. If `True`, creates a degenerate
surrogate distribution in which all variables are independent,
ignoring the prior dependence structure. Default value: `False`.
initial_prior_weight: Optional float value (either static or tensor value)
on the interval [0, 1]. A larger value creates an initial surrogate
distribution with more dependence on the prior structure. Default value:
`0.5`.
seed: PRNG seed; see `tfp.random.sanitize_seed` for details.
name: Optional string. Default value: `build_asvi_surrogate_posterior`.
Returns:
surrogate_posterior: A `tfd.JointDistributionCoroutineAutoBatched` instance
whose samples have shape and structure matching that of `prior`.
Raises:
TypeError: The `prior` argument cannot be a nested `JointDistribution`.
### Examples
Consider a Brownian motion model expressed as a JointDistribution:
```python
prior_loc = 0.
innovation_noise = .1
def model_fn():
new = yield tfd.Normal(loc=prior_loc, scale=innovation_noise)
for i in range(4):
new = yield tfd.Normal(loc=new, scale=innovation_noise)
prior = tfd.JointDistributionCoroutineAutoBatched(model_fn)
```
Let's use variational inference to approximate the posterior. We'll build a
surrogate posterior distribution by feeding in the prior distribution.
```python
surrogate_posterior =
tfp.experimental.vi.build_asvi_surrogate_posterior(prior)
```
This creates a trainable joint distribution, defined by variables in
`surrogate_posterior.trainable_variables`. We use `fit_surrogate_posterior`
to fit this distribution by minimizing a divergence to the true posterior.
```python
losses = tfp.vi.fit_surrogate_posterior(
target_log_prob_fn,
surrogate_posterior=surrogate_posterior,
num_steps=100,
optimizer=tf.optimizers.Adam(0.1),
sample_size=10)
# After optimization, samples from the surrogate will approximate
# samples from the true posterior.
samples = surrogate_posterior.sample(100)
posterior_mean = [tf.reduce_mean(x) for x in samples]
posterior_std = [tf.math.reduce_std(x) for x in samples]
```
#### References
[1]: <NAME>, <NAME>, <NAME>. Automatic structured
variational inference. _arXiv preprint arXiv:2002.00643_, 2020
https://arxiv.org/abs/2002.00643
"""
with tf.name_scope(name or 'build_asvi_surrogate_posterior'):
surrogate_posterior, variables = _asvi_surrogate_for_distribution(
dist=prior,
base_distribution_surrogate_fn=functools.partial(
_asvi_convex_update_for_base_distribution,
mean_field=mean_field,
initial_prior_weight=initial_prior_weight),
seed=seed)
surrogate_posterior.also_track = variables
return surrogate_posterior
def _asvi_surrogate_for_distribution(dist,
base_distribution_surrogate_fn,
sample_shape=None,
variables=None,
seed=None):
"""Recursively creates ASVI surrogates, and creates new variables if needed.
Args:
dist: a `tfd.Distribution` instance.
base_distribution_surrogate_fn: Callable to build a surrogate posterior
for a 'base' (non-meta and non-joint) distribution, with signature
`surrogate_posterior, variables = base_distribution_fn(
dist, sample_shape=None, variables=None, seed=None)`.
sample_shape: Optional `Tensor` shape of samples drawn from `dist` by
`tfd.Sample` wrappers. If not `None`, the surrogate's event will include
independent sample dimensions, i.e., it will have event shape
`concat([sample_shape, dist.event_shape], axis=0)`.
Default value: `None`.
variables: Optional nested structure of `tf.Variable`s returned from a
previous call to `_asvi_surrogate_for_distribution`. If `None`,
new variables will be created; otherwise, constructs a surrogate posterior
backed by the passed-in variables.
Default value: `None`.
seed: PRNG seed; see `tfp.random.sanitize_seed` for details.
Returns:
surrogate_posterior: Instance of `tfd.Distribution` representing a trainable
surrogate posterior distribution, with the same structure and `name` as
`dist`.
variables: Nested structure of `tf.Variable` trainable parameters for the
surrogate posterior. If `dist` is a base distribution, this is
a `dict` of `ASVIParameters` instances. If `dist` is a joint
distribution, this is a `dist.dtype` structure of such `dict`s.
"""
# Pass args to any nested surrogates.
build_nested_surrogate = functools.partial(
_asvi_surrogate_for_distribution,
base_distribution_surrogate_fn=base_distribution_surrogate_fn,
sample_shape=sample_shape,
seed=seed)
# Apply any substitutions, while attempting to preserve the original name.
dist = _set_name(_as_substituted_distribution(dist), name=_get_name(dist))
# Handle wrapper ("meta") distributions.
if isinstance(dist, markov_chain.MarkovChain):
return _asvi_surrogate_for_markov_chain(
dist=dist,
variables=variables,
base_distribution_surrogate_fn=base_distribution_surrogate_fn,
sample_shape=sample_shape,
seed=seed)
if isinstance(dist, sample.Sample):
dist_sample_shape = distribution_util.expand_to_vector(dist.sample_shape)
nested_surrogate, variables = build_nested_surrogate( # pylint: disable=redundant-keyword-arg
dist=dist.distribution,
variables=variables,
sample_shape=(
dist_sample_shape if sample_shape is None
else ps.concat([sample_shape, dist_sample_shape], axis=0)))
surrogate_posterior = independent.Independent(
nested_surrogate,
reinterpreted_batch_ndims=ps.rank_from_shape(dist_sample_shape),
name=_get_name(dist))
# Treat distributions that subclass TransformedDistribution with their own
# parameters (e.g., Gumbel, Weibull, MultivariateNormal*, etc) as their
# own type of base distribution, rather than as explicit TDs.
elif type(dist) == transformed_distribution.TransformedDistribution: # pylint: disable=unidiomatic-typecheck
nested_surrogate, variables = build_nested_surrogate(dist.distribution,
variables=variables)
surrogate_posterior = transformed_distribution.TransformedDistribution(
nested_surrogate,
bijector=dist.bijector,
name=_get_name(dist))
elif isinstance(dist, independent.Independent):
nested_surrogate, variables = build_nested_surrogate(dist.distribution,
variables=variables)
surrogate_posterior = independent.Independent(
| |
0.2,
)
diag = Line(
rect.get_corner(DOWN+LEFT),
rect.get_corner(UP+RIGHT),
color = YELLOW
)
bottom = Line(rect.get_left(), rect.get_right())
bottom.move_to(rect.get_bottom())
bottom.set_color(tex_to_color_map["x"])
side = Line(rect.get_bottom(), rect.get_top())
side.move_to(rect.get_right())
side.set_color(tex_to_color_map["y"])
x = TexMobject("x")
x.next_to(rect.get_bottom(), UP, SMALL_BUFF)
y = TexMobject("y")
y.next_to(rect.get_right(), LEFT, SMALL_BUFF)
hyp = TexMobject("\\sqrt", "{x", "^2 + ", "y", "^2}")
hyp.set_color_by_tex_to_color_map(tex_to_color_map)
hyp.next_to(ORIGIN, UP)
hyp.rotate(diag.get_angle())
hyp.shift(diag.get_center())
group = VGroup(rect, bottom, side, diag, x, y, hyp)
self.add(rect)
for line, tex in (bottom, x), (side, y), (diag, hyp):
self.play(
ShowCreation(line),
Write(tex, run_time = 1)
)
self.wait()
self.play(
group.rotate, 0.45*np.pi, LEFT,
group.shift, 2*DOWN
)
corner = diag.get_end()
z_line = Line(corner, corner + 3*UP)
z_line.set_color(tex_to_color_map["z"])
z = TexMobject("z")
z.set_color(tex_to_color_map["z"])
z.next_to(z_line, RIGHT)
dot = Dot(z_line.get_end())
three_d_diag = Line(diag.get_start(), z_line.get_end())
three_d_diag.set_color(MAROON_B)
self.play(
ShowCreation(z_line),
ShowCreation(dot),
Write(z, run_time = 1)
)
self.play(ShowCreation(three_d_diag))
self.wait()
full_group = VGroup(group, z_line, z, three_d_diag, dot)
self.play(Rotating(
full_group, radians = -np.pi/6,
axis = UP,
run_time = 10,
))
self.wait()
class ThreeDBoxFormulas(Scene):
def construct(self):
question = TexMobject(
"||(1, 1, 1)||", "=", "???"
)
answer = TexMobject(
"||(1, 1, 1)||", "&=", "\\sqrt{1^2 + 1^2 + 1^2}\\\\",
"&= \\sqrt{3}\\\\", "&\\approx", "1.73",
)
for mob in question, answer:
mob.to_corner(UP+LEFT)
inner_r = TexMobject(
"\\text{Inner radius}", "&=", "\\sqrt{3} - 1\\\\",
"&\\approx", "0.73"
)
inner_r.next_to(answer, DOWN, LARGE_BUFF, LEFT)
inner_r.set_color(GREEN_C)
VGroup(question, answer).shift(0.55*RIGHT)
self.play(Write(question))
self.wait(2)
self.play(ReplacementTransform(question, answer))
self.wait(2)
self.play(Write(inner_r))
self.wait(2)
class AskAboutHigherDimensions(TeacherStudentsScene):
def construct(self):
self.teacher_says(
"What happens for \\\\ higher dimensions?"
)
self.change_student_modes(*["pondering"]*3)
self.wait(2)
self.student_thinks(
"$\\sqrt{N} - 1$",
target_mode = "happy",
student_index = 1
)
self.wait()
pi = self.students[1]
self.play(pi.change, "confused", pi.bubble)
self.wait(3)
class TenSliders(SliderScene):
CONFIG = {
"n_sliders" : 10,
"run_time": 30,
"slider_spacing" : 0.75,
"ambient_acceleration_magnitude" : 2.0,
}
def construct(self):
self.initialize_ambiant_slider_movement()
self.wait(self.run_time)
self.wind_down_ambient_movement()
class TwoDBoxWithSliders(TwoDimensionalCase):
CONFIG = {
"slider_config" : {
"include_real_estate_ticks" : True,
"tick_frequency" : 1,
"numbers_with_elongated_ticks" : [],
"tick_size" : 0.1,
"dial_color" : YELLOW,
"x_min" : -2,
"x_max" : 2,
"unit_size" : 1.5,
},
"center_point" : [1, -1],
}
def setup(self):
TwoDimensionalCase.setup(self)
##Correct from previous setup
self.remove(self.equation)
self.sliders.shift(RIGHT)
VGroup(*self.get_top_level_mobjects()).shift(RIGHT)
x_slider = self.sliders[0]
for number in x_slider.numbers:
value = int(number.get_tex_string())
number.next_to(
x_slider.number_to_point(value),
LEFT, MED_SMALL_BUFF
)
self.plane.axes.set_color(BLUE)
##Add box material
corner_circles = VGroup(*[
self.circle.copy().move_to(
self.plane.coords_to_point(*coords)
).set_color(GREY)
for coords in ((1, 1), (-1, 1), (-1, -1))
])
line = Line(
self.plane.coords_to_point(-1, -1),
self.plane.coords_to_point(1, 1),
)
box = Square(color = RED)
box.replace(line, stretch = True)
self.add(box, corner_circles)
self.box = box
self.corner_circles = corner_circles
def construct(self):
self.ask_about_off_center_circle()
self.recenter_circle()
self.write_x_and_y_real_estate()
self.swap_with_top_right_circle()
self.show_center_circle()
self.describe_tangent_point()
self.perterb_point()
self.wander_on_inner_circle()
self.ask_about_inner_real_estate()
def ask_about_off_center_circle(self):
question = TextMobject("Off-center circles?")
question.next_to(self.plane, UP)
self.initialize_ambiant_slider_movement()
self.play(Write(question))
self.wait(4)
self.wind_down_ambient_movement()
self.question = question
def recenter_circle(self):
original_center_point = self.center_point
self.play(
self.circle.move_to, self.plane.coords_to_point(0, 0),
Animation(self.sliders),
*[
ApplyMethod(
mob.shift,
slider.number_to_point(0)-slider.number_to_point(slider.center_value)
)
for slider in self.sliders
for mob in [slider.real_estate_ticks, slider.dial]
]
)
self.center_point = [0, 0]
for x, slider in zip(self.center_point, self.sliders):
slider.center_value = x
self.initialize_ambiant_slider_movement()
self.wait(7)
self.wind_down_ambient_movement()
self.play(
self.circle.move_to,
self.plane.coords_to_point(*original_center_point),
Animation(self.sliders),
*[
ApplyMethod(
mob.shift,
slider.number_to_point(x)-slider.number_to_point(0)
)
for x, slider in zip(original_center_point, self.sliders)
for mob in [slider.real_estate_ticks, slider.dial]
]
)
self.center_point = original_center_point
for x, slider in zip(self.center_point, self.sliders):
slider.center_value = x
self.initialize_ambiant_slider_movement()
self.wait(5)
def write_x_and_y_real_estate(self):
phrases = VGroup(
TextMobject("$x$", "real estate:", "$(x-1)^2$"),
TextMobject("$y$", "real estate:", "$(y+1)^2$"),
)
phrases.next_to(self.plane, UP)
phrases[0].set_color_by_tex("x", GREEN)
phrases[1].set_color_by_tex("y", RED)
x_brace, y_brace = [
Brace(slider.real_estate_ticks, RIGHT)
for slider in self.sliders
]
x_brace.set_color(GREEN)
y_brace.set_color(RED)
self.play(FadeOut(self.question))
self.play(
Write(phrases[0]),
GrowFromCenter(x_brace)
)
self.wait(3)
self.play(
Transform(*phrases),
Transform(x_brace, y_brace)
)
self.wait(5)
self.wind_down_ambient_movement(wait = False)
self.play(*list(map(FadeOut, [x_brace, phrases[0]])))
def swap_with_top_right_circle(self):
alt_circle = self.corner_circles[0]
slider = self.sliders[1]
self.play(
self.circle.move_to, alt_circle,
alt_circle.move_to, self.circle,
Animation(slider),
*[
ApplyMethod(
mob.shift,
slider.number_to_point(1) - slider.number_to_point(-1)
)
for mob in (slider.real_estate_ticks, slider.dial)
]
)
slider.center_value = 1
self.center_point[1] = 1
self.initialize_ambiant_slider_movement()
self.wait(3)
def show_center_circle(self):
origin = self.plane.coords_to_point(0, 0)
radius = get_norm(
self.plane.coords_to_point(np.sqrt(2)-1, 0) - origin
)
circle = Circle(radius = radius, color = GREEN)
circle.move_to(origin)
self.play(FocusOn(circle))
self.play(GrowFromCenter(circle, run_time = 2))
self.wait(3)
def describe_tangent_point(self):
target_vector = np.array([
1-np.sqrt(2)/2, 1-np.sqrt(2)/2
])
point = self.plane.coords_to_point(*target_vector)
origin = self.plane.coords_to_point(0, 0)
h_line = Line(point[1]*UP + origin[0]*RIGHT, point)
v_line = Line(point[0]*RIGHT+origin[1]*UP, point)
while get_norm(self.get_vector()-target_vector) > 0.5:
self.wait()
self.wind_down_ambient_movement(0)
self.reset_dials(target_vector)
self.play(*list(map(ShowCreation, [h_line, v_line])))
self.wait()
re_line = DashedLine(
self.sliders[0].dial.get_left() + MED_SMALL_BUFF*LEFT,
self.sliders[1].dial.get_right() + MED_SMALL_BUFF*RIGHT,
)
words = TextMobject("Evenly shared \\\\ real estate")
words.scale(0.8)
words.next_to(re_line, RIGHT)
self.play(ShowCreation(re_line))
self.play(Write(words))
self.wait()
self.evenly_shared_words = words
self.re_line = re_line
def perterb_point(self):
#Perturb dials
target_vector = np.array([
1 - np.sqrt(0.7),
1 - np.sqrt(0.3),
])
ghost_dials = VGroup(*[
slider.dial.copy()
for slider in self.sliders
])
ghost_dials.set_fill(WHITE, opacity = 0.75)
self.add_foreground_mobjects(ghost_dials)
self.reset_dials(target_vector)
self.wait()
#Comment on real estate exchange
x_words = TextMobject("Gain expensive \\\\", "real estate")
y_words = TextMobject("Give up cheap \\\\", "real estate")
VGroup(x_words, y_words).scale(0.8)
x_words.next_to(self.re_line, UP+LEFT)
x_words.shift(SMALL_BUFF*(DOWN+LEFT))
y_words.next_to(self.re_line, UP+RIGHT)
y_words.shift(MED_LARGE_BUFF*UP)
x_arrow, y_arrow = [
Arrow(
words[1].get_edge_center(vect), self.sliders[i].dial,
tip_length = 0.15,
)
for i, words, vect in zip(
(0, 1), [x_words, y_words], [RIGHT, LEFT]
)
]
self.play(
Write(x_words, run_time = 2),
ShowCreation(x_arrow)
)
self.wait()
self.play(FadeOut(self.evenly_shared_words))
self.play(
Write(y_words, run_time = 2),
ShowCreation(y_arrow)
)
self.wait(2)
#Swap perspective
word_starts = VGroup(y_words[0], x_words[0])
crosses = VGroup()
new_words = VGroup()
for w1, w2 in zip(word_starts, reversed(word_starts)):
crosses.add(Cross(w1))
w1_copy = w1.copy()
w1_copy.generate_target()
w1_copy.target.next_to(w2, UP, SMALL_BUFF)
new_words.add(w1_copy)
self.play(*[
ApplyMethod(
slider.real_estate_ticks.shift,
slider.number_to_point(0)-slider.number_to_point(1)
)
for slider in self.sliders
])
self.wait()
self.play(ShowCreation(crosses))
self.play(
LaggedStartMap(MoveToTarget, new_words),
Animation(crosses)
)
self.wait(3)
#Return to original position
target_vector = np.array(2*[1-np.sqrt(0.5)])
self.play(LaggedStartMap(FadeOut, VGroup(*[
ghost_dials,
x_words, y_words,
x_arrow, y_arrow,
crosses, new_words,
])))
self.remove_foreground_mobjects(ghost_dials)
self.reset_dials(target_vector)
self.center_point = np.zeros(2)
for x, slider in zip(self.center_point, self.sliders):
slider.center_value = x
self.set_to_vector(target_vector)
self.total_real_estate = self.get_current_total_real_estate()
self.wait(2)
def wander_on_inner_circle(self):
self.initialize_ambiant_slider_movement()
self.wait(9)
def ask_about_inner_real_estate(self):
question = TextMobject("What is \\\\ $x^2 + y^2$?")
question.next_to(self.re_line, RIGHT)
rhs = TexMobject("<0.5^2 + 0.5^2")
rhs.scale(0.8)
rhs.next_to(question, DOWN)
rhs.to_edge(RIGHT)
half_line = Line(*[
slider.number_to_point(0.5) + MED_LARGE_BUFF*vect
for slider, vect in zip(self.sliders, [LEFT, RIGHT])
])
half = TexMobject("0.5")
half.scale(self.sliders[0].number_scale_val)
half.next_to(half_line, LEFT, SMALL_BUFF)
target_vector = np.array(2*[1-np.sqrt(0.5)])
while get_norm(target_vector - self.get_vector()) > 0.5:
self.wait()
self.wind_down_ambient_movement(0)
self.reset_dials(target_vector)
self.play(Write(question))
self.wait(3)
self.play(
ShowCreation(half_line),
Write(half)
)
self.wait()
self.play(Write(rhs))
self.wait(3)
class AskWhy(TeacherStudentsScene):
def construct(self):
self.student_says(
"Wait, why?",
target_mode = "confused"
)
self.wait(3)
class MentionComparisonToZeroPointFive(TeacherStudentsScene):
def construct(self):
self.teacher_says(
"Comparing to $0.5$ will \\\\"+\
"be surprisingly useful!",
target_mode = "hooray"
)
self.change_student_modes(*["happy"]*3)
self.wait(3)
class ThreeDBoxExampleWithSliders(SliderScene):
CONFIG = {
"n_sliders" : 3,
"slider_config" : {
"x_min" : -2,
"x_max" : 2,
"unit_size" : 1.5,
},
"center_point" : np.ones(3),
}
def setup(self):
SliderScene.setup(self)
self.sliders.shift(2*RIGHT)
def construct(self):
self.initialize_ambiant_slider_movement()
self.name_corner_sphere()
self.point_out_closest_point()
self.compare_to_halfway_point()
self.reframe_as_inner_sphere_point()
self.place_bound_on_inner_real_estate()
self.comment_on_inner_sphere_smallness()
def name_corner_sphere(self):
sphere_name = TextMobject(
"""Sphere with radius 1\\\\
centered at (1, 1, 1)"""
)
sphere_name.to_corner(UP+LEFT)
arrow = Arrow(
sphere_name, VGroup(*self.sliders[0].numbers[-2:]),
color = BLUE
)
self.play(
LaggedStartMap(FadeIn, sphere_name,),
ShowCreation(arrow, rate_func = squish_rate_func(smooth, 0.7, 1)),
run_time = 3
)
self.wait(5)
self.sphere_name = sphere_name
self.arrow = arrow
def point_out_closest_point(self):
target_x = 1-np.sqrt(1./3)
target_vector = np.array(3*[target_x])
re_words = TextMobject(
"$x$, $y$ and $z$ each have \\\\",
"$\\frac{1}{3}$", "units of real estate"
)
re_words.to_corner(UP+LEFT)
re_line = DashedLine(*[
self.sliders[i].number_to_point(target_x) + MED_SMALL_BUFF*vect
for i, vect in [(0, LEFT), (2, RIGHT)]
])
new_arrow = Arrow(
re_words.get_corner(DOWN+RIGHT), re_line.get_left(),
color = BLUE
)
self.wind_down_ambient_movement()
self.play(*[
ApplyMethod(slider.set_value, x)
for x, slider in zip(target_vector, self.sliders)
])
self.play(ShowCreation(re_line))
self.play(
FadeOut(self.sphere_name),
Transform(self.arrow, new_arrow),
)
self.play(LaggedStartMap(FadeIn, re_words))
self.wait(2)
self.re_words = re_words
self.re_line = re_line
def compare_to_halfway_point(self):
half_line = Line(*[
self.sliders[i].number_to_point(0.5)+MED_SMALL_BUFF*vect
for i, vect in [(0, LEFT), (2, RIGHT)]
])
half_line.set_stroke(MAROON_B, 6)
half_label = TexMobject("0.5")
half_label.scale(self.sliders[0].number_scale_val)
half_label.next_to(half_line, LEFT, MED_SMALL_BUFF)
half_label.set_color(half_line.get_color())
curr_vector = self.get_vector()
target_vector = 0.5*np.ones(3)
ghost_dials = VGroup(*[
slider.dial.copy().set_fill(WHITE, 0.5)
for slider in self.sliders
])
cross = Cross(self.re_words.get_parts_by_tex("frac"))
new_re = TexMobject("(0.5)^2 = 0.25")
new_re.next_to(cross, DOWN, MED_SMALL_BUFF, LEFT)
new_re.set_color(MAROON_B)
self.play(
FadeOut(self.arrow),
Write(half_label, run_time = 1),
ShowCreation(half_line)
)
self.wait()
self.add(ghost_dials)
self.play(*[
ApplyMethod(slider.set_value, 0.5)
for slider in self.sliders
])
self.play(ShowCreation(cross))
self.play(Write(new_re))
self.wait(3)
self.play(
FadeOut(new_re),
FadeOut(cross),
*[
ApplyMethod(slider.set_value, x)
for x, slider in zip(curr_vector, self.sliders)
]
)
def reframe_as_inner_sphere_point(self):
s = self.sliders[0]
shift_vect = s.number_to_point(0)-s.number_to_point(1)
curr_vector = self.get_vector()
self.set_center_point(np.zeros(3))
self.set_to_vector(curr_vector)
self.total_real_estate = self.get_current_total_real_estate()
all_re_ticks = VGroup(*[
slider.real_estate_ticks
for slider in self.sliders
])
inner_sphere_words = TextMobject(
"Also a point on \\\\", "the inner sphere"
)
inner_sphere_words.next_to(self.re_line, RIGHT)
question = TextMobject("How much \\\\", "real estate?")
question.next_to(self.re_line, RIGHT, MED_LARGE_BUFF)
self.play(
Animation(self.sliders),
FadeOut(self.re_words),
LaggedStartMap(
ApplyMethod, all_re_ticks,
lambda m : (m.shift, shift_vect)
)
)
self.initialize_ambiant_slider_movement()
self.play(Write(inner_sphere_words))
self.wait(5)
self.wind_down_ambient_movement(0)
self.play(*[
ApplyMethod(slider.set_value, x)
for slider, x in zip(self.sliders, curr_vector)
])
self.play(ReplacementTransform(
inner_sphere_words, question
))
self.wait(2)
self.re_question = question
def place_bound_on_inner_real_estate(self):
bound = TexMobject(
"&< 3(0.5)^2 ",
"= 0.75"
)
bound.next_to(self.re_question, DOWN)
bound.to_edge(RIGHT)
self.play(Write(bound))
self.wait(2)
def comment_on_inner_sphere_smallness(self):
self.initialize_ambiant_slider_movement()
self.wait(15)
class Rotating3DCornerSphere(ExternallyAnimatedScene):
pass
class | |
'fps': 60},
21: {'desc': 'stretching',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 60},
22: {'desc': 'limping, hurt right leg',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 60},
23: {'desc': 'limping, hurt right leg',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 60},
24: {'desc': 'limping, hurt right leg',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 60},
25: {'desc': 'careful walk and search',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 60},
26: {'desc': 'careful walk and search',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 60},
27: {'desc': 'careful walk and search',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 60},
28: {'desc': 'careful walk',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 60},
29: {'desc': 'creeping walk',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 60},
30: {'desc': 'creeping walk',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 60},
31: {'desc': 'silent walk',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 60},
32: {'desc': 'creeping walk',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 60},
33: {'desc': 'creeping with limp',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 60},
34: {'desc': 'creep and pause, repeat',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 60}}},
78: {'desc': 'walking',
'motions': {1: {'desc': 'LeftTightTurn CleanedGRS',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 120},
2: {'desc': 'RightWideTurn CleanedGRS',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 120},
3: {'desc': 'LeftWideTurn CleanedGRS',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 120},
4: {'desc': 'SuperTightLeft CleanedGRS',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 120},
5: {'desc': 'SuperTightRight CleanedGRS',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 120},
6: {'desc': 'RunningStraight CleanedGRS',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 120},
7: {'desc': 'RunningWideRight CleanedGRS',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 120},
8: {'desc': 'RunningWideLeft CleanedGRS',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 120},
9: {'desc': 'RunningTigherLeft CleanedGRS',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 120},
10: {'desc': 'RunningTighterRight CleanedGRS',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 120},
11: {'desc': 'calibration',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 120},
12: {'desc': 'RunningNoBall CleanedGRS',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 120},
13: {'desc': 'OffensiveMoveSpinLeft CleanedGRS',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 120},
14: {'desc': 'OffensiveMoveSpinLeft CleanedGRS',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 120},
15: {'desc': 'OffensiveMoveGoRight CleanedGRS',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 120},
16: {'desc': 'OffensiveMoveGoLeft CleanedGRs',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 120},
17: {'desc': 'OffensiveMoveSpinRight CleanedGRS',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 120},
18: {'desc': 'WalkEvasiveLeft CleanedGRS',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 120},
19: {'desc': 'WalkEvasiveRight CleanedGRS',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 120},
20: {'desc': 'FeintLeftMoveRight CleanedGRS',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 120},
21: {'desc': 'FeintRightMoveLeft CleanedGRS',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 120},
22: {'desc': 'FakeShotBreakRight CleanedGRs',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 120},
23: {'desc': 'FakeShotBreakLeft CleanedGRS',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 120},
24: {'desc': 'DefensiveStraightNoStop CleanedGRS',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 120},
25: {'desc': 'DefensiveStraightWithStop CleanedGRS',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 120},
26: {'desc': 'DefensiveRightStopToStop CleanedGRS',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 120},
27: {'desc': 'DefensiveLeftStopToStop CleanedGRS',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 120},
28: {'desc': 'DefensiveMoveZigZag CleanedGRS',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 120},
29: {'desc': 'DefensiveMoveSideToSide CleanedGRS',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 120},
30: {'desc': 'DefensiveMoveSideToSide CleanedGRS',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 120},
31: {'desc': 'Pivoting CleanedGRS',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 120},
32: {'desc': 'SraightDriveFromStop Cleaned GRS',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 120},
33: {'desc': 'RightDrive (left then right) Cleaned GRS',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 120},
34: {'desc': 'LeftDrive (right then left) Cleaned GRS',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 120},
35: {'desc': 'RightTightTurn CleanedGRS',
'files': ['tvd', 'c3d', 'amc', 'avi'],
'fps': 120}}},
79: {'desc': 'actor everyday activities',
'motions': {1: {'desc': 'chopping wood',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
2: {'desc': 'swimming',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
3: {'desc': 'swimming',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
4: {'desc': 'digging',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
5: {'desc': 'sewing',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
6: {'desc': 'hand shake',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
7: {'desc': 'lost marker',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
8: {'desc': 'boxing',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
9: {'desc': 'slicing bread',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
10: {'desc': 'chopping onions',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
11: {'desc': 'Subject calibration',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
12: {'desc': 'eating dinner',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
13: {'desc': 'mixing batter',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
14: {'desc': 'making dough',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
15: {'desc': 'eating a sandwich',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
16: {'desc': 'buying something',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
17: {'desc': 'playing violin',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
18: {'desc': 'playing drums',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
19: {'desc': 'playing piano',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
20: {'desc': 'hanging a picture',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
21: {'desc': 'putting on a pull over',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
22: {'desc': 'range of motion',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
23: {'desc': 'putting on a jacket',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
24: {'desc': 'putting on button up sweater',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
25: {'desc': 'moving heavy box',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
26: {'desc': 'planting a tree',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
27: {'desc': 'planting a plant',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
28: {'desc': 'putting on a ball cap',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
29: {'desc': 'putting on a dress',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
30: {'desc': 'pushing a swing',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
31: {'desc': 'writing on a chalkboard',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
32: {'desc': 'writing on a chalkboard',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
33: {'desc': 'chug a beer',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
34: {'desc': 'fishing',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
35: {'desc': 'fishing',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
36: {'desc': 'answering the phone',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
37: {'desc': 'dialing phone',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
38: {'desc': 'drinking water',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
39: {'desc': 'sipping martinee',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
40: {'desc': 'drinking a soda',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
41: {'desc': 'sipping coffee',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
42: {'desc': 'eating soup',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
43: {'desc': 'painting',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
44: {'desc': 'washing a window',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
45: {'desc': 'bear',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
46: {'desc': 'bear',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
47: {'desc': 'chicken',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
48: {'desc': 'elephant',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
49: {'desc': 'monkey',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
50: {'desc': 'mouse',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
51: {'desc': 'chipmunk',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
52: {'desc': 'prairie dog',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
53: {'desc': 'dog',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
54: {'desc': 'snake',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
55: {'desc': 'sweeping',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
56: {'desc': 'T-rex',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
57: {'desc': 'fish',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
58: {'desc': 'bird',
'files': ['tvd', 'c3d', 'amc', 'mpg', 'avi'],
'fps': 60},
59: {'desc': 'cat',
'files': | |
import asyncio
import inspect
import json
from typing import Callable, Dict, List, Optional
import aiohttp_cors
import websockets
import rlp
from aiohttp import web
from async_armor import armor
from decorator import decorator
from jsonrpcserver import config
from jsonrpcserver.async_methods import AsyncMethods
from jsonrpcserver.exceptions import InvalidParams, InvalidRequest, ServerError
from quarkchain.cluster.master import MasterServer
from quarkchain.cluster.rpc import AccountBranchData
from quarkchain.cluster.slave import SlaveServer
from quarkchain.core import (
Address,
Branch,
Log,
MinorBlock,
RootBlock,
SerializedEvmTransaction,
TokenBalanceMap,
TransactionReceipt,
TypedTransaction,
Constant,
MinorBlockHeader,
PoSWInfo,
)
from quarkchain.evm.transactions import Transaction as EvmTransaction
from quarkchain.evm.utils import denoms, is_numeric
from quarkchain.p2p.p2p_manager import P2PManager
from quarkchain.utils import Logger, token_id_decode, token_id_encode
from cachetools import LRUCache
import uuid
from quarkchain.cluster.log_filter import LogFilter
from quarkchain.cluster.subscription import SUB_LOGS
# defaults
DEFAULT_STARTGAS = 100 * 1000
DEFAULT_GASPRICE = 10 * denoms.gwei
# Allow 16 MB request for submitting big blocks
# TODO: revisit this parameter
JSON_RPC_CLIENT_REQUEST_MAX_SIZE = 16 * 1024 * 1024
# Disable jsonrpcserver logging
config.log_requests = False
config.log_responses = False
EMPTY_TX_ID = "0x" + "0" * Constant.TX_ID_HEX_LENGTH
def quantity_decoder(hex_str, allow_optional=False):
"""Decode `hexStr` representing a quantity."""
if allow_optional and hex_str is None:
return None
# must start with "0x"
if not hex_str.startswith("0x") or len(hex_str) < 3:
raise InvalidParams("Invalid quantity encoding")
try:
return int(hex_str, 16)
except ValueError:
raise InvalidParams("Invalid quantity encoding")
def quantity_encoder(i):
"""Encode integer quantity `data`."""
assert is_numeric(i)
return hex(i)
def data_decoder(hex_str, allow_optional=False):
"""Decode `hexStr` representing unformatted hex_str."""
if allow_optional and hex_str is None:
return None
if not hex_str.startswith("0x"):
raise InvalidParams("Invalid hex_str encoding")
try:
return bytes.fromhex(hex_str[2:])
except Exception:
raise InvalidParams("Invalid hex_str hex encoding")
def data_encoder(data_bytes):
"""Encode unformatted binary `dataBytes`."""
return "0x" + data_bytes.hex()
def address_decoder(hex_str):
"""Decode an address from hex with 0x prefix to 24 bytes."""
addr_bytes = data_decoder(hex_str)
if len(addr_bytes) not in (24, 0):
raise InvalidParams("Addresses must be 24 or 0 bytes long")
return addr_bytes
def address_encoder(addr_bytes):
assert len(addr_bytes) == 24
return data_encoder(addr_bytes)
def recipient_decoder(hex_str, allow_optional=False):
"""Decode an recipient from hex with 0x prefix to 20 bytes."""
if allow_optional and hex_str is None:
return None
recipient_bytes = data_decoder(hex_str)
if len(recipient_bytes) not in (20, 0):
raise InvalidParams("Addresses must be 20 or 0 bytes long")
return recipient_bytes
def recipient_encoder(recipient_bytes):
assert len(recipient_bytes) == 20
return data_encoder(recipient_bytes)
def full_shard_key_decoder(hex_str):
b = data_decoder(hex_str)
if len(b) != 4:
raise InvalidParams("Full shard id must be 4 bytes")
return int.from_bytes(b, byteorder="big")
def full_shard_key_encoder(full_shard_key):
return data_encoder(full_shard_key.to_bytes(4, byteorder="big"))
def id_encoder(hash_bytes, full_shard_key):
"""Encode hash and full_shard_key into hex"""
return data_encoder(hash_bytes + full_shard_key.to_bytes(4, byteorder="big"))
def id_decoder(hex_str):
"""Decode an id to (hash, full_shard_key)"""
data_bytes = data_decoder(hex_str)
if len(data_bytes) != 36:
raise InvalidParams("Invalid id encoding")
return data_bytes[:32], int.from_bytes(data_bytes[32:], byteorder="big")
def hash_decoder(hex_str):
"""Decode a block hash."""
decoded = data_decoder(hex_str)
if len(decoded) != 32:
raise InvalidParams("Hashes must be 32 bytes long")
return decoded
def signature_decoder(hex_str):
"""Decode a block signature."""
if not hex_str:
return None
decoded = data_decoder(hex_str)
if len(decoded) != 65:
raise InvalidParams("Signature must be 65 bytes long")
return decoded
def bool_decoder(data):
if not isinstance(data, bool):
raise InvalidParams("Parameter must be boolean")
return data
def _add_posw_info_to_resp(d: Dict, diff: int, posw_info: PoSWInfo):
d["effectiveDifficulty"] = quantity_encoder(posw_info.effective_difficulty)
d["poswMineableBlocks"] = quantity_encoder(posw_info.posw_mineable_blocks)
d["poswMinedBlocks"] = quantity_encoder(posw_info.posw_mined_blocks)
d["stakingApplied"] = posw_info.effective_difficulty < diff
def root_block_encoder(block, extra_info):
header = block.header
d = {
"id": data_encoder(header.get_hash()),
"height": quantity_encoder(header.height),
"hash": data_encoder(header.get_hash()),
"sealHash": data_encoder(header.get_hash_for_mining()),
"hashPrevBlock": data_encoder(header.hash_prev_block),
"idPrevBlock": data_encoder(header.hash_prev_block),
"nonce": quantity_encoder(header.nonce),
"hashMerkleRoot": data_encoder(header.hash_merkle_root),
"miner": address_encoder(header.coinbase_address.serialize()),
"coinbase": balances_encoder(header.coinbase_amount_map),
"difficulty": quantity_encoder(header.difficulty),
"timestamp": quantity_encoder(header.create_time),
"size": quantity_encoder(len(block.serialize())),
"minorBlockHeaders": [],
"signature": data_encoder(header.signature),
}
if extra_info:
_add_posw_info_to_resp(d, header.difficulty, extra_info)
for header in block.minor_block_header_list:
h = minor_block_header_encoder(header)
d["minorBlockHeaders"].append(h)
return d
def minor_block_encoder(block, include_transactions=False, extra_info=None):
"""Encode a block as JSON object.
:param block: a :class:`ethereum.block.Block`
:param include_transactions: if true transaction details are included, otherwise
only their hashes
:param extra_info: MinorBlockExtraInfo
:returns: a json encodable dictionary
"""
header = block.header
meta = block.meta
header_info = minor_block_header_encoder(header)
d = {
**header_info,
"hashMerkleRoot": data_encoder(meta.hash_merkle_root),
"hashEvmStateRoot": data_encoder(meta.hash_evm_state_root),
"gasUsed": quantity_encoder(meta.evm_gas_used),
"size": quantity_encoder(len(block.serialize())),
}
if include_transactions:
d["transactions"] = []
for i, _ in enumerate(block.tx_list):
d["transactions"].append(tx_encoder(block, i))
else:
d["transactions"] = [
id_encoder(tx.get_hash(), block.header.branch.get_full_shard_id())
for tx in block.tx_list
]
if extra_info:
_add_posw_info_to_resp(d, header.difficulty, extra_info)
return d
def minor_block_header_encoder(header: MinorBlockHeader) -> Dict:
d = {
"id": id_encoder(header.get_hash(), header.branch.get_full_shard_id()),
"height": quantity_encoder(header.height),
"hash": data_encoder(header.get_hash()),
"fullShardId": quantity_encoder(header.branch.get_full_shard_id()),
"chainId": quantity_encoder(header.branch.get_chain_id()),
"shardId": quantity_encoder(header.branch.get_shard_id()),
"hashPrevMinorBlock": data_encoder(header.hash_prev_minor_block),
"idPrevMinorBlock": id_encoder(
header.hash_prev_minor_block, header.branch.get_full_shard_id()
),
"hashPrevRootBlock": data_encoder(header.hash_prev_root_block),
"nonce": quantity_encoder(header.nonce),
"miner": address_encoder(header.coinbase_address.serialize()),
"coinbase": balances_encoder(header.coinbase_amount_map),
"difficulty": quantity_encoder(header.difficulty),
"extraData": data_encoder(header.extra_data),
"gasLimit": quantity_encoder(header.evm_gas_limit),
"timestamp": quantity_encoder(header.create_time),
}
return d
def tx_encoder(block, i):
"""Encode a transaction as JSON object.
`transaction` is the `i`th transaction in `block`.
"""
tx = block.tx_list[i]
evm_tx = tx.tx.to_evm_tx()
branch = block.header.branch
return {
"id": id_encoder(tx.get_hash(), evm_tx.from_full_shard_key),
"hash": data_encoder(tx.get_hash()),
"nonce": quantity_encoder(evm_tx.nonce),
"timestamp": quantity_encoder(block.header.create_time),
"fullShardId": quantity_encoder(branch.get_full_shard_id()),
"chainId": quantity_encoder(branch.get_chain_id()),
"shardId": quantity_encoder(branch.get_shard_id()),
"blockId": id_encoder(block.header.get_hash(), branch.get_full_shard_id()),
"blockHeight": quantity_encoder(block.header.height),
"transactionIndex": quantity_encoder(i),
"from": data_encoder(evm_tx.sender),
"to": data_encoder(evm_tx.to),
"fromFullShardKey": full_shard_key_encoder(evm_tx.from_full_shard_key),
"toFullShardKey": full_shard_key_encoder(evm_tx.to_full_shard_key),
"value": quantity_encoder(evm_tx.value),
"gasPrice": quantity_encoder(evm_tx.gasprice),
"gas": quantity_encoder(evm_tx.startgas),
"data": data_encoder(evm_tx.data),
"networkId": quantity_encoder(evm_tx.network_id),
"transferTokenId": quantity_encoder(evm_tx.transfer_token_id),
"gasTokenId": quantity_encoder(evm_tx.gas_token_id),
"transferTokenStr": token_id_decode(evm_tx.transfer_token_id),
"gasTokenStr": token_id_decode(evm_tx.gas_token_id),
"r": quantity_encoder(evm_tx.r),
"s": quantity_encoder(evm_tx.s),
"v": quantity_encoder(evm_tx.v),
}
def tx_detail_encoder(tx):
"""Encode a transaction detail object as JSON object. Used for indexing server."""
return {
"txId": id_encoder(tx.tx_hash, tx.from_address.full_shard_key),
"fromAddress": address_encoder(tx.from_address.serialize()),
"toAddress": address_encoder(tx.to_address.serialize())
if tx.to_address
else "0x",
"value": quantity_encoder(tx.value),
"transferTokenId": quantity_encoder(tx.transfer_token_id),
"transferTokenStr": token_id_decode(tx.transfer_token_id),
"gasTokenId": quantity_encoder(tx.gas_token_id),
"gasTokenStr": token_id_decode(tx.gas_token_id),
"blockHeight": quantity_encoder(tx.block_height),
"timestamp": quantity_encoder(tx.timestamp),
"success": tx.success,
"isFromRootChain": tx.is_from_root_chain,
"nonce": quantity_encoder(tx.nonce),
}
def loglist_encoder(loglist: List[Log], is_removed: bool = False):
"""Encode a list of log"""
result = []
for l in loglist:
result.append(
{
"logIndex": quantity_encoder(l.log_idx),
"transactionIndex": quantity_encoder(l.tx_idx),
"transactionHash": data_encoder(l.tx_hash),
"blockHash": data_encoder(l.block_hash),
"blockNumber": quantity_encoder(l.block_number),
"blockHeight": quantity_encoder(l.block_number),
"address": data_encoder(l.recipient),
"recipient": data_encoder(l.recipient),
"data": data_encoder(l.data),
"topics": [data_encoder(topic) for topic in l.topics],
"removed": is_removed,
}
)
return result
def receipt_encoder(block: MinorBlock, i: int, receipt: TransactionReceipt):
tx_id, tx_hash = None, None # if empty, will be populated at call site
if i < len(block.tx_list):
tx = block.tx_list[i]
evm_tx = tx.tx.to_evm_tx()
tx_id = id_encoder(tx.get_hash(), evm_tx.from_full_shard_key)
tx_hash = data_encoder(tx.get_hash())
resp = {
"transactionId": tx_id,
"transactionHash": tx_hash,
"transactionIndex": quantity_encoder(i),
"blockId": id_encoder(
block.header.get_hash(), block.header.branch.get_full_shard_id()
),
"blockHash": data_encoder(block.header.get_hash()),
"blockHeight": quantity_encoder(block.header.height),
"blockNumber": quantity_encoder(block.header.height),
"cumulativeGasUsed": quantity_encoder(receipt.gas_used),
"gasUsed": quantity_encoder(receipt.gas_used - receipt.prev_gas_used),
"status": quantity_encoder(1 if receipt.success == b"\x01" else 0),
"contractAddress": (
address_encoder(receipt.contract_address.serialize())
if not receipt.contract_address.is_empty()
else None
),
"logs": loglist_encoder(receipt.logs),
"timestamp": quantity_encoder(block.header.create_time),
}
return resp
def balances_encoder(balances: TokenBalanceMap) -> List[Dict]:
balance_list = []
for k, v in balances.balance_map.items():
balance_list.append(
{
"tokenId": quantity_encoder(k),
"tokenStr": token_id_decode(k),
"balance": quantity_encoder(v),
}
)
return balance_list
def decode_arg(name, decoder, allow_optional=False):
"""Create a decorator that applies `decoder` to argument `name`."""
@decorator
def new_f(f, *args, **kwargs):
call_args = inspect.getcallargs(f, *args, **kwargs)
call_args[name] = (
decoder(call_args[name], allow_optional=True)
if allow_optional
else decoder(call_args[name])
)
return f(**call_args)
return new_f
def encode_res(encoder):
"""Create a decorator that applies `encoder` to the return value of the
decorated function.
"""
@decorator
async def new_f(f, *args, **kwargs):
res = await f(*args, **kwargs)
return encoder(res)
return new_f
def block_height_decoder(data):
"""Decode block height string, which can either be None, 'latest', 'earliest' or a hex number
of minor block height"""
if data is None or data == "latest":
return None
if data == "earliest":
return 0
# TODO: support pending
return quantity_decoder(data)
def shard_id_decoder(data):
try:
return quantity_decoder(data)
except Exception:
return None
def eth_address_to_quarkchain_address_decoder(hex_str):
eth_hex = hex_str[2:]
if len(eth_hex) != 40:
raise InvalidParams("Addresses must be 40 or 0 bytes long")
return address_decoder("0x" + eth_hex + "00000001")
def _parse_log_request(
params: Dict, addr_decoder: Callable[[str], bytes]
) -> (bytes, bytes):
"""Returns addresses and topics from a EVM log request."""
addresses, topics = [], []
if "address" in params:
if isinstance(params["address"], str):
addresses = [Address.deserialize(addr_decoder(params["address"]))]
elif isinstance(params["address"], list):
addresses = [
Address.deserialize(addr_decoder(a)) for a in params["address"]
]
if "topics" in params:
for topic_item in params["topics"]:
if isinstance(topic_item, str):
topics.append([data_decoder(topic_item)])
elif isinstance(topic_item, list):
topics.append([data_decoder(tp) for tp in topic_item])
return addresses, topics
public_methods = AsyncMethods()
private_methods = AsyncMethods()
# noinspection PyPep8Naming
class JSONRPCHttpServer:
@classmethod
def start_public_server(cls, env, master_server):
server = cls(
env,
master_server,
env.cluster_config.JSON_RPC_PORT,
env.cluster_config.JSON_RPC_HOST,
public_methods,
)
server.start()
return server
@classmethod
def start_private_server(cls, env, master_server):
server = cls(
env,
master_server,
env.cluster_config.PRIVATE_JSON_RPC_PORT,
env.cluster_config.PRIVATE_JSON_RPC_HOST,
private_methods,
)
server.start()
return server
@classmethod
def start_test_server(cls, env, master_server):
methods = AsyncMethods()
for method in public_methods.values():
methods.add(method)
for method in private_methods.values():
methods.add(method)
server = cls(
env,
master_server,
env.cluster_config.JSON_RPC_PORT,
env.cluster_config.JSON_RPC_HOST,
methods,
)
server.start()
return server
def __init__(
self, env, master_server: MasterServer, port, host, methods: AsyncMethods
):
self.loop = asyncio.get_event_loop()
self.port = port
self.host = host
self.env = env
self.master = master_server
self.counters = dict()
# Bind RPC handler functions to this instance
self.handlers = AsyncMethods()
for rpc_name in methods:
func = methods[rpc_name]
self.handlers[rpc_name] = func.__get__(self, self.__class__)
async def __handle(self, request):
request = await request.text()
Logger.info(request)
d = dict()
try:
d = json.loads(request)
except Exception:
pass
method = d.get("method", "null")
if method in self.counters:
self.counters[method] += 1
else:
self.counters[method] = 1
# | |
<gh_stars>1-10
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
"""Helper functions and classes used by the Monorail Issue Tracker pages.
This module has functions that are reused in multiple servlets or
other modules.
"""
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import collections
import logging
import re
import urllib
from google.appengine.api import app_identity
import settings
from framework import authdata
from framework import exceptions
from framework import filecontent
from framework import framework_bizobj
from framework import framework_constants
from framework import framework_helpers
from framework import framework_views
from framework import permissions
from framework import sorting
from framework import template_helpers
from framework import urls
from proto import tracker_pb2
from services import client_config_svc
from tracker import field_helpers
from tracker import tracker_bizobj
from tracker import tracker_constants
# HTML input field names for blocked on and blocking issue refs.
BLOCKED_ON = 'blocked_on'
BLOCKING = 'blocking'
# This string is used in HTML form element names to identify custom fields.
# E.g., a value for a custom field with field_id 12 would be specified in
# an HTML form element with name="custom_12".
_CUSTOM_FIELD_NAME_PREFIX = 'custom_'
# When the attachment quota gets within 1MB of the limit, stop offering
# users the option to attach files.
_SOFT_QUOTA_LEEWAY = 1024 * 1024
# Accessors for sorting built-in fields.
SORTABLE_FIELDS = {
'project': lambda issue: issue.project_name,
'id': lambda issue: issue.local_id,
'owner': tracker_bizobj.GetOwnerId, # And postprocessor
'reporter': lambda issue: issue.reporter_id, # And postprocessor
'component': lambda issue: issue.component_ids,
'cc': tracker_bizobj.GetCcIds, # And postprocessor
'summary': lambda issue: issue.summary.lower(),
'stars': lambda issue: issue.star_count,
'attachments': lambda issue: issue.attachment_count,
'opened': lambda issue: issue.opened_timestamp,
'closed': lambda issue: issue.closed_timestamp,
'modified': lambda issue: issue.modified_timestamp,
'status': tracker_bizobj.GetStatus,
'blocked': lambda issue: bool(issue.blocked_on_iids),
'blockedon': lambda issue: issue.blocked_on_iids or sorting.MAX_STRING,
'blocking': lambda issue: issue.blocking_iids or sorting.MAX_STRING,
'mergedinto': lambda issue: issue.merged_into or sorting.MAX_STRING,
'ownermodified': lambda issue: issue.owner_modified_timestamp,
'statusmodified': lambda issue: issue.status_modified_timestamp,
'componentmodified': lambda issue: issue.component_modified_timestamp,
'ownerlastvisit': tracker_bizobj.GetOwnerId, # And postprocessor
}
# Some fields take a user ID from the issue and then use that to index
# into a dictionary of user views, and then get a field of the user view
# as the value to sort key.
SORTABLE_FIELDS_POSTPROCESSORS = {
'owner': lambda user_view: user_view.email,
'reporter': lambda user_view: user_view.email,
'cc': lambda user_view: user_view.email,
'ownerlastvisit': lambda user_view: -user_view.user.last_visit_timestamp,
}
# Here are some restriction labels to help people do the most common things
# that they might want to do with restrictions.
_FREQUENT_ISSUE_RESTRICTIONS = [
(permissions.VIEW, permissions.EDIT_ISSUE,
'Only users who can edit the issue may access it'),
(permissions.ADD_ISSUE_COMMENT, permissions.EDIT_ISSUE,
'Only users who can edit the issue may add comments'),
]
# These issue restrictions should be offered as examples whenever the project
# does not have any custom permissions in use already.
_EXAMPLE_ISSUE_RESTRICTIONS = [
(permissions.VIEW, 'CoreTeam',
'Custom permission CoreTeam is needed to access'),
]
# Namedtuples that hold data parsed from post_data.
ParsedComponents = collections.namedtuple(
'ParsedComponents', 'entered_str, paths, paths_remove')
ParsedFields = collections.namedtuple(
'ParsedFields',
'vals, vals_remove, fields_clear, '
'phase_vals, phase_vals_remove')
ParsedUsers = collections.namedtuple(
'ParsedUsers', 'owner_username, owner_id, cc_usernames, '
'cc_usernames_remove, cc_ids, cc_ids_remove')
ParsedBlockers = collections.namedtuple(
'ParsedBlockers', 'entered_str, iids, dangling_refs')
ParsedHotlistRef = collections.namedtuple(
'ParsedHotlistRef', 'user_email, hotlist_name')
ParsedHotlists = collections.namedtuple(
'ParsedHotlists', 'entered_str, hotlist_refs')
ParsedIssue = collections.namedtuple(
'ParsedIssue', 'summary, comment, is_description, status, users, labels, '
'labels_remove, components, fields, template_name, attachments, '
'kept_attachments, blocked_on, blocking, hotlists')
def ParseIssueRequest(cnxn, post_data, services, errors, default_project_name):
"""Parse all the possible arguments out of the request.
Args:
cnxn: connection to SQL database.
post_data: HTML form information.
services: Connections to persistence layer.
errors: object to accumulate validation error info.
default_project_name: name of the project that contains the issue.
Returns:
A namedtuple with all parsed information. User IDs are looked up, but
also the strings are returned to allow bouncing the user back to correct
any errors.
"""
summary = post_data.get('summary', '')
comment = post_data.get('comment', '')
is_description = bool(post_data.get('description', ''))
status = post_data.get('status', '')
template_name = urllib.unquote_plus(post_data.get('template_name', ''))
component_str = post_data.get('components', '')
label_strs = post_data.getall('label')
if is_description:
tmpl_txt = post_data.get('tmpl_txt', '')
comment = MarkupDescriptionOnInput(comment, tmpl_txt)
comp_paths, comp_paths_remove = _ClassifyPlusMinusItems(
re.split('[,;\s]+', component_str))
parsed_components = ParsedComponents(
component_str, comp_paths, comp_paths_remove)
labels, labels_remove = _ClassifyPlusMinusItems(label_strs)
parsed_fields = _ParseIssueRequestFields(post_data)
# TODO(jrobbins): change from numbered fields to a multi-valued field.
attachments = _ParseIssueRequestAttachments(post_data)
kept_attachments = _ParseIssueRequestKeptAttachments(post_data)
parsed_users = _ParseIssueRequestUsers(cnxn, post_data, services)
parsed_blocked_on = _ParseBlockers(
cnxn, post_data, services, errors, default_project_name, BLOCKED_ON)
parsed_blocking = _ParseBlockers(
cnxn, post_data, services, errors, default_project_name, BLOCKING)
parsed_hotlists = _ParseHotlists(post_data)
parsed_issue = ParsedIssue(
summary, comment, is_description, status, parsed_users, labels,
labels_remove, parsed_components, parsed_fields, template_name,
attachments, kept_attachments, parsed_blocked_on, parsed_blocking,
parsed_hotlists)
return parsed_issue
def MarkupDescriptionOnInput(content, tmpl_text):
"""Return HTML for the content of an issue description or comment.
Args:
content: the text sumbitted by the user, any user-entered markup
has already been escaped.
tmpl_text: the initial text that was put into the textarea.
Returns:
The description content text with template lines highlighted.
"""
tmpl_lines = tmpl_text.split('\n')
tmpl_lines = [pl.strip() for pl in tmpl_lines if pl.strip()]
entered_lines = content.split('\n')
marked_lines = [_MarkupDescriptionLineOnInput(line, tmpl_lines)
for line in entered_lines]
return '\n'.join(marked_lines)
def _MarkupDescriptionLineOnInput(line, tmpl_lines):
"""Markup one line of an issue description that was just entered.
Args:
line: string containing one line of the user-entered comment.
tmpl_lines: list of strings for the text of the template lines.
Returns:
The same user-entered line, or that line highlighted to
indicate that it came from the issue template.
"""
for tmpl_line in tmpl_lines:
if line.startswith(tmpl_line):
return '<b>' + tmpl_line + '</b>' + line[len(tmpl_line):]
return line
def _ClassifyPlusMinusItems(add_remove_list):
"""Classify the given plus-or-minus items into add and remove lists."""
add_remove_set = {s.strip() for s in add_remove_list}
add_strs = [s for s in add_remove_set if s and not s.startswith('-')]
remove_strs = [s[1:] for s in add_remove_set if s[1:] and s.startswith('-')]
return add_strs, remove_strs
def _ParseHotlists(post_data):
entered_str = post_data.get('hotlists', '').strip()
hotlist_refs = []
for ref_str in re.split('[,;\s]+', entered_str):
if not ref_str:
continue
if ':' in ref_str:
if ref_str.split(':')[0]:
# E-mail isn't empty; full reference.
hotlist_refs.append(ParsedHotlistRef(*ref_str.split(':', 1)))
else:
# Short reference.
hotlist_refs.append(ParsedHotlistRef(None, ref_str.split(':', 1)[1]))
else:
# Short reference
hotlist_refs.append(ParsedHotlistRef(None, ref_str))
parsed_hotlists = ParsedHotlists(entered_str, hotlist_refs)
return parsed_hotlists
def _ParseIssueRequestFields(post_data):
"""Iterate over post_data and return custom field values found in it."""
field_val_strs = {}
field_val_strs_remove = {}
phase_field_val_strs = collections.defaultdict(dict)
phase_field_val_strs_remove = collections.defaultdict(dict)
for key in post_data.keys():
if key.startswith(_CUSTOM_FIELD_NAME_PREFIX):
val_strs = [v for v in post_data.getall(key) if v]
if val_strs:
try:
field_id = int(key[len(_CUSTOM_FIELD_NAME_PREFIX):])
phase_name = None
except ValueError: # key must be in format <field_id>_<phase_name>
field_id, phase_name = key[len(_CUSTOM_FIELD_NAME_PREFIX):].split(
'_', 1)
field_id = int(field_id)
if post_data.get('op_' + key) == 'remove':
if phase_name:
phase_field_val_strs_remove[field_id][phase_name] = val_strs
else:
field_val_strs_remove[field_id] = val_strs
else:
if phase_name:
phase_field_val_strs[field_id][phase_name] = val_strs
else:
field_val_strs[field_id] = val_strs
# TODO(jojwang): monorail:5154, no support for clearing phase field values.
fields_clear = []
op_prefix = 'op_' + _CUSTOM_FIELD_NAME_PREFIX
for op_key in post_data.keys():
if op_key.startswith(op_prefix):
if post_data.get(op_key) == 'clear':
field_id = int(op_key[len(op_prefix):])
fields_clear.append(field_id)
return ParsedFields(
field_val_strs, field_val_strs_remove, fields_clear,
phase_field_val_strs, phase_field_val_strs_remove)
def _ParseIssueRequestAttachments(post_data):
"""Extract and clean-up any attached files from the post data.
Args:
post_data: dict w/ values from the user's HTTP POST form data.
Returns:
[(filename, filecontents, mimetype), ...] with items for each attachment.
"""
# TODO(jrobbins): change from numbered fields to a multi-valued field.
attachments = []
for i in range(1, 16):
if 'file%s' % i in post_data:
item = post_data['file%s' % i]
if isinstance(item, basestring):
continue
if '\\' in item.filename: # IE insists on giving us the whole path.
item.filename = item.filename[item.filename.rindex('\\') + 1:]
if not item.filename:
continue # Skip any FILE fields that were not filled in.
attachments.append((
item.filename, item.value,
filecontent.GuessContentTypeFromFilename(item.filename)))
return attachments
def _ParseIssueRequestKeptAttachments(post_data):
"""Extract attachment ids for attachments kept when updating description
Args:
post_data: dict w/ values from the user's HTTP POST form data.
Returns:
a list of attachment ids for kept attachments
"""
kept_attachments = post_data.getall('keep-attachment')
return [int(aid) for aid in kept_attachments]
def _ParseIssueRequestUsers(cnxn, post_data, services):
"""Extract usernames from the POST data, categorize them, and look up IDs.
Args:
cnxn: connection to SQL database.
post_data: dict w/ data from the HTTP POST.
services: Services.
Returns:
A namedtuple (owner_username, owner_id, cc_usernames, cc_usernames_remove,
cc_ids, cc_ids_remove), containing:
- issue owner's name and user ID, if any
- the list of all cc'd usernames
- the user IDs to add or remove from the issue CC list.
Any of these user IDs may be None if the corresponding username
| |
import tensorflow as tf
import math as m
from tensorflow.python import keras
import numpy as np
import math
def sinusoid(max_seq, embedding_dim):
return np.array([[
[
m.sin(
pos * m.exp(-m.log(10000) * i / embedding_dim) * m.exp(
m.log(10000) / embedding_dim * (i % 2)) + 0.5 * m.pi * (i % 2)
)
for i in range(embedding_dim)
]
for pos in range(max_seq)
]])
class ExpandDims(keras.layers.Layer):
def __init__(self, axis=-1, **kwargs):
super().__init__(**kwargs)
self.axis = axis
def call(self, inputs, **kwargs):
return tf.expand_dims(inputs, axis=self.axis)
# TODO : reduce time complexity
class PositionEmbedding(keras.layers.Layer):
def __init__(self, max_seq, embedding_dim, **kwargs):
super().__init__(**kwargs)
embed_sinusoid_list = np.array([[
[
m.sin(
pos * m.exp(-m.log(10000) * i/embedding_dim) *
m.exp(m.log(10000)/embedding_dim * (i % 2)) + 0.5 * m.pi * (i % 2)
)
for i in range(embedding_dim)
]
for pos in range(max_seq)
]])
self.positional_embedding = tf.constant(embed_sinusoid_list, dtype=tf.float32)
def call(self, inputs, **kwargs):
return tf.add(inputs, self.positional_embedding)
class PositionEmbeddingV2(keras.layers.Layer):
def __init__(self, max_seq, embedding_dim, **kwargs):
super(PositionEmbeddingV2, self).__init__(**kwargs)
angle_rads = PositionEmbeddingV2.__get_angles(np.arange(max_seq)[:, np.newaxis],
np.arange(embedding_dim)[np.newaxis, :],
embedding_dim)
# apply sin to even indices in the array; 2i
sines = np.sin(angle_rads[:, 0::2])
# apply cos to odd indices in the array; 2i+1
cosines = np.cos(angle_rads[:, 1::2])
pos_encoding = np.concatenate([sines, cosines], axis=-1)
pos_encoding = pos_encoding[np.newaxis, ...]
self.sinusoid = tf.cast(pos_encoding, dtype=tf.float32)
@staticmethod
def __get_angles(pos, i, d_model):
angle_rates = 1 / np.power(10000, (2 * (i // 2)) / np.float32(d_model))
return pos * angle_rates
def call(self, inputs, **kwargs):
return tf.add(inputs, self.sinusoid)
class DynamicPositionEmbedding(keras.layers.Layer):
def __init__(self, embedding_dim, max_seq=2048, **kwargs):
super().__init__(**kwargs)
embed_sinusoid_list = np.array([[
[
m.sin(
pos * m.exp(-m.log(10000) * i/embedding_dim) *
m.exp(m.log(10000)/embedding_dim * (i % 2)) + 0.5 * m.pi * (i % 2)
)
for i in range(embedding_dim)
]
for pos in range(max_seq)
]])
self.positional_embedding = tf.constant(embed_sinusoid_list, dtype=tf.float32)
def call(self, inputs, **kwargs):
return tf.add(inputs, self.positional_embedding[:,:inputs.shape[1],:])
class BaselineAttention(keras.layers.Layer):
def __init__(self, h, d, max_seq=2048, **kwargs):
super().__init__(**kwargs)
self.len_k = None
self.max_seq = None
self.E = None
self.h = h
self.d = d
self.dh = d // h
self.Wq = keras.layers.Dense(int(self.d / 2))
self.Wk = keras.layers.Dense(int(self.d / 2))
self.Wv = keras.layers.Dense(int(self.d))
self.fc = keras.layers.Dense(d)
self.max_seq = max_seq
def build(self, input_shape):
self.len_k = input_shape[1][1]
# self.max_seq = max(input_shape[0][1], input_shape[1][1], input_shape[2][1])
def call(self, inputs, mask=None, weight_out=False, **kwargs):
"""
:param inputs: a list of tensors. i.e) [Q, K, V]
:param mask: mask tensor
:param weight_out: decide to get weather weight or not
:param kwargs:
:return: final tensor ( output of attention )
"""
q = inputs[0]
q = self.Wq(q)
q = tf.reshape(q, (q.shape[0], q.shape[1], self.h, -1))
q = tf.transpose(q, (0, 2, 1, 3)) # batch, h, seq, dh
k = inputs[1]
k = self.Wk(k)
k = tf.reshape(k, (k.shape[0], k.shape[1], self.h, -1))
k = tf.transpose(k, (0, 2, 1, 3))
v = inputs[2]
v = self.Wv(v)
v = tf.reshape(v, (v.shape[0], v.shape[1], self.h, -1))
v = tf.transpose(v, (0, 2, 1, 3))
Kt = tf.transpose(k, [0, 1, 3, 2])
QKt = tf.matmul(q, Kt)
logits = QKt
logits = logits / math.sqrt(self.dh)
if mask is not None:
logits += (tf.cast(mask, tf.float32) * -1e9)
attention_weights = tf.nn.softmax(logits, -1)
attention = tf.matmul(attention_weights, v)
out = tf.transpose(attention, (0, 2, 1, 3))
out = tf.reshape(out, (out.shape[0], -1, self.d))
out = self.fc(out)
return out, attention_weights
class RelativeGlobalAttention(keras.layers.Layer):
"""
from Music Transformer ( Huang et al, 2018 )
[paper link](https://arxiv.org/pdf/1809.04281.pdf)
"""
def __init__(self, h=4, d=256, add_emb=False, max_seq=2048, **kwargs):
super().__init__(**kwargs)
self.len_k = None
self.max_seq = max_seq
self.E = None
self.h = h
self.d = d
self.dh = d // h
self.Wq = keras.layers.Dense(int(self.d))
self.Wk = keras.layers.Dense(int(self.d))
self.Wv = keras.layers.Dense(int(self.d))
self.fc = keras.layers.Dense(d)
self.additional = add_emb
if self.additional:
self.Radd = None
def build(self, input_shape):
self.shape_q = input_shape[0][1]
self.shape_k = input_shape[1][1]
# self.max_seq = max(input_shape[0][1], input_shape[1][1], input_shape[2][1])
self.E = self.add_weight('emb', shape=[self.max_seq, int(self.dh)])
def call(self, inputs, mask=None, **kwargs):
"""
:param inputs: a list of tensors. i.e) [Q, K, V]
:param mask: mask tensor
:param kwargs:
:return: final tensor ( output of attention )
"""
q = inputs[0]
q = self.Wq(q)
q = tf.reshape(q, (q.shape[0], q.shape[1], self.h, -1))
q = tf.transpose(q, (0, 2, 1, 3)) # batch, h, seq, dh
k = inputs[1]
k = self.Wk(k)
k = tf.reshape(k, (k.shape[0], k.shape[1], self.h, -1))
k = tf.transpose(k, (0, 2, 1, 3))
v = inputs[2]
v = self.Wv(v)
v = tf.reshape(v, (v.shape[0], v.shape[1], self.h, -1))
v = tf.transpose(v, (0, 2, 1, 3))
self.len_k = k.shape[2]
self.len_q = q.shape[2]
E = self._get_left_embedding(self.len_q, self.len_k)
QE = tf.einsum('bhld,md->bhlm', q, E)
QE = self._qe_masking(QE)
# print(QE.shape)
Srel = self._skewing(QE)
Kt = tf.transpose(k,[0, 1, 3, 2])
QKt = tf.matmul(q, Kt)
logits = QKt + Srel
logits = logits / math.sqrt(self.dh)
if mask is not None:
logits += (tf.cast(mask, tf.float32) * -1e9)
attention_weights = tf.nn.softmax(logits, -1)
# tf.print('logit result: \n', logits, output_stream=sys.stdout)
attention = tf.matmul(attention_weights, v)
# tf.print('attention result: \n', attention, output_stream=sys.stdout)
out = tf.transpose(attention, (0, 2, 1, 3))
out = tf.reshape(out, (out.shape[0], -1, self.d))
out = self.fc(out)
return out, attention_weights
def _get_left_embedding(self, len_q, len_k):
starting_point = max(0,self.max_seq-len_q)
e = self.E[starting_point:,:]
return e
@staticmethod
def _qe_masking(qe):
mask = tf.sequence_mask(
tf.range(qe.shape[-1] -1, qe.shape[-1] - qe.shape[-2] -1, -1), qe.shape[-1])
mask = tf.logical_not(mask)
mask = tf.cast(mask, tf.float32)
return mask * qe
def _skewing(self, tensor: tf.Tensor):
padded = tf.pad(tensor, [[0, 0], [0,0], [0, 0], [1, 0]])
reshaped = tf.reshape(padded, shape=[-1, padded.shape[1], padded.shape[-1], padded.shape[-2]])
Srel = reshaped[:, :, 1:, :]
# print('Sre: {}'.format(Srel))
if self.len_k > self.len_q:
Srel = tf.pad(Srel, [[0,0], [0,0], [0,0], [0, self.len_k-self.len_q]])
elif self.len_k < self.len_q:
Srel = Srel[:,:,:,:self.len_k]
return Srel
class View1D(keras.layers.Layer):
def __init__(self, axis=-1, **kwargs):
super().__init__(**kwargs)
self.axis = axis
def call(self, inputs, **kwargs):
return inputs[:,self.axis]
class EncoderLayer(keras.layers.Layer):
def __init__(self, d_model, rate=0.1, h=16, additional=False, max_seq=2048):
super(EncoderLayer, self).__init__()
self.d_model = d_model
self.rga = RelativeGlobalAttention(h=h, d=d_model, max_seq=max_seq, add_emb=additional)
self.FFN_pre = keras.layers.Dense(self.d_model // 2, activation=tf.nn.relu)
self.FFN_suf = keras.layers.Dense(self.d_model)
self.layernorm1 = keras.layers.LayerNormalization(epsilon=1e-6)
self.layernorm2 = keras.layers.LayerNormalization(epsilon=1e-6)
self.dropout1 = keras.layers.Dropout(rate)
self.dropout2 = keras.layers.Dropout(rate)
def call(self, x, mask=None, training=False, **kwargs):
attn_out, w = self.rga([x,x,x], mask)
attn_out = self.dropout1(attn_out, training=training)
out1 = self.layernorm1(attn_out+x)
ffn_out = self.FFN_pre(out1)
ffn_out = self.FFN_suf(ffn_out)
ffn_out = self.dropout2(ffn_out, training=training)
out2 = self.layernorm2(out1+ffn_out)
return out2, w
class DecoderLayer(keras.layers.Layer):
def __init__(self, d_model, rate=0.1, h=16, additional=False, max_seq=2048):
super(DecoderLayer, self).__init__()
self.d_model = d_model
self.rga2 = RelativeGlobalAttention(d=d_model, h=h, max_seq=max_seq, add_emb=additional)
self.rga = RelativeGlobalAttention(d=d_model, h=h, max_seq=max_seq, add_emb=additional)
self.FFN_pre = keras.layers.Dense(self.d_model // 2, activation=tf.nn.relu)
self.FFN_suf = keras.layers.Dense(self.d_model)
self.layernorm1 = keras.layers.LayerNormalization(epsilon=1e-6)
self.layernorm2 = keras.layers.LayerNormalization(epsilon=1e-6)
self.layernorm3 = keras.layers.LayerNormalization(epsilon=1e-6)
self.dropout1 = keras.layers.Dropout(rate)
self.dropout2 = keras.layers.Dropout(rate)
self.dropout3 = keras.layers.Dropout(rate)
def call(self, x, encode_out, mask=None, lookup_mask=None, training=False, w_out=False, **kwargs):
attn_out, aw1 = self.rga([x, x, x], mask=lookup_mask)
attn_out = self.dropout1(attn_out, training=training)
out1 = self.layernorm1(attn_out+x)
if encode_out is None:
attn_out2, aw2 = self.rga2([out1, out1, out1], mask=mask)
else:
attn_out2, aw2 = self.rga2([out1, encode_out, encode_out], mask=mask)
attn_out2 = self.dropout2(attn_out2, training=training)
attn_out2 = self.layernorm2(out1+attn_out2)
ffn_out = self.FFN_pre(attn_out2)
ffn_out = self.FFN_suf(ffn_out)
ffn_out = self.dropout3(ffn_out, training=training)
out = self.layernorm3(attn_out2+ffn_out)
if w_out:
return out, aw1, aw2
else:
return out
class Encoder(keras.layers.Layer):
def __init__(self, note_emb, num_layers, d_model, input_vocab_size, rate=0.1, max_len=None):
super(Encoder, self).__init__()
self.d_model = d_model
self.num_layers = num_layers
self.input_vocab_size = input_vocab_size
self.rate = rate
def emb(shape, dtype):
return note_emb
self.embedding = keras.layers.Embedding(input_vocab_size, d_model, embeddings_initializer=emb,trainable=False)
# self.embedding.set_weights(note_emb)
self.max_len = max_len
# tf.keras.layers.Embedding()
# self.embedding = lambda x: tf.gather(note_emb, x)
# if max_len is not None:
# self.pos_encoding = PositionEmbedding(max_seq=max_len, embedding_dim=self.d_model)
if True:
self.pos_encoding = DynamicPositionEmbedding(self.d_model, max_seq=max_len)
self.enc_layers = [EncoderLayer(d_model, rate, h=self.d_model // 64, additional=False, max_seq=max_len)
for i in range(num_layers)]
self.dropout = keras.layers.Dropout(rate)
def call(self, x, mask=None, training=False):
weights = []
# adding embedding and position encoding.
x = self.embedding(x) # (batch_size, input_seq_len, d_model)
x *= tf.math.sqrt(tf.cast(self.d_model, tf.float32))
x = self.pos_encoding(x)
x = self.dropout(x, training=training)
for i in range(self.num_layers):
x, w = self.enc_layers[i](x, mask, training=training)
weights.append(w)
return x, weights # (batch_size, input_seq_len, d_model)
def get_config(self):
config = super().get_config().copy()
config.update({
'd_model': self.d_model,
'num_layers': self.num_layers,
'input_vocab_size': self.input_vocab_size,
'rate': self.rate,
'max_len': self.max_len,
})
return config
class Decoder(keras.layers.Layer):
def __init__(self, num_layers, d_model, input_vocab_size,
rate=0.1, max_len=None):
super(Decoder, self).__init__()
self.d_model = d_model
self.num_layers = num_layers
self.embedding = keras.layers.Embedding(input_vocab_size, d_model)
if True:
self.pos_encoding = DynamicPositionEmbedding(self.d_model, max_seq=max_len)
self.dec_layers = [DecoderLayer(d_model, rate, h=self.d_model // 64, additional=False, max_seq=max_len)
for i in range(num_layers)]
self.dropout = keras.layers.Dropout(rate)
def call(self, x, mask, lookup_mask, training, enc_output=None):
weights = []
# adding embedding and position encoding.
x = self.embedding(x) # (batch_size, input_seq_len, d_model)
x *= tf.math.sqrt(tf.cast(self.d_model, tf.float32))
x = self.pos_encoding(x)
| |
noqa: E501
collection_formats = {}
path_params = {}
if 'server_id' in params:
path_params['server_id'] = params['server_id'] # noqa: E501
query_params = []
if 'zone' in params:
query_params.append(('zone', params['zone'])) # noqa: E501
if 'dnssec' in params:
query_params.append(('dnssec', params['dnssec'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/servers/{server_id}/zones', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Zone]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def notify_zone(self, server_id, zone_id, **kwargs): # noqa: E501
"""Send a DNS NOTIFY to all slaves. # noqa: E501
Fails when zone kind is not Master or Slave, or master and slave are disabled in the configuration. Only works for Slave if renotify is on. Clients MUST NOT send a body. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.notify_zone(server_id, zone_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server_id: The id of the server to retrieve (required)
:param str zone_id: The id of the zone to retrieve (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.notify_zone_with_http_info(server_id, zone_id, **kwargs) # noqa: E501
else:
(data) = self.notify_zone_with_http_info(server_id, zone_id, **kwargs) # noqa: E501
return data
def notify_zone_with_http_info(self, server_id, zone_id, **kwargs): # noqa: E501
"""Send a DNS NOTIFY to all slaves. # noqa: E501
Fails when zone kind is not Master or Slave, or master and slave are disabled in the configuration. Only works for Slave if renotify is on. Clients MUST NOT send a body. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.notify_zone_with_http_info(server_id, zone_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server_id: The id of the server to retrieve (required)
:param str zone_id: The id of the zone to retrieve (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['server_id', 'zone_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method notify_zone" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'server_id' is set
if ('server_id' not in params or
params['server_id'] is None):
raise ValueError("Missing the required parameter `server_id` when calling `notify_zone`") # noqa: E501
# verify the required parameter 'zone_id' is set
if ('zone_id' not in params or
params['zone_id'] is None):
raise ValueError("Missing the required parameter `zone_id` when calling `notify_zone`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server_id' in params:
path_params['server_id'] = params['server_id'] # noqa: E501
if 'zone_id' in params:
path_params['zone_id'] = params['zone_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/servers/{server_id}/zones/{zone_id}/notify', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_zone(self, server_id, zone_id, zone_struct, **kwargs): # noqa: E501
"""Creates/modifies/deletes RRsets present in the payload and their comments. Returns 204 No Content on success. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_zone(server_id, zone_id, zone_struct, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server_id: The id of the server to retrieve (required)
:param str zone_id: (required)
:param Zone zone_struct: The zone struct to patch with (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_zone_with_http_info(server_id, zone_id, zone_struct, **kwargs) # noqa: E501
else:
(data) = self.patch_zone_with_http_info(server_id, zone_id, zone_struct, **kwargs) # noqa: E501
return data
def patch_zone_with_http_info(self, server_id, zone_id, zone_struct, **kwargs): # noqa: E501
"""Creates/modifies/deletes RRsets present in the payload and their comments. Returns 204 No Content on success. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_zone_with_http_info(server_id, zone_id, zone_struct, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server_id: The id of the server to retrieve (required)
:param str zone_id: (required)
:param Zone zone_struct: The zone struct to patch with (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['server_id', 'zone_id', 'zone_struct'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_zone" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'server_id' is set
if ('server_id' not in params or
params['server_id'] is None):
raise ValueError("Missing the required parameter `server_id` when calling `patch_zone`") # noqa: E501
# verify the required parameter 'zone_id' is set
if ('zone_id' not in params or
params['zone_id'] is None):
raise ValueError("Missing the required parameter `zone_id` when calling `patch_zone`") # noqa: E501
# verify the required parameter 'zone_struct' is set
if ('zone_struct' not in params or
params['zone_struct'] is None):
raise ValueError("Missing the required parameter `zone_struct` when calling `patch_zone`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server_id' in params:
path_params['server_id'] = params['server_id'] # noqa: E501
if 'zone_id' in params:
path_params['zone_id'] = params['zone_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'zone_struct' in params:
body_params = params['zone_struct']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/servers/{server_id}/zones/{zone_id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def put_zone(self, server_id, zone_id, zone_struct, **kwargs): # noqa: E501
"""Modifies basic zone data (metadata). # noqa: E501
Allowed fields in client body: all except id, url and name. Returns 204 No Content on success. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.put_zone(server_id, zone_id, zone_struct, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server_id: The id of the server to retrieve (required)
:param str zone_id: (required)
:param Zone zone_struct: The zone struct to patch with (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.put_zone_with_http_info(server_id, zone_id, zone_struct, **kwargs) # noqa: E501
else:
(data) = self.put_zone_with_http_info(server_id, zone_id, zone_struct, **kwargs) # noqa: E501
return data
def put_zone_with_http_info(self, server_id, zone_id, zone_struct, **kwargs): # noqa: E501
"""Modifies basic zone data (metadata). # noqa: E501
Allowed fields in client body: all except id, url and name. Returns 204 No Content on success. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.put_zone_with_http_info(server_id, zone_id, zone_struct, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server_id: The id of the server to retrieve (required)
:param str zone_id: (required)
:param Zone zone_struct: The zone struct to patch with (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['server_id', 'zone_id', 'zone_struct'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key | |
print doubles_output
print '***********************************'
outfile = params['TEMPDIR'] + params['ROOT'] + '.conv'
combine_cats(doubles_output,outfile,search_params)
#outfile_field = params['TEMPDIR'] + params['ROOT'] + '.field'
#command = 'ldacdeltab -i ' + outfile + ' -t FIELDS -o ' + outfile_field
#utilities.run(command)
command = 'ldactoasc -b -q -i ' + outfile + ' -t OBJECTS\
-k ALPHA_J2000 DELTA_J2000 > ' + outfile.replace('conv','pos')
print command
utilities.run(command)
command = 'mkreg.pl -c -rad 8 -xcol 0 -ycol 1 -wcs -colour green ' + outfile.replace('conv','pos')
print command
utilities.run(command)
print outfile
command = 'ldaccalc -i ' + outfile + ' -o ' + params['TEMPDIR'] + params['ROOT'] + '.newpos -t OBJECTS -c "(Xpos + ' + str(float(search_params['CRPIX1ZERO']) - float(crpix['CRPIX1'])) + ');" -k FLOAT -n Xpos_ABS "" -c "(Ypos + ' + str(float(search_params['CRPIX2ZERO']) - float(crpix['CRPIX2'])) + ');" -k FLOAT -n Ypos_ABS "" -c "(Ypos*0 + ' + str(params['NUM']) + ');" -k FLOAT -n CHIP "" '
print command
utilities.run(command)
except:
print traceback.print_exc(file=sys.stdout)
sys.exit(0)
if not trial:
sys.exit(0)
# print sys.exc_info()
# print 'finishing'
# sys.exit(0)
#sys.exit(0)
print children
for child in children:
print 'waiting for', child
os.waitpid(child,0)
print 'finished waiting'
pasted_cat = path + 'PHOTOMETRY/ILLUMINATION/' + 'pasted_' + SUPA + '_' + search_params['filter'] + '_' + str(search_params['ROTATION']) + '.cat'
print pasted_cat
os.system('mkdir -p ' + path + 'PHOTOMETRY/ILLUMINATION/')
from glob import glob
outcat = search_params['TEMPDIR'] + 'tmppaste_' + SUPA + '.cat'
newposlist = glob(search_params['TEMPDIR'] + SUPA + '*newpos')
print search_params['TEMPDIR'] + SUPA + '*newpos'
if len(newposlist) > 1:
#command = 'ldacpaste -i ' + search_params['TEMPDIR'] + SUPA + '*newpos -o ' + pasted_cat
#print command
files = glob(search_params['TEMPDIR'] + SUPA + '*newpos')
print files, search_params['TEMPDIR'] + SUPA + '*newpos'
paste_cats(files,pasted_cat)
else:
command = 'cp ' + newposlist[0] + ' ' + pasted_cat
utilities.run(command)
save_exposure({'pasted_cat':pasted_cat,'resam':0},SUPA,FLAT_TYPE)
command = "rm -rf " + search_params['TEMPDIR']
os.system(command)
#fs = glob.glob(subpath+pprun+'/SCIENCE_DOMEFLAT*.tarz'.replace('.tarz',''))
#if len(fs) > 0:
# os.system('tar xzvf ' + fs[0])
#fs = glob.glob(subpath+pprun+'/SCIENCE_SKYFLAT*.tarz'.replace('.tarz',''))
#fs = glob.glob(subpath+pprun+'/SCIENCE_SKYFLAT*.tarz')
#if len(fs) > 0:
# os.system('tar xzvf ' + fs[0])
#return exposures, LENGTH1, LENGTH2
def get_sdss_obj(SUPA, FLAT_TYPE):
dict = get_files(SUPA,FLAT_TYPE)
search_params = initialize(dict['filter'],dict['OBJNAME'])
search_params.update(dict)
ROTATION = str(search_params['ROTATION']) #exposures[exposure]['keywords']['ROTATION']
import os
starcat ='/nfs/slac/g/ki/ki05/anja/SUBARU/%(OBJNAME)s/PHOTOMETRY/sdssstar%(ROTATION)s.cat' % {'ROTATION':ROTATION,'OBJNAME':search_params['OBJNAME']}
galaxycat ='/nfs/slac/g/ki/ki05/anja/SUBARU/%(OBJNAME)s/PHOTOMETRY/sdssgalaxy%(ROTATION)s.cat' % {'ROTATION':ROTATION,'OBJNAME':search_params['OBJNAME']}
path='/nfs/slac/g/ki/ki05/anja/SUBARU/%(OBJNAME)s/' % {'OBJNAME':search_params['OBJNAME']}
illum_path='/nfs/slac/g/ki/ki05/anja/SUBARU/ILLUMINATION/' % {'OBJNAME':search_params['OBJNAME']}
#os.system('mkdir -p ' + path + 'PHOTOMETRY/ILLUMINATION/')
os.system('mkdir -p ' + path + 'PHOTOMETRY/ILLUMINATION/STAR/')
os.system('mkdir -p ' + path + 'PHOTOMETRY/ILLUMINATION/GALAXY/')
from glob import glob
print starcat
for type,cat in [['star',starcat]]: #,['galaxy',galaxycat]]:
catalog = search_params['pasted_cat'] #exposures[exposure]['pasted_cat']
ramin,ramax, decmin, decmax = coordinate_limits(catalog)
limits = {'ramin':ramin-0.2,'ramax':ramax+0.2,'decmin':decmin-0.2,'decmax':decmax+0.2}
print ramin,ramax, decmin, decmax
if len(glob(cat)) == 0:
#os.system('rm ' + cat)
image = search_params['files'][0]
print image
import retrieve_test
retrieve_test.run(image,cat,type,limits)
save_exposure({'starcat':cat},SUPA,FLAT_TYPE)
def match_simple(SUPA,FLAT_TYPE):
dict = get_files(SUPA,FLAT_TYPE)
search_params = initialize(dict['filter'],dict['OBJNAME'])
search_params.update(dict)
ROTATION = str(search_params['ROTATION']) #exposures[exposure]['keywords']['ROTATION']
import os
starcat ='/nfs/slac/g/ki/ki05/anja/SUBARU/%(OBJNAME)s/PHOTOMETRY/sdssstar%(ROTATION)s.cat' % {'ROTATION':ROTATION,'OBJNAME':search_params['OBJNAME']}
galaxycat ='/nfs/slac/g/ki/ki05/anja/SUBARU/%(OBJNAME)s/PHOTOMETRY/sdssgalaxy%(ROTATION)s.cat' % {'ROTATION':ROTATION,'OBJNAME':search_params['OBJNAME']}
path='/nfs/slac/g/ki/ki05/anja/SUBARU/%(OBJNAME)s/' % {'OBJNAME':search_params['OBJNAME']}
illum_path='/nfs/slac/g/ki/ki05/anja/SUBARU/ILLUMINATION/' % {'OBJNAME':search_params['OBJNAME']}
#os.system('mkdir -p ' + path + 'PHOTOMETRY/ILLUMINATION/')
os.system('mkdir -p ' + path + 'PHOTOMETRY/ILLUMINATION/STAR/')
os.system('mkdir -p ' + path + 'PHOTOMETRY/ILLUMINATION/GALAXY/')
from glob import glob
print starcat
for type,cat in [['star',starcat]]: #,['galaxy',galaxycat]]:
catalog = search_params['pasted_cat'] #exposures[exposure]['pasted_cat']
ramin,ramax, decmin, decmax = coordinate_limits(catalog)
limits = {'ramin':ramin-0.2,'ramax':ramax+0.2,'decmin':decmin-0.2,'decmax':decmax+0.2}
print ramin,ramax, decmin, decmax
if 1: #len(glob(cat)) == 0:
#os.system('rm ' + cat)
image = search_params['files'][0]
print image
import retrieve_test
retrieve_test.run(image,cat,type,limits)
filter = search_params['filter'] #exposures[exposure]['keywords']['filter']
#GABODSID = exposures[exposure]['keywords']['GABODSID']
OBJECT = search_params['OBJECT'] #exposures[exposure]['keywords']['OBJECT']
print catalog
outcat = path + 'PHOTOMETRY/ILLUMINATION/' + type + '/' + 'matched_' + SUPA + '_' + filter + '_' + ROTATION + '_' + type + '.cat'
outcat_dir = path + 'PHOTOMETRY/ILLUMINATION/' + type + '/' + ROTATION + '/' + OBJECT + '/'
os.system('mkdir -p ' + outcat_dir)
file = 'matched_' + SUPA + '.cat'
linkdir = illum_path + '/' + filter + '/' + ROTATION + '/' + OBJECT + '/'
#outcatlink = linkdir + 'matched_' + exposure + '_' + OBJNAME + '_' + GABODSID + '.cat'
outcatlink = linkdir + 'matched_' + SUPA + '_' + search_params['OBJNAME'] + '_' + type + '.cat'
os.system('mkdir -p ' + linkdir)
os.system('rm ' + outcat)
command = 'match_simple.sh ' + catalog + ' ' + cat + ' ' + outcat
print command
os.system(command)
os.system('rm ' + outcatlink)
command = 'ln -s ' + outcat + ' ' + outcatlink
print command
os.system(command)
save_exposure({'matched_cat_' + type:outcat},SUPA,FLAT_TYPE)
print type, 'TYPE!'
print outcat, type
#exposures[exposure]['matched_cat_' + type] = outcat
#return exposures
def phot(SUPA,FLAT_TYPE):
dict = get_files(SUPA,FLAT_TYPE)
print dict.keys()
search_params = initialize(dict['filter'],dict['OBJNAME'])
search_params.update(dict)
filter = dict['filter']
import utilities
info = {'B':{'filter':'g','color1':'gmr','color2':'umg','EXTCOEFF':-0.2104,'COLCOEFF':0.0},\
'W-J-B':{'filter':'g','color1':'gmr','color2':'umg','EXTCOEFF':-0.2104,'COLCOEFF':0.0},\
'W-J-V':{'filter':'g','color1':'gmr','color2':'rmi','EXTCOEFF':-0.1202,'COLCOEFF':0.0},\
'W-C-RC':{'filter':'r','color1':'rmi','color2':'gmr','EXTCOEFF':-0.0925,'COLCOEFF':0.0},\
'W-C-IC':{'filter':'i','color1':'imz','color2':'rmi','EXTCOEFF':-0.02728,'COLCOEFF':0.0},\
'W-S-Z+':{'filter':'z','color1':'imz','color2':'rmi','EXTCOEFF':0.0,'COLCOEFF':0.0}}
import mk_saturation_plot,os,re
os.environ['BONN_TARGET'] = search_params['OBJNAME']
os.environ['INSTRUMENT'] = 'SUBARU'
stars_0 = []
stars_90 = []
ROTATION = dict['ROTATION']
print ROTATION
import os
ppid = str(os.getppid())
from glob import glob
for im_type in ['']: #,'D','S']:
for type in ['star']: #,'galaxy']:
file = dict['matched_cat_' + type]
print file
print file
if type == 'galaxy':
mag='MAG_AUTO' + im_type
magerr='MAGERR_AUTO' + im_type
class_star = "<0.9"
if type == 'star':
mag='MAG_APER2' + im_type
magerr='MAGERR_APER2' + im_type
class_star = ">0.9"
print 'filter', filter
os.environ['BONN_FILTER'] = filter
filt = re.split('_',filter)[0]
d = info[filt]
print file
utilities.run('ldacfilter -i ' + file + ' -o ' + search_params['TEMPDIR'] + 'good.stars' + ' -t PSSC\
-c "(Flag!=-99);"',['' + search_params['TEMPDIR'] + 'good.stars'])
utilities.run('ldacfilter -i ' + search_params['TEMPDIR'] + 'good.stars -o ' + search_params['TEMPDIR'] + 'good.colors -t PSSC\
-c "((((SEx_' + mag + '!=0 AND ' + d['color1'] + '<900) AND ' + d['color1'] + '!=0) AND ' + d['color1'] + '>-900) AND ' + d['color1'] + '!=0);"',['' + search_params['TEMPDIR'] + 'good.colors'])
print '' + search_params['TEMPDIR'] + 'good.colors'
utilities.run('ldaccalc -i ' + search_params['TEMPDIR'] + 'good.colors -t PSSC -c "(' + d['filter'] + 'mag - SEx_' + mag + ');" -k FLOAT -n magdiff "" -o ' + search_params['TEMPDIR'] + 'all.diffA.cat' ,[search_params['TEMPDIR'] + 'all.diffA.cat'] )
median = get_median('' + search_params['TEMPDIR'] + 'all.diffA.cat','magdiff')
utilities.run('ldacfilter -i ' + search_params['TEMPDIR'] + 'all.diffA.cat -o ' + search_params['TEMPDIR'] + 'all.diffB.cat -t PSSC\
-c "((magdiff > ' + str(median -1.25) + ') AND (magdiff < ' + str(median + 1.25) + '));"',['' + search_params['TEMPDIR'] + 'good.colors'])
utilities.run('ldaccalc -i ' + search_params['TEMPDIR'] + 'all.diffB.cat -t PSSC -c "(SEx_MaxVal + SEx_BackGr);" -k FLOAT -n MaxVal "" -o ' + search_params['TEMPDIR'] + 'all.diff.cat' ,['' + search_params['TEMPDIR'] + 'all.diff.cat'] )
command = 'ldactoasc -b -q -i ' + search_params['TEMPDIR'] + 'all.diff.cat -t PSSC -k SEx_' + mag + ' ' + d['filter'] + 'mag SEx_FLUX_RADIUS ' + im_type + ' SEx_CLASS_STAR' + im_type + ' ' + d['filter'] + 'err ' + d['color1'] + ' MaxVal > ' + search_params['TEMPDIR'] + 'mk_sat_all'
#print command
#raw_input()
utilities.run(command,['' + search_params['TEMPDIR'] + 'mk_sat_all'] )
import commands
length = commands.getoutput('wc -l ' + search_params['TEMPDIR'] + 'mk_sat_all')
print 'TOTAL # of STARS:', length
cuts_to_make = ['MaxVal>27500.0','Clean!=1','SEx_IMAFLAGS_ISO'+im_type + '!=0','SEx_CLASS_STAR'+im_type+ class_star,'SEx_Flag'+im_type+'!=0',]
files = ['' + search_params['TEMPDIR'] + 'mk_sat_all']
titles = ['raw']
for cut in cuts_to_make:
#print 'making cut:', cut
cut_name = cut.replace('>','').replace('<','')
os.system('rm ' + cut_name)
command = 'ldacfilter -i ' + search_params['TEMPDIR'] + 'all.diff.cat -o ' + search_params['TEMPDIR'] + '' + cut_name + ' -t PSSC\
-c "(' + cut + ');"'
utilities.run(command,['' + search_params['TEMPDIR'] + '' + cut_name])
import glob
#print len(glob.glob('' + search_params['TEMPDIR'] + '' + cut_name)), glob.glob('' + search_params['TEMPDIR'] + '' + cut_name)
if len(glob.glob('' + search_params['TEMPDIR'] + '' + cut_name)) > 0:
utilities.run('ldactoasc -b -q -i ' + search_params['TEMPDIR'] + '' + cut_name + ' -t PSSC\
-k SEx_' + mag + ' ' + d['filter'] + 'mag SEx_FLUX_RADIUS SEx_CLASS_STAR ' + d['filter'] + 'err ' + d['color1'] + ' > ' + search_params['TEMPDIR'] + '' + cut_name + '.cat',['' + search_params['TEMPDIR'] + '' + cut_name + '.cat'])
length = commands.getoutput('wc -l ' + search_params['TEMPDIR'] + '' + cut_name + '.cat')
print 'TOTAL # of STARS CUT:', | |
else:
username, password = decoded.split(':')
if DEBUG: print(f'access: tokenForRequest: searching for token for Authorization: Basic {username}:xxxxxx header')
if self.userAndPasswordCorrect(username, password):
# _tokenForUser already includes the default set, so just return.
return self._tokenForUser(username)
else:
self.igor.app.raiseHTTPError('401 Unauthorized', headers={'WWW_Authenticate' : 'Basic realm="igor"'})
# Add more here for other methods
return _combineTokens(token, self._defaultToken())
user = self.igor.app.getSessionItem('user')
if user:
if DEBUG: print(f'access: tokenForRequest: returning token for session.user {user}')
return self._tokenForUser(user)
# xxxjack should we allow carrying tokens in cookies?
if DEBUG: print('access: no token found for request {}'.format(headers.get('PATH_INFO', '???')), 'returning', self._defaultToken())
return self._defaultToken()
def externalTokenForHost(self, host, token=None):
"""If an external token for the given host is available (with the current token) return it"""
# If the current token gives access to the plugindata for the plugin with this <host> field we also allow access.
# xxxjack whether we should check for GET access or something else is open to discussion
pluginElements = self.igor.database.getElements(f"/data/plugindata/*[host='{host}']", 'get', token)
for pe in pluginElements:
pluginName = pe.tagName
token = self.tokenForPlugin(pluginName, token)
tid = token._hasExternalRepresentationFor(host)
if not tid:
print(f'access: WARNING: requested external token for request to {host} but not available')
return
extToken = token._getTokenWithIdentifier(tid)
assert extToken
rv = extToken._getExternalRepresentation()
assert rv
return rv
def tokensForSubject(self, sub, token):
"""Return list of token descriptions (accessible via token) valid for subject sub"""
# First get the list of all tokens valid for this subject (we filter later for accessible tokens)
idExpr = f"au:access/au:exportedCapabilities/au:capability[sub='{sub}']/cid"
idList = self.igor.database.getValues(idExpr, _accessSelfToken, namespaces=NAMESPACES)
# Now attempt to get each of these through the token we carry
rv = []
for _, tokId in idList:
tok = token._getTokenWithIdentifier(tokId)
if tok:
rv = rv + tok._getTokenDescription()
return rv
def _externalAccessToken(self, data):
"""Internal method - Create a token from the given "Authorization: bearer" data"""
content = self._decodeIncomingData(data)
cid = content.get('cid')
if not cid:
print(f'access: ERROR: no cid on bearer token {content}')
self.igor.app.raiseHTTPError('400 Missing cid on key')
if singleton._isTokenOnRevokeList(cid):
print(f'access: ERROR: token has been revoked: {content}')
self.igor.app.raiseHTTPError('400 Revoked token')
return ExternalAccessTokenImplementation(content)
def getTokenDescription(self, token, tokenId=None):
"""Returns a list of dictionaries which describe the tokens"""
if tokenId:
originalToken = token
token = token._getTokenWithIdentifier(tokenId)
if not token:
identifiers = originalToken.getIdentifiers()
print(f'\taccess: getTokenDescription: no such token ID: {tokenId}. Tokens:')
for i in identifiers:
print(f'\t\t{i}')
self.igor.app.raiseHTTPError(f'404 No such token: {tokenId}')
return token._getTokenDescription()
def newToken(self, token, tokenId, newOwner, newPath=None, **kwargs):
"""Create a new token based on an existing token. Returns ID of new token."""
assert self.igor
assert self.igor.database
#
# Split remaining args into rights and other content
#
newRights = {}
content = {}
for k, v in list(kwargs.items()):
# Note delegate right is checked implicitly, below.
if k in NORMAL_OPERATIONS:
newRights[k] = v
else:
content[k] = v
#
# Check that original token exists, and allows this delegation
#
originalToken = token
token = token._getTokenWithIdentifier(tokenId)
if newPath == None:
newPath = token._getObject()
if not token:
identifiers = originalToken.getIdentifiers()
print(f'\taccess: newToken: no such token ID: {tokenId}. Tokens:')
for i in identifiers:
print(f'\t\t{i}')
self.igor.app.raiseHTTPError(f'404 No such token: {tokenId}')
if not token._allowsDelegation(newPath, newRights, content.get('aud')):
self.igor.app.raiseHTTPError('401 Delegation not allowed')
#
# Check the new parent exists
#
parentElement = self.igor.database.getElements(newOwner, 'post', _accessSelfToken, namespaces=NAMESPACES)
if len(parentElement) != 1:
if DEBUG_DELEGATION: print(f'access: newToken: no unique destination {newOwner}')
self.igor.app.raiseNotfound()
parentElement = parentElement[0]
#
# Construct the data for the new token.
#
newId = 'c%d' % random.getrandbits(64)
token._addChild(newId)
tokenData = dict(cid=newId, obj=newPath, parent=tokenId)
moreData = token._getExternalContent()
for k, v in list(moreData.items()):
if not k in tokenData:
tokenData[k] = v
tokenData.update(newRights)
tokenData.update(content)
element = self.igor.database.elementFromTagAndData("capability", tokenData, namespace=AU_NAMESPACE)
#
# Insert into the tree
#
parentElement.appendChild(element)
self.igor.database.setChanged()
#
# Save
#
self._clearTokenCaches()
self._save()
#
# If the new token may affect actions we should update the actions
#
if newOwner.startswith('/data/actions') or newOwner.startswith('actions'):
self.igor.internal.queue('updateActions', _accessSelfToken)
#
# Return the ID
#
return newId
def createTokensNeededByElement(self, needElementList, token):
"""Create tokens (if they don't exist yet) based on a list of needCapability elements"""
toCreate = []
for needElement in needElementList:
parentElement = needElement.parentNode
# xxxjack this is a hack. The au:needCapability will be in an <action> or in the plugindata for the element
if parentElement.tagName == 'action':
parentToken = self.tokenForAction(parentElement)
newOwner = self.igor.database.getXPathForElement(parentElement)
else:
parentToken = self.tokenForPlugin(parentElement.tagName)
newOwner = self.igor.database.getXPathForElement(parentElement)
need = self.igor.database.tagAndDictFromElement(needElement)[1]
path = need.pop('obj')
if self.findCompatibleTokens(parentToken, path, **need):
# The tokens in the parent of the needCapability element already allows it. Nothing to do.
continue
# Otherwise we have to create it from the tokens we are carrying
compatibleTokenIDs = self.findCompatibleTokens(token, path, **need)
if not compatibleTokenIDs:
self.igor.app.raiseHTTPError(f"401 No rights to create capability for {self.igor.database.getXPathForElement(needElement)}")
# Remember for later creation
toCreate.append((compatibleTokenIDs[0], path, need, newOwner))
# Now create all the needed capabilities
if not toCreate:
return
for tokenId, newPath, need, newOwner in toCreate:
self.newToken(token, tokenId, newOwner, newPath, **need)
self._clearTokenCaches()
def findCompatibleTokens(self, token, newPath, **kwargs):
"""Return list of token IDs that allow the given operation."""
assert self.igor
assert self.igor.database
#
# Get rights from the args
#
newRights = {}
for k, v in list(kwargs.items()):
# Note delegate right is checked implicitly, below.
if k in NORMAL_OPERATIONS:
newRights[k] = v
rv = []
for tID in token.getIdentifiers():
t = token._getTokenWithIdentifier(tID)
if not t: continue
if t._allowsDelegation(newPath, newRights, kwargs.get('aud')):
rv = rv + t.getIdentifiers()
return rv
def passToken(self, token, tokenId, newOwner):
"""Pass token ownership to a new owner. Token must be in the set of tokens that can be passed."""
originalToken = token
tokenToPass = token._getTokenWithIdentifier(tokenId)
if not tokenToPass:
identifiers = originalToken.getIdentifiers()
print(f'\taccess: passToken: no such token ID: {tokenId}. Tokens:')
for i in identifiers:
print(f'\t\t{i}')
self.igor.app.raiseHTTPError(f"401 No such token: {tokenId}")
oldOwner = tokenToPass._getOwner()
if not oldOwner:
self.igor.app.raiseHTTPError(f"401 Not owner of token {tokenId}")
if oldOwner == newOwner:
return ''
if not tokenToPass._setOwner(newOwner):
self.igor.app.raiseHTTPError(f"401 Cannot move token {tokenId} to new owner {newOwner}")
token._removeToken(tokenId)
#
# Save
#
self._clearTokenCaches()
self._save()
def revokeToken(self, token, parentId, tokenId):
"""Revoke a token"""
parentToken = token._getTokenWithIdentifier(parentId)
if not parentToken:
identifiers = token.getIdentifiers()
print(f'\taccess: revokeToken: no such token ID: {parentId}. Tokens:')
for i in identifiers:
print(f'\t\t{i}')
self.igor.app.raiseHTTPError(f"404 No such parent token: {parentId}")
self._revokeRecursive(parentToken, tokenId, raiseError=True)
#
# Save
#
self._clearTokenCaches()
self._save()
def _revokeRecursive(self, parentToken, childTokenId, raiseError=False):
"""Helper for revoking a token"""
childToken = parentToken._getTokenWithIdentifier(childTokenId)
if not childToken:
print(f'\taccess: revokeToken: no such token ID: {childTokenId}. Tokens:')
for i in identifiers:
print(f'\t\t{i}')
if raiseError:
self.igor.app.raiseHTTPError(f"404 No such token: {childTokenId}")
print('Warning: ignored unknown token during recursive revoke')
return
# First do the recursion
grandChildren = childToken._getChildIdList()
for grandChildId in grandChildren:
self._revokeRecursive(childToken, grandChildId)
self._addToRevokeList(childTokenId, childToken.content.get('nva'))
childToken._revoke()
parentToken._delChild(childTokenId)
def exportToken(self, token, tokenId, subject=None, lifetime=None, **kwargs):
"""Create an external representation of this token, destined for the given subject"""
#
# Add keys needed for external token
#
if subject:
kwargs['sub'] = subject
if not lifetime:
lifetime = 60*60*24*365 # One year
lifetime = int(lifetime)
kwargs['nvb'] = str(int(time.time())-1)
kwargs['nva'] = str(int(time.time()) + lifetime)
if 'aud' in kwargs:
audience = kwargs['aud']
else:
audience = self.getSelfAudience()
kwargs['aud'] = self.getSelfAudience()
kwargs['iss'] = self.getSelfIssuer()
#
# Create the new token
#
# xxxjack we should check whehter the given external token already exists and
# simply return the external representation if it does...
#
newTokenId = self.newToken(token, tokenId, self._getExternalTokenOwner(), **kwargs)
tokenToExport = token._getTokenWithIdentifier(newTokenId)
if not tokenToExport:
# The new token is a grandchild of our token, so we may not be able to get it directly.
# Try harder.
parentToken = token._getTokenWithIdentifier(tokenId)
tokenToExport = parentToken._getTokenWithIdentifier(newTokenId)
if not tokenToExport:
self.igor.app.raiseHTTPError(f'500 created token {newTokenId} but it does not exist')
#
# Create the external representation
#
assert tokenToExport
assert tokenToExport._hasExternalRepresentationFor(audience)
externalRepresentation = tokenToExport._getExternalRepresentation()
#
# Save
#
self._save()
return externalRepresentation
def externalRepresentation(self, token, tokenId):
"""Return external representation for given token"""
tokenToExport = token._getTokenWithIdentifier(tokenId, recursive=True)
if not tokenToExport:
identifiers = token.getIdentifiers()
print(f'\taccess: externalRepresentation: no such token ID: {tokenId}. Tokens:')
for i in identifiers:
print(f'\t\t{i}')
self.igor.app.raiseHTTPError(f"401 No such token: {tokenId}")
assert tokenToExport._hasExternalRepresentationFor(self.getSelfAudience())
externalRepresentation = tokenToExport._getExternalRepresentation()
return externalRepresentation
def _getExternalTokenOwner(self):
"""Return the location where we store external tokens"""
return '/data/au:access/au:exportedCapabilities'
| |
= True
self.cpx_adapter = cpx_adapter
self.cpx_cst = cpx_adapter.cpx_cst
self._initialize_constants_from_cplex()
cpx = cpx_adapter.cpx
self._cplex_location = cpx_adapter.cplex_location
self._model = mdl
self._saved_log_output = True # initialization from model is deferred (pickle)
self._cpx_version_as_tuple = tuple(float(x) for x in cpx.get_version().split("."))
self._allocate_one_index = self.allocate_one_index_return
self._allocate_range_index = self.allocate_range_index_return
# deferred bounds changes, as dicts {var: num}
self._var_lb_changed = {}
self._var_ub_changed = {}
self._cplex = cpx
# self._solve_count = 0
self._last_solve_status = None
self._last_solve_details = None
# for unpickling, remember to resync with model
self._sync_mode = _CplexSyncMode.InSync
# index of benders long annotation
self._benders_anno_idx = -1
# callback connector
self._ccb = None
def _mark_as_out_of_sync(self):
self._sync_mode = _CplexSyncMode.OutOfSync
@classmethod
def _cpx_set_all_streams(cls, cpx, ofs):
cpx.set_log_stream(ofs)
cpx.set_results_stream(ofs)
cpx.set_error_stream(ofs)
cpx.set_warning_stream(ofs)
@classmethod
def cpx_get_all_streams(cls, cpx):
# returns an array of streams in the order: log, result, error, warning
streams = [cpx._env._get_log_stream(),
cpx._env._get_results_stream(),
cpx._env._get_error_stream(),
cpx._env._get_warning_stream()]
return [x._file if hasattr(x, '_file') else None for x in streams]
@classmethod
def cpx_set_all_streams(cls, cpx, streams, error_handler):
if len(streams) != 4:
error_handler.fatal("Wrong number of streams, should be 4: {0!s}", len(streams))
else:
cpx.set_log_stream(streams[0])
cpx.set_results_stream(streams[1])
cpx.set_error_stream(streams[2])
cpx.set_warning_stream(streams[3])
def set_streams(self, outs):
self_log_output = self._saved_log_output
if self_log_output != outs:
self._cpx_set_all_streams(self._cplex, outs)
self._saved_log_output = outs
def get_var_index(self, dvar): # pragma: no cover
self._resync_if_needed()
dvar_name = dvar.name
if not dvar_name:
self.error_handler.fatal("cannot query index for anonymous object: {0!s}", (dvar,))
else:
return self._cplex.variables.get_indices(dvar_name)
def get_ct_index(self, ct): # pragma: no cover
self._resync_if_needed()
ctname = ct.name
if not ctname:
self.error_handler.fatal("cannot query index for anonymous constraint: {0!s}", (ct,))
self_cplex = self._cplex
ctscope = ct.cplex_scope
if ctscope is CplexScope.LINEAR_CT_SCOPE:
return self_cplex.linear_constraints.get_indices(ctname)
elif ctscope is CplexScope.IND_CT_SCOPE:
return self_cplex.indicator_constraints.get_indices(ctname)
elif ctscope is CplexScope.QUAD_CT_SCOPE:
return self_cplex.quadratic_constraints.get_indices(ctname)
elif ctscope is CplexScope.PWL_CT_SCOPE:
return self_cplex.pwl_constraints.get_indices(ctname)
else:
self.error_handler.fatal("unrecognized constraint to query index: {0!s}", ct)
@classmethod
def sync_data_differ_stop_here(cls, cpx_data, mdl_data):
# put breakpoint here
pass
def _check_one_constraint_index(self, cpx_linear, ct, prec=1e-6):
def sparse_to_terms(indices_, koefs_):
terms = sorted( zip(indices_, koefs_),key=lambda t: t[0] )
# terms = [(ix, k) for ix, k in zip(indices_, koefs_)]
# terms.sort(key=lambda t: t[0])
return terms
# assert idx > 0
cpx_row = cpx_linear.get_rows(ct.index)
cpx_terms = sparse_to_terms(cpx_row.ind, cpx_row.val)
mdl_idxs, mdl_coefs = self.linear_ct_to_cplex(ct)
mdl_terms = sparse_to_terms(mdl_idxs, mdl_coefs)
assert len(cpx_terms) == len(mdl_terms)
for cpxt, mdlt in zip(cpx_terms, mdl_terms):
assert cpxt[0] == mdlt[0]
assert abs(cpxt[1] - mdlt[1]) <= prec
def check_constraint_indices(self, cts, ctscope):
mdl = self._model
interface = self._scope_interface(ctscope)
cpx_num = interface.get_num()
l_cts = list(cts)
if len(l_cts) != cpx_num:
mdl.error("Sizes differ: cplex: {0}, docplex: {1}".format(cpx_num, len(l_cts)))
for c in range(cpx_num):
mdl_ct = l_cts[c]
try:
cpx_name = interface.get_names(c)
except self.cpx_adapter.CplexSolverError:
cpx_name = None
mdl_name = mdl_ct.name
if mdl_name and cpx_name != mdl_name:
self.sync_data_differ_stop_here(cpx_name, mdl_name)
mdl.error("Names differ: index: {0}, cplex: {1}, docplex: {2}".format(c, cpx_name, mdl_name))
if hasattr(interface, "get_rhs"):
cpx_rhs = interface.get_rhs(c)
mdl_rhs = mdl_ct.cplex_num_rhs()
if abs(cpx_rhs - mdl_rhs) >= 1e-6:
self.sync_data_differ_stop_here(cpx_rhs, mdl_rhs)
mdl.error("RHS differ: index: {0}, cplex: {1}, docplex: {2}".format(c, cpx_rhs, mdl_rhs))
if cpx_num and ctscope == CplexScope.LINEAR_CT_SCOPE:
full_check_indices = set()
full_check_indices.add(0)
if cpx_num > 1:
full_check_indices.add(cpx_num - 1)
if cpx_num >= 4:
full_check_indices.add(int(cpx_num / 2))
cpxlinear = interface
for j in full_check_indices:
ct = mdl.get_constraint_by_index(j)
self._check_one_constraint_index(cpxlinear, ct)
def check_var_indices(self, dvars): # pragma: no cover
for dvar in dvars:
# assuming dvar has a name
model_index = dvar.index
cpx_index = self.get_var_index(dvar)
if model_index != cpx_index: # pragma: nocover
self._model.error("indices differ, obj: {0!s}, docplex={1}, CPLEX={2}", dvar, model_index,
cpx_index)
@property
def error_handler(self):
return self._model.error_handler
@property
def logger(self):
return self._model.error_handler
def get_cplex(self):
"""
Returns the underlying CPLEX object
:return:
"""
return self._cplex
def get_cplex_location(self):
return self._cplex_location
def get_infinity(self):
return self.cpx_adapter.cplex_module.infinity
def _create_cpx_multitype_vartype_list(self, vartypes):
# vartypes is a list of model variable types
# if all continuous return []
if all(mvt.cplex_typecode == 'C' for mvt in vartypes):
return ""
else:
# return a list of 'B', 'C', 'I' symbols
return "".join(mvt.cplex_typecode for mvt in vartypes)
@classmethod
def compute_cpx_vartype(cls, vartype, size):
if vartype == 'C':
return ''
else:
return vartype * size
def create_one_variable(self, vartype, lb, ub, name):
lbs = [float(lb)]
ubs = [float(ub)]
names = [name]
indices = self.create_variables(1, vartype, lbs, ubs, names)
assert 1 == len(indices)
return indices[0]
def create_variables(self, nb_vars, vartype, lbs, ubs, names):
self._resync_if_needed()
cpx_types = self.compute_cpx_vartype(vartype.cplex_typecode, nb_vars)
if not cpx_types:
if not (lbs or ubs):
# force at least one list with correct size.
lbs = [0] * nb_vars
return self._create_cpx_variables(nb_vars, cpx_types, lbs, ubs, names)
def create_multitype_variables(self, nb_vars, vartypes, lbs, ubs, names):
self._resync_if_needed()
cpx_types = self._create_cpx_multitype_vartype_list(vartypes)
return self._create_cpx_variables(nb_vars, cpx_types, lbs, ubs, names)
def _create_cpx_variables(self, nb_vars, cpx_vartypes, lbs, ubs, names):
ret_add = self.fast_add_cols(cpx_vartypes, lbs, ubs, list(names))
return self._allocate_range_index(size=nb_vars, ret_value=ret_add)
def _apply_var_fn(self, dvars, args, setter_fn, getter_fn=None):
cpxvars = self._cplex.variables
indices = [_v.index for _v in dvars]
# noinspection PyArgumentList
setter_fn(cpxvars, zip(indices, args))
if getter_fn:
return getter_fn(cpxvars, indices)
else:
return None
# TODO: to be removed, does not seem to be used ?
# _getset_map = {"lb": (cplex._internal._subinterfaces.VariablesInterface.set_lower_bounds,
# cplex._internal._subinterfaces.VariablesInterface.get_lower_bounds),
# "ub": (cplex._internal._subinterfaces.VariablesInterface.set_upper_bounds,
# cplex._internal._subinterfaces.VariablesInterface.get_upper_bounds),
# "name": (cplex._internal._subinterfaces.VariablesInterface.set_names,
# cplex._internal._subinterfaces.VariablesInterface.get_names)}
def rename_var(self, dvar, new_name):
# self._cplex.variables.set_names([(dvar.index, new_name or "")])
self._fast_set_col_name(dvar._index, new_name)
def fast_set_var_types(self, dvars, vartypes):
cpx = self._cplex
cpx_adapter = self.cpx_adapter
cpx_adapter.chgctype(cpx._env._e, cpx._lp,
[dv.index for dv in dvars],
"".join(vt.cplex_typecode for vt in vartypes)
)
def change_var_types(self, dvars, newtypes): # pragma: no cover
if self.procedural:
self.fast_set_var_types(dvars, newtypes)
else:
# noinspection PyArgumentList
sparses = [(dv.index, vt.cplex_typecode) for (dv, vt) in zip(dvars, newtypes)]
self._cplex.variables.set_types(sparses)
def set_var_lb(self, dvar, lb):
self._resync_if_needed()
self_var_lbs = self._var_lb_changed
self_var_lbs[dvar] = float(lb)
def change_var_lbs(self, dvar_lb_dict):
self._resync_if_needed()
self_var_lbs = self._var_lb_changed
# naive code
self_var_lbs.update(dvar_lb_dict)
def change_var_ubs(self, dvar_ub_dict):
self._resync_if_needed()
self_var_ubs = self._var_ub_changed
# naive code
self_var_ubs.update(dvar_ub_dict)
def set_var_ub(self, dvar, ub):
self._resync_if_needed()
self_var_ubs = self._var_ub_changed
self_var_ubs[dvar] = float(ub) # force float here: numpy types will crash
def make_attribute_map_from_scope_fn(self, mdl, cplexfn, scope):
# transforms an array of cplex values into a map
# using the scope object as a mapper
all_values = cplexfn()
return self.make_attribute_map_from_scope_list(mdl, all_values, scope)
@classmethod
def make_attribute_map_from_scope_list(cls, mdl, values, scope, keep_zeros=False):
value_map = {}
for ix, cplex_value in enumerate(values):
mobj = scope(ix)
if mobj is None:
mdl.error("No {0} with index: {1} - caution".format(scope.qualifier, ix))
elif keep_zeros or cplex_value:
value_map[mobj] = cplex_value
return value_map
def get_all_reduced_costs(self, mdl):
return self.make_attribute_map_from_scope_fn(mdl, self._cplex.solution.get_reduced_costs, mdl._var_scope)
def get_all_dual_values(self, mdl):
return self.make_attribute_map_from_scope_fn(mdl, self._cplex.solution.get_dual_values, mdl._linct_scope)
def get_all_slack_values(self, mdl):
lin_slacks = self.make_attribute_map_from_scope_fn(mdl, self._cplex.solution.get_linear_slacks,
mdl._linct_scope)
quad_slacks = self.make_attribute_map_from_scope_fn(mdl, self._cplex.solution.get_quadratic_slacks,
mdl._quadct_scope)
ind_slacks = self.make_attribute_map_from_scope_fn(mdl, self._cplex.solution.get_indicator_slacks,
mdl._logical_scope)
# dict : cplex_scope -> dict from obj to slack
return {CplexScope.LINEAR_CT_SCOPE: lin_slacks,
CplexScope.QUAD_CT_SCOPE: quad_slacks,
CplexScope.IND_CT_SCOPE: ind_slacks}
def get_basis(self, mdl):
try:
status_vars, status_licnts = self._cplex.solution.basis.get_basis()
var_statuses_map = self.make_attribute_map_from_scope_list(mdl, status_vars, mdl._var_scope,
keep_zeros=True)
status_linearct_map = self.make_attribute_map_from_scope_list(mdl, status_licnts, mdl._linct_scope,
keep_zeros=True)
return var_statuses_map, status_linearct_map
except self.cpx_adapter.CplexError as cpxe:
if cpxe.args[2] == 1262: # code 1262 is "no basis exists"
return {}, {}
else: # pragma: no cover
raise
# the returned list MUST be of size 2 otherwise the wrapper will crash.
_trivial_linexpr = [[], []]
@classmethod
def linear_ct_to_cplex(cls, linear_ct):
# INTERNAL
assert linear_ct.is_linear(), "Not a linear constraint: {0}".format(linear_ct)
return cls.make_cpx_linear_from_exprs(linear_ct.get_left_expr(), linear_ct.get_right_expr())
def make_cpx_linear_from_one_expr(self, expr):
return self.make_cpx_linear_from_exprs(left_expr=expr, right_expr=None)
@classmethod
def make_cpx_linear_from_exprs(cls, left_expr, right_expr):
indices = []
coefs = []
if right_expr is None or right_expr.is_constant():
nb_terms = left_expr.number_of_terms()
if nb_terms:
indices = [-1] * nb_terms
coefs = [0.0] * nb_terms
for i, (dv, k) in enumerate(left_expr.iter_terms()):
indices[i] = dv._index
coefs[i] = float(k)
elif left_expr.is_constant():
nb_terms = right_expr.number_of_terms()
if nb_terms:
indices = [-1] * nb_terms
coefs = [0] * nb_terms
for i, (dv, k) in enumerate(right_expr.iter_terms()):
indices[i] = dv._index
coefs[i] = -float(k)
else:
# hard to guess array size here:
# we could allocate size(left) + size(right) and truncate, but??
for dv, k in BinaryConstraint._generate_net_linear_coefs2_unsorted(left_expr, right_expr):
indices.append(dv._index)
coefs.append(float(k))
# all_indices_coefs is a list of (index, coef) 2-tuples
if indices:
# CPLEX requires two lists: one for indices, one for coefs
# we use zip to unzip the tuples
return [indices, coefs]
else:
# the returned list MUST be of size 2 otherwise the wrapper will crash.
return cls._trivial_linexpr
@staticmethod
def make_cpx_ct_rhs_from_exprs(left_expr, right_expr):
return right_expr.get_constant() - left_expr.get_constant()
def __index_problem_stop_here(self):
# put a breakpoint here if index problems occur
pass # pragma: no cover
def _make_cplex_linear_ct(self, cpx_lin_expr, cpx_sense, rhs, name):
# INTERNAL
cpx_rhs = [float(rhs)] # if not a float, cplex crashes baaaadly
cpxnames = [name] if name | |
response_type='list[ProcessDefinitionStatisticsResultDto]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_process_definitions(self, **kwargs): # noqa: E501
"""Get List # noqa: E501
Queries for process definitions that fulfill given parameters. Parameters may be the properties of process definitions, such as the name, key or version. The size of the result set can be retrieved by using the [Get Definition Count](https://docs.camunda.org/manual/7.13/reference/rest/process-definition/get-query-count/) method. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_process_definitions(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str process_definition_id: Filter by process definition id.
:param str process_definition_id_in: Filter by a comma-separated list of process definition ids.
:param str name: Filter by process definition name.
:param str name_like: Filter by process definition names that the parameter is a substring of.
:param str deployment_id: Filter by the deployment the id belongs to.
:param datetime deployed_after: Filter by the deploy time of the deployment the process definition belongs to. Only selects process definitions that have been deployed after (exclusive) a specific time. By [default](https://docs.camunda.org/manual/7.13/reference/rest/overview/date-format/), the date must have the format `yyyy-MM-dd'T'HH:mm:ss.SSSZ`, e.g., `2013-01-23T14:42:45.546+0200`.
:param datetime deployed_at: Filter by the deploy time of the deployment the process definition belongs to. Only selects process definitions that have been deployed at a specific time (exact match). By [default](https://docs.camunda.org/manual/7.13/reference/rest/overview/date-format/), the date must have the format `yyyy-MM-dd'T'HH:mm:ss.SSSZ`, e.g., `2013-01-23T14:42:45.546+0200`.
:param str key: Filter by process definition key, i.e., the id in the BPMN 2.0 XML. Exact match.
:param str keys_in: Filter by a comma-separated list of process definition keys.
:param str key_like: Filter by process definition keys that the parameter is a substring of.
:param str category: Filter by process definition category. Exact match.
:param str category_like: Filter by process definition categories that the parameter is a substring of.
:param int version: Filter by process definition version.
:param bool latest_version: Only include those process definitions that are latest versions. Value may only be `true`, as `false` is the default behavior.
:param str resource_name: Filter by the name of the process definition resource. Exact match.
:param str resource_name_like: Filter by names of those process definition resources that the parameter is a substring of.
:param str startable_by: Filter by a user name who is allowed to start the process.
:param bool active: Only include active process definitions. Value may only be `true`, as `false` is the default behavior.
:param bool suspended: Only include suspended process definitions. Value may only be `true`, as `false` is the default behavior.
:param str incident_id: Filter by the incident id.
:param str incident_type: Filter by the incident type. See the [User Guide](https://docs.camunda.org/manual/7.13/user-guide/process-engine/incidents/#incident-types) for a list of incident types.
:param str incident_message: Filter by the incident message. Exact match.
:param str incident_message_like: Filter by the incident message that the parameter is a substring of.
:param str tenant_id_in: Filter by a comma-separated list of tenant ids. A process definition must have one of the given tenant ids.
:param bool without_tenant_id: Only include process definitions which belong to no tenant. Value may only be true, as false is the default behavior.
:param bool include_process_definitions_without_tenant_id: Include process definitions which belong to no tenant. Can be used in combination with `tenantIdIn`. Value may only be `true`, as `false` is the default behavior.
:param str version_tag: Filter by the version tag.
:param str version_tag_like: Filter by the version tag that the parameter is a substring of.
:param bool without_version_tag: Only include process definitions without a `versionTag`.
:param bool startable_in_tasklist: Filter by process definitions which are startable in Tasklist..
:param bool not_startable_in_tasklist: Filter by process definitions which are not startable in Tasklist.
:param bool startable_permission_check: Filter by process definitions which the user is allowed to start in Tasklist. If the user doesn't have these permissions the result will be empty list. The permissions are: * `CREATE` permission for all Process instances * `CREATE_INSTANCE` and `READ` permission on Process definition level
:param str sort_by: Sort the results lexicographically by a given criterion. Must be used in conjunction with the sortOrder parameter.
:param str sort_order: Sort the results in a given order. Values may be asc for ascending order or desc for descending order. Must be used in conjunction with the sortBy parameter.
:param int first_result: Pagination of results. Specifies the index of the first result to return.
:param int max_results: Pagination of results. Specifies the maximum number of results to return. Will return less results if there are no more results left.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[ProcessDefinitionDto]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_process_definitions_with_http_info(**kwargs) # noqa: E501
def get_process_definitions_with_http_info(self, **kwargs): # noqa: E501
"""Get List # noqa: E501
Queries for process definitions that fulfill given parameters. Parameters may be the properties of process definitions, such as the name, key or version. The size of the result set can be retrieved by using the [Get Definition Count](https://docs.camunda.org/manual/7.13/reference/rest/process-definition/get-query-count/) method. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_process_definitions_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str process_definition_id: Filter by process definition id.
:param str process_definition_id_in: Filter by a comma-separated list of process definition ids.
:param str name: Filter by process definition name.
:param str name_like: Filter by process definition names that the parameter is a substring of.
:param str deployment_id: Filter by the deployment the id belongs to.
:param datetime deployed_after: Filter by the deploy time of the deployment the process definition belongs to. Only selects process definitions that have been deployed after (exclusive) a specific time. By [default](https://docs.camunda.org/manual/7.13/reference/rest/overview/date-format/), the date must have the format `yyyy-MM-dd'T'HH:mm:ss.SSSZ`, e.g., `2013-01-23T14:42:45.546+0200`.
:param datetime deployed_at: Filter by the deploy time of the deployment the process definition belongs to. Only selects process definitions that have been deployed at a specific time (exact match). By [default](https://docs.camunda.org/manual/7.13/reference/rest/overview/date-format/), the date must have the format `yyyy-MM-dd'T'HH:mm:ss.SSSZ`, e.g., `2013-01-23T14:42:45.546+0200`.
:param str key: Filter by process definition key, i.e., the id in the BPMN 2.0 XML. Exact match.
:param str keys_in: Filter by a comma-separated list of process definition keys.
:param str key_like: Filter by process definition keys that the parameter is a substring of.
:param str category: Filter by process definition category. Exact match.
:param str category_like: Filter by process definition categories that the parameter is a substring of.
:param int version: Filter by process definition version.
:param bool latest_version: Only include those process definitions that are latest versions. Value may only be `true`, as `false` is the default behavior.
:param str resource_name: Filter by the name of the process definition resource. Exact match.
:param str resource_name_like: Filter by names of those process definition resources that the parameter is a substring of.
:param str startable_by: Filter by a user name who is allowed to start the process.
:param bool active: Only include active process definitions. Value may only be `true`, as `false` is the default behavior.
:param bool suspended: Only include suspended process definitions. Value may only be `true`, as `false` is the default behavior.
:param str incident_id: Filter by the incident id.
:param str incident_type: Filter by the incident type. See the [User Guide](https://docs.camunda.org/manual/7.13/user-guide/process-engine/incidents/#incident-types) for a list of incident types.
:param str incident_message: Filter by the incident message. Exact match.
:param str incident_message_like: Filter by the incident message that the parameter is a substring of.
:param str tenant_id_in: Filter by a comma-separated list of tenant ids. A process definition must | |
p in model.parameters() if p.requires_grad)}')
if(config.train):
MRPC_train_data_frame = utils.get_MRPC_data_frame(config.MRPC_train_data_path)
train_LM_dataset = MRPCLMDataset(MRPC_train_data_frame, tokenizer)
train_dataset_aux_1 = MRPCAuxDataset(root='../data/glue_data/MRPC/train/', data_frame=MRPC_train_data_frame, DT_G=DT_G, is_sentence_1=True)
train_dataset_aux_2 = MRPCAuxDataset(root='../data/glue_data/MRPC/train/', data_frame=MRPC_train_data_frame, DT_G=DT_G, is_sentence_2=True)
train_loader = GraphDataLoader(WrapperDataset(train_LM_dataset, train_dataset_aux_1, train_dataset_aux_2), batch_size=config.batch_size)
MRPC_dev_data_frame = utils.get_MRPC_data_frame(config.MRPC_dev_data_path)
dev_LM_dataset = MRPCLMDataset(MRPC_dev_data_frame, tokenizer)
dev_dataset_aux_1 = MRPCAuxDataset(root='../data/glue_data/MRPC/dev/', data_frame=MRPC_dev_data_frame, DT_G=DT_G, is_sentence_1=True)
dev_dataset_aux_2 = MRPCAuxDataset(root='../data/glue_data/MRPC/dev/', data_frame=MRPC_dev_data_frame, DT_G=DT_G, is_sentence_2=True)
dev_loader = GraphDataLoader(WrapperDataset(dev_LM_dataset, dev_dataset_aux_1, dev_dataset_aux_2), batch_size=config.batch_size)
no_decay = ['bias', 'LayerNorm.weight', 'layer_norm.weight']
optimizer_grouped_parameters = [{"params": [p for n, p in model.named_parameters() if any(nd in n for nd in no_decay)], "weight_decay": 0.0,}, {"params": [p for n, p in model.named_parameters() if not any(nd in n for nd in no_decay)],},]
optimizer = AdamW(optimizer_grouped_parameters, lr=config.learning_rate, betas=(config.beta_1, config.beta_2), weight_decay=config.weight_decay)
t_total = (len(train_loader.dataset) // config.batch_size) * float(config.epochs)
scheduler = get_linear_schedule_with_warmup(optimizer, num_warmup_steps=config.warmup_ratio*t_total, num_training_steps=t_total)
loss_fn = nn.CrossEntropyLoss()
print(f'Training: {config.experiment}')
train(model=model, train_loader=train_loader, loss_fn=loss_fn, optimizer=optimizer, scheduler=scheduler, dev_loader=dev_loader)
else:
MRPC_dev_data_frame = utils.get_MRPC_data_frame(config.MRPC_dev_data_path)
dev_LM_dataset = MRPCLMDataset(MRPC_dev_data_frame, tokenizer)
dev_dataset_aux_1 = MRPCAuxDataset(root='../data/glue_data/MRPC/dev/', data_frame=MRPC_dev_data_frame, DT_G=DT_G, is_sentence_1=True)
dev_dataset_aux_2 = MRPCAuxDataset(root='../data/glue_data/MRPC/dev/', data_frame=MRPC_dev_data_frame, DT_G=DT_G, is_sentence_2=True)
dev_loader = GraphDataLoader(WrapperDataset(dev_LM_dataset, dev_dataset_aux_1, dev_dataset_aux_2), batch_size=config.batch_size)
loss_fn = nn.CrossEntropyLoss()
model.load_state_dict(torch.load(config.model_name))
print(f'Testing')
return test(model=model, test_loader=dev_loader, loss_fn=loss_fn)
def SST_2_main():
assert(config.experiment == 'SST_2')
tokenizer = AutoTokenizer.from_pretrained(config.lm_model_name, do_lower_case=True)
DT_G = utils.load_DT()
model = SentenceLevelNet(tokenizer=tokenizer, num_output_classes=len(config.SST_2_labels))
model.to(config.device)
print(f'Total number of parameters: {sum(p.numel() for p in model.parameters())}')
print(f'Total number of trainable parameters: {sum(p.numel() for p in model.parameters() if p.requires_grad)}')
if(config.train):
SST_2_train_data_frame = utils.get_SST_2_data_frame(config.SST_2_train_data_path)
train_LM_dataset = SST_2LMDataset(SST_2_train_data_frame, tokenizer)
train_dataset_aux_1 = SST_2AuxDataset(root='../data/glue_data/SST-2/train/', data_frame=SST_2_train_data_frame, DT_G=DT_G, is_sentence_1=True)
train_loader = GraphDataLoader(WrapperDataset(train_LM_dataset, train_dataset_aux_1), batch_size=config.batch_size)
SST_2_dev_data_frame = utils.get_SST_2_data_frame(config.SST_2_dev_data_path)
dev_LM_dataset = SST_2LMDataset(SST_2_dev_data_frame, tokenizer)
dev_dataset_aux_1 = SST_2AuxDataset(root='../data/glue_data/SST-2/dev/', data_frame=SST_2_dev_data_frame, DT_G=DT_G, is_sentence_1=True)
dev_loader = GraphDataLoader(WrapperDataset(dev_LM_dataset, dev_dataset_aux_1), batch_size=config.batch_size)
no_decay = ['bias', 'LayerNorm.weight', 'layer_norm.weight']
optimizer_grouped_parameters = [{"params": [p for n, p in model.named_parameters() if any(nd in n for nd in no_decay)], "weight_decay": 0.0,}, {"params": [p for n, p in model.named_parameters() if not any(nd in n for nd in no_decay)],},]
optimizer = AdamW(optimizer_grouped_parameters, lr=config.learning_rate, betas=(config.beta_1, config.beta_2), weight_decay=config.weight_decay)
t_total = (len(train_loader.dataset) // config.batch_size) * float(config.epochs)
scheduler = get_linear_schedule_with_warmup(optimizer, num_warmup_steps=config.warmup_ratio*t_total, num_training_steps=t_total)
loss_fn = nn.CrossEntropyLoss()
print(f'Training: {config.experiment}')
train(model=model, train_loader=train_loader, loss_fn=loss_fn, optimizer=optimizer, scheduler=scheduler, dev_loader=dev_loader)
else:
SST_2_dev_data_frame = utils.get_SST_2_data_frame(config.SST_2_dev_data_path)
dev_LM_dataset = SST_2LMDataset(SST_2_dev_data_frame, tokenizer)
dev_dataset_aux_1 = SST_2AuxDataset(root='../data/glue_data/SST-2/dev/', data_frame=SST_2_dev_data_frame, DT_G=DT_G, is_sentence_1=True)
dev_loader = GraphDataLoader(WrapperDataset(dev_LM_dataset, dev_dataset_aux_1), batch_size=config.batch_size)
loss_fn = nn.CrossEntropyLoss()
model.load_state_dict(torch.load(config.model_name))
print(f'Testing')
return test(model=model, test_loader=dev_loader, loss_fn=loss_fn)
def CoLA_main():
assert(config.experiment == 'CoLA')
tokenizer = AutoTokenizer.from_pretrained(config.lm_model_name, do_lower_case=False)
DT_G = utils.load_DT()
model = SentenceLevelNet(tokenizer=tokenizer, num_output_classes=len(config.CoLA_labels))
model.to(config.device)
print(f'Total number of parameters: {sum(p.numel() for p in model.parameters())}')
print(f'Total number of trainable parameters: {sum(p.numel() for p in model.parameters() if p.requires_grad)}')
if(config.train):
CoLA_train_data_frame = utils.get_CoLA_data_frame(config.CoLA_train_data_path)
train_LM_dataset = CoLA_LMDataset(CoLA_train_data_frame, tokenizer)
train_dataset_aux_1 = CoLAAuxDataset(root='../data/glue_data/CoLA/train/', data_frame=CoLA_train_data_frame, DT_G=DT_G, is_sentence_1=True)
train_loader = GraphDataLoader(WrapperDataset(train_LM_dataset, train_dataset_aux_1), batch_size=config.batch_size)
CoLA_dev_data_frame = utils.get_CoLA_data_frame(config.CoLA_dev_data_path)
dev_LM_dataset = CoLA_LMDataset(CoLA_dev_data_frame, tokenizer)
dev_dataset_aux_1 = CoLAAuxDataset(root='../data/glue_data/CoLA/dev/', data_frame=CoLA_dev_data_frame, DT_G=DT_G, is_sentence_1=True)
dev_loader = GraphDataLoader(WrapperDataset(dev_LM_dataset, dev_dataset_aux_1), batch_size=config.batch_size)
no_decay = ['bias', 'LayerNorm.weight', 'layer_norm.weight']
optimizer_grouped_parameters = [{"params": [p for n, p in model.named_parameters() if any(nd in n for nd in no_decay)], "weight_decay": 0.0,}, {"params": [p for n, p in model.named_parameters() if not any(nd in n for nd in no_decay)],},]
optimizer = AdamW(optimizer_grouped_parameters, lr=config.learning_rate, betas=(config.beta_1, config.beta_2), weight_decay=config.weight_decay)
t_total = (len(train_loader.dataset) // config.batch_size) * float(config.epochs)
scheduler = get_linear_schedule_with_warmup(optimizer, num_warmup_steps=config.warmup_ratio*t_total, num_training_steps=t_total)
loss_fn = nn.CrossEntropyLoss()
print(f'Training: {config.experiment}')
train(model=model, train_loader=train_loader, loss_fn=loss_fn, optimizer=optimizer, scheduler=scheduler, dev_loader=dev_loader)
else:
CoLA_dev_data_frame = utils.get_CoLA_data_frame(config.CoLA_dev_data_path)
dev_LM_dataset = CoLA_LMDataset(CoLA_dev_data_frame, tokenizer)
dev_dataset_aux_1 = CoLAAuxDataset(root='../data/glue_data/CoLA/dev/', data_frame=CoLA_dev_data_frame, DT_G=DT_G, is_sentence_1=True)
dev_loader = GraphDataLoader(WrapperDataset(dev_LM_dataset, dev_dataset_aux_1), batch_size=config.batch_size)
loss_fn = nn.CrossEntropyLoss()
model.load_state_dict(torch.load(config.model_name))
print(f'Testing')
return test(model=model, test_loader=dev_loader, loss_fn=loss_fn)
def WNLI_translated_main():
assert(config.experiment == 'wnli_translated')
tokenizer = AutoTokenizer.from_pretrained(config.lm_model_name, do_lower_case=False)
DT_G = utils.load_DT()
model = SentenceLevelNet(tokenizer=tokenizer, num_output_classes=len(config.SST_2_labels))
model.to(config.device)
print(f'Total number of parameters: {sum(p.numel() for p in model.parameters())}')
print(f'Total number of trainable parameters: {sum(p.numel() for p in model.parameters() if p.requires_grad)}')
if(config.train):
WNLI_translated_train_data_frame = utils.get_WNLI_translated_data_frame(config.WNLI_translated_train_data_path)
train_LM_dataset = WNLI_TranslatedLMDataset(WNLI_translated_train_data_frame, tokenizer)
train_dataset_aux_1 = WNLI_TranslatedAuxDataset(root='../data/wnli-translated/hi/train/', data_frame=WNLI_translated_train_data_frame, DT_G=DT_G, is_sentence_1=True)
train_dataset_aux_2 = WNLI_TranslatedAuxDataset(root='../data/wnli-translated/hi/train/', data_frame=WNLI_translated_train_data_frame, DT_G=DT_G, is_sentence_2=True)
train_loader = GraphDataLoader(WrapperDataset(train_LM_dataset, train_dataset_aux_1, train_dataset_aux_2), batch_size=config.batch_size)
WNLI_translated_dev_data_frame = utils.get_WNLI_translated_data_frame(config.WNLI_translated_dev_data_path)
dev_LM_dataset = WNLI_TranslatedLMDataset(WNLI_translated_dev_data_frame, tokenizer)
dev_dataset_aux_1 = WNLI_TranslatedAuxDataset(root='../data/wnli-translated/hi/dev/', data_frame=WNLI_translated_dev_data_frame, DT_G=DT_G, is_sentence_1=True)
dev_dataset_aux_2 = WNLI_TranslatedAuxDataset(root='../data/wnli-translated/hi/dev/', data_frame=WNLI_translated_dev_data_frame, DT_G=DT_G, is_sentence_2=True)
dev_loader = GraphDataLoader(WrapperDataset(dev_LM_dataset, dev_dataset_aux_1, dev_dataset_aux_2), batch_size=config.batch_size)
no_decay = ['bias', 'LayerNorm.weight', 'layer_norm.weight']
optimizer_grouped_parameters = [{"params": [p for n, p in model.named_parameters() if any(nd in n for nd in no_decay)], "weight_decay": 0.0,}, {"params": [p for n, p in model.named_parameters() if not any(nd in n for nd in no_decay)],},]
optimizer = AdamW(optimizer_grouped_parameters, lr=config.learning_rate, betas=(config.beta_1, config.beta_2), weight_decay=config.weight_decay)
t_total = (len(train_loader.dataset) // config.batch_size) * float(config.epochs)
scheduler = get_linear_schedule_with_warmup(optimizer, num_warmup_steps=config.warmup_ratio*t_total, num_training_steps=t_total)
loss_fn = nn.CrossEntropyLoss()
print(f'Training: {config.experiment}')
train(model=model, train_loader=train_loader, loss_fn=loss_fn, optimizer=optimizer, scheduler=scheduler, dev_loader=dev_loader)
else:
WNLI_translated_dev_data_frame = utils.get_WNLI_translated_data_frame(config.WNLI_translated_dev_data_path)
dev_LM_dataset = WNLI_TranslatedLMDataset(WNLI_translated_dev_data_frame, tokenizer)
dev_dataset_aux_1 = WNLI_TranslatedAuxDataset(root='../data/wnli-translated/hi/dev/', data_frame=WNLI_translated_dev_data_frame, DT_G=DT_G, is_sentence_1=True)
dev_dataset_aux_2 = WNLI_TranslatedAuxDataset(root='../data/wnli-translated/hi/dev/', data_frame=WNLI_translated_dev_data_frame, DT_G=DT_G, is_sentence_2=True)
dev_loader = GraphDataLoader(WrapperDataset(dev_LM_dataset, dev_dataset_aux_1, dev_dataset_aux_2), batch_size=config.batch_size)
loss_fn = nn.CrossEntropyLoss()
model.load_state_dict(torch.load(config.model_name))
print(f'Testing')
return test(model=model, test_loader=dev_loader, loss_fn=loss_fn)
def IITP_product_reviews_main():
assert(config.experiment == 'iitp_product')
tokenizer = AutoTokenizer.from_pretrained(config.lm_model_name, do_lower_case=False)
DT_G = utils.load_DT()
model = SentenceLevelNet(tokenizer=tokenizer, num_output_classes=len(config.IITP_product_reviews_labels))
model.to(config.device)
print(f'Total number of parameters: {sum(p.numel() for p in model.parameters())}')
print(f'Total number of trainable parameters: {sum(p.numel() for p in model.parameters() if p.requires_grad)}')
if(config.train):
IITP_product_reviews_train_data_frame = utils.get_IITP_product_reviews_data_frame(config.IITP_product_reviews_train_data_path)
train_LM_dataset = IITP_Product_ReviewsLMDataset(IITP_product_reviews_train_data_frame, tokenizer)
train_dataset_aux_1 = IITP_Product_ReviewsAuxDataset(root='../data/iitp-product-reviews/hi/train/', data_frame=IITP_product_reviews_train_data_frame, DT_G=DT_G, is_sentence_1=True)
train_loader = GraphDataLoader(WrapperDataset(train_LM_dataset, train_dataset_aux_1), batch_size=config.batch_size)
IITP_product_reviews_dev_data_frame = utils.get_IITP_product_reviews_data_frame(config.IITP_product_reviews_dev_data_path)
dev_LM_dataset = IITP_Product_ReviewsLMDataset(IITP_product_reviews_dev_data_frame, tokenizer)
dev_dataset_aux_1 = IITP_Product_ReviewsAuxDataset(root='../data/iitp-product-reviews/hi/test/', data_frame=IITP_product_reviews_dev_data_frame, DT_G=DT_G, is_sentence_1=True)
dev_loader = GraphDataLoader(WrapperDataset(dev_LM_dataset, dev_dataset_aux_1), batch_size=config.batch_size)
no_decay = ['bias', 'LayerNorm.weight', 'layer_norm.weight']
optimizer_grouped_parameters = [{"params": [p for n, p in model.named_parameters() if any(nd in n for nd in no_decay)], "weight_decay": 0.0,}, {"params": [p for n, p in model.named_parameters() if not any(nd in n for nd in no_decay)],},]
optimizer = AdamW(optimizer_grouped_parameters, lr=config.learning_rate, betas=(config.beta_1, config.beta_2), weight_decay=config.weight_decay)
t_total = (len(train_loader.dataset) // config.batch_size) * float(config.epochs)
scheduler = get_linear_schedule_with_warmup(optimizer, num_warmup_steps=config.warmup_ratio*t_total, num_training_steps=t_total)
loss_fn = nn.CrossEntropyLoss()
print(f'Training: {config.experiment}')
train(model=model, train_loader=train_loader, loss_fn=loss_fn, optimizer=optimizer, scheduler=scheduler, dev_loader=dev_loader)
else:
IITP_product_reviews_dev_data_frame = utils.get_IITP_product_reviews_data_frame(config.IITP_product_reviews_dev_data_path)
dev_LM_dataset = IITP_Product_ReviewsLMDataset(IITP_product_reviews_dev_data_frame, tokenizer)
dev_dataset_aux_1 = IITP_Product_ReviewsAuxDataset(root='../data/iitp-product-reviews/hi/valid/', data_frame=IITP_product_reviews_dev_data_frame, DT_G=DT_G, is_sentence_1=True)
dev_loader = GraphDataLoader(WrapperDataset(dev_LM_dataset, dev_dataset_aux_1), batch_size=config.batch_size)
loss_fn = nn.CrossEntropyLoss()
model.load_state_dict(torch.load(config.model_name))
print(f'Testing')
return test(model=model, test_loader=dev_loader, loss_fn=loss_fn)
def MIDAS_discourse_main():
assert(config.experiment == 'midas_discourse')
tokenizer = AutoTokenizer.from_pretrained(config.lm_model_name, do_lower_case=False)
DT_G = utils.load_DT()
model = SentenceLevelNet(tokenizer=tokenizer, num_output_classes=len(config.MIDAS_discourse_labels))
model.to(config.device)
print(f'Total number of parameters: {sum(p.numel() for p in model.parameters())}')
print(f'Total number of trainable parameters: {sum(p.numel() for p in model.parameters() if p.requires_grad)}')
if(config.train):
MIDAS_discourse_train_json = utils.get_MIDAS_discourse_json(config.MIDAS_discourse_train_json_path)
train_LM_dataset = MIDAS_DiscourseLMDataset(MIDAS_discourse_train_json, tokenizer)
train_dataset_aux_1 = MIDAS_DiscourseAuxDataset(root='../data/midas-discourse/hi/train/', json_data=MIDAS_discourse_train_json, DT_G=DT_G, is_sentence_1=True)
train_loader = GraphDataLoader(WrapperDataset(train_LM_dataset, train_dataset_aux_1), batch_size=config.batch_size)
MIDAS_discourse_dev_json = utils.get_MIDAS_discourse_json(config.MIDAS_discourse_dev_json_path)
dev_LM_dataset = MIDAS_DiscourseLMDataset(MIDAS_discourse_dev_json, tokenizer)
dev_dataset_aux_1 = MIDAS_DiscourseAuxDataset(root='../data/midas-discourse/hi/test/', json_data=MIDAS_discourse_dev_json, DT_G=DT_G, is_sentence_1=True)
dev_loader = GraphDataLoader(WrapperDataset(dev_LM_dataset, dev_dataset_aux_1), batch_size=config.batch_size)
no_decay = ['bias', 'LayerNorm.weight', 'layer_norm.weight']
optimizer_grouped_parameters = [{"params": [p for n, p in model.named_parameters() if any(nd in n for nd in no_decay)], "weight_decay": 0.0,}, {"params": [p for n, p in model.named_parameters() if not any(nd in n for nd in no_decay)],},]
optimizer = AdamW(optimizer_grouped_parameters, lr=config.learning_rate, betas=(config.beta_1, config.beta_2), weight_decay=config.weight_decay)
t_total = (len(train_loader.dataset) // config.batch_size) * float(config.epochs)
scheduler = get_linear_schedule_with_warmup(optimizer, num_warmup_steps=config.warmup_ratio*t_total, num_training_steps=t_total)
loss_fn = nn.CrossEntropyLoss()
print(f'Training: {config.experiment}')
train(model=model, train_loader=train_loader, loss_fn=loss_fn, optimizer=optimizer, scheduler=scheduler, dev_loader=dev_loader)
else:
MIDAS_discourse_dev_json = utils.get_MIDAS_discourse_json(config.MIDAS_discourse_dev_json_path)
dev_LM_dataset = MIDAS_DiscourseLMDataset(MIDAS_discourse_dev_json, tokenizer)
dev_dataset_aux_1 = MIDAS_DiscourseAuxDataset(root='../data/midas-discourse/hi/dev/', json_data=MIDAS_discourse_dev_json, DT_G=DT_G, is_sentence_1=True)
dev_loader = GraphDataLoader(WrapperDataset(dev_LM_dataset, dev_dataset_aux_1), batch_size=config.batch_size)
loss_fn = nn.CrossEntropyLoss()
model.load_state_dict(torch.load(config.model_name))
print(f'Testing')
return test(model=model, test_loader=dev_loader, loss_fn=loss_fn)
def DPIL_subtask_1_main():
assert(config.experiment == 'dpil_subtask_1')
tokenizer = AutoTokenizer.from_pretrained(config.lm_model_name, do_lower_case=False)
DT_G = utils.load_DT()
model = SentenceLevelNet(tokenizer=tokenizer, num_output_classes=len(config.DPIL_subtask_1_labels))
model.to(config.device)
print(f'Total number of parameters: {sum(p.numel() for p in model.parameters())}')
print(f'Total number of trainable parameters: {sum(p.numel() for p in model.parameters() if p.requires_grad)}')
if(config.train):
DPIL_subtask_1_train_data_frame = utils.get_DPIL_data_frame(config.DPIL_subtask_1_train_path)
train_LM_dataset = DPIL_Subtask_1LMDataset(DPIL_subtask_1_train_data_frame, tokenizer)
train_dataset_aux_1 = DPIL_Subtask_1AuxDataset(root='../data/DPIL_csv/subtask_1/train/', data_frame=DPIL_subtask_1_train_data_frame, DT_G=DT_G, is_sentence_1=True)
train_dataset_aux_2 = DPIL_Subtask_1AuxDataset(root='../data/DPIL_csv/subtask_1/train/', data_frame=DPIL_subtask_1_train_data_frame, DT_G=DT_G, is_sentence_2=True)
train_loader = GraphDataLoader(WrapperDataset(train_LM_dataset, train_dataset_aux_1, train_dataset_aux_2), batch_size=config.batch_size)
DPIL_subtask_1_dev_data_frame = utils.get_DPIL_data_frame(config.DPIL_subtask_1_dev_path)
dev_LM_dataset = DPIL_Subtask_1LMDataset(DPIL_subtask_1_dev_data_frame, tokenizer)
dev_dataset_aux_1 = DPIL_Subtask_1AuxDataset(root='../data/DPIL_csv/subtask_1/test/', data_frame=DPIL_subtask_1_dev_data_frame, DT_G=DT_G, is_sentence_1=True)
dev_dataset_aux_2 = DPIL_Subtask_1AuxDataset(root='../data/DPIL_csv/subtask_1/test/', data_frame=DPIL_subtask_1_dev_data_frame, DT_G=DT_G, is_sentence_2=True)
dev_loader = GraphDataLoader(WrapperDataset(dev_LM_dataset, dev_dataset_aux_1, dev_dataset_aux_2), batch_size=config.batch_size)
no_decay = ['bias', 'LayerNorm.weight', 'layer_norm.weight']
optimizer_grouped_parameters = [{"params": [p for n, p in model.named_parameters() if any(nd in n for nd in no_decay)], "weight_decay": 0.0,}, {"params": [p for n, p in model.named_parameters() if not any(nd in n for nd in no_decay)],},]
optimizer = AdamW(optimizer_grouped_parameters, lr=config.learning_rate, betas=(config.beta_1, config.beta_2), weight_decay=config.weight_decay)
t_total = (len(train_loader.dataset) // config.batch_size) * float(config.epochs)
scheduler = get_linear_schedule_with_warmup(optimizer, num_warmup_steps=config.warmup_ratio*t_total, num_training_steps=t_total)
loss_fn = nn.CrossEntropyLoss()
print(f'Training: {config.experiment}')
train(model=model, train_loader=train_loader, loss_fn=loss_fn, optimizer=optimizer, scheduler=scheduler, dev_loader=dev_loader)
else:
DPIL_subtask_1_dev_data_frame = utils.get_DPIL_data_frame(config.DPIL_subtask_1_dev_path)
dev_LM_dataset = DPIL_Subtask_1LMDataset(DPIL_subtask_1_dev_data_frame, tokenizer)
dev_dataset_aux_1 = DPIL_Subtask_1AuxDataset(root='../data/DPIL_csv/subtask_1/test/', data_frame=DPIL_subtask_1_dev_data_frame, DT_G=DT_G, is_sentence_1=True)
dev_dataset_aux_2 = DPIL_Subtask_1AuxDataset(root='../data/DPIL_csv/subtask_1/test/', data_frame=DPIL_subtask_1_dev_data_frame, DT_G=DT_G, is_sentence_2=True)
dev_loader = GraphDataLoader(WrapperDataset(dev_LM_dataset, dev_dataset_aux_1, dev_dataset_aux_2), batch_size=config.batch_size)
loss_fn = nn.CrossEntropyLoss()
model.load_state_dict(torch.load(config.model_name))
print(f'Testing')
return test(model=model, test_loader=dev_loader, loss_fn=loss_fn)
def DPIL_subtask_2_main():
assert(config.experiment == 'dpil_subtask_2')
tokenizer = AutoTokenizer.from_pretrained(config.lm_model_name, do_lower_case=False)
DT_G = utils.load_DT()
model = SentenceLevelNet(tokenizer=tokenizer, num_output_classes=len(config.DPIL_subtask_2_labels))
model.to(config.device)
print(f'Total number of parameters: {sum(p.numel() for p in model.parameters())}')
print(f'Total number of trainable parameters: {sum(p.numel() | |
Image(unchecked_image_path, .25 * cm, .25 * cm),
Paragraph('Reading', styles["Line_Label"]),
Image(unchecked_image_path, .25 * cm, .25 * cm),
Paragraph('<b>Child has Birth Certificate:</b>', styles["Line_Label"]),
Paragraph('Yes', styles["Line_Label"]),
get_check(ovc_data['bcert'], 'AYES'),
Paragraph('No', styles["Line_Label"]),
get_check(ovc_data['bcert'], 'ANNO'),
Paragraph('Refer to CRD.', styles["Line_Data_Small"]),
]]
# t1 = Table(data1, colWidths=(3 * cm, None, 4.5 * cm,))
t1 = Table(data1, colWidths=(
2.4 * cm, 1.4 * cm, 0.6 * cm, 1.4 * cm, 0.6 * cm, 1.2 * cm, 0.6 * cm,
1.4 * cm, 0.6 * cm, 1.4 * cm, 0.6 * cm, 2.4 * cm,
1.0 * cm, 0.6 * cm, 0.8 * cm, 0.6 * cm, 2.0 * cm))
t1.setStyle(TableStyle([
('INNERGRID', (0, 0), (1, 0), 0.25, colors.black),
('INNERGRID', (10, 0), (12, 0), 0.25, colors.black),
('BOX', (0, 0), (-1, -1), 0.25, colors.black),
('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
]))
story.append(t1)
story.append(Spacer(0.1 * cm, .2 * cm))
# SIBLINGS
data1 = [[Paragraph('<b>SIBLINGS</b>', styles["Line_Title"])]]
t1 = Table(data1, colWidths=(None,), rowHeights = [0.5 * cm])
t1.setStyle(TableStyle([
('BACKGROUND', (0, 0), (-1, -1), '#a7a5a5'),
('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
]))
story.append(t1)
story.append(Spacer(0.1 * cm, .2 * cm))
data1 = [[Paragraph('<b>No.</b>', styles["Line_Label"]),
Paragraph('<b>Name</b>', styles["Line_Label"]),
Paragraph('<b>D.O.B</b>', styles["Line_Label"]),
Paragraph('<b>Sex</b>', styles["Line_Label"]),
Paragraph('<b>Name of School</b>', styles["Line_Label"]),
Paragraph('<b>Class</b>', styles["Line_Label"]),
Paragraph('<b>Remarks</b>', styles["Line_Label"])],
]
t1 = Table(data1, colWidths=(
0.9 * cm, 5.0 * cm, 2.5 * cm, 1.5 * cm, 5 * cm,
1.5 * cm, 3.2 * cm), rowHeights = [0.6 * cm])
t1.setStyle(TableStyle([
('INNERGRID', (0, 0), (-1, -1), 0.25, colors.black),
('BOX', (0, 0), (-1, -1), 0.25, colors.black),
('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
]))
story.append(t1)
siblings = ovc_items['siblings']
items = [{'sibling': i} for i in range (1, 9)]
data1 = [[Paragraph(str(product['sibling']), styles["Line_Data"]),
Paragraph(str(siblings[product['sibling']]['name']), styles["Line_Data"]),
Paragraph(str(siblings[product['sibling']]['dob']), styles["Line_Data"]),
Paragraph(str(siblings[product['sibling']]['sex']), styles["Line_Data"]),
'','',
Paragraph(str(siblings[product['sibling']]['remark']), styles["Line_Data"])] for product in items]
t1 = Table(data1, colWidths=(
0.9 * cm, 5.0 * cm, 2.5 * cm, 1.5 * cm, 5 * cm, 1.5 * cm, 3.2 * cm))
t1.setStyle(TableStyle([
('INNERGRID', (0, 0), (-1, -1), 0.25, colors.black),
('BOX', (0, 0), (-1, -1), 0.25, colors.black),
('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
]))
story.append(t1)
story.append(Spacer(0.1 * cm, .2 * cm))
# HOME PARTICULARS
data1 = [[Paragraph('<b>HOME PARTICULARS OF THE CHILD</b>', styles["Line_Title"])]]
t1 = Table(data1, colWidths=(None,), rowHeights = [0.5 * cm])
t1.setStyle(TableStyle([
('BACKGROUND', (0, 0), (-1, -1), '#a7a5a5'),
('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
]))
story.append(t1)
story.append(Spacer(0.1 * cm, .2 * cm))
data1 = [[Paragraph('<b>County:<br/></b>', styles["Line_Label"]),
Paragraph(ovc_data['child_county'], styles["Line_Data_Small"]),
Paragraph('<b>Sub-County:</b>', styles["Line_Label"]),
Paragraph(ovc_data['child_sub_county'], styles["Line_Data_Small"]),
Paragraph('<b>Village/Estate:</b>', styles["Line_Label"]),
Paragraph('', styles["Line_Data_Small"])
]]
t1 = Table(data1, colWidths=(
2.4 * cm, 4.3 * cm, 2.1 * cm, 4.0 * cm, 2.5 * cm,
4.3 * cm), rowHeights = [0.6 * cm])
t1.setStyle(TableStyle([
('INNERGRID', (0, 0), (-1, -1), 0.25, colors.black),
('BOX', (0, 0), (-1, -1), 0.25, colors.black),
('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
]))
story.append(t1)
data1 = [[Paragraph('<b>Ward:</b>', styles["Line_Label"]),
Paragraph(ovc_data['child_ward'], styles["Line_Data_Small"]),
Paragraph('<b>Nearest Land Mark:</b>', styles["Line_Label"]),
Paragraph('', styles["Line_Data_Small"])
]]
# t1 = Table(data1, colWidths=(3 * cm, None, 4.5 * cm,))
t1 = Table(data1, colWidths=(2.4 * cm, 4.3 * cm, 2.1 * cm, 10.8 * cm))
t1.setStyle(TableStyle([
('INNERGRID', (0, 0), (-1, -1), 0.25, colors.black),
('BOX', (0, 0), (-1, -1), 0.25, colors.black),
('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
]))
story.append(t1)
hes_txt = '<b>Household Economic Status (Income):</b>'
data1 = [[Paragraph('<b>Family Status:</b>', styles["Line_Label"]),
Paragraph('Parents living together', styles["Line_Label"]),
get_check(ovc_data['family_status'], ''),
Paragraph('Parents not living together', styles["Line_Label"]),
get_check(ovc_data['family_status'], 'FSPN'),
Paragraph(hes_txt, styles["Line_Label"]),
Paragraph('Low', styles["Line_Label"]),
get_check(ovc_data['hes_status'], 'LINC'),
Paragraph('Middle', styles["Line_Label"]),
get_check(ovc_data['hes_status'], 'MINC'),
Paragraph('High', styles["Line_Label"]),
get_check(ovc_data['hes_status'], 'HINC'),
Paragraph('Unknown', styles["Line_Label"]),
get_check(ovc_data['hes_status'], 'UINC')
]]
# t1 = Table(data1, colWidths=(3 * cm, None, 4.5 * cm,))
t1 = Table(data1, colWidths=(
2.4 * cm, 2.6 * cm, 0.6 * cm, 2.6 * cm, 0.6 * cm,
3.1 * cm, 1.1 * cm, 0.6 * cm, 1.4 * cm, 0.7 * cm,
1.1 * cm, 0.7 * cm, 1.5 * cm, 0.6 * cm))
t1.setStyle(TableStyle([
('INNERGRID', (0, 0), (1, 0), 0.25, colors.black),
('INNERGRID', (4, 0), (6, 0), 0.25, colors.black),
('BOX', (0, 0), (-1, -1), 0.25, colors.black),
('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
]))
story.append(t1)
story.append(Spacer(0.1 * cm, .2 * cm))
# PARENTS PARTICULARS
data1 = [[Paragraph('<b>PARENTS PARTICULARS</b>', styles["Line_Title"])]]
t1 = Table(data1, colWidths=(None,), rowHeights=[0.5 * cm])
t1.setStyle(TableStyle([
('BACKGROUND', (0, 0), (-1, -1), '#a7a5a5'),
('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
]))
story.append(t1)
story.append(Spacer(0.1 * cm, .2 * cm))
data1 = [[Paragraph('<b>Name</b>', styles["Line_Label"]),
Paragraph('<b>Relationship</b>', styles["Line_Label"]),
Paragraph('<b>ID No.</b>', styles["Line_Label"]),
Paragraph('<b>Date of Birth</b>', styles["Line_Label"]),
Paragraph('<b>Telephone</b>', styles["Line_Label"]),
Paragraph('<b>Village/Estate</b>', styles["Line_Label"]),
Paragraph('<b>Occupation</b>', styles["Line_Label"]),
Paragraph('<b>Education<sup>2</sup></b>', styles["Line_Label"]),
Paragraph('<b>Alive</b>', styles["Line_Label"])
]]
t1 = Table(data1, colWidths=(
4.5 * cm, 2.0 * cm, 2.0 * cm, 2.0 * cm, 2.0 * cm, 2.1 * cm, 2.0 * cm,
1.8 * cm, 1.1 * cm), rowHeights=[0.6 * cm])
t1.setStyle(TableStyle([
('INNERGRID', (0, 0), (-1, -1), 0.25, colors.black),
('BOX', (0, 0), (-1, -1), 0.25, colors.black),
('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
]))
sld = styles["Line_Data"]
story.append(t1)
parents_items = {1: 'Father', 2: 'Mother'}
parents = ovc_data['parents']
items = [{'parent': 1}, {'parent': 2}]
data1 = [[Paragraph(str(parents[product['parent']]['name']), sld),
Paragraph(str(parents_items[product['parent']]), sld),
'',
Paragraph(str(parents[product['parent']]['dob']), sld),
'','', '', '', ''] for product in items]
t1 = Table(data1, colWidths=(4.5 * cm, 2.0 * cm, 2.0 * cm, 2.0 * cm, 2.0 * cm, 2.1 * cm, 2.0 * cm, 1.8 * cm, 1.1 * cm))
t1.setStyle(TableStyle([
('INNERGRID', (0, 0), (-1, -1), 0.25, colors.black),
('BOX', (0, 0), (-1, -1), 0.25, colors.black),
('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
]))
story.append(t1)
story.append(Spacer(0.1 * cm, .2 * cm))
# CAREGIVERS
data1 = [[Paragraph('<b>CAREGIVER PARTICULARS</b>', styles["Line_Title"])]]
t1 = Table(data1, colWidths=(None,), rowHeights = [0.5 * cm])
t1.setStyle(TableStyle([
('BACKGROUND', (0, 0), (-1, -1), '#a7a5a5'),
('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
]))
story.append(t1)
story.append(Spacer(0.1 * cm, .1 * cm))
data1 = [[Paragraph('<b>Relationship:</b>', styles["Line_Label"]),
Paragraph('Foster Parent', styles["Line_Label"]),
Image(unchecked_image_path, .25 * cm, .25 * cm),
Paragraph('Guardian', styles["Line_Label"]),
Image(unchecked_image_path, .25 * cm, .25 * cm),
Paragraph('Next of Kin', styles["Line_Label"]),
Image(unchecked_image_path, .25 * cm, .25 * cm),
Paragraph('Select as appropriate for caregiver', styles["Line_Label"]),]
]
t1 = Table(data1, colWidths=(2.5 * cm, 2.0 * cm, 0.6 * cm, 1.5 * cm, 0.6 * cm, 2.0 * cm, 0.6 * cm, None))
t1.setStyle(TableStyle([
('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
]))
story.append(t1)
story.append(Spacer(0.1 * cm, .1 * cm))
data1 = [[Paragraph('<b>Name</b>', styles["Line_Label"]),
Paragraph('<b>Sex</b>', styles["Line_Label"]),
Paragraph('<b>Relationship</b>', styles["Line_Label"]),
Paragraph('<b>ID No.</b>', styles["Line_Label"]),
Paragraph('<b>Date of Birth</b>', styles["Line_Label"]),
Paragraph('<b>Telephone</b>', styles["Line_Label"]),
Paragraph('<b>Village/Estate</b>', styles["Line_Label"]),
Paragraph('<b>Occupation</b>', styles["Line_Label"]),
Paragraph('<b>Education<sup>2</sup></b>', styles["Line_Label"])],
]
t1 = Table(data1, colWidths=(
4.5 * cm, 1.1 * cm, 2.0 * cm, 2.0 * cm, 2.0 * cm, 2.0 * cm,
2.1 * cm, 2.0 * cm, 1.8 * cm), rowHeights=[0.6 * cm])
t1.setStyle(TableStyle([
('INNERGRID', (0, 0), (-1, -1), 0.25, colors.black),
('BOX', (0, 0), (-1, -1), 0.25, colors.black),
('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
]))
story.append(t1)
items = [{'caregiver': 1}, {'caregiver': 2}]
caregivers = ovc_data['caregivers']
data1 = [[Paragraph(str(caregivers[product['caregiver']]['name']), sld),
'', '', '', '', '', '', '', ''] for product in items]
t1 = Table(data1, colWidths=(
4.5 * cm, 1.1 * cm, 2.0 * cm, 2.0 * cm, 2.0 * cm,
2.0 * cm, 2.1 * cm, 2.0 * cm, 1.8 * cm))
t1.setStyle(TableStyle([
('INNERGRID', (0, 0), (-1, -1), 0.25, colors.black),
('BOX', (0, 0), (-1, -1), 0.25, colors.black),
('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
]))
story.append(t1)
# story.append(Spacer(0.5 * cm, 2.5 * cm))
story.append(PageBreak())
# CASE HISTORY
data1 = [[Paragraph('<b>CASE HISTORY OF THE CHILD</b>', styles["Line_Title"])]]
t1 = Table(data1, colWidths=(None,), rowHeights = [0.5 * cm])
t1.setStyle(TableStyle([
('BACKGROUND', (0, 0), (-1, -1), '#a7a5a5'),
('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
]))
story.append(t1)
story.append(Spacer(0.1 * cm, .2 * cm))
data1 = [[Paragraph('<b>Date of Event / incident:</b>', styles["Line_Label"]),
Paragraph(ovc_data['case_date'], styles["Line_Data_Small"]),
Paragraph('<b>Place of Event / incident:</b>', styles["Line_Label"]),
Paragraph(ovc_data['case_place'], styles["Line_Data_Small"])
]]
# t1 = Table(data1, colWidths=(3 * cm, None, 4.5 * cm,))
t1 = Table(data1, colWidths=(2.9 * cm, 7.0 * cm, 2.6 * cm, 7.0 * cm), rowHeights = [1.2 * cm])
t1.setStyle(TableStyle([
('INNERGRID', (0, 0), (-1, -1), 0.25, colors.black),
('BOX', (0, 0), (-1, -1), 0.25, colors.black),
('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
]))
story.append(t1)
data1 = [[Paragraph('<b>Alleged Perpetrator / Offender:</b>', styles["Line_Label"]),
Paragraph(ovc_data['perpetrator'], styles["Line_Data_Small"]),
Paragraph('<b>Relationship to the Child:</b>', styles["Line_Label"]),
Paragraph(ovc_data['perpetrator_relation'], styles["Line_Data_Small"])
]]
# t1 = Table(data1, colWidths=(3 * cm, None, 4.5 * cm,))
t1 = Table(data1, colWidths=(2.9 * cm, | |
generator.samlize_token(self.ISSUER, self.RECIPIENT,
self.SUBJECT,
self.SUBJECT_DOMAIN,
self.ROLES, self.PROJECT,
self.PROJECT_DOMAIN)
saml_str = response.to_string()
response = etree.fromstring(saml_str)
issuer = response[0]
assertion = response[2]
self.assertEqual(self.RECIPIENT, response.get('Destination'))
self.assertEqual(self.ISSUER, issuer.text)
user_attribute = assertion[4][0]
self.assertEqual(self.SUBJECT, user_attribute[0].text)
user_domain_attribute = assertion[4][1]
self.assertEqual(self.SUBJECT_DOMAIN, user_domain_attribute[0].text)
role_attribute = assertion[4][2]
for attribute_value in role_attribute:
self.assertIn(attribute_value.text, self.ROLES)
project_attribute = assertion[4][3]
self.assertEqual(self.PROJECT, project_attribute[0].text)
project_domain_attribute = assertion[4][4]
self.assertEqual(self.PROJECT_DOMAIN, project_domain_attribute[0].text)
def test_assertion_using_explicit_namespace_prefixes(self):
def mocked_subprocess_check_output(*popenargs, **kwargs):
# the last option is the assertion file to be signed
filename = popenargs[0][-1]
with open(filename, 'r') as f:
assertion_content = f.read()
# since we are not testing the signature itself, we can return
# the assertion as is without signing it
return assertion_content
with mock.patch.object(subprocess, 'check_output',
side_effect=mocked_subprocess_check_output):
generator = keystone_idp.SAMLGenerator()
response = generator.samlize_token(self.ISSUER, self.RECIPIENT,
self.SUBJECT,
self.SUBJECT_DOMAIN,
self.ROLES, self.PROJECT,
self.PROJECT_DOMAIN)
assertion_xml = response.assertion.to_string()
# make sure we have the proper tag and prefix for the assertion
# namespace
self.assertIn('<saml:Assertion', assertion_xml)
self.assertIn('xmlns:saml="' + saml2.NAMESPACE + '"',
assertion_xml)
self.assertIn('xmlns:xmldsig="' + xmldsig.NAMESPACE + '"',
assertion_xml)
def test_saml_signing(self):
"""Test that the SAML generator produces a SAML object.
Test the SAML generator directly by passing known arguments, the result
should be a SAML object that consistently includes attributes based on
the known arguments that were passed in.
"""
if not _is_xmlsec1_installed():
self.skipTest('xmlsec1 is not installed')
generator = keystone_idp.SAMLGenerator()
response = generator.samlize_token(self.ISSUER, self.RECIPIENT,
self.SUBJECT, self.SUBJECT_DOMAIN,
self.ROLES, self.PROJECT,
self.PROJECT_DOMAIN)
signature = response.assertion.signature
self.assertIsNotNone(signature)
self.assertIsInstance(signature, xmldsig.Signature)
idp_public_key = sigver.read_cert_from_file(CONF.saml.certfile, 'pem')
cert_text = signature.key_info.x509_data[0].x509_certificate.text
# NOTE(stevemar): Rather than one line of text, the certificate is
# printed with newlines for readability, we remove these so we can
# match it with the key that we used.
cert_text = cert_text.replace(os.linesep, '')
self.assertEqual(idp_public_key, cert_text)
def _create_generate_saml_request(self, token_id, sp_id):
return {
"auth": {
"identity": {
"methods": [
"token"
],
"token": {
"id": token_id
}
},
"scope": {
"service_provider": {
"id": sp_id
}
}
}
}
def _fetch_valid_token(self):
auth_data = self.build_authentication_request(
user_id=self.user['id'],
password=<PASSWORD>['password'],
project_id=self.project['id'])
resp = self.v3_create_token(auth_data)
token_id = resp.headers.get('X-Subject-Token')
return token_id
def _fetch_domain_scoped_token(self):
auth_data = self.build_authentication_request(
user_id=self.user['id'],
password=<PASSWORD>['password'],
user_domain_id=self.domain['id'])
resp = self.v3_create_token(auth_data)
token_id = resp.headers.get('X-Subject-Token')
return token_id
def test_not_project_scoped_token(self):
"""Ensure SAML generation fails when passing domain-scoped tokens.
The server should return a 403 Forbidden Action.
"""
self.config_fixture.config(group='saml', idp_entity_id=self.ISSUER)
token_id = self._fetch_domain_scoped_token()
body = self._create_generate_saml_request(token_id,
self.SERVICE_PROVDIER_ID)
with mock.patch.object(keystone_idp, '_sign_assertion',
return_value=self.signed_assertion):
self.post(self.SAML_GENERATION_ROUTE, body=body,
expected_status=http_client.FORBIDDEN)
def test_generate_saml_route(self):
"""Test that the SAML generation endpoint produces XML.
The SAML endpoint /v3/auth/OS-FEDERATION/saml2 should take as input,
a scoped token ID, and a Service Provider ID.
The controller should fetch details about the user from the token,
and details about the service provider from its ID.
This should be enough information to invoke the SAML generator and
provide a valid SAML (XML) document back.
"""
self.config_fixture.config(group='saml', idp_entity_id=self.ISSUER)
token_id = self._fetch_valid_token()
body = self._create_generate_saml_request(token_id,
self.SERVICE_PROVDIER_ID)
with mock.patch.object(keystone_idp, '_sign_assertion',
return_value=self.signed_assertion):
http_response = self.post(self.SAML_GENERATION_ROUTE, body=body,
response_content_type='text/xml',
expected_status=http_client.OK)
response = etree.fromstring(http_response.result)
issuer = response[0]
assertion = response[2]
self.assertEqual(self.RECIPIENT, response.get('Destination'))
self.assertEqual(self.ISSUER, issuer.text)
# NOTE(stevemar): We should test this against expected values,
# but the self.xyz attribute names are uuids, and we mock out
# the result. Ideally we should update the mocked result with
# some known data, and create the roles/project/user before
# these tests run.
user_attribute = assertion[4][0]
self.assertIsInstance(user_attribute[0].text, str)
user_domain_attribute = assertion[4][1]
self.assertIsInstance(user_domain_attribute[0].text, str)
role_attribute = assertion[4][2]
self.assertIsInstance(role_attribute[0].text, str)
project_attribute = assertion[4][3]
self.assertIsInstance(project_attribute[0].text, str)
project_domain_attribute = assertion[4][4]
self.assertIsInstance(project_domain_attribute[0].text, str)
def test_invalid_scope_body(self):
"""Test that missing the scope in request body raises an exception.
Raises exception.SchemaValidationError() - error 400 Bad Request
"""
token_id = uuid.uuid4().hex
body = self._create_generate_saml_request(token_id,
self.SERVICE_PROVDIER_ID)
del body['auth']['scope']
self.post(self.SAML_GENERATION_ROUTE, body=body,
expected_status=http_client.BAD_REQUEST)
def test_invalid_token_body(self):
"""Test that missing the token in request body raises an exception.
Raises exception.SchemaValidationError() - error 400 Bad Request
"""
token_id = uuid.uuid4().hex
body = self._create_generate_saml_request(token_id,
self.SERVICE_PROVDIER_ID)
del body['auth']['identity']['token']
self.post(self.SAML_GENERATION_ROUTE, body=body,
expected_status=http_client.BAD_REQUEST)
def test_sp_not_found(self):
"""Test SAML generation with an invalid service provider ID.
Raises exception.ServiceProviderNotFound() - error Not Found 404
"""
sp_id = uuid.uuid4().hex
token_id = self._fetch_valid_token()
body = self._create_generate_saml_request(token_id, sp_id)
self.post(self.SAML_GENERATION_ROUTE, body=body,
expected_status=http_client.NOT_FOUND)
def test_sp_disabled(self):
"""Try generating assertion for disabled Service Provider."""
# Disable Service Provider
sp_ref = {'enabled': False}
self.federation_api.update_sp(self.SERVICE_PROVDIER_ID, sp_ref)
token_id = self._fetch_valid_token()
body = self._create_generate_saml_request(token_id,
self.SERVICE_PROVDIER_ID)
self.post(self.SAML_GENERATION_ROUTE, body=body,
expected_status=http_client.FORBIDDEN)
def test_token_not_found(self):
"""Test that an invalid token in the request body raises an exception.
Raises exception.TokenNotFound() - error Not Found 404
"""
token_id = uuid.uuid4().hex
body = self._create_generate_saml_request(token_id,
self.SERVICE_PROVDIER_ID)
self.post(self.SAML_GENERATION_ROUTE, body=body,
expected_status=http_client.NOT_FOUND)
def test_generate_ecp_route(self):
"""Test that the ECP generation endpoint produces XML.
The ECP endpoint /v3/auth/OS-FEDERATION/saml2/ecp should take the same
input as the SAML generation endpoint (scoped token ID + Service
Provider ID).
The controller should return a SAML assertion that is wrapped in a
SOAP envelope.
"""
self.config_fixture.config(group='saml', idp_entity_id=self.ISSUER)
token_id = self._fetch_valid_token()
body = self._create_generate_saml_request(token_id,
self.SERVICE_PROVDIER_ID)
with mock.patch.object(keystone_idp, '_sign_assertion',
return_value=self.signed_assertion):
http_response = self.post(self.ECP_GENERATION_ROUTE, body=body,
response_content_type='text/xml',
expected_status=http_client.OK)
env_response = etree.fromstring(http_response.result)
header = env_response[0]
# Verify the relay state starts with 'ss:mem'
prefix = CONF.saml.relay_state_prefix
self.assertThat(header[0].text, matchers.StartsWith(prefix))
# Verify that the content in the body matches the expected assertion
body = env_response[1]
response = body[0]
issuer = response[0]
assertion = response[2]
self.assertEqual(self.RECIPIENT, response.get('Destination'))
self.assertEqual(self.ISSUER, issuer.text)
user_attribute = assertion[4][0]
self.assertIsInstance(user_attribute[0].text, str)
user_domain_attribute = assertion[4][1]
self.assertIsInstance(user_domain_attribute[0].text, str)
role_attribute = assertion[4][2]
self.assertIsInstance(role_attribute[0].text, str)
project_attribute = assertion[4][3]
self.assertIsInstance(project_attribute[0].text, str)
project_domain_attribute = assertion[4][4]
self.assertIsInstance(project_domain_attribute[0].text, str)
@mock.patch('saml2.create_class_from_xml_string')
@mock.patch('oslo_utils.fileutils.write_to_tempfile')
@mock.patch.object(subprocess, 'check_output')
def test__sign_assertion(self, check_output_mock,
write_to_tempfile_mock, create_class_mock):
write_to_tempfile_mock.return_value = 'tmp_path'
check_output_mock.return_value = 'fakeoutput'
keystone_idp._sign_assertion(self.signed_assertion)
create_class_mock.assert_called_with(saml.Assertion, 'fakeoutput')
@mock.patch('oslo_utils.fileutils.write_to_tempfile')
@mock.patch.object(subprocess, 'check_output')
def test__sign_assertion_exc(self, check_output_mock,
write_to_tempfile_mock):
# If the command fails the command output is logged.
write_to_tempfile_mock.return_value = 'tmp_path'
sample_returncode = 1
sample_output = self.getUniqueString()
check_output_mock.side_effect = subprocess.CalledProcessError(
returncode=sample_returncode, cmd=CONF.saml.xmlsec1_binary,
output=sample_output)
logger_fixture = self.useFixture(fixtures.LoggerFixture())
self.assertRaises(exception.SAMLSigningError,
keystone_idp._sign_assertion,
self.signed_assertion)
expected_log = (
"Error when signing assertion, reason: Command '%s' returned "
"non-zero exit status %s %s\n" %
(CONF.saml.xmlsec1_binary, sample_returncode, sample_output))
self.assertEqual(expected_log, logger_fixture.output)
@mock.patch('oslo_utils.fileutils.write_to_tempfile')
def test__sign_assertion_fileutils_exc(self, write_to_tempfile_mock):
exception_msg = 'fake'
write_to_tempfile_mock.side_effect = Exception(exception_msg)
logger_fixture = self.useFixture(fixtures.LoggerFixture())
self.assertRaises(exception.SAMLSigningError,
keystone_idp._sign_assertion,
self.signed_assertion)
expected_log = (
'Error when signing assertion, reason: %s\n' % exception_msg)
self.assertEqual(expected_log, logger_fixture.output)
class IdPMetadataGenerationTests(test_v3.RestfulTestCase):
"""A class for testing Identity Provider Metadata generation."""
METADATA_URL = '/OS-FEDERATION/saml2/metadata'
def setUp(self):
super(IdPMetadataGenerationTests, self).setUp()
self.generator = keystone_idp.MetadataGenerator()
def config_overrides(self):
super(IdPMetadataGenerationTests, self).config_overrides()
self.config_fixture.config(
group='saml',
idp_entity_id=federation_fixtures.IDP_ENTITY_ID,
idp_sso_endpoint=federation_fixtures.IDP_SSO_ENDPOINT,
idp_organization_name=federation_fixtures.IDP_ORGANIZATION_NAME,
idp_organization_display_name=(
federation_fixtures.IDP_ORGANIZATION_DISPLAY_NAME),
idp_organization_url=federation_fixtures.IDP_ORGANIZATION_URL,
idp_contact_company=federation_fixtures.IDP_CONTACT_COMPANY,
idp_contact_name=federation_fixtures.IDP_CONTACT_GIVEN_NAME,
idp_contact_surname=federation_fixtures.IDP_CONTACT_SURNAME,
idp_contact_email=federation_fixtures.IDP_CONTACT_EMAIL,
idp_contact_telephone=(
federation_fixtures.IDP_CONTACT_TELEPHONE_NUMBER),
idp_contact_type=federation_fixtures.IDP_CONTACT_TYPE)
def test_check_entity_id(self):
metadata = self.generator.generate_metadata()
self.assertEqual(federation_fixtures.IDP_ENTITY_ID, metadata.entity_id)
def test_metadata_validity(self):
"""Call md.EntityDescriptor method that does internal verification."""
self.generator.generate_metadata().verify()
def test_serialize_metadata_object(self):
"""Check whether serialization doesn't raise any exceptions."""
self.generator.generate_metadata().to_string()
# TODO(marek-denis): Check values here
def test_check_idp_sso(self):
metadata = self.generator.generate_metadata()
idpsso_descriptor = metadata.idpsso_descriptor
self.assertIsNotNone(metadata.idpsso_descriptor)
self.assertEqual(federation_fixtures.IDP_SSO_ENDPOINT,
idpsso_descriptor.single_sign_on_service.location)
self.assertIsNotNone(idpsso_descriptor.organization)
organization = idpsso_descriptor.organization
self.assertEqual(federation_fixtures.IDP_ORGANIZATION_DISPLAY_NAME,
organization.organization_display_name.text)
self.assertEqual(federation_fixtures.IDP_ORGANIZATION_NAME,
organization.organization_name.text)
self.assertEqual(federation_fixtures.IDP_ORGANIZATION_URL,
organization.organization_url.text)
self.assertIsNotNone(idpsso_descriptor.contact_person)
contact_person = idpsso_descriptor.contact_person
self.assertEqual(federation_fixtures.IDP_CONTACT_GIVEN_NAME,
contact_person.given_name.text)
self.assertEqual(federation_fixtures.IDP_CONTACT_SURNAME,
contact_person.sur_name.text)
self.assertEqual(federation_fixtures.IDP_CONTACT_EMAIL,
contact_person.email_address.text)
self.assertEqual(federation_fixtures.IDP_CONTACT_TELEPHONE_NUMBER,
contact_person.telephone_number.text)
self.assertEqual(federation_fixtures.IDP_CONTACT_TYPE,
contact_person.contact_type)
def test_metadata_no_organization(self):
self.config_fixture.config(
group='saml',
idp_organization_display_name=None,
idp_organization_url=None,
idp_organization_name=None)
metadata = self.generator.generate_metadata()
idpsso_descriptor = metadata.idpsso_descriptor
self.assertIsNotNone(metadata.idpsso_descriptor)
self.assertIsNone(idpsso_descriptor.organization)
self.assertIsNotNone(idpsso_descriptor.contact_person)
def test_metadata_no_contact_person(self):
self.config_fixture.config(
group='saml',
idp_contact_name=None,
idp_contact_surname=None,
idp_contact_email=None,
idp_contact_telephone=None)
metadata = self.generator.generate_metadata()
idpsso_descriptor = metadata.idpsso_descriptor
self.assertIsNotNone(metadata.idpsso_descriptor)
self.assertIsNotNone(idpsso_descriptor.organization)
self.assertEqual([], idpsso_descriptor.contact_person)
def test_metadata_invalid_contact_type(self):
self.config_fixture.config(
group='saml',
idp_contact_type="invalid")
self.assertRaises(exception.ValidationError,
self.generator.generate_metadata)
def test_metadata_invalid_idp_sso_endpoint(self):
self.config_fixture.config(
group='saml',
idp_sso_endpoint=None)
self.assertRaises(exception.ValidationError,
self.generator.generate_metadata)
def test_metadata_invalid_idp_entity_id(self):
self.config_fixture.config(
group='saml',
idp_entity_id=None)
self.assertRaises(exception.ValidationError,
self.generator.generate_metadata)
def test_get_metadata_with_no_metadata_file_configured(self):
self.get(self.METADATA_URL,
expected_status=http_client.INTERNAL_SERVER_ERROR)
def test_get_metadata(self):
self.config_fixture.config(
group='saml', idp_metadata_path=XMLDIR + '/idp_saml2_metadata.xml')
r = self.get(self.METADATA_URL, response_content_type='text/xml')
self.assertEqual('text/xml', r.headers.get('Content-Type'))
reference_file = _load_xml('idp_saml2_metadata.xml')
self.assertEqual(reference_file, r.result)
class ServiceProviderTests(test_v3.RestfulTestCase):
"""A test class for Service Providers."""
MEMBER_NAME = 'service_provider'
COLLECTION_NAME = 'service_providers'
SERVICE_PROVIDER_ID = 'ACME'
SP_KEYS = ['auth_url', 'id', 'enabled', 'description',
'relay_state_prefix', 'sp_url']
def setUp(self):
super(ServiceProviderTests, self).setUp()
# Add a Service Provider
url = self.base_url(suffix=self.SERVICE_PROVIDER_ID)
self.SP_REF = self.sp_ref()
self.SERVICE_PROVIDER = self.put(
url, body={'service_provider': self.SP_REF},
expected_status=http_client.CREATED).result
def sp_ref(self):
ref = {
'auth_url': 'https://' + uuid.uuid4().hex + '.com',
'enabled': True,
'description': uuid.uuid4().hex,
'sp_url': 'https://' + uuid.uuid4().hex + '.com',
'relay_state_prefix': CONF.saml.relay_state_prefix
}
return ref
def base_url(self, suffix=None):
if suffix is not None:
return '/OS-FEDERATION/service_providers/' + str(suffix)
return '/OS-FEDERATION/service_providers'
def _create_default_sp(self, body=None):
"""Create default Service Provider."""
url = self.base_url(suffix=uuid.uuid4().hex)
if body is None:
body = self.sp_ref()
resp = self.put(url, body={'service_provider': body},
expected_status=http_client.CREATED)
return resp
def test_get_service_provider(self):
url = self.base_url(suffix=self.SERVICE_PROVIDER_ID)
resp = self.get(url)
self.assertValidEntity(resp.result['service_provider'],
keys_to_check=self.SP_KEYS)
def test_get_service_provider_fail(self):
url = self.base_url(suffix=uuid.uuid4().hex)
self.get(url, expected_status=http_client.NOT_FOUND)
def test_create_service_provider(self):
url = self.base_url(suffix=uuid.uuid4().hex)
sp = self.sp_ref()
resp = self.put(url, body={'service_provider': sp},
expected_status=http_client.CREATED)
self.assertValidEntity(resp.result['service_provider'],
keys_to_check=self.SP_KEYS)
def test_create_sp_relay_state_default(self):
"""Create an SP without relay state, should default to `ss:mem`."""
url = self.base_url(suffix=uuid.uuid4().hex)
sp = self.sp_ref()
del sp['relay_state_prefix']
resp = self.put(url, body={'service_provider': sp},
| |
<gh_stars>0
import collections
import re
import sys
from collections import defaultdict
from pprint import pprint
def pm(lvl):
for row in lvl:
print(''.join(row))
def get_level(lines):
w = 0
h = len(lines)
for i in range(0, len(lines)):
if len(lines[i]) > w:
w = len(lines[i])
m = []
m2 = []
for i in range(0, w):
t = []
for j in range(0, h):
mw = len(lines[i])
if (j >= mw and mw < w):
t.append("_")
else:
t.append(lines[i][j])
m.append(t)
m2.append(t)
return (m, w, h, m2)
def neighbors(cell, level):
rv = []
[x,y] = cell
test = [ [x-1,y], [x+1,y], [x,y-1], [x,y+1] ]
for t in test:
rv.append(t)
return rv
def find_portals(level, w, h):
rv = {}
def m(s):
return re.match(r"[A-Z]", s)
for x in range(1, w-1):
for y in range(1,h-1):
c = level[x][y]
ne = neighbors((x,y), level)
if m(c):
found = False
c1 = None
for n in ne:
if level[n[0]][n[1]] == '.':
found = True
c1 = n
if found:
sec = None
c2 = None
for n in ne:
if m(level[n[0]][n[1]]):
sec = level[n[0]][n[1]]
c2 = n
if sec:
if (c,sec) in rv:
rv[(c,sec)].append(c1)
else:
rv[(c,sec)] = c1
#print(rv)
#print("llllllllllllllllllllllllllllllllll")
rv2 = {}
for r in rv:
if len(rv[r]) == 2:
rv2[r] = rv[r]
elif len(rv[r]) == 3:
rv2[r] = [rv[r][0], rv[r][1]]
k = (r[1],r[0])
rv2[k] = rv[r][2]
else:
print("ERROR",rv[r])
rv3 = {}
for k in rv2.keys():
c = rv2[k]
e = k
rv3[(c[0],c[1])] = e
return rv2, rv3
def is_inner(grid,w,h,x,y):
if x < 3 or y < 3:
return False
if x + 3 > w or y + 3 > h:
return False
if x > 33 or y > 40:
return False
return True
def find_portals3(level0, w, h, layer, level1x):
rv = {}
if layer == 0:
level = level0
else:
level = level1x
def m(s):
return re.match(r"[A-Z]", s)
for x in range(1, w-1):
for y in range(1,h-1):
c = level[x][y]
ne = neighbors((x,y), level)
if m(c):
found = False
c1 = None
for n in ne:
if level[n[0]][n[1]] == '.':
found = True
if is_inner(level, w, h, n[0], n[1]):
if layer == 0:
lx = 1
else:
lx = layer +1
c1 = [n[0],n[1],layer+1]
else:
c1 = [n[0],n[1],layer-1]
if found:
sec = None
c2 = None
for n in ne:
if m(level[n[0]][n[1]]):
sec = level[n[0]][n[1]]
c2 = (n[0],n[1],layer)
if sec:
if (c,sec,layer) in rv:
#print(rv[(c,sec,layer)])
#print(c1)
rv[(c,sec, layer)].append(c1)
else:
rv[(c,sec, layer)] = c1
#pprint(rv)
#print("llllllllllllllllllllllllllllllllll")
if layer == 0:
rv3 = {}
for k in rv.keys():
c = rv[k]
e = k
rv3[(c[0],c[1],layer+1)] = e
#pprint(rv3)
rv1 = {}
for k in rv.keys():
rv1[(k[1],k[0],k[2])] = rv[k]
#pprint(rv1)
#for k in rv1:
# rv[k] = rv1[k]
return rv, rv3
rv2 = {}
for r in rv:
if len(rv[r]) == 3:
rv2[r] = rv[r]
elif len(rv[r]) == 4:
rv2[r] = [rv[r][0], rv[r][1], rv[r][2]]
k = (r[1],r[0], layer)
rv2[k] = rv[r][3]
else:
print("ERROR",rv[r])
rv3 = {}
#pprint(rv2)
for k in rv2.keys():
c = rv2[k]
e = k
rv3[(c[0],c[1],layer)] = e
return rv2, rv3
def bfs(grid, width, height, start, end):
start = (start[0], start[1])
queue = collections.deque([[start]])
seen = set([start])
while queue:
path = queue.popleft()
x, y = path[-1]
if x == end[0] and y == end[1]:
return path
for x2, y2 in ((x+1,y), (x-1,y), (x,y+1), (x,y-1)):
if 0 <= x2 < width and 0 <= y2 < height and grid[y2][x2] != '#' and grid[y2][x2] != "B" and (x2, y2) not in seen:
queue.append(path + [(x2, y2)])
seen.add((x2, y2))
def bfs2(grid, width, height, start, end, polookup, pocoords):
start = (start[0], start[1])
queue = collections.deque([[start]])
seen = set([start])
while queue:
path = queue.popleft()
x, y = path[-1]
if x == end[0] and y == end[1]:
return path
for x2, y2 in ((x+1,y), (x-1,y), (x,y+1), (x,y-1)):
seen2 = set()
cell = grid[y2][x2]
if 0 <= x2 < width and 0 <= y2 < height and cell != '#' and not re.match(r"[A-Z]", cell) and (x2, y2) not in seen:
queue.append(path + [(x2, y2)])
#print("XX", x2,y2)
seen2.add((x2, y2))
poc = (y2, x2)
if poc in pocoords:
pol = pocoords[poc]
#print("Found portal",poc,"=",pol)
#print("P",path)
#print("S1",seen)
#print("S2",seen2)
#print("Q",queue)
#print("===")
if cell == pol[0]:
other_end = polookup[pol]
else:
other_end = polookup[(pol[1],pol[0])]
#print("TO", other_end)
nn = path + [(other_end[1], other_end[0])]
nn.insert(0, (0,0))
#print("NN",nn)
queue.append(nn)
seen2.add((x2, y2))
for s in seen2:
seen.add(s)
def bfs3(grid0, grid1, width, height, start, end, numlayers=10):
polookup = []
pocoords = []
(polookup0, pocoords0) = find_portals3(grid0, width, height, 0, grid1)
pprint(polookup0)
pprint(pocoords0)
polookup.append(polookup0)
pocoords.append(pocoords0)
print("#######EOF 0")
for i in range(1,numlayers+1):
(polookup1, pocoords1) = find_portals3(grid0, width, height, i, grid1)
pprint(polookup1)
pprint(pocoords1)
polookup.append(polookup1)
pocoords.append(pocoords1)
#pprint(polookup[1])
#pprint(pocoords[1])
print("#######EOF",i)
start = (start[0], start[1], 0)
queue = collections.deque([[start]])
seen = set([start])
while queue:
path = queue.popleft()
print("P",path)
x, y, z = path[-1]
if x == end[0] and y == end[1] and z == 0:
return path
for x2, y2, z2 in ((x+1,y,z), (x-1,y,z), (x,y+1,z), (x,y-1,z)):
#seen2 = set()
#print("XXX",y2,x2)
if 0 <= x2 < width and 0 <= y2 < height:
cell = grid0[y2][x2]
if cell != '#' and not re.match(r"[A-Z]", cell) and (x2, y2, z2) not in seen:
queue.append(path + [(x2, y2, z2)])
#print("XX", x2,y2)
seen.add((x2, y2, z2))
#poc = (y2, x2, z2)
#if poc in pocoords[z2]:
# pol = pocoords[z2][poc]
# print("Found portal",z2,poc,"=",pol)
# #print("P",path)
# #print("S1",seen)
# #print("S2",seen2)
# #print("Q",queue)
# #print("===")
# if cell == pol[0]:
# other_end = polookup[z2][pol]
# else:
# try:
# other_end = polookup[z2][(pol[1],pol[0],z2)]
# except KeyError:
# other_end = polookup[z2][(pol[0],pol[1],z2)]
# print("TO", other_end)
# nn = path + [(other_end[1], other_end[0], z2)]
# nn.insert(0, (0,0,0))
#
# #print("NN",nn)
# queue.append(nn)
#
# seen2.add((x2, y2, z2))
# #seen2.add((x2, y2, z2+1))
pocdown = (y2, x2, z2-1)
if z2-1 >=0 and pocdown in pocoords[z2-1]:
pol = pocoords[z2-1][pocdown]
print("Found portal",z2,">",z2-1,pocdown,"=",pol)
#if cell == pol[0]:
# other_end = polookup[z2][pol]
#else:
# other_end = polookup[z2][(pol[1],pol[0],z2)]
other_end = polookup[z2][(pol[1],pol[0],z2)]
other_end[2] = z2-1
print("TO", other_end)
nn2 = path + [(other_end[1], other_end[0], z2-1)]
#nn2.insert(0, (0, 0, 0))
queue.append(nn2)
pocup = (y2, x2, z2+1)
#print("POCUP",pocup)
if z2+1 <= numlayers and pocup in pocoords[z2+1]:
pol = pocoords[z2+1][pocup]
print("Found portal",z2,">",z2+1,pocup,"=",pol)
# pocup 28 17 2
# pol X F 2
# want 21 2 2
other_end = polookup[z2][(pol[1],pol[0],z2)]
#if cell == pol[0]:
# other_end = polookup[z2][pol]
#else:
# other_end = polookup[z2][(pol[1],pol[0],z2)]
other_end[2] = z2+1
print("TO", other_end)
nn2 = path + [(other_end[1], other_end[0], z2+1)]
#nn2.insert(0, (0, 0, 0))
queue.append(nn2)
#for s in seen2:
# seen.add(s)
#return
# for x2, y2, z2 in ((x+1,y,z+1), (x-1,y,z+1), (x,y+1,z+1), (x,y-1,z+1)):
# if z2 > 1:
# continue
# if z2 < 1:
# continue
# seen2 = set()
# cell = grid1[y2][x2]
# if 0 <= x2 < width and 0 <= y2 < height and cell != '#' and not re.match(r"[A-Z]", cell) and (x2, y2, z2) not in seen:
# queue.append(path + [(x2, y2, z2)])
# #print("XX", x2,y2)
# seen2.add((x2, y2, z2))
# if poc in pocoords[z2]:
# pol = pocoords[z2][poc]
# #print("Found portal",poc,"=",pol)
# #print("P",path)
# #print("S1",seen)
# #print("S2",seen2)
# #print("Q",queue)
# #print("===")
# if cell == pol[0]:
# other_end = polookup[z2][pol]
# else:
# try:
# other_end = polookup[z2][(pol[1],pol[0],z2)]
# except KeyError:
# other_end = polookup[z2][(pol[0],pol[1],z2)]
# #print("TO", other_end)
# nn = path + [(other_end[1], other_end[0]), z2]
# nn.insert(0, (0,0,0))
# #print("NN",nn)
# queue.append(nn)
# seen2.add((x2, y2, z2))
# for s in seen2:
# seen.add(s)
def find_se(level, w, h):
s = None
e = None
for i in range(0,h):
for j in range(0,w):
if level[j][i] == "@":
s = (i,j)
if level[j][i] == "$":
e = (i,j)
return (s,e)
def naive(level, start, end):
pass
def main(lines, lines2):
(lvl, w, h, lvl2) = get_level(lines)
pm(lvl)
print(w,h)
(start,end) = find_se(lvl2, w, h)
print("S",start)
print("E",end)
path = bfs(lvl2, w, h, start, end)
print(path)
print("BFS1",len(path)-1)
pol, poc = find_portals(lvl, w, h)
#print("###########")
#print(pol)
#print(poc)
print("###########")
path2 = bfs2(lvl2, w, h, start, end, pol, poc)
print(path2)
print("BFS2",len(path2)-1)
def main2(lines, lines2):
(_, w, h, lvl0) = get_level(lines)
(_, _, _, lvl1) = get_level(lines2)
(start,end) = find_se(lvl0, | |
from __future__ import print_function
from six.moves import xrange
import theano
from theano.tensor import basic as T
import numpy as N
#from util import strutil
from theano.tensor.blas_headers import blas_header_text, blas_header_version
from theano.tensor.blas import ldflags
from theano.misc import strutil
from theano.gradient import grad_undefined
# Note: not a true convolution because we don't bother with flipping the kernel
# An op that takes a weight tensor W. a bias vector b, and a visible tensor V, produces a hidden unit tensor H
# Also parmeterized by integer strides dr,dc,dt
# H[i,r,c,t,j] = video i within the minibatch, feature map j, location and time within feature map (r,c,t)
# W[j,k,l,m,z] = weights connecting H[i,r,c,t,j] to V[i,dr*r+k,dc*c+l,dt*t+m,z]
# b[j] = bias of feature map j
# V[i,r,c,t,j] = pixel at (r,c,t) within video featuremap j of video i within the minibatch
# i.e., H[i,j,r,c,t] = b_j + sum_k sum_l sum_m sum_z W[j,k,l,m,z] V[i,z, dr*r+k,dc*c+l,dt*t+m]
# The layouts of these variables are chosen to improve locality of reference.
# numpy seems to put the largest stride on axis 0 and decrease the stride from there. If we do convolution
# one filter at a time, one example at a time, then we want the largest strides to
# be over the examples. We want the smallest stride to be over the input channel because as we change
# the channel we re-visit the same location in the input.
# The smallest stride being over the input channel means that the weights need to be formatted with the input
# channel as the last index
# partial C / partial b_j = sum_i sum_k sum_r sum_c sum_t (partial C / partial H[i,r,c,t,k] ) * ( partial H[i,r,c,t,k] / partial b_j )
# = sum_i sum_k sum_r sum_c sum_t (partial C / partial H[i,r,c,t,k] ) * delta(k = j)
# = sum_i sum_r sum_c sum_t (partial C / partial H[i,r,c,t,j] )
# partial C / partial W[j,k,l,m,z] = sum_i sum_n sum_p sum_q sum_r (partial C /partial H[i,p,q,r,n] ) * (partial H[i,p,q,r,n] / partial W[j,k,l,m,z])
# = partial C / partial W[j,k,l,m,z] = sum_i sum_n sum_p sum_q sum_r (partial C /partial H[i,p,q,r,n] ) *
# (partial sum_s sum_u sum_v sum_a W[n,a, s,u,v] V[i, dr*p+s,dc*q+u,dt*r+v, a] ) / partial W[j,k,l,m,z])
# = partial C / partial W[j,k,l,m,z] = sum_i sum_p sum_q sum_r (partial C /partial H[i,p,q,r,j] ) *
# (partial sum_s sum_u sum_v sum_a W[j,a, s,u,v] V[i,dr*p+s,dc*q+u,dt*r+v,a] ) / partial W[j,k,l,m,z])
# = partial C / partial W[j,k,l,m,z] = sum_i sum_p sum_q sum_r (partial C /partial H[i,p,q,r,j] ) * V[i,dr*p+k,dc*q+l,dt*r+m,z]
# derivatives wrt V unimplemented for now. derivatives wrt dr, dc, dt are undefined since
# the output function is only defined when dr, dc, dt are natural numbers.
class Conv3D(theano.Op):
""" 3D `convolution` of multiple filters on a minibatch
:note: does not flip the kernel, moves kernel with a user specified stride
"""
def __eq__(self, other):
return type(self) == type(other)
def __hash__(self):
return hash(type(self))
def __str__(self):
return "Conv3D"
def c_code_cache_version(self):
return (3, blas_header_version())
def make_node(self, V, W, b, d):
"""
:param V: Visible unit, input(batch,row,column,time,in channel)
:param W: Weights, filter(out channel,row,column,time,in channel)
:param b: bias, shape == (W.shape[0],)
:param d: strides when moving the filter over the input(dx,dy,dt)
"""
V_ = T.as_tensor_variable(V)
W_ = T.as_tensor_variable(W)
b_ = T.as_tensor_variable(b)
d_ = T.as_tensor_variable(d)
bcast = (V_.broadcastable[0], False, False, False, W_.broadcastable[0])
node = theano.Apply(self, inputs=[V_, W_, b_, d_],
outputs=[T.TensorType(V_.dtype, bcast)()])
return node
def grad(self, inputs, output_gradients):
V, W, b, d = inputs
dCdH , = output_gradients
# make all of these ops support broadcasting of scalar b to vector b and eplace the zeros_like in all their grads
# print dCdH.broadcastable
# print "dCdH.broadcastable"
# quit(-1)
#dCdH = printing.Print("dCdH = ",["shape"])
# Make sure the broadcasting pattern of the gradient is the the same
# as the initial variable
dCdV = ConvTransp3D.convTransp3D(W, T.zeros_like(V[0, 0, 0, 0, :]), d, dCdH, V.shape[1:4])
dCdV = T.patternbroadcast(dCdV, V.broadcastable)
WShape = W.shape
dCdW = ConvGrad3D.convGrad3D(V, d, WShape, dCdH)
dCdW = T.patternbroadcast(dCdW, W.broadcastable)
dCdb = T.sum(dCdH, axis=(0, 1, 2, 3))
dCdb = T.patternbroadcast(dCdb, b.broadcastable)
dCdd = grad_undefined(self, 3, inputs[3],
"The gradient of Conv3D with respect to the convolution" +\
" stride is undefined because Conv3D is only defined for" +\
" integer strides.")
if 'name' in dir(dCdH) and dCdH.name is not None:
dCdH_name = dCdH.name
else:
dCdH_name = 'anon_dCdH'
if 'name' in dir(V) and V.name is not None:
V_name = V.name
else:
V_name = 'anon_V'
if 'name' in dir(W) and W.name is not None:
W_name = W.name
else:
W_name = 'anon_W'
if 'name' in dir(b) and b.name is not None:
b_name = b.name
else:
b_name = 'anon_b'
dCdV.name = 'Conv3D_dCdV(dCdH='+dCdH_name+',V='+V_name+')'
dCdW.name = 'Conv3D_dCdW(dCdH='+dCdH_name+',V='+V_name+',W='+W_name+')'
dCdb.name = 'Conv3D_dCdb(dCdH='+dCdH_name+',V='+V_name+',W='+W_name+',b='+b_name+')'
return [ dCdV, dCdW, dCdb, dCdd ]
def perform(self, node, inputs, output_storage):
V, W, b, d = inputs
# print "Conv3D python code"
output_storage[0][0] = computeH(V, W, b, d)
def infer_shape(self, node, input_shapes):
V, W, b, d = node.inputs
V_shape, W_shape, b_shape, d_shape = input_shapes
dr = d[0]
dc = d[1]
dt = d[2]
batch_size = V_shape[0]
output_channels = W_shape[0]
vidHeight = V_shape[1]
filterHeight = W_shape[1]
vidWidth = V_shape[2]
filterWidth = W_shape[2]
vidDur = V_shape[3]
filterDur = W_shape[3]
output_height = T.floor((vidHeight - filterHeight) // dr) + 1
output_width = T.floor((vidWidth - filterWidth) // dc) + 1
output_dur = T.floor((vidDur - filterDur) // dt) + 1
rval = (batch_size, output_height, output_width, output_dur, output_channels )
return [rval]
def c_support_code(self):
return blas_header_text()
def c_libraries(self):
return ldflags()
def c_compile_args(self):
flags = ldflags(libs=False, flags=True)
return flags
def c_lib_dirs(self):
return ldflags(libs=False, libs_dir=True)
def c_header_dirs(self):
return ldflags(libs=False, include_dir=True)
def c_code(self, node, nodename, inputs, outputs, sub):
V, W, b, d = inputs
fail = sub['fail']
H = outputs[0]
codeSource = """
///////////// < code generated by Conv3D >
//printf("\t\t\t\tConv3D c code\\n");
//Check dimensionality of inputs
if (PyArray_NDIM(%(W)s) != 5)
{
PyErr_Format(PyExc_ValueError, "Conv3D: W must be a 5 dimensional tensor");
%(fail)s
}
if (PyArray_NDIM(%(V)s) != 5)
{
PyErr_Format(PyExc_ValueError, "Conv3D: V must be a 5 dimensional tensor");
%(fail)s
}
if (PyArray_NDIM(%(b)s) != 1)
{
PyErr_Format(PyExc_ValueError,"Conv3D: b must be a vector.");
%(fail)s
}
if (PyArray_NDIM(%(d)s) != 1)
{
PyErr_Format(PyExc_ValueError,"Conv3D: d must be a vector.");
%(fail)s
}
if (PyArray_DIMS(%(d)s)[0] != 3)
{
PyErr_Format(PyExc_ValueError,"Conv3D: 3 stride length arguments expected (row, col, time) but %%li were given", (long)PyArray_DIMS(%(d)s)[0]);
%(fail)s
}
//Read and check sizes of inputs
{ // exta scope so error handler jumps don't cause errors
const int batchSize = PyArray_DIMS(%(V)s)[0];
const int outputChannels = PyArray_DIMS(%(W)s)[0];
const int inputChannels = PyArray_DIMS(%(V)s)[4];
if (PyArray_DIMS(%(W)s)[4] != inputChannels)
{
PyErr_Format(PyExc_ValueError, "Conv3D: W operates on a %%ld channel image but the image has %%d channels. Overall shape of input: (%%ld,%%ld,%%ld,%%ld,%%ld)", (long)PyArray_DIMS(%(W)s)[4], inputChannels, (long)PyArray_DIMS(%(V)s)[0], (long)PyArray_DIMS(%(V)s)[1], (long)PyArray_DIMS(%(V)s)[2], (long)PyArray_DIMS(%(V)s)[3], (long)PyArray_DIMS(%(V)s)[4]);
%(fail)s
}
if (PyArray_DIMS(%(b)s)[0] != outputChannels)
{
PyErr_Format(PyExc_ValueError, "Conv3D: b adds to a(n) %%ld channel output image but the output has %%d channels", (long)PyArray_DIMS(%(b)s)[0], outputChannels);
%(fail)s
}
{ //extra scope so error handler jumps don't cause errors
const int filterHeight = PyArray_DIMS(%(W)s)[1];
const int filterWidth = PyArray_DIMS(%(W)s)[2];
const int filterDur = PyArray_DIMS(%(W)s)[3];
const int vidHeight = PyArray_DIMS(%(V)s)[1];
const int vidWidth = PyArray_DIMS(%(V)s)[2];
const int vidDur = PyArray_DIMS(%(V)s)[3];\
if (vidHeight < filterHeight)
{
PyErr_Format(PyExc_ValueError, "W has a height of %%i but V is only %%i pixels tall",filterHeight,vidHeight);
%(fail)s
}
{ // extra scope so fail works
if (vidWidth < filterWidth)
{
PyErr_Format(PyExc_ValueError, "W has a width of %%i but V is only %%i pixels wide",filterWidth,vidWidth);
%(fail)s
}
{ // extra scope so fail works
if (vidDur < filterDur)
{
PyErr_Format(PyExc_ValueError, "W has a duration of %%i but V is only %%i pixels long",filterDur,vidDur);
%(fail)s
}
{ // extra scope so fail works
//Read and check stride arguments
const int dr = *(dtype_%(d)s*) PyArray_GETPTR1(%(d)s,0);
const int dc = *(dtype_%(d)s*) PyArray_GETPTR1(%(d)s,1);
const int dt = *(dtype_%(d)s*) PyArray_GETPTR1(%(d)s,2);
if (dr <= 0 || dc <= 0 || dt <= 0)
{
PyErr_Format(PyExc_ValueError,"Conv3D: Strides must all be positive but are %%i, %%i, %%i",dr,dc,dt);
%(fail)s
}
{ // extra scope so fail works
//Make correctly sized output
const long long outputHeight = int( (vidHeight - filterHeight) / dr )+1;
const long long outputWidth = int( (vidWidth - filterWidth) / dc )+1;
| |
route['network'] + '%' + r_dom
route['gateway'] = route['gateway'] + '%' + r_dom
else:
for n_link in routes:
for route in routes[n_link]:
if route['use_rd'] and (route['network'] == '0.0.0.0'
or route['network'] == '::'):
LOG.warning(
'excluding default route on link %s because route domains disabled',
n_link)
route['exclude'] = True
LOG.debug('resolving service resources')
dns_services = []
ntp_services = []
if 'hostname' not in services:
LOG.warning('forcing device hostname to: %s', os_md['name'])
services['hostname'] = os_md['name']
if 'domainname' not in services:
LOG.warning('forcing device domainname to .local')
services['domainname'] = 'local'
declared_hostname = "%s.%s" % (services['hostname'],
services['domainname'])
if len(declared_hostname) > 64:
max_hostname = 63 - len(services['domainname'])
services['hostname'] = services['hostname'][0:max_hostname]
for service in os_nmd['services']:
if service['type'] == 'dns' and (
bigiq_onboard_utils.is_v6(service['address'])
or bigiq_onboard_utils.is_v4(service['address'])):
LOG.debug('discovered DNS service configuration')
dns_services.append(service['address'])
if service['type'] == 'ntp' and (
bigiq_onboard_utils.is_v6(service['address'])
or bigiq_onboard_utils.is_v4(service['address'])):
LOG.debug('discovered NTP service configuration')
ntp_services.append(service['address'])
if dns_services:
services['dns_services'] = dns_services
else:
services['dns_services'] = DEFAULT_DNS_SERVERS
if ntp_services:
services['ntp_services'] = ntp_services
else:
services['ntp_services'] = DEFAULT_NTP_SERVERS
if forced_tmm_down:
bigiq_onboard_utils.start_tmm()
return {
'mgmt_link_id': m_l_id,
'mgmt_address': m_ip,
'mgmt_netmask': m_nm,
'mgmt_gateway': m_gw,
'mgmt_mtu': m_l_mtu,
'device_discovery_interface': device_discovery_interface,
'device_discovery_address': device_discovery_address,
'default_route_interface': default_route_interface,
'links': links,
'selfips': selfips,
'routes': routes,
'services': services
}
def create_mgmt_init_script(resources):
"""Create initialization scripts for the mgmt interface"""
LOG.debug('writing out mgmt interface scripts')
LOG.debug('mmgt ip %s/%s', resources['mgmt_address'],
resources['mgmt_netmask'])
if resources['mgmt_gateway']:
LOG.debug('mmgt gw %s', resources['mgmt_gateway'])
tmsh_cmd_dir_exists()
mgmt_onboard_script = TMSH_CMD_FILE_DIR + '/001_mgmt_setup.sh'
# write out management network configuration
with open(mgmt_onboard_script, 'w') as mgmt_script:
mgmt_script.write("#!/bin/bash\n")
mgmt_script.write("%s 'starting mgmt network provisioning'\n" %
SYSCMDS['echo'])
mgmt_script.write(
"%s modify sys global-settings mgmt-dhcp disabled\n" %
SYSCMDS['tmsh'])
mgmt_script.write("sleep 5\n")
mgmt_script.write("%s create sys management-ip %s/%s\n" %
(SYSCMDS['tmsh'], resources['mgmt_address'],
resources['mgmt_netmask']))
if resources['mgmt_gateway']:
mgmt_script.write(
"%s create sys management-route default gateway %s\n" %
(SYSCMDS['tmsh'], resources['mgmt_gateway']))
mgmt_script.write("%s link set %s mtu %s\n" %
(SYSCMDS['ip'], bigiq_onboard_utils.MGMT_DEV_NAME,
resources['mgmt_mtu']))
mgmt_script.write(
"%s '%s link set %s %s' >> /config/startup\n" %
(SYSCMDS['echo'], SYSCMDS['ip'], bigiq_onboard_utils.MGMT_DEV_NAME,
resources['mgmt_mtu']))
mgmt_script.write("%s 'mgmt interface configured: %s/%s'\n" %
(SYSCMDS['echo'], resources['mgmt_address'],
resources['mgmt_netmask']))
def create_tmm_net_init_script(resources):
"""Create the initialization script for TMM network interfaces"""
LOG.debug('writing out non declarative network provisioning scripts')
network_onboard_script = TMSH_CMD_FILE_DIR + '/002_network_setup.sh'
with open(network_onboard_script, 'w') as net_script:
net_script.write("#!/bin/bash\n")
net_script.write("%s 'starting network provisioning'\n" %
SYSCMDS['echo'])
# write out L2 network configuration
for l_id in resources['links']:
link = resources['links'][l_id]
if l_id == resources['mgmt_link_id']:
continue
if not link['tagged']:
net_script.write(
"%s create net vlan %s mtu %s interfaces replace-all-with { %s } tag %s\n"
% (SYSCMDS['tmsh'], link['net_name'], link['mtu'],
link['interface'], link['segmentation_id']))
else:
net_script.write(
"%s create net vlan %s mtu %s interfaces replace-all-with { %s { tagged } } tag %s\n"
% (SYSCMDS['tmsh'], link['net_name'], link['mtu'],
link['interface'], link['segmentation_id']))
if link['route_domain'] > 0:
net_script.write(
"%s create net route-domain %s { id %s vlans add { %s } }\n"
% (SYSCMDS['tmsh'], link['route_domain'],
link['route_domain'], link['net_name']))
# write out L3 network configuration
for n_link in resources['selfips']:
selfip = resources['selfips'][n_link]
net_script.write(
"%s create net self %s address %s/%s vlan %s allow-service all\n"
%
(SYSCMDS['tmsh'], selfip['selfip_name'], selfip['ip_address'],
selfip['netmask'], selfip['net_name']))
for n_link in resources['routes']:
for route in resources['routes'][n_link]:
if not route['exclude']:
net_script.write(
"%s create net route %s network %s/%s gw %s\n" %
(SYSCMDS['tmsh'], route['route_name'],
route['network'], route['netmask'], route['gateway']))
net_script.write("%s 'networking configured'\n" % SYSCMDS['echo'])
def create_services_init_script(resources):
"""Create the initialization script for services configurations"""
LOG.debug('writing out services initialization script')
services_onboard_script = TMSH_CMD_FILE_DIR + '/003_services_setup.sh'
if os.path.isfile(services_onboard_script):
bigiq_onboard_utils.del_file(services_onboard_script)
with open(services_onboard_script, 'w') as services_script:
services = resources['services']
services_script.write(
"%s modify sys global-settings hostname %s.%s\n" %
(SYSCMDS['tmsh'], services['hostname'], services['domainname']))
services_script.write(
"%s modify sys management-dhcp sys-mgmt-dhcp-config request-options delete { host-name }\n"
% SYSCMDS['tmsh'])
services_script.write(
"%s modify sys dns search replace-all-with { %s }\n" %
(SYSCMDS['tmsh'], services['domainname']))
services_script.write(
"%s modify sys dns name-servers replace-all-with { %s }\n" %
(SYSCMDS['tmsh'], " ".join(services['dns_services'])))
services_script.write(
"%s modify sys management-dhcp sys-mgmt-dhcp-config request-options delete { domain-name-servers domain-name }\n"
% SYSCMDS['tmsh'])
services_script.write("%s modify sys ntp timezone %s\n" %
(SYSCMDS['tmsh'], DEFAULT_TIMEZONE))
services_script.write(
"%s modify sys ntp servers replace-all-with { %s }\n" %
(SYSCMDS['tmsh'], " ".join(services['ntp_services'])))
services_script.write(
"%s modify sys management-dhcp sys-mgmt-dhcp-config request-options delete { ntp-servers }\n"
% SYSCMDS['tmsh'])
services_script.write("%s 'services configured'\n" % SYSCMDS['echo'])
def create_bigiq_init_script(resources, license_key, node_type):
"""Create the initialization script for BIG-IQ configurations"""
LOG.debug('writing out BIG-IQ initialization script')
sync_onboard_script = TMSH_CMD_FILE_DIR + '/004_device_discovery_setup.sh'
if os.path.isfile(sync_onboard_script):
bigiq_onboard_utils.del_file(sync_onboard_script)
with open(sync_onboard_script, 'w') as cmi_script:
cmi_script.write("cd %s\n" % ANSIBLE_PLAYBOOK_DIR)
cmi_script.write("%s %s\n" %
(SYSCMDS['ansible-playbook'], ANSIBLE_PLAYBOOK))
LOG.debug('writing out ansible playbook variables')
with open(ANSIBLE_VAR_FILE, 'w') as ansible_vars:
ansible_vars.write('---\n\n')
if license_key:
ansible_vars.write('license_key: %s\n' % license_key)
ansible_vars.write('node_type: %s\n' % node_type)
ansible_vars.write('hostname: %s.%s\n' %
(resources['services']['hostname'],
resources['services']['domainname']))
ansible_vars.write('device_discovery_interface: %s\n' %
resources['device_discovery_interface'])
ansible_vars.write('device_discovery_address: %s\n' %
resources['device_discovery_address'])
return resources
def create_post_onboard_script(post_onboard_commands):
"""Create the post onboard command script"""
if post_onboard_commands:
LOG.debug('writing out post onboard commands script')
post_onboard_commands_script = POST_ONBOARD_CMD_FILE_DIR + \
'/001_post_onboard_commands.sh'
if os.path.isfile(post_onboard_commands_script):
bigiq_onboard_utils.del_file(post_onboard_commands_script)
with open(post_onboard_commands_script, 'w') as pob_script:
pob_script.write("#!/bin/bash\n")
pob_script.write(
"%s 'starting post onboarding commands provisioning'\n\n" %
SYSCMDS['echo'])
pob_script.write("cmds=(\n")
for cmd in post_onboard_commands:
pob_script.write("\"%s\"\n" % cmd.replace('"', r'\"'))
pob_script.write(")\n\n")
pob_script.write("for (( i = 0; i < ${#cmds[@]} ; i++ )); do\n")
pob_script.write(" %s \"Running: ${cmds[$i]}\"\n" %
SYSCMDS['echo'])
pob_script.write(" eval \"${cmds[$i]}\"\n")
pob_script.write(" if [ $? -eq 1 ]; then\n")
pob_script.write(
" %s \"Command: ${cmds[$i]} Failed. Stopping execution of any further commands.\"\n"
% SYSCMDS['echo'])
pob_script.write(" exit 1\n")
pob_script.write(" fi\n")
pob_script.write("done\n\n")
def create_onboard_artifacts(resources,
license_key,
node_type,
post_onboard_commands=None):
"""Generates all needed onboarding artifacts from metadata or the environment"""
create_mgmt_init_script(resources)
create_tmm_net_init_script(resources)
create_services_init_script(resources)
create_bigiq_init_script(resources, license_key, node_type)
create_post_onboard_script(post_onboard_commands)
def create_onboard():
"""Creates the initialization script for all tasks"""
LOG.debug('writing out pre declaration onboard initialization script')
onboard_script = TMSH_CMD_FILE_DIR + '/onboard.sh'
if os.path.isfile(onboard_script):
bigiq_onboard_utils.del_file(onboard_script)
script_files = os.listdir(TMSH_CMD_FILE_DIR)
script_files.sort()
with open(onboard_script, 'w') as obs:
obs.write("#!/bin/bash\n\n")
obs.write("function check_mcpd_up() {\n")
obs.write(" checks=0\n")
obs.write(" while [ $checks -lt 120 ]; do\n")
obs.write(
" if %s -a show sys mcp-state field-fmt 2> /dev/null | %s -q running; then\n"
% (SYSCMDS['tmsh'], SYSCMDS['grep']))
obs.write(" break\n")
obs.write(" fi\n")
obs.write(" %s 'waiting for mcpd to reach running state'\n" %
SYSCMDS['echo'])
obs.write(" let checks=checks+1\n")
obs.write(" %s 10\n" % SYSCMDS['sleep'])
obs.write(" done\n")
obs.write("}\n\n")
obs.write("function exec_phases() {\n")
for script_file in script_files:
obs.write(" /bin/bash %s/%s\n" %
(TMSH_CMD_FILE_DIR, script_file))
obs.write(" %s 1 > %s\n" %
(SYSCMDS['echo'], ONBOARD_COMPLETE_FLAG_FILE))
obs.write("}\n\n")
obs.write("check_mcpd_up\n")
obs.write("exec_phases\n")
obs.write("check_mcpd_up\n")
obs.write("%s save sys config base\n" % SYSCMDS['tmsh'])
obs.write("check_mcpd_up\n")
os.chmod(onboard_script, 0775)
def create_post_onboard():
"""Creates the initialization script for all post onboarding scripts"""
LOG.debug('writing out post onboard initialization script')
onboard_script = POST_ONBOARD_CMD_FILE_DIR + '/onboard.sh'
if os.path.isfile(onboard_script):
bigiq_onboard_utils.del_file(onboard_script)
script_files = os.listdir(POST_ONBOARD_CMD_FILE_DIR)
script_files.sort()
with open(onboard_script, 'w') as obs:
obs.write("#!/bin/bash\n\n")
obs.write("function check_mcpd_up() {\n")
obs.write(" checks=0\n")
obs.write(" while [ $checks -lt 120 ]; do\n")
obs.write(
" if %s -a show sys mcp-state field-fmt 2> /dev/null | %s -q running; then\n"
% (SYSCMDS['tmsh'], SYSCMDS['grep']))
obs.write(" break\n")
obs.write(" fi\n")
obs.write(" %s 'waiting for mcpd to reach running state'\n" %
SYSCMDS['echo'])
obs.write(" let checks=checks+1\n")
obs.write(" %s 10\n" % SYSCMDS['sleep'])
obs.write(" done\n")
obs.write("}\n\n")
obs.write("function exec_phases() {\n")
for script_file in script_files:
obs.write(" /bin/bash %s/%s\n" %
(POST_ONBOARD_CMD_FILE_DIR, script_file))
obs.write(" %s 1 > %s\n" %
(SYSCMDS['echo'], POST_ONBOARD_FLAG_FILE))
obs.write("}\n\n")
obs.write("check_mcpd_up\n")
obs.write("exec_phases\n")
os.chmod(onboard_script, 0775)
def is_startup_injected(script_name):
""" check if script is injected into /config/startup """
injected_already = subprocess.Popen(
"%s /config/startup | %s " + script_name + " | %s -l" %
(SYSCMDS['cat'], SYSCMDS['grep'], SYSCMDS['wc']),
stdout=subprocess.PIPE,
shell=True).communicate()[0].replace('\n', '')
if injected_already == '0':
return False
return True
def run_post_onboard_commands():
""" run post onboard commands """
post_onboard_script = POST_ONBOARD_CMD_FILE_DIR + '/onboard.sh'
with open(LOG_FILE, 'a+') as onboardlog:
LOG.info('running post onboard script')
subprocess.call(
[SYSCMDS['nohup'], 'sh', '-c', post_onboard_script, '&'],
stdout=onboardlog,
stderr=onboardlog)
end_time = time.time() + ONBOARD_TIMEOUT
while (end_time - time.time()) > 0:
if not os.path.exists(POST_ONBOARD_FLAG_FILE):
time.sleep(1)
else:
end_time = 0
if os.path.exists(POST_ONBOARD_FLAG_FILE):
LOG.error('post onboard commands did not complete properly')
return False
return True
def onboard(post_onboard_enabled, phone_home_url, phone_home_url_verify_tls,
phone_home_url_metadata, phone_home_cli):
"""Implements the onboarding business logic"""
# initialize onboarding tasks
onboard_status = ERROR
onboard_script = TMSH_CMD_FILE_DIR + '/onboard.sh'
with open(LOG_FILE, 'a+') as onboardlog:
LOG.info('running onboarding scripts')
subprocess.call([SYSCMDS['nohup'], 'sh', '-c', onboard_script, '&'],
stdout=onboardlog,
stderr=onboardlog)
end_time = time.time() + ONBOARD_TIMEOUT
while (end_time - time.time()) > 0:
if not os.path.exists(ONBOARD_COMPLETE_FLAG_FILE):
time.sleep(1)
else:
end_time = 0
onboard_status = SUCCESS
if onboard_status == SUCCESS:
if post_onboard_enabled:
if not run_post_onboard_commands():
onboard_status = ERROR
LOG.error('post onboarding did not complete')
else:
onboard_status = ERROR
LOG.error('onboarding configuration did not complete')
if phone_home_url:
bigiq_onboard_utils.phone_home(phone_home_url, onboard_status,
phone_home_url_verify_tls,
phone_home_url_metadata)
if phone_home_cli and onboard_status == SUCCESS:
bigiq_onboard_utils.run_cmd(phone_home_cli)
LOG.info('onboarding ended with status: %s', onboard_status)
def clean():
"""Remove any onboarding artifacts"""
if REMOVE_METADATA_FILES:
if os.path.isfile(NETWORK_DATA_FILE):
bigiq_onboard_utils.del_file(NETWORK_DATA_FILE)
if os.path.isfile(META_DATA_FILE):
bigiq_onboard_utils.del_file(META_DATA_FILE)
bigiq_onboard_utils.clean()
def handle(name, userdata, cloud, log, args):
"""Cloud-init processing function"""
| |
XXX XXXXXXX XXX XXXXXXXX XXXX
XX XXXX XXX XXXXXXXXXX XXX XX XXXXX
XXX X X XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX XXXX XX XXXXXXXX XX XXX XXXX XXXX XXXXXXXXXXXXXXXX XXXXXXX
XXXX XXXX XXX XXXXXXXX XX XXXXXXXXXXXXXXXXXXXXX XX XXXXXXXXX XXXXXXXXXX XXX
XXXXXX XX XXXXXXXX XX XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXX XXXX XXXXX XX XXX
XXXXX
XX XXXXXXXXXXX XXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXX XXXXXXXXX XXXXXXX X XXXX XX XXX XXXXX XX XXX XXXXXX XXXXX
XXXXXX XX XXX XXXXXX XXXXX XXXX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXX XXXX XXXX XXX
XXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXX XX XXXXXXX XX XXXXX XXXX XX XX XXXX XXXXXXXX
XXX X X XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXX
XXX XX XXXXXXXXXXXXXXXX
XXX XXXXXXXXXX XXXXXXXXX XXXXXX XXXXXXXX XXX X XX XXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXX
XXXXXXXXXXX XXXXXXXXXX
XXXXXXXXX XXX XXXXXX XXXX XXX XXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXXXX XXXXXXXXXXX
XXX XXX XXXXXX XXX XXXXXX XX XXXXXXXXXXXXX XXXXXXXX XXXX XXX XXXXXXXXXX
XXXXXXXXXXX
XXX XXX XXX XX XXXXXXXXXXXXXXXXXX XXXXXXXXXX
XXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XX XXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XX XXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XX XXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XX XXXXXXXXXXXXXXX
XXX XXX XXXXX XXX XXXXX XX XXXXXXXXXXXXX XXXXXXXX XX XXXXXX XXX XXX XX XX
XXXXXXXXX XX XXX XXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXX X XXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX
XXXXXX XXX XX XXXXX XXX XXXXXXXXXXXXXXX XXXXXXXXX XXXXXXX XXXX XXXXXXXXXXXX
XXXX XXXXXXXXX XXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXX XXXXXX XXX XXXX XXXXXX
XXXXXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXX X XXXXXXXXXX
XXX XXXXXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX
XXXXXXXXX XXXXXXX XXXX
XXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXXXX XXXXXXXXXXXXXXXXX
XXXX XXXXX XX X XXXXXXXXXXXXX XXXXX XX XXXXXXXXXXX XXX XXXX XXX XXXXXXXXXX
XXXXX XXX XXXX XXX XXXXXXXXXX XX XX XXXXXXXXXXX XX XX X XXXXXXXXXX XXXXXXX XXXX
XX X XXXX XXXXXXXX XXXXXXX XX XXXXXX XXXX XXX X XXXXXXXXXX XXXXX XX XX XXXXX XX
X XXXXXXX XX XXXXX XXXXXX XXXXXXXXX XX XXXXXXXXXX XXXXXXX
XXX XXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXX X
XXXXXX XXXXXXXXXXXXXXXXX XXXXXXX XXXXXXXXXX XX XXXXXXX XXX XXXX XX X XXXXXX XX
XXX XXXXXX XXXXXXXXXXXXXXXXX X XXXXXXXXXXXXXXXXX XXXXXXX XX X XXXXXX XX XXXXX
XXXXXXXX XXXXXXXXXXXXX XXXX XXXXXX XXXXXXXXX XX XX X XXXXXXXXXXXXXXXXX XXXXXX
XX XXXX XX XXXX XXXXXX
XXXX XXXXXX XXXXXXX X XXXXXXXXXXXXXX XXXXXXXX XXXX X XXX XX XXXX XXX XXXXXXXXX
XXX XXX XXX XXXXXX XXX XXXXX XXXX XXX XXX XXXXXXXXXXXXXXXX XXXXXXXXXXX
XXX XXXX X XXXXXXXXXXX XXXXXXXX
XXX XXXXXXXXXX XXX XXXXXXX
XXX XXXXXXXXX XXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXX XXXXX
XXX X X XXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXX
XXXX
XXX XXXXXXXXXXXXXX
XXXXXXXXXXXXX XXXXX XXXXXXXXXX XXX XXXXXXX XXXXXXXXX XXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXXXXX
XXXX XXXX XXX XXXXXXXXXX XXXXX XX XXXX XX XXXXXXXXXXXXX XX XXXXXXXXXXXXXX XX
XXXXXX XXXXXX XXX XXXXX XXXX X XXXXXXX XXXXX XXXXX XXX XXXXXXXX XXXXXXXXXXXX
XXXXX XX XXXX XXXXXXXXX
XX XXXX XXXX XXXX XXXXX XXXXXXXXX XXX XXXXXXXXXXXXXXXX XXXXXXXXXX XXXXXXXX
XXXX XXX XXXXX XXXXXXXX
XXX XXXX X XXXXXXXXXXX XXX
XXX XXXXXXXXXX XXX XXXXXXX
XXX XXXXXXXXX XXXXXXXX XXXXX XXXXXXXXX
XXX XXXXXXXXXXXX XXXXX
XXX X X XXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXX
XXXXX
XXX XXXXXXXXXXXXXX
XXXXXXXXXXXXX XXXXX XXXXXXXXXX XXX XXXXXXX
XXXXXXXXXXXXXXXX XXXX XXXXXX XXXXXX XXXXXXX X XXX XXX XXXXXX XXXXXXX XX XXX
XXXXXXXXX XXXX XX XXX XXXX XXXXX XXXX XXXX XXX XXXXXX XXX XXXXXXXXX XX XXXX
XXXXXXXX XX XXXX X XXXXX XX XXXXX XXXXXX XX XXX XXXXXXXXXXXXXXX XXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXX XXXXXXXX XXXX XXX XXXXXX XXXXXXXX
XXX XXXX X XXXXXXXXXXX XXXXXXXX
XXX XXXXXXXXXX XXX XXXXXXX
XXX XXXXXXXXX XXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXX XXXXX
XXX XXXXXXXXXXXXXXXX XXXXXX
XXX XXXXXXXXXXXXXXXX XXXXXX
XXX XXXXXXXXXXXXXXXX XXXXXX
XXX X X XXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXX
XXXX
XXX XXXXXXXXXXXXXX X XXXXXXX XXXXXXX XXXXXXXXXXXXXX XXXX
XXXXXXXXXXXXX XXXXX XXXXXXXXXX XXX XXXXXXX XXXXXXXXX XXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXXXXX
XXXX XXX XXXXXXXX XX XXXXXX XXXXXXXXXXXXXXXX XXXX XXXXXXX X XXX XXX XXXXX XXX
XXXXX XXX XXXXXXX XXXX XX XXX XXXX XXXXXX XXXXXXX X XXXXX XXX XXXX XXXXXXXX
XXXXXXX XX XXXX XXXXXXXX XXX XXXX XXXXXXXXXX XXXXXXX XXXXXXX X XXXXX XXX XXX
XXXXXXXXXXXXX XXXXXX XXX XXXXXXXXXXXXXXXX XXXXXXXX XXX XXXX XX XXXXX XXXXXXX
XXX XXXX XXXXXX XXXXXX XXXXX
XXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXX X XXXXXXXXXXXXXXXXX
XXX XXXXXXXXX X XXXXXXXXXXXXXXXXX
XXX XXXXXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXX X XXXXXXXXXXXXXX XXXXXXX XXXXXXXXXXXX XXXXXXXXX
XXX X X XXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXX
XXXX
XXX XXXXXXXXXXXXXX
XXXXXXXXXXXXX XXX XXXXXXXXXXXXX XXXXXXX XXXXXXXXXXXX XXXXXXXXX
XX XXXX XXXXX XXXXXXXX XXX XXXXXXXXXXXXXXXX XXXXX XXX XXXXXXXXXXXXX XX XXX XX XX
XXXXX XXXXXXX XXXXXXX XXXXXXXXXXXXX XX XXXXXXXXXXXXXX XXX XXXXXXXXXXXXXXX XXXXX
XXXXX XXXXXX XX XX XXXXX XXXXXXX XXXX XXXXX XXXX XXXXX XXXX XXX XXXXXXX XXXXX
XX XX XXXXX XXX XXXXXXXXXXXXXX XXXX XXXXXXXX XXXXXXX XX XXX XXXXX XXXXXXX XXX
XXXX XXXXXXX XX XXXX XXXXXXX XXXXXXXX XX XXXX XXXXX XXX XXX XXXXXX XXXXXX XXXX
XXX XXXX XXXXX XX XXX XXXXXXXXX XXXXXXXXX XXXXXXXX XXXXXXX XXXXXX
XXX XXX XXXXX XXXX XX XXXXXXX XXXXXXXXXX XXX XXXXXXXXXX XXXX XXXXXX XXXXXX XX
XXXXX XXXXX XX XXX XXX XXXX XX X XXXXX XXXXXXXXXXXX XXXXXXXXXXXX XX XXXXXXX
XXXXXXXX XXXX XXXXXXXXXXX XXXXX XXXX XX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXX XXXXX XX XXXX
XXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXX XXXX XX X XXXXXXXX XXXXXX XX XX XXXXXX XXXXXX XX XXXXX XX XX XXX
XXXXXXXXX XXXX
XXX X X XXXXXXXXXXXXX
XXX XXXXXXXX
XXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXX XXXX XX XXXXX XX XXXXX XXX XXXX XXXXXX XXXX XXXXXXX XXXX XXXX
XXXXXXXXXXXXXX XXX XXXXXXXX XX X XXXXX XX XXXXXXXXXXX XX XX
XXXXXXXX XXXXXXXXXXXXXXX XXX XXXX XXXX XX XX XXX XXXXXXXXX XXXXXXXXXX XX X
XXXXX XX XXXXXXXXXXX XX XX XXXXXXXX XXXXXXXXXXXXXXXXXXX XXXX XXXX XXXX XXXX
XXXXXXX XXXXXXXXXXX XX XXXXXXXXXXXXX
XXX XXXX X XXXXXXXXXXX XXXXXXXX
XXX XXXXXXXXXX XXX XXXXXXX
XXX XXXXXXXXX XXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXX XXXXX
XXX X X XXXXXXXXXXXXXXXXX
XXX XXXXXXXX
XXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXX
XXXX XXXXXXX XXXXXX XX X XXXXXXXXXX XXXX XXXXXX XXXX X XXXXXXXX XXX XXXX XXXXXX
XXXXXX XXX XXXXXXXXXX
X XXX XXXXXXXXXXXX XXX XXXXXX XXXX XXXXX XXXXXXX XXX XXXXXXXXXXX XXX
XXXXXXXXXXXX XXXXX XXX XXXX XX XXXXXXX XXX XXXXXXXXXX XXX XXXXXXXXXXX
XXXX XX XX XXXXXXXX XXXXXXXXXXXXXXXX XXXX XXXX XXXX XXX XX XX XXXXX
X XXXX XXXXX XXXX XXX X XXXXXXX XXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XX
XXXXXXXXXXX XX XX XXXXXXXX XXXXXXXXXXXXXX XXX XXXXXXXXXXXXXX XX XX
XXXXXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XX XXXXXXXXXXX XX XX
XXXXXXXX XXXXXXXXXXXXXXXXXXX XXXX XXXXX XXX XXXXXX XXXXXXXX XXXXXXXXX XXX XXX
XXXXXXX XXXXX XXXX XX XXX XXX X XXXXX XXXXX XX XXXXX XXXXXXXX XXXXX XXXXX
XXXXXXX XXXXXXXX
X XXX XXXX XXXXXXXX XXX XXXX XXX XX XXXXX XXXXXXXX XXXX XXX XXXXXXXXX XXXX
XX XXX XXXXXXXXXXXXXXX XXXXXX
X XXX XXXX XXXXX XXX XXXX XXXXX XX XXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXX XXX
XXXXX XXXXXXXXXX XX XXXXXXXXX XXXX XXX XXXXX XXXX XX XXXXXXXXXX XXX
XXXXXXXXXXX XX XXXXXX XXX XXXXXXXXXXXX XXX XXXXX XXXXXXX XXXXXX XXXX
XXXXX XXX XXXXXX XXXXXXXX XXXXXXXXX XXX XXX XXXX XXXXXXX XXXXXX XXXXXXXXX
X XXXX XXXX XXXXX XX XXXXXXXXXX XX XX XXXX XXXXXXXXXXX XXXX XXXXX XXXXXX
XX XXX XXXXXXXXXXX XXXX XXXXX XXX XXX XXXXXXX XXX XXXXXXX XX XXXXX XX
XXXXXXXXX XX XXXXXXXXXX XXXXXXXXX XX XXX XXXXX XXXXX XXX XXXXXX
XXXXXXXXXX XXX XXXXXXXXXXX XXXX XXX XXXXXXXX XX XXX XXXXXX XX XXXXXXXX XX
XXXXXX XXXX XXXXXXXXXX XXX XXX XXX XXXXXX XXXX XXXXXXXXX
X XXX XXXXXX XXXX XXXXX XXXXXXX XXXXXXXXX XXXXXXXXXXX XXXXXXXX XXX XXXXXXXX
XX XXXX XXXXXXX XXXXXXXXXX XXXX XX XXXXXXXXXXX XXXXXX XXXX XXX XXXXX XXXXX
XX XXXXXXXXXXXXXXXXXXXXXX
XXXXXXXX XXXXXXXXXXX XXXXXX XX XXX XXXXXXX XXXXXX XXXXX XXXX XXX XXXXXXXXX X
XXXXX XXXXX XXXXXX XXXXXX XXX XXXXXXXXXX XXXX XXXXX XX XXXXXXXXX XX X XXXXXX XX
X XXXX XXXXXXX XXX XXXX XXXXXXXXX XXXXXX XXXXXXX X XXXXXXX
XXXXXXXXXX
XXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXX
XXXXXXXXXX XXXXXXX XXX XXXX XX X XXXXXX XX XXXXXXX XXXXX XXXX XXXX XXXXXXX
XXXXXXXXXX XXX XXXXXXX
XXX X X XXXXXXXXXXXXX
XXX XXXXXXXX
XXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXX
XXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXX
XXXXXXXXXXX XXXXXXX XXX XXXX XX X XXXXXX XX XXXXXXXX XXXXX XXXX XXXX
XXXXXXXX XXXXXXXXXX XXX XXXXXX XX XXXX XXXXX XXXXXXX XXX XXXXXXXX XX
XXXXXXXXXX XX XXXX XXX XXX XXXXXXX XXX XXXX XXXXXXXXXX XX XXX XXXXXXXX XXX
XXXXXXXXXXXXX
XXX X X XXXXXXXXXXXXX
XXX XXXXXXXXX
XXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXX
XXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXX | |
import pytest
from dbt.tests.fixtures.project import write_project_files
wrong_specification_block__schema_yml = """
version: 2
models:
- name: some_seed
description: "This is my seed under a model"
"""
test_context_where_subq_models__schema_yml = """
version: 2
models:
- name: model_a
tests:
- self_referential
"""
test_context_where_subq_models__model_a_sql = """
select 1 as fun
"""
test_utils__dbt_project_yml = """
name: 'test_utils'
version: '1.0'
config-version: 2
profile: 'default'
macro-paths: ["macros"]
"""
test_utils__macros__current_timestamp_sql = """
{% macro current_timestamp() -%}
{{ return(adapter.dispatch('current_timestamp', 'test_utils')()) }}
{%- endmacro %}
{% macro default__current_timestamp() -%}
now()
{%- endmacro %}
"""
test_utils__macros__custom_test_sql = """
{% macro test_dispatch(model) -%}
{{ return(adapter.dispatch('test_dispatch', macro_namespace = 'test_utils')()) }}
{%- endmacro %}
{% macro default__test_dispatch(model) %}
select {{ adapter.dispatch('current_timestamp', macro_namespace = 'test_utils')() }}
{% endmacro %}
"""
local_dependency__dbt_project_yml = """
name: 'local_dep'
version: '1.0'
config-version: 2
profile: 'default'
macro-paths: ["macros"]
"""
local_dependency__macros__equality_sql = """
{#-- taken from dbt-utils --#}
{% test equality(model, compare_model, compare_columns=None) %}
{{ return(adapter.dispatch('test_equality')(model, compare_model, compare_columns)) }}
{% endtest %}
{% macro default__test_equality(model, compare_model, compare_columns=None) %}
{% set set_diff %}
count(*) + abs(
sum(case when which_diff = 'a_minus_b' then 1 else 0 end) -
sum(case when which_diff = 'b_minus_a' then 1 else 0 end)
)
{% endset %}
{#-- Needs to be set at parse time, before we return '' below --#}
{{ config(fail_calc = set_diff) }}
{#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #}
{%- if not execute -%}
{{ return('') }}
{% endif %}
-- setup
{%- do dbt_utils._is_relation(model, 'test_equality') -%}
{#-
If the compare_cols arg is provided, we can run this test without querying the
information schema — this allows the model to be an ephemeral model
-#}
{%- if not compare_columns -%}
{%- do dbt_utils._is_ephemeral(model, 'test_equality') -%}
{%- set compare_columns = adapter.get_columns_in_relation(model) | map(attribute='quoted') -%}
{%- endif -%}
{% set compare_cols_csv = compare_columns | join(', ') %}
with a as (
select * from {{ model }}
),
b as (
select * from {{ compare_model }}
),
a_minus_b as (
select {{compare_cols_csv}} from a
{{ dbt_utils.except() }}
select {{compare_cols_csv}} from b
),
b_minus_a as (
select {{compare_cols_csv}} from b
{{ dbt_utils.except() }}
select {{compare_cols_csv}} from a
),
unioned as (
select 'a_minus_b' as which_diff, * from a_minus_b
union all
select 'b_minus_a' as which_diff, * from b_minus_a
)
select * from unioned
{% endmacro %}
"""
case_sensitive_models__schema_yml = """
version: 2
models:
- name: lowercase
columns:
- name: id
quote: true
tests:
- unique
- name: uppercase
columns:
- name: id
quote: true
tests:
- unique
"""
case_sensitive_models__uppercase_SQL = """
select 1 as id
"""
case_sensitive_models__lowercase_sql = """
select 1 as id
"""
test_context_macros__my_test_sql = """
{% macro test_call_pkg_macro(model) %}
select {{ adapter.dispatch('current_timestamp', macro_namespace = 'local_utils')() }}
{% endmacro %}
"""
test_context_macros__test_my_datediff_sql = """
{% macro test_my_datediff(model) %}
select {{ local_utils.datediff() }}
{% endmacro %}
"""
test_context_macros__custom_schema_tests_sql = """
{% test type_one(model) %}
select * from (
select * from {{ model }}
union all
select * from {{ ref('model_b') }}
) as Foo
{% endtest %}
{% test type_two(model) %}
{{ config(severity = "WARN") }}
select * from {{ model }}
{% endtest %}
"""
test_context_models_namespaced__schema_yml = """
version: 2
models:
- name: model_a
tests:
- type_one
- type_two
- name: model_c
tests:
- call_pkg_macro
- test_utils.dispatch
"""
test_context_models_namespaced__model_c_sql = """
select 1 as fun
"""
test_context_models_namespaced__model_b_sql = """
select 1 as notfun
"""
test_context_models_namespaced__model_a_sql = """
select 1 as fun
"""
macros_v2__override_get_test_macros_fail__get_test_sql_sql = """
{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}
select
{{ fail_calc }} as failures,
case when {{ fail_calc }} {{ warn_if }} then 'x' else 'y' end as should_warn,
case when {{ fail_calc }} {{ error_if }} then 'x' else 'y' end as should_error
from (
{{ main_sql }}
{{ "limit " ~ limit if limit != none }}
) dbt_internal_test
{% endmacro %}
"""
macros_v2__macros__tests_sql = """
{% test every_value_is_blue(model, column_name) %}
select *
from {{ model }}
where {{ column_name }} != 'blue'
{% endtest %}
{% test rejected_values(model, column_name, values) %}
select *
from {{ model }}
where {{ column_name }} in (
{% for value in values %}
'{{ value }}' {% if not loop.last %} , {% endif %}
{% endfor %}
)
{% endtest %}
{% test equivalent(model, value) %}
{% set expected = 'foo-bar' %}
{% set eq = 1 if value == expected else 0 %}
{% set validation_message -%}
'got "{{ value }}", expected "{{ expected }}"'
{%- endset %}
{% if eq == 0 and execute %}
{{ log(validation_message, info=True) }}
{% endif %}
select {{ validation_message }} as validation_error
where {{ eq }} = 0
{% endtest %}
"""
macros_v2__override_get_test_macros__get_test_sql_sql = """
{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}
select
{{ fail_calc }} as failures,
case when {{ fail_calc }} {{ warn_if }} then 1 else 0 end as should_warn,
case when {{ fail_calc }} {{ error_if }} then 1 else 0 end as should_error
from (
{{ main_sql }}
{{ "limit " ~ limit if limit != none }}
) dbt_internal_test
{%- endmacro %}
"""
macros_v2__custom_configs__test_sql = """
{% test where(model, column_name) %}
{{ config(where = "1 = 0") }}
select * from {{ model }}
{% endtest %}
{% test error_if(model, column_name) %}
{{ config(error_if = "<= 0", warn_if = "<= 0") }}
select * from {{ model }}
{% endtest %}
{% test warn_if(model, column_name) %}
{{ config(warn_if = "<= 0", severity = "WARN") }}
select * from {{ model }}
{% endtest %}
{% test limit(model, column_name) %}
{{ config(limit = 0) }}
select * from {{ model }}
{% endtest %}
{% test fail_calc(model, column_name) %}
{{ config(fail_calc = "count(*) - count(*)") }}
select * from {{ model }}
{% endtest %}
"""
test_context_macros_namespaced__my_test_sql = """
{% macro test_call_pkg_macro(model) %}
select {{ test_utils.current_timestamp() }}
{% endmacro %}
"""
test_context_macros_namespaced__custom_schema_tests_sql = """
{% test type_one(model) %}
select * from (
select * from {{ model }}
union all
select * from {{ ref('model_b') }}
) as Foo
{% endtest %}
{% test type_two(model) %}
{{ config(severity = "WARN") }}
select * from {{ model }}
{% endtest %}
"""
seeds__some_seed_csv = """
col_int,col_str
1,hello
2,goodbye
"""
test_context_models__schema_yml = """
version: 2
models:
- name: model_a
tests:
- type_one
- type_two
- name: model_c
tests:
- call_pkg_macro
- local_utils.dispatch
- my_datediff
"""
test_context_models__model_c_sql = """
select 1 as fun
"""
test_context_models__model_b_sql = """
select 1 as notfun
"""
test_context_models__model_a_sql = """
select 1 as fun
"""
name_collision__schema_yml = """
version: 2
models:
- name: base
columns:
- name: extension_id
tests:
- not_null
- name: base_extension
columns:
- name: id
tests:
- not_null
"""
name_collision__base_sql = """
SELECT 'hello_world' AS extension_id
"""
name_collision__base_extension_sql = """
SELECT 'NOT_NULL' AS id
"""
test_context_where_subq_macros__custom_generic_test_sql = """
/*{# This test will fail if get_where_subquery() is missing from TestContext + TestMacroNamespace #}*/
{% test self_referential(model) %}
{%- set relation = api.Relation.create(schema=model.schema, identifier=model.table) -%}
{%- set columns = adapter.get_columns_in_relation(relation) -%}
{%- set columns_csv = columns | map(attribute='name') | list | join(', ') -%}
select {{ columns_csv }} from {{ model }}
limit 0
{% endtest %}
"""
invalid_schema_models__schema_yml = """
version: 2
models:
name: model
columns:
- name: Id
quote: true
tests:
- unique
- not_null
"""
invalid_schema_models__model_sql = """
select 1 as "Id"
"""
models_v2__render_test_cli_arg_models__schema_yml = """
version: 2
models:
- name: model
tests:
- equivalent:
value: "{{ var('myvar', 'baz') }}-bar"
"""
models_v2__render_test_cli_arg_models__model_sql = """
select 1 as id
"""
models_v2__override_get_test_models__schema_yml = """
version: 2
models:
- name: my_model_pass
description: "The table has 1 null values, and we're okay with that, until it's more than 1."
columns:
- name: id
description: "The number of responses for this favorite color - purple will be null"
tests:
- not_null:
error_if: '>1'
warn_if: '>1'
- name: my_model_warning
description: "The table has 1 null values, and we're okay with that, but let us know"
columns:
- name: id
description: "The number of responses for this favorite color - purple will be null"
tests:
- not_null:
error_if: '>1'
- name: my_model_failure
description: "The table has 2 null values, and we're not okay with that"
columns:
- name: id
description: "The number of responses for this favorite color - purple will be null"
tests:
- not_null:
error_if: '>1'
"""
models_v2__override_get_test_models__my_model_warning_sql = """
select * from {{ ref('my_model_pass') }}
"""
models_v2__override_get_test_models__my_model_pass_sql = """
select 1 as id
UNION ALL
select null as id
"""
models_v2__override_get_test_models__my_model_failure_sql = """
select * from {{ ref('my_model_pass') }}
UNION ALL
select null as id
"""
models_v2__models__schema_yml = """
version: 2
models:
- name: table_copy
description: "A copy of the table"
columns:
- name: id
description: "The ID"
tests:
- not_null
- unique
tags:
- table_id
- name: first_name
description: "The user's first name"
tests:
- not_null
tags:
- table_first_name
- name: ip_address
description: "The user's IP address"
tests:
- not_null
- name: updated_at
description: "The update time of the user"
tests:
- not_null
- name: email
description: "The user's email address"
tests:
- unique
- name: favorite_color
description: "The user's favorite color"
tests:
- accepted_values: {
values: ['blue', 'green'],
quote: true,
tags: table_copy_favorite_color # tags can be a single | |
<gh_stars>0
import multiprocessing as mp
import pandas as pd
import numpy as np
from pathlib import Path
import pickle
from datetime import datetime
import os
from ParallelPyMetaMap.altered_pymetamap.MetaMap import MetaMap
from ParallelPyMetaMap.altered_pymetamap.SubprocessBackend import SubprocessBackend
from ParallelPyMetaMap.main.output_files import output_files
from ParallelPyMetaMap.main.annotation_func import annotation_func
from ParallelPyMetaMap.main.df_semantictypes import df_semantictypes
from ParallelPyMetaMap.main.df_semgroups import df_semgroups
def ppmm(numbers_of_cores,
path_to_metamap,
column_name = 'content_text',
unique_id = 'pmid',
extension = 'txt',
extension_format = None,
restart = False,
path_to_file = None,
file = None,
composite_phrase=4,
fielded_mmi_output=False,
machine_output=False,
filename=None,
file_format='sldi',
allow_acronym_variants=False,
word_sense_disambiguation=False,
allow_large_n=False,
strict_model=False,
relaxed_model=False,
allow_overmatches=False,
allow_concept_gaps=False,
term_processing=False,
no_derivational_variants=False,
derivational_variants=False,
ignore_word_order=False,
unique_acronym_variants=False,
prefer_multiple_concepts=False,
ignore_stop_phrases=False,
compute_all_mappings=False,
prune=False,
mm_data_version=False,
mm_data_year=False,
verbose=False,
exclude_sources=[],
restrict_to_sources=[],
restrict_to_sts=[],
exclude_sts=[],
no_nums=[]):
if (fielded_mmi_output == False and machine_output == False) or (fielded_mmi_output == True and machine_output == True):
print("You need to set either fielded_mmi_output or machine_output to 'True'")
return None
exit()
if fielded_mmi_output == True:
out_form = 'mmi'
else:
out_form = 'mo'
output_files(column_name, out_form, extension)
if numbers_of_cores >= mp.cpu_count():
print('The number of cores you want to use is equal or greater than the numbers of cores in your machine. We stop the script now')
return None
exit()
elif numbers_of_cores < 4:
par_core = 1
elif numbers_of_cores > 3:
par_core = (numbers_of_cores - 2)
if path_to_file != None and file != None:
print('You need to input either a path to a Pickle object or a Pandas DataFrame. You can not input both!')
return None
exit()
elif path_to_file != None:
df = pickle.load(open(path_to_file, 'rb'))
elif type(file) == pd.core.frame.DataFrame:
df = file
else:
print('You did not input any data to process')
return None
exit()
if extension_format == 'dict' or extension_format == 'terminal' or extension_format == None:
pass
else:
print("Your extension_format parameter should be equal to 'dict' or 'terminal' for mmi output or 'None' for mo output please enter a valid parameter.")
return None
exit()
if fielded_mmi_output == True:
if extension_format == 'dict' or extension_format == 'terminal':
pass
else:
print("You are running the your code with the 'fielded_mmi_output' parameter please change the 'extension_format' to 'dict' or 'terminal'.")
return None
exit()
else:
if extension_format == None:
pass
else:
print("You are running the your code with the 'machine_output' parameter please change 'extension_format' to 'None'.")
return None
exit()
if len(df) < par_core:
par_core = len(df)
pmids = []
for i in range(len(df)):
if df[column_name][i] != df[column_name][i] or df[column_name][i] == None or df[column_name][i] == '' or df[column_name][i][:4] == 'ABS:':
pass
else:
if len(df.iloc[i][column_name].split()) > 150000:
pass
else:
pmids.append(df.iloc[i][unique_id])
if df.iloc[i][unique_id] != df.iloc[i][unique_id] or df.iloc[i][unique_id] == None or df.iloc[i][unique_id] == '':
print('Your unique identifier is empty/None/NaN, please choose a unique identifier present for each row.')
return None
exit()
if '/' in str(df.iloc[i][unique_id]):
print('Your unique identifier contains "/" please choose another unique identifier, remove the "/" from the current unique identifier or replace "/" with another character.')
return None
exit()
if len(np.unique(pmids)) == len(pmids):
df = df[df[unique_id].isin(pmids)]
else:
print('It seems that one of your unique identifier is duplicate, please choose a unique identifier present for each row.')
return None
exit()
update = False
retrieved_path = [path for path in Path(f'output_ParallelPyMetaMap_{column_name}_{out_form}/annotated_df').iterdir() if path.stem == f'annotated_{column_name}_{unique_id}_df2']
if len(retrieved_path) == 0:
update = False
elif retrieved_path[0]:
update = True
else:
update = False
if restart == True and len([name for name in os.listdir(f'./output_ParallelPyMetaMap_{column_name}_{out_form}/temporary_df/') if os.path.isfile(os.path.join(f'./output_ParallelPyMetaMap_{column_name}_{out_form}/temporary_df/', name))]) == 0:
print('There is/are no temporary_df(s) in the directory. The code never started to annotate. Please change the "restart" parameter to "False". You might want to check if you get another error.')
return None
exit()
if restart == False:
pass
else:
needed_restart = False
concat_df = None
if update == True:
if fielded_mmi_output == True:
df_processed = pickle.load(open(f'./output_ParallelPyMetaMap_{column_name}_{out_form}/annotated_df/annotated_{column_name}_{unique_id}_df2.p', 'rb'))
df_processed = df_processed[['cui', 'umls_preferred_name', 'semantic_type', 'full_semantic_type_name', 'semantic_group_name', 'occurrence', 'annotation', f'{unique_id}']]
if machine_output == True:
df_processed = pickle.load(open(f'./output_ParallelPyMetaMap_{column_name}_{out_form}/annotated_df/annotated_{column_name}_{unique_id}_df2.p', 'rb'))
df_processed = df_processed[['cui', 'prefered_name', 'semantic_type', 'full_semantic_type_name', 'semantic_group_name', 'occurrence', 'negation', 'trigger', 'sab', 'pos_info', 'score', f'{unique_id}']]
count_temp_files = len([name for name in os.listdir(f'./output_ParallelPyMetaMap_{column_name}_{out_form}/temporary_df/') if os.path.isfile(os.path.join(f'./output_ParallelPyMetaMap_{column_name}_{out_form}/temporary_df/', name))])
if count_temp_files > 1:
for i in range(count_temp_files):
df_dynamic = pickle.load(open(f'./output_ParallelPyMetaMap_{column_name}_{out_form}/temporary_df/annotated_{column_name}_df2_{i+1}.p', 'rb'))
if fielded_mmi_output == True:
if df_dynamic.shape[1] == 8:
df_dynamic = df_dynamic[['cui', 'umls_preferred_name', 'semantic_type', 'full_semantic_type_name', 'semantic_group_name', 'occurrence', 'annotation', f'{unique_id}']]
df_processed = pd.concat([df_processed, df_dynamic])
if type(concat_df) == None:
concat_df = df_dynamic
else:
concat_df = pd.concat([concat_df, df_dynamic])
else:
needed_restart = True
df_dynamic['semantic_type'] = df_dynamic['semantic_type'].str.strip('[]').str.split(',')
df_semantictypes_df = pickle.load(open(f'./output_ParallelPyMetaMap_{column_name}_{out_form}/extra_resources/df_semantictypes.p', 'rb'))
df_semgroups_df = pickle.load(open(f'./output_ParallelPyMetaMap_{column_name}_{out_form}/extra_resources/df_semgroups.p', 'rb'))
full_semantic_type_name_list = []
for i in range(len(df_dynamic)):
full_semantic_type_name_list_current = []
for j in range(len(df_dynamic.iloc[i].semantic_type)):
full_semantic_type_name_list_current.append(df_semantictypes_df[df_semantictypes_df.abbreviation == df_dynamic.iloc[i].semantic_type[j]].full_semantic_type_name.values[0])
full_semantic_type_name_list.append(full_semantic_type_name_list_current)
df_dynamic["full_semantic_type_name"] = full_semantic_type_name_list
semantic_group_name_list = []
for i in range(len(df_dynamic)):
semantic_group_name_list_current = []
for j in range(len(df_dynamic.iloc[i].semantic_type)):
semantic_group_name_list_current.append(df_semgroups_df[df_semgroups_df.full_semantic_type_name == df_dynamic.iloc[i].full_semantic_type_name[j]].semantic_group_name.values[0])
semantic_group_name_list.append(semantic_group_name_list_current)
df_dynamic["semantic_group_name"] = semantic_group_name_list
df_dynamic = df_dynamic[['cui', 'umls_preferred_name', 'semantic_type', 'full_semantic_type_name', 'semantic_group_name', 'occurrence', 'annotation', f'{unique_id}']]
df_processed = pd.concat([df_processed, df_dynamic])
if type(concat_df) == None:
concat_df = df_dynamic
else:
concat_df = pd.concat([concat_df, df_dynamic])
if machine_output == True:
if df_dynamic.shape[1] == 12:
df_dynamic = df_dynamic[['cui', 'prefered_name', 'semantic_type', 'full_semantic_type_name', 'semantic_group_name', 'occurrence', 'negation', 'trigger', 'sab', 'pos_info', 'score', f'{unique_id}']]
df_processed = pd.concat([df_processed, df_dynamic])
if type(concat_df) == None:
concat_df = df_dynamic
else:
concat_df = pd.concat([concat_df, df_dynamic])
else:
needed_restart = True
df_dynamic = df_dynamic.drop_duplicates(subset=['cui', 'trigger', 'pos_info', f'{unique_id}'])
df_dynamic = df_dynamic.reset_index(drop=True)
df_dynamic['pos_info'] = df_dynamic['pos_info'].str.strip('[]').str.split(',')
aggregation_functions = {'occurrence': 'sum', 'negation': 'sum', 'sab': lambda x: list(x), 'trigger': lambda x: list(x), 'score': lambda x: list(x), 'pos_info': lambda x: list(x), 'prefered_name': 'first', 'semantic_type': 'first'}
df_dynamic = df_dynamic.groupby(['cui', f'{unique_id}']).aggregate(aggregation_functions)
df_dynamic = df_dynamic.reset_index()
df_semantictypes_df = pickle.load(open(f'./output_ParallelPyMetaMap_{column_name}_{out_form}/extra_resources/df_semantictypes.p', 'rb'))
df_semgroups_df = pickle.load(open(f'./output_ParallelPyMetaMap_{column_name}_{out_form}/extra_resources/df_semgroups.p', 'rb'))
full_semantic_type_name_list = []
for i in range(len(df_dynamic)):
full_semantic_type_name_list_current = []
for j in range(len(df_dynamic.iloc[i].semantic_type)):
full_semantic_type_name_list_current.append(df_semantictypes_df[df_semantictypes_df.abbreviation == df_dynamic.iloc[i].semantic_type[j]].full_semantic_type_name.values[0])
full_semantic_type_name_list.append(full_semantic_type_name_list_current)
df_dynamic["full_semantic_type_name"] = full_semantic_type_name_list
semantic_group_name_list = []
for i in range(len(df_dynamic)):
semantic_group_name_list_current = []
for j in range(len(df_dynamic.iloc[i].semantic_type)):
semantic_group_name_list_current.append(df_semgroups_df[df_semgroups_df.full_semantic_type_name == df_dynamic.iloc[i].full_semantic_type_name[j]].semantic_group_name.values[0])
semantic_group_name_list.append(semantic_group_name_list_current)
df_dynamic["semantic_group_name"] = semantic_group_name_list
df_dynamic = df_dynamic[['cui', 'prefered_name', 'semantic_type', 'full_semantic_type_name', 'semantic_group_name', 'occurrence', 'negation', 'trigger', 'sab', 'pos_info', 'score', f'{unique_id}']]
df_processed = pd.concat([df_processed, df_dynamic])
if type(concat_df) == None:
concat_df = df_dynamic
else:
concat_df = pd.concat([concat_df, df_dynamic])
df_processed = df_processed.drop_duplicates(subset=[f'{unique_id}', 'cui'], keep='first')
df_processed = df_processed.reset_index(drop=True)
concat_df = concat_df.drop_duplicates(subset=[f'{unique_id}', 'cui'], keep='first')
concat_df = concat_df.reset_index(drop=True)
if needed_restart == False:
print('The process seems to be done already, please set the restart parameter to "False"')
return None
exit()
else:
pickle.dump(concat_df, open(f'./output_ParallelPyMetaMap_{column_name}_{out_form}/annotated_df/annotated_{column_name}_{unique_id}_df.p', 'wb'))
pickle.dump(df_processed, open(f'./output_ParallelPyMetaMap_{column_name}_{out_form}/annotated_df/annotated_{column_name}_{unique_id}_df2.p', 'wb'))
else:
count_temp_files = len([name for name in os.listdir(f'./output_ParallelPyMetaMap_{column_name}_{out_form}/temporary_df/') if os.path.isfile(os.path.join(f'./output_ParallelPyMetaMap_{column_name}_{out_form}/temporary_df/', name))])
if count_temp_files > 1:
for i in range(count_temp_files):
df_dynamic = pickle.load(open(f'./output_ParallelPyMetaMap_{column_name}_{out_form}/temporary_df/annotated_{column_name}_df2_{i+1}.p', 'rb'))
if fielded_mmi_output == True:
if df_dynamic.shape[1] == 8:
df_dynamic = df_dynamic[['cui', 'umls_preferred_name', 'semantic_type', 'full_semantic_type_name', 'semantic_group_name', 'occurrence', 'annotation', f'{unique_id}']]
if type(concat_df) == None:
concat_df = df_dynamic
else:
concat_df = pd.concat([concat_df, df_dynamic])
else:
df_dynamic['semantic_type'] = df_dynamic['semantic_type'].str.strip('[]').str.split(',')
df_semantictypes_df = pickle.load(open(f'./output_ParallelPyMetaMap_{column_name}_{out_form}/extra_resources/df_semantictypes.p', 'rb'))
df_semgroups_df = pickle.load(open(f'./output_ParallelPyMetaMap_{column_name}_{out_form}/extra_resources/df_semgroups.p', 'rb'))
full_semantic_type_name_list = []
for i in range(len(df_dynamic)):
full_semantic_type_name_list_current = []
for j in range(len(df_dynamic.iloc[i].semantic_type)):
full_semantic_type_name_list_current.append(df_semantictypes_df[df_semantictypes_df.abbreviation == df_dynamic.iloc[i].semantic_type[j]].full_semantic_type_name.values[0])
full_semantic_type_name_list.append(full_semantic_type_name_list_current)
df_dynamic["full_semantic_type_name"] = full_semantic_type_name_list
semantic_group_name_list = []
for i in range(len(df_dynamic)):
semantic_group_name_list_current = []
for j in range(len(df_dynamic.iloc[i].semantic_type)):
semantic_group_name_list_current.append(df_semgroups_df[df_semgroups_df.full_semantic_type_name == df_dynamic.iloc[i].full_semantic_type_name[j]].semantic_group_name.values[0])
semantic_group_name_list.append(semantic_group_name_list_current)
df_dynamic["semantic_group_name"] = semantic_group_name_list
df_dynamic = df_dynamic[['cui', 'umls_preferred_name', 'semantic_type', 'full_semantic_type_name', 'semantic_group_name', 'occurrence', 'annotation', f'{unique_id}']]
if type(concat_df) == None:
concat_df = df_dynamic
else:
concat_df = pd.concat([concat_df, df_dynamic])
if machine_output == True:
if df_dynamic.shape[1] == 12:
df_dynamic = df_dynamic[['cui', 'prefered_name', 'semantic_type', 'full_semantic_type_name', 'semantic_group_name', 'occurrence', 'negation', 'trigger', 'sab', 'pos_info', 'score', f'{unique_id}']]
df_processed = pd.concat([df_processed, df_dynamic])
if type(concat_df) == None:
concat_df = df_dynamic
else:
concat_df = pd.concat([concat_df, df_dynamic])
else:
df_dynamic = df_dynamic.drop_duplicates(subset=['cui', 'trigger', 'pos_info', f'{unique_id}'])
df_dynamic = df_dynamic.reset_index(drop=True)
df_dynamic['pos_info'] = df_dynamic['pos_info'].str.strip('[]').str.split(',')
aggregation_functions = {'occurrence': 'sum', 'negation': 'sum', 'sab': lambda x: list(x), 'trigger': lambda x: list(x), 'score': lambda x: list(x), 'pos_info': lambda x: list(x), 'prefered_name': 'first', 'semantic_type': 'first'}
df_dynamic = df_dynamic.groupby(['cui', f'{unique_id}']).aggregate(aggregation_functions)
df_dynamic = df_dynamic.reset_index()
df_semantictypes_df = pickle.load(open(f'./output_ParallelPyMetaMap_{column_name}_{out_form}/extra_resources/df_semantictypes.p', 'rb'))
df_semgroups_df = pickle.load(open(f'./output_ParallelPyMetaMap_{column_name}_{out_form}/extra_resources/df_semgroups.p', 'rb'))
full_semantic_type_name_list = []
for i in range(len(df_dynamic)):
full_semantic_type_name_list_current = []
for j in range(len(df_dynamic.iloc[i].semantic_type)):
full_semantic_type_name_list_current.append(df_semantictypes_df[df_semantictypes_df.abbreviation == df_dynamic.iloc[i].semantic_type[j]].full_semantic_type_name.values[0])
full_semantic_type_name_list.append(full_semantic_type_name_list_current)
df_dynamic["full_semantic_type_name"] = full_semantic_type_name_list
semantic_group_name_list = []
for i in range(len(df_dynamic)):
semantic_group_name_list_current = []
for j in range(len(df_dynamic.iloc[i].semantic_type)):
semantic_group_name_list_current.append(df_semgroups_df[df_semgroups_df.full_semantic_type_name == df_dynamic.iloc[i].full_semantic_type_name[j]].semantic_group_name.values[0])
semantic_group_name_list.append(semantic_group_name_list_current)
df_dynamic["semantic_group_name"] = semantic_group_name_list
df_dynamic = df_dynamic[['cui', 'prefered_name', 'semantic_type', 'full_semantic_type_name', 'semantic_group_name', 'occurrence', 'negation', 'trigger', 'sab', 'pos_info', 'score', f'{unique_id}']]
if type(concat_df) == None:
concat_df = df_dynamic
else:
concat_df = pd.concat([concat_df, df_dynamic])
concat_df = concat_df.drop_duplicates(subset=[f'{unique_id}', 'cui'], keep='first')
concat_df = concat_df.reset_index(drop=True)
pickle.dump(concat_df, open(f'./output_ParallelPyMetaMap_{column_name}_{out_form}/annotated_df/annotated_{column_name}_{unique_id}_df.p', 'wb'))
pickle.dump(concat_df, open(f'./output_ParallelPyMetaMap_{column_name}_{out_form}/annotated_df/annotated_{column_name}_{unique_id}_df2.p', 'wb'))
update = True
if update == | |
<filename>VPLParser.py<gh_stars>1-10
# Generated from VPL.g4 by ANTLR 4.7
# encoding: utf-8
from antlr4 import *
from io import StringIO
from typing.io import TextIO
import sys
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3\32")
buf.write("\u0085\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\3\2\3\2\3\2\3\2\5\2\31\n\2\3")
buf.write("\3\3\3\3\3\3\3\3\3\3\3\3\3\3\4\3\4\3\4\3\4\3\5\3\5\3\5")
buf.write("\3\5\5\5*\n\5\3\6\3\6\3\6\3\6\3\6\5\6\61\n\6\3\7\3\7\3")
buf.write("\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7")
buf.write("\3\7\3\7\3\7\3\7\5\7G\n\7\3\b\3\b\3\b\3\b\3\b\5\bN\n\b")
buf.write("\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3")
buf.write("\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t")
buf.write("\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3")
buf.write("\t\5\ty\n\t\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\5\n\u0083")
buf.write("\n\n\3\n\2\2\13\2\4\6\b\n\f\16\20\22\2\2\2\u008a\2\30")
buf.write("\3\2\2\2\4\32\3\2\2\2\6!\3\2\2\2\b)\3\2\2\2\n\60\3\2\2")
buf.write("\2\fF\3\2\2\2\16M\3\2\2\2\20x\3\2\2\2\22\u0082\3\2\2\2")
buf.write("\24\25\5\4\3\2\25\26\5\2\2\2\26\31\3\2\2\2\27\31\7\2\2")
buf.write("\3\30\24\3\2\2\2\30\27\3\2\2\2\31\3\3\2\2\2\32\33\7\n")
buf.write("\2\2\33\34\7\30\2\2\34\35\5\6\4\2\35\36\5\n\6\2\36\37")
buf.write("\5\f\7\2\37 \7\13\2\2 \5\3\2\2\2!\"\7\3\2\2\"#\5\b\5\2")
buf.write("#$\7\4\2\2$\7\3\2\2\2%*\7\30\2\2&\'\7\30\2\2\'(\7\5\2")
buf.write("\2(*\5\b\5\2)%\3\2\2\2)&\3\2\2\2*\t\3\2\2\2+,\7\f\2\2")
buf.write(",-\5\b\5\2-.\7\6\2\2.\61\3\2\2\2/\61\3\2\2\2\60+\3\2\2")
buf.write("\2\60/\3\2\2\2\61\13\3\2\2\2\62\63\7\r\2\2\63\64\5\22")
buf.write("\n\2\64\65\7\16\2\2\65\66\5\f\7\2\66\67\7\17\2\2\678\5")
buf.write("\16\b\28G\3\2\2\29:\7\21\2\2:;\5\22\n\2;<\7\22\2\2<=\5")
buf.write("\f\7\2=>\7\20\2\2>?\5\16\b\2?G\3\2\2\2@A\7\30\2\2AB\7")
buf.write("\7\2\2BC\5\20\t\2CD\5\16\b\2DG\3\2\2\2EG\3\2\2\2F\62\3")
buf.write("\2\2\2F9\3\2\2\2F@\3\2\2\2FE\3\2\2\2G\r\3\2\2\2HI\7\6")
buf.write("\2\2IJ\5\f\7\2JK\5\16\b\2KN\3\2\2\2LN\3\2\2\2MH\3\2\2")
buf.write("\2ML\3\2\2\2N\17\3\2\2\2OP\7\23\2\2PQ\7\3\2\2QR\5\20\t")
buf.write("\2RS\7\5\2\2ST\5\20\t\2TU\7\4\2\2Uy\3\2\2\2VW\7\24\2\2")
buf.write("WX\7\3\2\2XY\5\20\t\2YZ\7\5\2\2Z[\5\20\t\2[\\\7\4\2\2")
buf.write("\\y\3\2\2\2]^\7\25\2\2^_\7\3\2\2_`\5\20\t\2`a\7\5\2\2")
buf.write("ab\5\20\t\2bc\7\4\2\2cy\3\2\2\2de\7\26\2\2ef\7\3\2\2f")
buf.write("g\5\20\t\2gh\7\5\2\2hi\5\20\t\2ij\7\4\2\2jy\3\2\2\2kl")
buf.write("\7\27\2\2lm\7\3\2\2mn\5\20\t\2no\7\5\2\2op\5\20\t\2pq")
buf.write("\7\4\2\2qy\3\2\2\2rs\7\3\2\2st\5\20\t\2tu\7\4\2\2uy\3")
buf.write("\2\2\2vy\7\30\2\2wy\7\31\2\2xO\3\2\2\2xV\3\2\2\2x]\3\2")
buf.write("\2\2xd\3\2\2\2xk\3\2\2\2xr\3\2\2\2xv\3\2\2\2xw\3\2\2\2")
buf.write("y\21\3\2\2\2z{\5\20\t\2{|\7\b\2\2|}\7\31\2\2}\u0083\3")
buf.write("\2\2\2~\177\5\20\t\2\177\u0080\7\t\2\2\u0080\u0081\7\31")
buf.write("\2\2\u0081\u0083\3\2\2\2\u0082z\3\2\2\2\u0082~\3\2\2\2")
buf.write("\u0083\23\3\2\2\2\t\30)\60FMx\u0082")
return buf.getvalue()
class VPLParser ( Parser ):
grammarFileName = "VPL.g4"
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
sharedContextCache = PredictionContextCache()
literalNames = [ "<INVALID>", "'('", "')'", "','", "';'", "'='", "'<'",
"'>='", "'func'", "'end'", "'var'", "'if'", "'then'",
"'endif'", "'endwhile'", "'while'", "'do'", "'add'",
"'minus'", "'mult'", "'div'", "'min'" ]
symbolicNames = [ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"FUNC", "END", "VAR", "IF", "THEN", "ENDIF", "ENDWHILE",
"WHILE", "DO", "ADD", "MINUS", "MULT", "DIV", "MIN",
"IDENT", "NUM", "WS" ]
RULE_program = 0
RULE_function_declaration = 1
RULE_parameter = 2
RULE_name = 3
RULE_variable_declaration = 4
RULE_statement = 5
RULE_nest_statement = 6
RULE_expression = 7
RULE_compare = 8
ruleNames = [ "program", "function_declaration", "parameter", "name",
"variable_declaration", "statement", "nest_statement",
"expression", "compare" ]
EOF = Token.EOF
T__0=1
T__1=2
T__2=3
T__3=4
T__4=5
T__5=6
T__6=7
FUNC=8
END=9
VAR=10
IF=11
THEN=12
ENDIF=13
ENDWHILE=14
WHILE=15
DO=16
ADD=17
MINUS=18
MULT=19
DIV=20
MIN=21
IDENT=22
NUM=23
WS=24
def __init__(self, input:TokenStream, output:TextIO = sys.stdout):
super().__init__(input, output)
self.checkVersion("4.7")
self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache)
self._predicates = None
class ProgramContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def function_declaration(self):
return self.getTypedRuleContext(VPLParser.Function_declarationContext,0)
def program(self):
return self.getTypedRuleContext(VPLParser.ProgramContext,0)
def EOF(self):
return self.getToken(VPLParser.EOF, 0)
def getRuleIndex(self):
return VPLParser.RULE_program
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterProgram" ):
listener.enterProgram(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitProgram" ):
listener.exitProgram(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitProgram" ):
return visitor.visitProgram(self)
else:
return visitor.visitChildren(self)
def program(self):
localctx = VPLParser.ProgramContext(self, self._ctx, self.state)
self.enterRule(localctx, 0, self.RULE_program)
try:
self.state = 22
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [VPLParser.FUNC]:
self.enterOuterAlt(localctx, 1)
self.state = 18
self.function_declaration()
self.state = 19
self.program()
pass
elif token in [VPLParser.EOF]:
self.enterOuterAlt(localctx, 2)
self.state = 21
self.match(VPLParser.EOF)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Function_declarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def FUNC(self):
return self.getToken(VPLParser.FUNC, 0)
def IDENT(self):
return self.getToken(VPLParser.IDENT, 0)
def parameter(self):
return self.getTypedRuleContext(VPLParser.ParameterContext,0)
def variable_declaration(self):
return self.getTypedRuleContext(VPLParser.Variable_declarationContext,0)
def statement(self):
return self.getTypedRuleContext(VPLParser.StatementContext,0)
def END(self):
return self.getToken(VPLParser.END, 0)
def getRuleIndex(self):
return VPLParser.RULE_function_declaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFunction_declaration" ):
listener.enterFunction_declaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFunction_declaration" ):
listener.exitFunction_declaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitFunction_declaration" ):
return visitor.visitFunction_declaration(self)
else:
return visitor.visitChildren(self)
def function_declaration(self):
localctx = VPLParser.Function_declarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 2, self.RULE_function_declaration)
try:
self.enterOuterAlt(localctx, 1)
self.state = 24
self.match(VPLParser.FUNC)
self.state = 25
self.match(VPLParser.IDENT)
self.state = 26
self.parameter()
self.state = 27
self.variable_declaration()
self.state = 28
self.statement()
self.state = 29
self.match(VPLParser.END)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ParameterContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def name(self):
return self.getTypedRuleContext(VPLParser.NameContext,0)
def getRuleIndex(self):
return VPLParser.RULE_parameter
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterParameter" ):
listener.enterParameter(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitParameter" ):
listener.exitParameter(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitParameter" ):
return visitor.visitParameter(self)
else:
return visitor.visitChildren(self)
def parameter(self):
localctx = VPLParser.ParameterContext(self, self._ctx, self.state)
self.enterRule(localctx, 4, self.RULE_parameter)
try:
self.enterOuterAlt(localctx, 1)
self.state = 31
self.match(VPLParser.T__0)
self.state = 32
self.name()
self.state = 33
self.match(VPLParser.T__1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class NameContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def getRuleIndex(self):
return VPLParser.RULE_name
def copyFrom(self, ctx:ParserRuleContext):
super().copyFrom(ctx)
class MultParameterNameContext(NameContext):
def __init__(self, parser, ctx:ParserRuleContext): # actually a VPLParser.NameContext
super().__init__(parser)
self.copyFrom(ctx)
def IDENT(self):
return self.getToken(VPLParser.IDENT, 0)
def name(self):
return self.getTypedRuleContext(VPLParser.NameContext,0)
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterMultParameterName" ):
listener.enterMultParameterName(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitMultParameterName" ):
listener.exitMultParameterName(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitMultParameterName" ):
return visitor.visitMultParameterName(self)
else:
return visitor.visitChildren(self)
class ParameterNameContext(NameContext):
def __init__(self, parser, ctx:ParserRuleContext): # actually a VPLParser.NameContext
super().__init__(parser)
self.copyFrom(ctx)
def IDENT(self):
return self.getToken(VPLParser.IDENT, 0)
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterParameterName" ):
listener.enterParameterName(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitParameterName" ):
listener.exitParameterName(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitParameterName" ):
return visitor.visitParameterName(self)
else:
return visitor.visitChildren(self)
def name(self):
localctx = VPLParser.NameContext(self, self._ctx, self.state)
self.enterRule(localctx, 6, self.RULE_name)
try:
self.state = 39
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,1,self._ctx)
if la_ == 1:
localctx = VPLParser.ParameterNameContext(self, localctx)
self.enterOuterAlt(localctx, 1)
self.state = 35
self.match(VPLParser.IDENT)
pass
elif la_ == 2:
localctx = VPLParser.MultParameterNameContext(self, localctx)
self.enterOuterAlt(localctx, 2)
self.state = 36
self.match(VPLParser.IDENT)
self.state = 37
self.match(VPLParser.T__2)
self.state = 38
self.name()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Variable_declarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def VAR(self):
return self.getToken(VPLParser.VAR, 0)
def name(self):
return self.getTypedRuleContext(VPLParser.NameContext,0)
def getRuleIndex(self):
return VPLParser.RULE_variable_declaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterVariable_declaration" ):
listener.enterVariable_declaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitVariable_declaration" ):
listener.exitVariable_declaration(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitVariable_declaration" ):
return visitor.visitVariable_declaration(self)
else:
return visitor.visitChildren(self)
def variable_declaration(self):
localctx = VPLParser.Variable_declarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 8, self.RULE_variable_declaration)
try:
self.state = 46
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [VPLParser.VAR]:
self.enterOuterAlt(localctx, 1)
self.state = 41
self.match(VPLParser.VAR)
self.state = 42
self.name()
self.state = 43
self.match(VPLParser.T__3)
pass
elif token in [VPLParser.END, VPLParser.IF, VPLParser.WHILE, VPLParser.IDENT]:
self.enterOuterAlt(localctx, 2)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class StatementContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def getRuleIndex(self):
return VPLParser.RULE_statement
def copyFrom(self, ctx:ParserRuleContext):
super().copyFrom(ctx)
class WhileloopContext(StatementContext):
def __init__(self, parser, ctx:ParserRuleContext): # actually a VPLParser.StatementContext
super().__init__(parser)
self.copyFrom(ctx)
def WHILE(self):
return self.getToken(VPLParser.WHILE, 0)
def compare(self):
return self.getTypedRuleContext(VPLParser.CompareContext,0)
def DO(self):
return self.getToken(VPLParser.DO, 0)
def statement(self):
return self.getTypedRuleContext(VPLParser.StatementContext,0)
def ENDWHILE(self):
return self.getToken(VPLParser.ENDWHILE, 0)
def nest_statement(self):
return self.getTypedRuleContext(VPLParser.Nest_statementContext,0)
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterWhileloop" ):
listener.enterWhileloop(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitWhileloop" ):
listener.exitWhileloop(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitWhileloop" ):
return visitor.visitWhileloop(self)
else:
return visitor.visitChildren(self)
class ConditionContext(StatementContext):
def __init__(self, parser, ctx:ParserRuleContext): # actually a VPLParser.StatementContext
super().__init__(parser)
self.copyFrom(ctx)
def IF(self):
return self.getToken(VPLParser.IF, 0)
def compare(self):
return self.getTypedRuleContext(VPLParser.CompareContext,0)
def THEN(self):
return self.getToken(VPLParser.THEN, 0)
def statement(self):
return self.getTypedRuleContext(VPLParser.StatementContext,0)
def ENDIF(self):
return self.getToken(VPLParser.ENDIF, 0)
def nest_statement(self):
return self.getTypedRuleContext(VPLParser.Nest_statementContext,0)
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCondition" ):
listener.enterCondition(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCondition" ):
listener.exitCondition(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitCondition" ):
return visitor.visitCondition(self)
else:
return visitor.visitChildren(self)
class NoneStatementContext(StatementContext):
def __init__(self, parser, ctx:ParserRuleContext): # actually a VPLParser.StatementContext
super().__init__(parser)
self.copyFrom(ctx)
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterNoneStatement" ):
listener.enterNoneStatement(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitNoneStatement" ):
listener.exitNoneStatement(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitNoneStatement" ):
return visitor.visitNoneStatement(self)
else:
return visitor.visitChildren(self)
class AssignContext(StatementContext):
def __init__(self, parser, ctx:ParserRuleContext): # actually a VPLParser.StatementContext
super().__init__(parser)
self.copyFrom(ctx)
def IDENT(self):
return self.getToken(VPLParser.IDENT, 0)
def expression(self):
return self.getTypedRuleContext(VPLParser.ExpressionContext,0)
def nest_statement(self):
return self.getTypedRuleContext(VPLParser.Nest_statementContext,0)
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAssign" ):
listener.enterAssign(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAssign" ):
listener.exitAssign(self)
def accept(self, visitor:ParseTreeVisitor):
if hasattr( visitor, "visitAssign" ):
return visitor.visitAssign(self)
else:
return visitor.visitChildren(self)
def statement(self):
localctx = VPLParser.StatementContext(self, self._ctx, self.state)
self.enterRule(localctx, 10, self.RULE_statement)
try:
self.state = 68
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [VPLParser.IF]:
localctx = VPLParser.ConditionContext(self, localctx)
self.enterOuterAlt(localctx, 1)
self.state = 48
self.match(VPLParser.IF)
self.state = 49
self.compare()
self.state = 50
self.match(VPLParser.THEN)
self.state = 51
self.statement()
self.state = 52
self.match(VPLParser.ENDIF)
self.state = 53
self.nest_statement()
pass
elif token in [VPLParser.WHILE]:
localctx = VPLParser.WhileloopContext(self, localctx)
self.enterOuterAlt(localctx, 2)
self.state = 55
self.match(VPLParser.WHILE)
self.state = 56
self.compare()
self.state = 57
self.match(VPLParser.DO)
self.state = 58
self.statement()
self.state = 59
self.match(VPLParser.ENDWHILE)
self.state = 60
self.nest_statement()
pass
elif token in [VPLParser.IDENT]:
localctx = VPLParser.AssignContext(self, localctx)
self.enterOuterAlt(localctx, 3)
self.state = 62
self.match(VPLParser.IDENT)
self.state = 63
self.match(VPLParser.T__4)
self.state = 64
self.expression()
self.state = 65
self.nest_statement()
pass
elif token in [VPLParser.T__3, VPLParser.END, VPLParser.ENDIF, VPLParser.ENDWHILE]:
localctx = VPLParser.NoneStatementContext(self, | |
resolved = set([])
group = []
def resolve_streaming_dependencies(node):
for e in node.outgoing():
if e.has_streaming_link():
target = e._target
resolved.add(target)
group.append(target)
resolve_streaming_dependencies(target)
for in_edge in target.incoming():
if in_edge == e:
continue
source = in_edge._source
if source == node or source in resolved:
continue
if in_edge.has_streaming_link():
resolved.add(source)
group.append(source)
for node in self.topological_order():
if node in resolved:
continue
group.append(node)
resolved.add(node)
resolve_streaming_dependencies(node)
log.debug("Expand | Creating job group: %s", group)
yield group
group = []
def exclude(self, excludes):
"""Takes a list of node names and removes all nodes and their
successors from the graph.
:param excludes: list of node names
:type excludes: list of string
"""
if not excludes:
return
if not isinstance(excludes, (list, tuple)):
excludes = [excludes]
excludes = set(excludes)
# index the nodes by name
names2nodes = {}
for node in self.nodes():
if node._job.name is not None:
names2nodes[node._job.name] = node
def _recursive_remove(node, force=True):
parents = list(node.parents())
if force or len(parents) <= 1:
children = list(node.children())
map(lambda n: _recursive_remove(n, False),
children)
try:
log.info("Excluding node %s", node)
self.remove(node)
# check the children again, they might have becom invalid
for child in [c for c in children
if c._tool in self._nodes]:
try:
child._tool.validate()
except:
log.info("Forcing exclude of %s, "
"node became invalid",
child)
_recursive_remove(child)
except KeyError:
## ignore errors where the node was already removed
pass
for name in excludes:
if not name in names2nodes:
log.warn("Node marked for exclusing not found: %s", name)
else:
if isinstance(name, basestring) and not name in names2nodes:
node = names2nodes[name]
else:
node = name
_recursive_remove(names2nodes[name])
self._update_cleanup_nodes()
def skip(self, excludes):
"""Takes a list of node names or node instances and removes the node
and tries to connect parent and children of the node
:param excludes: list of node names
:type excludes: list of string
"""
if not excludes:
return
if not isinstance(excludes, (list, tuple)):
excludes = [excludes]
excludes = set(excludes)
# index the nodes by name
names2nodes = {}
for node in self.nodes():
if node._job.name is not None:
names2nodes[node._job.name] = node
for name in excludes:
if isinstance(name, basestring) and not name in names2nodes:
log.warn("Node marked for skip not found: %s", name)
else:
if isinstance(name, basestring):
node = names2nodes[name]
else:
node = name
parents = list(node.parents())
children = list(node.children())
if len(parents) > 0 and len(children) > 0:
# propagate all output files of the skip node
# pack to teh parent if the parent does not already
# write a file
out_files = list(node._tool.get_output_files())
if len(out_files) > 0:
for p in parents:
p_files = list(p._tool.get_output_files())
if len(p_files) == 0:
out_opt = p._tool.options.get_default_output()
p.set(out_opt.name, out_files)
for outedge in node.outgoing():
for link in outedge._links:
target_option = link[1]
for inedge in node.incoming():
for link in inedge._links:
source_option, stream = link[0], link[2]
outedge._target.set(target_option.name,
source_option,
append=True,
allow_stream=stream)
elif len(parents) == 0:
# no parent but at least one child.
in_opt = node._tool.options.get_default_input()
if in_opt:
for child in children:
child._tool.options.get_default_input().set(
in_opt.raw()
)
elif len(children) == 0:
# no children
opt = node._tool.options.get_default_output()
if opt:
for parent in parents:
parent._tool.options.get_default_output().set(
opt.raw()
)
self.remove(node)
self._update_cleanup_nodes()
def context(self, context):
"""Update the global context of the pipeline and add the values
from the given context
:param context: the context
"""
if context:
self.utils._update_global_env(context)
def expand(self, context=None, validate=True, _find_dup=True,
_check_fanout=True):
"""This modifies the current graph state and applies fan_out
operations on nodes with singleton options that are populated with
list.
An exception is raised in case a node has more than one option that
should be expanded and the number of configured elements is not the
same.
You can specify a ``context`` that will be used additionally to resolve
template variables and references in node options. This allows you
to give the template system access to your local environment. For
example::
>>> p = Pipeline()
>>> a = "myinput.txt"
>>> p.bash('wc -l ${a}')
bash
>>> p.expand(locals())
False
>>> assert p.get("bash").cmd.get() == 'wc -l myinput.txt'
:param validate: disable validation by setting this to false
:param context: specify a local context that is taken into account
in template and option rendering
"""
log.info("Expand | Expand Graph with %d nodes", len(self))
if context is not None:
self.context(context)
# add dependency edges between groups
# when a node in a group has an incoming edge from a parent
# outside of the group, add the edge also to any predecessor
# of the node within the group
self._expand_add_group_dependencies()
# check nodes for fanout
fanout_done = self._expand_fanout(_check_fanout)
# for all temp jobs, find a final non-temp target
# if we have targets, create a cleanup job, add
# all the temp job's output files and
# make it dependant on the temp nodes targets
self._expand_add_cleanup_jobs()
# iterate again to expand on pipeline of pipelines
self._expand_sub_pipelines(validate=validate)
if _find_dup:
# update node option values from links
# TODO add index to links and use it here
# render all ndoes
log.info("Expand | Render node context for %d nodes", len(self))
# setup nodes
#for n in self.nodes():
#n._tool.setup()
# render values
#_render_nodes(self, list(self.nodes()))
updated = set([])
cwd = self._cwd
if cwd is None:
cwd = os.getcwd()
for node in self.topological_order():
# ensure a working directory is set
if node._job.working_dir is None:
node._job.working_dir = cwd
node._tool.options.make_absolute(node._job.working_dir)
for link in [l for e in node.outgoing() for l in e._links]:
source = link[0]
target = link[1]
if not target in updated:
target._value = []
updated.add(target)
target._value.extend(source.value)
# detect duplicates and try to merge them
self._expand_merge_duplicates()
# apply names from global context
self._expand_name_jobs_by_context()
# applied and perform the final validation on all nodes
if _find_dup:
log.info("Expand | Validating nodes")
for node in self.nodes():
#node._tool.options.make_absolute(node._job.working_dir)
self._validate_node(node, silent=not validate)
#self._apply_node_name(node, node._name)
##########################################################
# transitive reduction of dependencies
#
# Currently quiet inefficient implementation of transitive
# reduction to remove edges that are redudant in the
# graph.
##########################################################
#def transitive_reduction(vertex, child, done):
#if child in done:
#return
#for outedge in child.outgoing():
#vertex._remove_edge_to(outedge._target)
#transitive_reduction(vertex, outedge._target, done)
#done.add(child)
#for j in self.nodes():
#done = set([])
#for child in j.outgoing():
#transitive_reduction(j, child._target, done)
log.info("Expand | Expansion finished. Nodes: %d", len(self))
return fanout_done
def _expand_add_group_dependencies(self):
"""Add dependency edges between groups
when a node in a group has an incoming edge from a parent
outside of the group, add the edge also to any predecessor
of the node within the group
"""
for group in self.groups():
gs = set(group)
first = group[0]
for node in group:
for parent in node.parents():
if parent not in gs:
## add an edge to the first of the group
log.debug("Expand | add group dependency %s->%s",
parent, first)
self.add_edge(parent, first)
def _expand_fanout(self, fanout):
"""Check all nodes in topological order if they need to
be fanned out and perform the fanout if necessary.
"""
if not fanout:
log.info("Expand | Fanout disabled, updating options")
return False
log.info("Expand | Checking for fanout in %d nodes", len(self))
fanout_done = False
for node in self.topological_order():
fanout_options = self._get_fanout_options(node)
if not fanout_options:
log.debug("Expand | No fanout options found for %s", node)
continue
# check that all fanout options have the same length
self._check_fanout_options(node, fanout_options)
# no exception was raised so we can actually do the
# fanout on the giben node
self._fan_out(node, fanout_options)
fanout_done = True
return fanout_done
def _expand_add_cleanup_jobs(self):
"""For all temp jobs, find a final non-temp target
if we have targets, create a cleanup job, add
all the temp job's output files and
make it dependant on the temp nodes targets
"""
log.info("Expand | Checking for temporary jobs")
temp_nodes = set([])
targets = set([])
temp_outputs = set([])
for node in self.nodes():
if node._job.temp:
temp_nodes.add(node)
if temp_nodes:
log.info("Expand | Check temporary outputs for %d job(s)",
len(temp_nodes))
for temp_node in temp_nodes:
for opt in temp_node._tool.options.get_by_type(
jip.options.TYPE_OUTPUT):
if not opt.is_stream():
temp_outputs.add(opt)
for child in temp_node.children():
if not child._job.temp:
targets.add(child)
log.info("Expand | Found %d temporary outputs and %d targets",
len(temp_outputs), len(targets))
if len(targets) > 0:
cleanup_node = self.run(
'cleanup',
files=list(temp_outputs)
)
cleanup_node.job.threads = 1
cleanup_node.job.temp = True
cleanup_node.job.name = "cleanup"
cleanup_node._name = "cleanup"
cleanup_node.files.dependency = True
#for | |
DOI_help = item.select('a')[0]['href']
DOI = DOI_help.split("/doi/")[1]
#print(DOI)
except:
DOI = None
try:
#Title
Title = item.select('h5')[0].get_text()
except:
Title = None
try:
Author = item.select('ul')[0].get_text()
Author = Author.replace("\n", " ")
except:
Author = Nonee
#print(Author)
try:
#Date
Date = item.select('span[class="dot-separator"]')[0].get_text()
Date = Date.split(",")[0]
except:
Date = None
#print(Date)
try:
#Publisher
Publisher = item.select('span[class="epub-section__title"]')[0].get_text()
try:
Publisher = Publisher.split(":")[1]
except:
Publisher = Publisher
#print(Publisher)
except:
Publisher = None
try:
Abstract = item.select('p')[0].get_text()
#print(Abstract)
except:
Abstract = None
try:
#Link
Link = "https://dl.acm.org" + DOI_help
#print(Link)
except:
Link = None
try:
cit= item.find("span", {"class": "citation"}).get_text()
except:
cit = None
records.append({ #Liste mit Ergebnissen erweitern
"DOI" : DOI,
"Title" : Title,
"Author" : Author,
"Cited_by" : cit,
"Date" : Date,
"Publisher" : Publisher,
"Abstract" : Abstract,
"Link" : Link
})
time.sleep(2)
#Entweder nächste Seite aufrufen oder Browser beenden
try:
driver.find_element_by_class_name('pagination__btn--next').click()
except:
driver.quit()
driver.quit()
print('--------------------')
df_ACM = pd.DataFrame(records)
df_ACM = df_ACM.head(count)
df_ACM['Database'] = "ACM Digital"
df_ACM.index += 1
return df_ACM
################################################## IEEE ######################################################
def scrape_ieee (query_ieee, count):
locale.setlocale(locale.LC_ALL, 'en_US.UTF-8')
driver = webdriver.Chrome(ChromeDriverManager().install())
# URL aufrufen
driver.get('https://ieeexplore.ieee.org/')
time.sleep(5)
search = driver.find_element_by_class_name("Typeahead-input")
search.send_keys(query_ieee)
time.sleep(3)
search.send_keys(Keys.RETURN)
time.sleep(8)
Anzahl = driver.find_element_by_class_name("Dashboard-header").text
Anzahl = Anzahl.split("of ")[1]
Anzahl = Anzahl.split(" for")[0]
Anzahl = Anzahl.split(" result")[0]
try:
Anzahl = int(Anzahl.replace(",",""))
except:
Anzahl = int(Anzahl)
print("IEEE digital - Anzahl Suchergebnisse: ", Anzahl)
records = [] # Liste für Ergebnisse initialisieren
if count < Anzahl:
y = int(count/25)
print("IEEE digital - Ausgabe an Suchergebnissen: ", count)
else:
#Schleife über Seiten einbauen
y = int(Anzahl/25)
print("IEEE digital - Ausgabe an Suchergebnissen: ", Anzahl)
print('IEEE digital - Seiten zu durchsuchen: ', y+1)
if Anzahl<25:
z = Anzahl*2
else:
if count < 25:
z = 2*count
else:
z = 15
for i in range(y+1):
time.sleep(5)
#Nach unten srollen, da sonst nur der erste Data-Container automatisch geladen wird
t = 400
for timer in range(0,z):
driver.execute_script("window.scrollTo(0, "+str(t)+")")
t += 400
time.sleep(1)
time.sleep(2)
page_source = driver.page_source
soup = BeautifulSoup(page_source, 'lxml')
for item in soup.select('div[class="List-results-items"]'):
try:
#Title
Title = item.select('h2')[0].get_text()
#print(Title)
except:
Title = None
try:
Author = item.select('p')[0].get_text()
except:
Author = None
try:
Date = item.select('div[class="publisher-info-container"]')[0].get_text()
Date = Date.split("|")[0]
Date = Date.split(": ")[1]
#print(Date)
except:
Date =None
try:
Publisher = item.select('div[class="description"]')[0].get_text()
Publisher = Publisher.split("|")[0]
Publisher = Publisher.split("Year :")[0]
except:
Publisher = None
#print(Publisher)
try:
#Abstract
Abstract = item.select('div[class="row doc-access-tools-container"]')[0].get_text()
Abstract = Abstract.split("Abstract")[1]
Abstract = Abstract.split("Show More")[0]
except:
Abstract = None
#print(Abstract)
try:
#Link
Link_help = item.select('a')[0]['href']#.get_text()
Link = "https://ieeexplore.ieee.org" + Link_help
except:
Link = None
try:
cit = item.select('a:contains("Papers")')[0].get_text()
substring = "Papers"
if substring in cit:
cit = cit.replace("Papers (","")
cit = cit.replace(")","")
else:
cit = cit
except:
cit = None
#print(Link)
#print("---------------------")
#print(item)
records.append({ #Liste mit Ergebnissen erweitern
#"DOI" : '',
"Title" : Title,
"Author" : Author,
"Cited_by": cit,
"Date" : Date,
"Publisher" : Publisher,
"Abstract" : Abstract,
"Link" : Link
})
#Entweder nächste Seite aufrufen oder Browser beenden
try:
driver.find_element_by_class_name('next-btn').click()
except:
driver.quit()
driver.quit()
print('--------------------')
df_ieee = pd.DataFrame(records)
df_ieee.insert(0, 'DOI', '')
df_ieee['DOI'] = np.nan
df_ieee['Database'] = "IEEE"
df_ieee = df_ieee.head(count)
df_ieee.index += 1
return df_ieee
################################################## Emerald ######################################################
def scrape_emerald (query_emerald, count):
locale.setlocale(locale.LC_ALL, 'en_US.UTF-8')
driver = webdriver.Chrome(ChromeDriverManager().install())
# URL aufrufen
driver.get('https://www.emerald.com/insight/')
time.sleep(5)
search = driver.find_element_by_id("terms")
search.send_keys(query_emerald)
time.sleep(3)
search.send_keys(Keys.RETURN)
time.sleep(8)
Anzahl = driver.find_element_by_class_name("intent_searchresultscount").text
Anzahl = Anzahl.split("of ")[1]
Anzahl = int(Anzahl)
print("Emerald Insight - Anzahl Suchergebnisse: ", Anzahl)
driver.find_element_by_link_text("50").click()
time.sleep(6)
records = [] # Liste für Ergebnisse initialisieren
if count < Anzahl:
y = int(count/50)
print("Emerald Insight - Ausgabe an Suchergebnissen: ", count)
else:
#Schleife über Seiten einbauen
y = int(Anzahl/50)
print("Emerald Insight - Ausgabe an Suchergebnissen: ", Anzahl)
print('Emerald Insight - Seiten zu durchsuchen: ', y+1)
for i in range(y+1):
time.sleep(5)
#Nach unten srollen, da sonst nur der erste Data-Container automatisch geladen wird
t = 500
for timer in range(0,15):
driver.execute_script("window.scrollTo(0, "+str(t)+")")
t += 1500
time.sleep(1)
time.sleep(2)
page_source = driver.page_source
soup = BeautifulSoup(page_source, 'lxml')
for item_2 in soup.select('div[class="intent_search_result container card-shadow is-animated Search-item__wrapper"]'):
for item in item_2.select('div[class="d-lg-flex flex-row pb-3"]'):
#print(item)
try:
Title = item.select('h2')[0].get_text()
#print(Title)
except:
Title= None
try:
Author = item.select('p')[0].get_text()
Author = Author.split(",")[0]
Author = Author.split("and")[0]
#print(Author)
except:
Author = None
try:
Date = item.select('div[class="pr-2 small"]')[0].get_text()
Date = Date.split("Publication date: ")[1]
#Date = Date.split(" ")[2]
#print(Date)
except:
Date = None
try:
Abstract = item_2.select('div[class="intent_abstract pb-1 col-md-7 pt-2 pl-2 pr-4"]')[0].get_text()
#print(Abstract)
except:
Abstract = None
try:
Link = item_2.select('div[class="mb-1"]')[2].get_text()
Link = Link.split("DOI: ")[1]
#print(Link)
except:
Link = None
try:
Publisher = item_2.select('div[class="mb-1"]')[0].get_text()
#Publisher = Publisher.split(",")[0]
#print(Publisher)
except:
Publisher = None
try:
DOI = Link.split("https://doi.org/")[1]
#print(DOI)
except:
DOI = None
cit = None
#print("---")
records.append({ #Liste mit Ergebnissen erweitern
"DOI" : DOI,
"Title" : Title,
"Author" : Author,
"Cited_by": cit,
"Date" : Date,
"Publisher" : Publisher,
"Abstract" : Abstract,
"Link" : Link
})
#Entweder nächste Seite aufrufen oder Browser beenden
try:
driver.find_element_by_class_name("intent_next_page_link").click()
except:
driver.quit()
driver.quit()
print('--------------------')
df_emerald = pd.DataFrame(records)
df_emerald['Database'] = "Emerald Insight"
df_emerald = df_emerald.head(count)
df_emerald.index += 1
return df_emerald
################################################## Google Scholar ######################################################
def scrape_scholar(query_scholar, Start, count):
#Start Datum der Suche
start_date = Start
driver = webdriver.Chrome(ChromeDriverManager().install())
# URL aufrufen
driver.get('https://scholar.google.de/')
time.sleep(5)
#Searchbar finden und ausfüllen
search = driver.find_element_by_id('gs_hdr_tsi')
search.send_keys(query_scholar)
time.sleep(5)
search.send_keys(Keys.RETURN)
#############################################################################
#### 60 Sekunden Warteschranke einbauen, falls Captcha-Abfrage kommt. Falls Captach gelöst geht es automatisch weiter
#### Falls es nicht gelöst wird, wird der driver beendet
try:
load = WebDriverWait(driver, 60).until(EC.presence_of_element_located((By.ID, "gs_ab_md")))
# Zeitraum einstellen --> Man muss nur Start einstellen, da dann automatisch bis heute gesucht wird
time.sleep(3)
driver.find_element_by_link_text("Zeitraum wählen...").click()
time.sleep(2)
start = driver.find_element_by_id('gs_as_ylo')
start.send_keys(start_date)
time.sleep(2)
start.send_keys(Keys.RETURN)
time.sleep(2)
## Anzahl Ergebnisse --> /10 ist die Anzahl der Klicks auf "weiter"
Anzahl = driver.find_element_by_id('gs_ab_md').text
Anzahl = Anzahl.split("Ungefähr ")[1]
Anzahl = Anzahl.split(" Ergebnisse")[0]
Anzahl = int(Anzahl.replace('.',''))
if count < Anzahl:
y = int(count/10)+1
else:
y = int(Anzahl/10)+1
print("Seitenanzahl der Scholar-Suche:", y)
print("Insgesamte Anzahl an Ergebnissen bei Google Scholar:", Anzahl)
if Anzahl<10:
z = Anzahl*2
else:
if count < 10:
z = 2*count
else:
z = 10
records = [] # Liste für Ergebnisse initialisieren
# in range(y); 2 nur zum Testen
for i in range(y): #y
time.sleep(2)
# Scrollen simulieren, damit Google denkt wir sind ein echter Mensch
y = 300
for timer in range(0,5):
driver.execute_script("window.scrollTo(0, "+str(y)+")")
y += 400
time.sleep(1)
page_source = driver.page_source
soup = BeautifulSoup(page_source, 'lxml')
### Die ganzen Prints können auskommentiert werden
for item in soup.select('[data-lid]'):
#print('----------------------------------------')
try:
# print(item)
#print(item.select('h3')[0].get_text())
title = item.select('h3')[0].get_text()
#print(item.select('a')[0]['href'])
link = item.select('a')[0]['href']
#print(item.select('.gs_a')[0].get_text())
author = item.select('.gs_a')[0].get_text()
txt = item.select('.gs_a')[0].get_text()
#print("Veröffentlichungsjahr:", re.findall(r'\d+', txt)[0])
year = re.findall(r'\d+', txt)[-1]
#print(item.select('.gs_rs')[0].get_text())
abstract = item.select('.gs_rs')[0].get_text()
zit=item.select('.gs_fl')[1].get_text()
#print("Zitiert von:", re.findall(r'\d+', zit)[0])
except Exception as e:
#raise e
print('---')
#print('----------------------------------------')
records.append({ #Liste mit Ergebnissen erweitern
#"DOI" : '',
"Title" : title,
"Author" : author,
"Date" : year,
"Abstract" : abstract,
"Link" : link,
"Zitiert von": zit
})
# Random Wartezeit (2-10 Sekunden), bis nächste Seite aufgerufen wird, um IP-Blocks zu verhindern
w = random.randint(1,9)
time.sleep(w)
#Entweder nächste Google-Seite aufrufen oder Browser beenden
try:
driver.find_element_by_link_text('Weiter').click()
except:
driver.quit()
finally:
driver.quit()
# alles in Dataframe packen
df_scholar = pd.DataFrame(records)
df_scholar.insert(0, 'DOI', '')
df_scholar['DOI'] = np.nan
df_scholar['Title'] = df_scholar["Title"].str.replace(r"\[.*\]", "", regex=True)
df_scholar = df_scholar.head(count)
df_scholar.index += 1
return df_scholar
################################################## API-Scrape ######################################################
def api_scrape(query, scopus_key, sd_key, insttoken, count):
#count = 5000
try:
df_Scopus = scrape_scopus(scopus_key, query, count)
| |
import py
from rpython.jit.metainterp import compile
from rpython.jit.metainterp.optimizeopt.util import make_dispatcher_method
from rpython.jit.metainterp.resoperation import (rop, GuardResOp, ResOperation)
from rpython.jit.metainterp.resume import Snapshot
from rpython.jit.codewriter.effectinfo import EffectInfo
from rpython.jit.metainterp.history import (ConstPtr, ConstInt,Const,
AbstractValue, AbstractFailDescr)
from rpython.rtyper.lltypesystem import llmemory
from rpython.rlib.unroll import unrolling_iterable
from rpython.rlib.objectmodel import we_are_translated
MODIFY_COMPLEX_OBJ = [ (rop.SETARRAYITEM_GC, 0, 1)
, (rop.SETARRAYITEM_RAW, 0, 1)
, (rop.RAW_STORE, 0, 1)
, (rop.SETINTERIORFIELD_GC, 0, -1)
, (rop.SETINTERIORFIELD_RAW, 0, -1)
, (rop.SETFIELD_GC, 0, -1)
, (rop.SETFIELD_RAW, 0, -1)
, (rop.ZERO_PTR_FIELD, 0, -1)
, (rop.ZERO_ARRAY, 0, -1)
, (rop.STRSETITEM, 0, -1)
, (rop.UNICODESETITEM, 0, -1)
]
LOAD_COMPLEX_OBJ = [ (rop.GETARRAYITEM_GC_I, 0, 1)
, (rop.GETARRAYITEM_GC_F, 0, 1)
, (rop.GETARRAYITEM_GC_R, 0, 1)
, (rop.GETARRAYITEM_RAW_I, 0, 1)
, (rop.GETARRAYITEM_RAW_F, 0, 1)
, (rop.RAW_LOAD_I, 0, 1)
, (rop.RAW_LOAD_F, 0, 1)
, (rop.GETINTERIORFIELD_GC_I, 0, 1)
, (rop.GETINTERIORFIELD_GC_F, 0, 1)
, (rop.GETINTERIORFIELD_GC_R, 0, 1)
, (rop.GETFIELD_GC_I, 0, -1)
, (rop.GETFIELD_GC_F, 0, -1)
, (rop.GETFIELD_GC_R, 0, -1)
, (rop.GETFIELD_RAW_I, 0, -1)
, (rop.GETFIELD_RAW_F, 0, -1)
, (rop.GETFIELD_RAW_R, 0, -1)
]
class Path(object):
def __init__(self,path):
self.path = path
def second(self):
if len(self.path) <= 1:
return None
return self.path[1]
def last_but_one(self):
if len(self.path) < 2:
return None
return self.path[-2]
def last(self):
if len(self.path) < 1:
return None
return self.path[-1]
def first(self):
return self.path[0]
def is_always_pure(self, exclude_first=False, exclude_last=False):
last = len(self.path)-1
count = len(self.path)
i = 0
if exclude_first:
i += 1
if exclude_last:
count -= 1
while i < count:
node = self.path[i]
if node.is_imaginary():
i += 1
continue
op = node.getoperation()
if op.is_guard():
descr = op.getdescr()
if not descr:
return False
assert isinstance(descr, AbstractFailDescr)
if not descr.exits_early():
return False
elif not op.is_always_pure():
return False
i += 1
return True
def set_schedule_priority(self, p):
for node in self.path:
node.setpriority(p)
def walk(self, node):
self.path.append(node)
def cut_off_at(self, index):
self.path = self.path[:index]
def check_acyclic(self):
"""NOT_RPYTHON"""
seen = set()
for segment in self.path:
if segment in seen:
print "path:"
for segment in self.path:
print " ->", segment
print ""
assert 0, "segment %s was already seen. this makes the path cyclic!" % segment
else:
seen.add(segment)
return True
def clone(self):
return Path(self.path[:])
def as_str(self):
""" NOT_RPYTHON """
return ' -> '.join([str(p) for p in self.path])
class Node(object):
def __init__(self, op, opidx):
self.op = op
self.opidx = opidx
self.adjacent_list = []
self.adjacent_list_back = []
self.memory_ref = None
self.pack = None
self.pack_position = -1
self.emitted = False
self.schedule_position = -1
self.priority = 0
self._stack = False
def is_imaginary(self):
return False
def getoperation(self):
return self.op
def getindex(self):
return self.opidx
def getopnum(self):
return self.op.getopnum()
def getopname(self):
return self.op.getopname()
def setpriority(self, value):
self.priority = value
def can_be_relaxed(self):
return self.op.getopnum() in (rop.GUARD_TRUE, rop.GUARD_FALSE)
def edge_to(self, to, arg=None, failarg=False, label=None):
if self is to:
return
dep = self.depends_on(to)
if not dep:
#if force or self.independent(idx_from, idx_to):
dep = Dependency(self, to, arg, failarg)
self.adjacent_list.append(dep)
dep_back = Dependency(to, self, arg, failarg)
dep.backward = dep_back
to.adjacent_list_back.append(dep_back)
if not we_are_translated():
if label is None:
label = ''
dep.label = label
else:
if not dep.because_of(arg):
dep.add_dependency(self,to,arg)
# if a fail argument is overwritten by another normal
# dependency it will remove the failarg flag
if not (dep.is_failarg() and failarg):
dep.set_failarg(False)
if not we_are_translated() and label is not None:
_label = getattr(dep, 'label', '')
dep.label = _label + ", " + label
return dep
def clear_dependencies(self):
self.adjacent_list = []
self.adjacent_list_back = []
def exits_early(self):
if self.op.is_guard():
descr = self.op.getdescr()
return descr.exits_early()
return False
def loads_from_complex_object(self):
return rop._ALWAYS_PURE_LAST <= self.op.getopnum() < rop._MALLOC_FIRST
def modifies_complex_object(self):
return rop.SETARRAYITEM_GC <= self.op.getopnum() <= rop.UNICODESETITEM
def side_effect_arguments(self):
# if an item in array p0 is modified or a call contains an argument
# it can modify it is returned in the destroyed list.
args = []
op = self.op
if self.modifies_complex_object():
for opnum, i, j in unrolling_iterable(MODIFY_COMPLEX_OBJ):
if op.getopnum() == opnum:
op_args = op.getarglist()
if j == -1:
args.append((op.getarg(i), None, True))
for j in range(i+1,len(op_args)):
args.append((op.getarg(j), None, False))
else:
args.append((op.getarg(i), op.getarg(j), True))
for x in range(j+1,len(op_args)):
args.append((op.getarg(x), None, False))
return args
# assume this destroys every argument... can be enhanced by looking
# at the effect info of a call for instance
for arg in op.getarglist():
# if it is a constant argument it cannot be destroyed.
# neither can a box float be destroyed. BoxInt can
# contain a reference thus it is assumed to be destroyed
if arg.is_constant() or arg.type == 'f':
args.append((arg, None, False))
else:
args.append((arg, None, True))
return args
def provides_count(self):
return len(self.adjacent_list)
def provides(self):
return self.adjacent_list
def depends_count(self):
return len(self.adjacent_list_back)
def depends(self):
return self.adjacent_list_back
def depends_on(self, to):
""" Does there exist a dependency from the instruction to another?
Returns None if there is no dependency or the Dependency object in
any other case.
"""
for edge in self.adjacent_list:
if edge.to is to:
return edge
return None
def dependencies(self):
return self.adjacent_list[:] + self.adjacent_list_back[:] # COPY
def is_after(self, other):
return self.opidx > other.opidx
def is_before(self, other):
return self.opidx < other.opidx
def independent(self, other):
""" An instruction depends on another if there is a path from
self to other. """
if self == other:
return True
# forward
worklist = [self]
while len(worklist) > 0:
node = worklist.pop()
for dep in node.provides():
if dep.to.is_after(other):
continue
if dep.points_to(other):
# dependent. There is a path from self to other
return False
worklist.append(dep.to)
# backward
worklist = [self]
while len(worklist) > 0:
node = worklist.pop()
for dep in node.depends():
if dep.to.is_before(other):
continue
if dep.points_to(other):
# dependent. There is a path from self to other
return False
worklist.append(dep.to)
return True
def iterate_paths(self, to, backwards=False, path_max_len=-1, blacklist=False):
""" Yield all nodes from self leading to 'to'.
backwards: Determines the iteration direction.
blacklist: Marks nodes that have already been visited.
It comes in handy if a property must hold for every path.
Not *every* possible instance must be iterated, but trees
that have already been visited can be ignored after the
first visit.
"""
if self is to:
return
blacklist_visit = {}
path = Path([self])
worklist = [(0, self, 1)]
while len(worklist) > 0:
index,node,pathlen = worklist.pop()
if backwards:
iterdir = node.depends()
else:
iterdir = node.provides()
if index >= len(iterdir):
if to is None and index == 0:
yield Path(path.path[:])
if blacklist:
blacklist_visit[node] = None
continue
else:
next_dep = iterdir[index]
next_node = next_dep.to
index += 1
if index < len(iterdir):
worklist.append((index, node, pathlen))
else:
blacklist_visit[node] = None
path.cut_off_at(pathlen)
path.walk(next_node)
if blacklist and next_node in blacklist_visit:
yield Path(path.path[:])
continue
pathlen += 1
if next_node is to or \
(path_max_len > 0 and pathlen >= path_max_len):
yield Path(path.path[:])
# note that the destiantion node ``to'' is never blacklisted
#if blacklist:
# blacklist_visit[next_node] = None
else:
worklist.append((0, next_node, pathlen))
def remove_edge_to(self, node):
i = 0
while i < len(self.adjacent_list):
dep = self.adjacent_list[i]
if dep.to is node:
del self.adjacent_list[i]
break
i += 1
i = 0
while i < len(node.adjacent_list_back):
dep = node.adjacent_list_back[i]
if dep.to is self:
del node.adjacent_list_back[i]
break
i += 1
def getedge_to(self, other):
for dep in self.adjacent_list:
if dep.to == other:
return dep
return None
def __repr__(self):
pack = ''
if self.pack:
pack = "p: %d" % self.pack.numops()
return "Node(%s,%s i: %d)" % (self.op, pack, self.opidx)
def getdotlabel(self):
""" NOT_RPTYHON """
op_str = str(self.op)
if self.op.is_guard():
args_str = []
for arg in self.op.getfailargs():
name = 'None'
if arg:
name = arg.repr_short(arg._repr_memo)
args_str.append(name)
op_str += " " + ','.join(args_str)
return "[%d] %s" % (self.opidx, op_str)
class ImaginaryNode(Node):
_index = 987654321 # big enough? :)
def __init__(self, label):
index = -1
if not we_are_translated():
self.dotlabel = label
index = ImaginaryNode._index
ImaginaryNode._index += 1
Node.__init__(self, None, index)
def is_imaginary(self):
return True
def getdotlabel(self):
""" NOT_RPTYHON """
return self.dotlabel
class Dependency(object):
def __init__(self, at, to, arg, failarg=False):
assert at != to
self.args = []
if arg is not None:
self.add_dependency(at, to, arg)
self.at = at
self.to = to
self.failarg = failarg
self.backward = None
def because_of(self, var):
for arg in self.args:
if arg[1] == var:
return True
return False
def target_node(self):
return self.to
def origin_node(self):
return self.at
def to_index(self):
return self.to.getindex()
def at_index(self):
return self.at.getindex()
def points_after_to(self, to):
return self.to.opidx | |
== 0, dS == 1)
#--------------------------------------------
# This would allow existing conflicts to be
# "reseeded" and causes error in counting.
#--------------------------------------------
### w3 = (dS == 1)
n3 = w3.sum()
if (self.REPORT):
print('Number of new conflicts =', n3)
if (n3 > 0):
#------------------------------
# Update S with new conflicts
#------------------------------
self.S[ w3 ] = 1
## self.IDs[ w3 ] = np.arange( n3 ) + self.start_ID
## self.start_ID += n3
i = self.start_index
self.IDs[ w3 ] = self.ran_IDs[i:i + n3]
self.start_index += n3
### np.maximum( self.S, dS, self.S) # in place
#------------------------------------------
# New durations are Geometric random vars
#------------------------------------------
g = np.random.geometric( self.p_geom, size=n3 )
self.durs[ w3 ] = g
self.n_conflict_cells += n3
#----------------------------------
# Attempt to spread the conflicts
#------------------------------------------------
# Set spread_method == 0 to turn off spreading,
# e.g. to test against theoretical results.
#------------------------------------------------
if (self.spread_method == 1):
self.spread_conflicts1()
# elif (self.spread_method == 2):
# self.spread_conflicts2()
# elif (self.spread_method == 3):
# self.spread_conflicts3()
# else:
# pass
SAVE_S = True
if (SAVE_S):
#---------------------------------
# Write grid as binary to file
# (could use .astype('float32'))
#---------------------------------
S2 = np.float32(self.S)
S2.tofile( self.out_unit )
SAVE_IDs = True
if (SAVE_IDs):
self.IDs.tofile( self.IDs_unit )
# update_S_old()
#---------------------------------------------------------------
def update_time( self ):
self.time_index += 1
# update_time()
#---------------------------------------------------------------
def get_neighbor_cols_and_rows( self, w1, n1 ):
cols = self.col_grid[ w1 ]
rows = self.row_grid[ w1 ]
#--------------------------------------------------
# 1st index is over grid cells that have conflict.
# 2nd index is over the 8 nearest neighbors.
#--------------------------------------------------
cn = np.zeros( (n1, 8), dtype='int32')
cn[:,0] = cols-1
cn[:,1] = cols
cn[:,2] = cols+1
cn[:,3] = cols-1
cn[:,4] = cols+1
cn[:,5] = cols-1
cn[:,6] = cols
cn[:,7] = cols+1
#---------------------------------------
rn = np.zeros( (n1, 8), dtype='int32')
rn[:,0] = rows-1
rn[:,1] = rows-1
rn[:,2] = rows-1
rn[:,3] = rows
rn[:,4] = rows
rn[:,5] = rows+1
rn[:,6] = rows+1
rn[:,7] = rows+1
#------------------
self.cn = cn
self.rn = rn
# get_neighbor_cols_and_rows()
#---------------------------------------------------------------
def get_neighbor_values( self, var, n1 ):
#----------------------------------------
# Get values of 8 nearest neighbors
# vals[k,:] = neighbor values of cell k
#----------------------------------------
cn = self.cn
rn = self.rn
vals = np.zeros( (n1, 8), dtype='float32')
vals[:,0] = var[rn[:,0], cn[:,0]] # (top left)
vals[:,1] = var[rn[:,1], cn[:,1]] # (top center)
vals[:,2] = var[rn[:,2], cn[:,2]] # (top right)
vals[:,3] = var[rn[:,3], cn[:,3]] # (left center)
vals[:,4] = var[rn[:,4], cn[:,4]] # (right center)
vals[:,5] = var[rn[:,5], cn[:,5]] # (bottom left)
vals[:,6] = var[rn[:,6], cn[:,6]] # (bottom center)
vals[:,7] = var[rn[:,7], cn[:,7]] # (bottom right)
# vals[:,8] = var[rn[:,8], cn[:,8]] # (center)
return vals
# get_neighbor_values()
#---------------------------------------------------------------
def spread_conflicts1( self, USE_LOOP=False ):
#-------------------------------------------------
# Note: Can only spread to cells that have S=0.
#-------------------------------------------------
w1 = (self.S == 1)
n1 = w1.sum()
if (n1 == 0):
print('No conflicts to spread at time:', self.time_index)
return
if (USE_LOOP):
ID_vals = self.IDs[ w1 ] #(for the for loop version)
else:
ID_vals = np.tile( np.array([self.IDs[w1]]).transpose(), (1,8))
#------------------
# This also works
#------------------
# ID_vals = np.zeros((n1,8), dtype='int64')
# ID_vals[:,0] = self.IDs[w1]
# ID_vals[:,1] = self.IDs[w1]
# ID_vals[:,2] = self.IDs[w1]
# ID_vals[:,3] = self.IDs[w1]
# ID_vals[:,4] = self.IDs[w1]
# ID_vals[:,5] = self.IDs[w1]
# ID_vals[:,6] = self.IDs[w1]
# ID_vals[:,7] = self.IDs[w1]
#---------------------------------------------
# Get nearest neighbor values for U, S, & C1
#---------------------------------------------
self.get_neighbor_cols_and_rows( w1, n1 )
#---------------------------------------------
## Sn = self.get_neighbor_values( self.S, n1)
Un = self.get_neighbor_values( self.U, n1)
## C1n = self.get_neighbor_values( self.C1, n1)
#------------------------------------------------
# Compute probability of spreading to neighbors
#------------------------------------------------
# The "None trick" shown here allows us to do
# the following for all k at once:
# pn[k,:] = Un[k,:] * (c2 / Un[k,:].max() )
# Need c2 = c_spread to be in (0,1].
# np.amax lets us take the max along an axis.
#------------------------------------------------
# NOTE: Un and pn have shape = (n1, 8)
# NOTE: pn is initialized & defaults to 0.
#------------------------------------------------
Un_max = np.amax( Un, axis=1 ) # a 1D array
wg = (Un_max > 0)
pn = np.zeros(Un.shape, dtype='float32')
pn[ wg,: ] = self.c_spread * Un[wg,:] / (Un_max[wg,None])
#--------------------------------------
# Alternate method that uses U and C1
#--------------------------------------
# Rn = Un * C1n
# Rn_max = np.amax( Rn, axis=1 ) # a 1D array
# wg = (Rn_max > 0)
# pn = np.zeros(Rn.shape, dtype='float32')
# pn[ wg,: ] = self.c_spread * Rn[wg,:] / (Rn_max[wg,None])
#---------------------------------------------
# Use Bernoulli r.v.s to determine spreading
#---------------------------------------------
cn = self.cn
rn = self.rn
n_start = self.n_conflict_cells
if (USE_LOOP):
for k in range(n1):
B = np.random.binomial(1, pn[k,:]) # (8 r.v.s)
## B = B.astype( 'uint8' ) ######
w2 = np.logical_and( (self.S[rn[k,:], cn[k,:]] == 0),
(B == 1) )
n2 = w2.sum()
#-----------------------------------------
# Spread conflict to some neighbor cells
#-----------------------------------------
# w2 is boolean array, but this is okay
#----------------------------------------------------
# Set duration to be a geometric r.v.
## g1 = np.random.geometric( self.p_geom, size=n2 )
## self.durs[ rn[k,w2], cn[k,w2] ] = g1
#----------------------------------------------------
self.S[ rn[k,w2], cn[k,w2] ] = 1
self.IDs[ rn[k,w2], cn[k,w2] ] = ID_vals[k]
self.n_conflict_cells += n2
else:
#-------------------------------------
# Spread conflict without a for loop
# Much faster, and results look similar
# See notes below re: overcounting.
#-------------------------------------
B = np.random.binomial(1, pn) # (8 r.v.s)
## B = B.astype( 'uint8' ) ######
w2 = np.logical_and( (self.S[rn, cn] == 0), (B == 1) )
n2 = w2.sum()
#-----------------------------------------
# Spread conflict to some neighbor cells
#-----------------------------------------
# w2 is boolean array, but this is okay
#-----------------------------------------
self.S[ rn[w2], cn[w2] ] = 1
self.IDs[ rn[w2], cn[w2] ] = ID_vals[w2]
#--------------------------------------------
# Without the for loop, several cells can
# spread conflict to the same cell and this
# next line results in over-counting:
# self.n_conflict_cells += n2
#--------------------------------------------
w_new = (self.S == 1)
n_new = (w_new.sum() - n1)
self.n_conflict_cells += n_new
if (self.REPORT):
n_spread = (self.n_conflict_cells - n_start)
print('Number of spread conflicts =', n_spread)
print()
# spread_conflicts1()
#---------------------------------------------------------------
def finalize( self ):
#-------------------------
# Close the output files
#-------------------------
self.out_unit.close()
self.IDs_unit.close()
if (self.REPORT):
print()
run_time = (time.time() - self.start_time)
run_time = (run_time / 60.0)
print('run_time =', run_time, ' [minutes]')
print('n_steps =', self.n_steps)
print('c_emerge =', self.c_emerge, 'in (0,1)')
print('c_spread =', self.c_spread, 'in (0,1)')
## print('p_geom =', self.p_geom)
print('p_resolve =', self.p_resolve, 'in (0,1)')
print('time_lag =', self.time_lag)
#------------------------------------------
# S has type 'uint8', so sum may not work
#------------------------------------------
### n_conflict_cells = self.S.sum()
w = (self.S == 1)
n_conflict_cells = w.sum()
#------------------------------------------
print('n_conflict_cells =', n_conflict_cells)
print('SELF CHECK...')
print('n_conflict_cells =', self.n_conflict_cells)
n_cells = (self.nx - 1) * (self.ny - 1) # (exclude borders)
f_conflict_cells = (self.n_conflict_cells / n_cells)
print('fraction_conflict_cells =', f_conflict_cells)
print('Finished.')
# finalize()
#---------------------------------------------------------------
def run_model( self, cfg_file=None ):
self.initialize( cfg_file )
for k in range( self.n_steps ):
self.update()
self.finalize()
# run_model()
#-------------------------------------------------------------------
#-------------------------------------------------------------------
def get_raster_cellsize( gdal_unit ):
geotransform = gdal_unit.GetGeoTransform()
# ulx = geotransform[0]
xres = geotransform[1]
# xrtn = geotransform[2]
#-----------------------
# uly = geotransform[3]
# yrtn = geotransform[4] # (not yres !!)
yres = geotransform[5] # (not yrtn !!)
return (xres, abs(yres))
# get_raster_cellsize()
#-------------------------------------------------------------------
def get_raster_bounds( ds, VERBOSE=False):
#-------------------------------------------------------------
# Note: The bounds depend on the map projection and are not
# necessarily a Geographic bounding box of lons and lats.
#-------------------------------------------------------------
# See:
# https://pcjericks.github.io/py-gdalogr-cookbook/raster_layers.html
# and search on "geotransform". An example of gdal.SetGeoTransform
# gives: [xmin, pixel_size, 0, ymax, 0, -pixel_size].
# Also says args are:
# [ulx, xDist, rtnX, uly, yDist, rtnY]
# This is consistent with information below.
#-------------------------------------------------------------
# ulx = upper left x = xmin
# uly = upper left y = ymax
# lrx = lower right x = xmax
# lry = lower right y = ymin
#-----------------------------
#----------------------------------------------------------
# Notice the strange order or parameters here is CORRECT.
# It is not: ulx, xres, xskew, uly, | |
"""
Main file of the Securesteg library.
"""
from PIL import Image
from getmac import get_mac_address as gma
import hashlib
from bitstring import ConstBitStream
from typing import Union
from securesteg import encryption
def get_bits(header_length_bits: str, header_length: str, header: str, data_length_bits: str, data_length: str, data: Union[list[int], str], is_file: bool = False) -> list[int]:
"""
Function which takes in the metadata and the actual data to be incorporated into the images and converts them into bits.
@:param header_length_bits -> The length of the header length code, eg. 000, 100, etc. 3 bits.
@:param header_length -> The number of bits comprising the header data. (Formatted to 0, 8, 16, 32, etc).
@:param header -> The metadata to be added to the image.
@:param data_length_bits -> The length of the data length code, eg. 000, 100, etc. 3 bits.
@:param data_length -> The number of bits comprising the data. (Formatted to 0 bits, 8 bits, 16 bits, 32 bits, etc).
@:param data -> The data to be added to the image.
@:param is_file -> A boolean which is True if the data passed is a file, otherwise False. [Default = False]
@:return -> List of integers containing the bits.
"""
# Initialise Bits List.
bits = []
# Add the Metadata bits into the list.
for bit in header_length_bits + data_length_bits + header_length + data_length:
bits.append(int(bit))
# Add the characters (8-bit binary) of the header to the bit list.
for character in header:
bits.extend(map(int, [bit for bit in format(ord(character), "08b")]))
# Add bits directly if the data is a file, else convert the character to 8-bit form and append to the bit list.
if is_file:
bits.extend(data)
else:
for character in data:
bits.extend(map(int, [bit for bit in format(ord(character), "08b")]))
# Return the bit List.
return bits
def get_message(bits: list[int]) -> str:
"""
Takes in the bits of the message and converts into readable message.
@:param -> The bits of the image.
@:return -> Message encoded in the image.
"""
# Initialise Data Variables.
count_bits_added = 0
message = []
# Fetch 8 bits and convert it into a letter. Exit at '$'.
while count_bits_added < len(bits):
letter = chr(int("".join(map(str, bits[count_bits_added:count_bits_added + 8])), 2)) # Reading 8 bits at a time and converting it to a character.
if letter == '$': # Check if reached end of message.
break
message.append(letter)
count_bits_added += 8
# Return the message obtained.
return "".join(message)
def get_header(bits: list[int]) -> dict:
"""
Function to return the header from the bits obtained from the image.
@:param bits -> The bits obtained from the image related to the header.
@:return -> A Dictionary with the header values as Key-Value pair.
"""
# Get the header data in the form of string and initialise the Header Dictionary.
header_data = get_message(bits)
header = {}
# Convert the header string into data store.
for line in header_data.strip().split("\n"):
key, value = line.split(":", 1)
header[key] = value
# Return the header dictionary.
return header
def get_metadata(metadata: Union[str, list[int]], is_file: bool = False) -> (str, str):
"""
Finds and returns the length_code and the length of the metadata
@:param metadata -> The data whose length_code and length is to be calculated.
@:param is_file -> A boolean which is True if the data passed is a file, otherwise False. [Default = False]
@:return -> A tuple containing the length and the length_code of the metadata.
"""
# Obtain the total bits.
total_bits = len(metadata) * (1 if is_file else 8)
# Assign the length_codes and length.
if total_bits == 0:
length_bits = "000"
length = ""
elif total_bits < 256:
length_bits = "001"
length = bin(total_bits).replace("0b", "").zfill(8) # Convert to binary and insert zeros at the beginning.
elif total_bits < 65536:
length_bits = "010"
length = bin(total_bits).replace("0b", "").zfill(16)
elif total_bits < 4294967296:
length_bits = "011"
length = bin(total_bits).replace("0b", "").zfill(32)
else:
length_bits = "100"
length = bin(total_bits).replace("0b", "").zfill(64)
# Return the length and the length_code.
return length, length_bits
def get_length(pixel: tuple) -> int:
"""
The function returns the length code from the pixel.
@:param pixel -> The pixel from which the length code is to be retrieved.
@:return -> Number of bits to read to find the length of the metadata.
"""
# Get the length_code from the pixel.
length_bits = "".join(map(str, [pixel[0] % 2, pixel[1] % 2, pixel[2] % 2]))
# Get the length from the length_code.
if length_bits == "000":
length = 0
elif length_bits == "001":
length = 8
elif length_bits == "010":
length = 16
elif length_bits == "011":
length = 32
else:
length = -1
# Return length.
return length
def create_file(target_bits: list[int], file_name: str):
"""
Create a file from its bits with the given filename.
@:param target_bits -> The bits from which the file is to be created.
@:param file_name -> The name of the file.
"""
# Create bit string from the bits.
output = "".join(map(str, target_bits))
# Initialise Bit Counter and buffer.
bits_read = 0
buffer = bytearray()
# Read 8 bits and create file using the byte.
while bits_read < len(output):
buffer.append(int(output[bits_read:bits_read + 8], 2))
bits_read += 8
# Save the file.
with open(file_name, 'bw') as file:
file.write(buffer)
class SecureSteganography:
"""
The main SecureSteganography class.
"""
def __init__(self, image_path: str):
"""
The constructor of the Class.
@:param image_path -> The path of the image which is to be opened.
"""
# The path of the image.
self.image_path: str = image_path
# The headers of the image which contain additional information about the data and decoding methods.
self.header: dict = {}
# Boolean to check if the data is a file or not.
self.is_file: Union[bool, None] = None
# The string data.
self.data: Union[str, None] = None
# The file bits.
self.file: list[int] = []
# The image object.
self.image: 'Image' = None
def secure(self, **kwargs) -> None:
"""
Adds security to the image encoding.
:param kwargs: Keyword Arguments
"""
# Get the security method.
security = kwargs.get('security', None)
# Get Target MAC Address and add to header.
if security == 'mac':
self.header['security'] = 'mac'
self.header['target'] = kwargs['target'].upper()
# Get the password and add the hashed_version to the header.
elif security == 'password':
self.header['security'] = 'password'
password = kwargs['password']
hashed_password = hashlib.sha3_256(password.encode()).hexdigest()
self.header['password'] = hashed_password
def set_data(self, data: str, is_file: bool = False, decode_save_name: str = None) -> None:
"""
Sets the data to be hidden into the object.
:param data: The data to be hidden.
:param is_file: True if the data passed is a file, else False.
:param decode_save_name: The name to be saved during decode.
"""
# Set variables.
self.is_file = is_file
self.header["is_file"] = is_file
# Add data if not file.
if not is_file:
self.data = data
else:
file = ConstBitStream(filename=data)
self.header['file_name'] = decode_save_name or data
self.file = list(map(int, file.bin)) # Get the bits of the file.
def encode(self) -> None:
"""
Encodes(Hides) the Data into the image.
"""
# No data or file available.
if self.data == "" and self.file == []:
raise Exception("No Data to Encode. Call set_data method to add data")
# Create image object with image_path.
image = Image.open(self.image_path, "r")
# Create header string from header dictionary.
header = []
for key in self.header.keys():
header.append(key)
header.append(":")
header.append(str(self.header[key]))
header.append("\n")
header = "".join(header)
# Get header Length and length_code
header_length, header_length_bits = get_metadata(header)
# Get data length and length_code.
if self.is_file:
data_length, data_length_bits = get_metadata(self.file, True)
else:
data_length, data_length_bits = get_metadata(self.data)
# Find the total size of the message and headers.
if self.is_file:
total_size = 3 + len(header_length) + (len(header) * 8) + 3 + len(data_length) + (len(self.file))
else:
total_size = 3 + len(header_length) + (len(header) * 8) + 3 + len(data_length) + (len(self.data) * 8)
# Check if space exists.
if total_size > (image.size[0] * image.size[1]) * 3:
raise Exception("Message is too long for this image.")
# Get the bits of the entire message.
if self.is_file:
bits = get_bits(header_length_bits, header_length, header, data_length_bits, data_length, self.file, True)
else:
bits = get_bits(header_length_bits, header_length, header, data_length_bits, data_length, self.data)
# Write bits to image.
bits_written = 0
done = False
for i in range(image.size[0]):
for j in range(image.size[1]):
if bits_written < len(bits):
# Get the next 3 bits.
try:
a = bits[bits_written]
except IndexError:
a = 0
try:
b = bits[bits_written + 1]
except IndexError:
b = 0
try:
c = bits[bits_written + 2]
except IndexError:
c = 0
# Encode the bits to each individual channel of the pixel.
p1, p2, p3 = image.getpixel((i, j))
if p1 % 2 != a:
p1 += (-1 if p1 == 255 else 1)
if p2 % 2 != b:
p2 += (-1 if p2 == 255 else 1)
if p3 % 2 != c:
p3 += (-1 if p3 == 255 else 1)
# Set the pixel.
image.putpixel((i, j), (p1, p2, p3))
bits_written += 3
else:
done = True
break
if done:
break
# Set image object to image.
self.image = image
def save(self, save_path: str) -> None:
"""
Saves the encoded image into secret image.
:param save_path: The name and path to save the secret image.
"""
if self.image is None:
return
self.image.save(save_path)
self.image.close()
def encrypt(self, **kwargs) -> None:
"""
Encrypts the data (string message).
:param kwargs: Keyword Arguments.
"""
# Handle Exceptions.
if self.is_file:
raise Exception("Cannot Further encrypt files. Encryption possible with string data")
if self.data is None:
raise Exception("Data not present to encrypt. Call object.set_data(data) method")
# Caesar Cipher Encryption.
if kwargs.get("encrypt", None) == 'caesar':
n = kwargs.get("n", 0)
if n != 0:
self.header['encrypt'] = 'caesar'
self.header['n'] = n
self.data = encryption.caesar_cipher(self.data, n)
def check_security_access(self, kwargs) -> (bool, str):
"""
Check if the user has the | |
config.my_eval_nodes]
self.eval_node_list = config.my_eval_nodes
self.global_eval_nodes = eval_node_list
if inference == False:
self.topo_order = find_topo_sort(self.eval_node_list)
else: # in inference phase
if self.config.use_sparse_pull == True or self.config.cstable_policy is not None:
# insert ps_sparse_pull_op
self.topo_order = find_topo_sort_inference(self.eval_node_list)
# fetch sparse parameter
fetch_sparse_parameter_value(self.topo_order, self.config)
else:
self.topo_order = find_topo_sort(self.eval_node_list)
# main structures, nodes' shapes and arrays
self.node_to_shape_map = {}
self.node_to_arr_map = {}
# inherit from configurations
self.comm_mode = self.config.comm_mode
self.ps_comm = self.config.ps_comm
self.nccl_comm = self.config.nccl_comm
self.comp_stream = self.config.comp_stream
self.h2d_stream = self.config.h2d_stream
self.d2h_stream = self.config.d2h_stream
self.nccl_stream = self.config.nccl_stream
self.param_psval_map = self.config.infer_ps_map if self.inference else self.config.ps_map
self.use_sparse_pull = self.config.use_sparse_pull
self.cstable_policy = self.config.cstable_policy
self.use_p2p = self.config.p2p_stream is not None
# assisting structures, improve performance
self.need_feed_nodes = []
self.param_nodes = []
self.dataloader_nodes = []
self.computing_nodes = []
for node in self.topo_order:
if isinstance(node, DataloaderOp) or isinstance(node, GNNDataLoaderOp):
self.dataloader_nodes.append(node)
elif isinstance(node, PlaceholderOp):
if node.shape is None:
self.need_feed_nodes.append(node)
elif node.trainable:
self.param_nodes.append(node)
elif not ((self.use_sparse_pull or self.cstable_policy) and isinstance(node, EmbeddingLookUp) and self.config.prefetch):
self.computing_nodes.append(node)
self.batch_num = set([node.get_batch_num(self.name)
for node in self.dataloader_nodes])
assert len(self.batch_num) <= 1, 'Batch num not conform.'
self.batch_num = None if len(
self.batch_num) == 0 else self.batch_num.pop()
self.init_need_allocation = (self.need_feed_nodes == []) and (
self.dataloader_nodes == [])
def update_executor(self, eval_node_list):
self.eval_node_list = eval_node_list
inference = not any([isinstance(node, OptimizerOp)
for node in eval_node_list])
self.inference = inference
if self.config.p2p_stream and self.inference == True:
raise NotImplementedError
if inference == False:
self.topo_order = find_topo_sort(self.eval_node_list)
else: # in inference phase
if self.config.use_sparse_pull == True or self.config.cstable_policy is not None:
# insert ps_sparse_pull_op
self.topo_order = find_topo_sort_inference(self.eval_node_list)
# fetch sparse parameter
fetch_sparse_parameter_value(self.topo_order, self.config)
else:
self.topo_order = find_topo_sort(self.eval_node_list)
# main structures, nodes' shapes and arrays
self.node_to_shape_map = {}
self.node_to_arr_map = {}
# assisting structures, improve performance
self.need_feed_nodes = []
self.param_nodes = []
self.dataloader_nodes = []
self.computing_nodes = []
for node in self.topo_order:
if isinstance(node, DataloaderOp) or isinstance(node, GNNDataLoaderOp):
self.dataloader_nodes.append(node)
elif isinstance(node, PlaceholderOp):
if node.shape is None:
self.need_feed_nodes.append(node)
elif node.trainable:
self.param_nodes.append(node)
elif not ((self.use_sparse_pull or self.cstable_policy) and isinstance(node, EmbeddingLookUp) and self.config.prefetch):
self.computing_nodes.append(node)
self.batch_num = set([node.get_batch_num(self.name)
for node in self.dataloader_nodes])
assert len(self.batch_num) <= 1, 'Batch num not conform.'
self.batch_num = None if len(
self.batch_num) == 0 else self.batch_num.pop()
self.init_need_allocation = (self.need_feed_nodes == []) and (
self.dataloader_nodes == [])
def infer_shape(self, feed_shapes):
"""Given shapes of feed_dict nodes, infer shape for all nodes in graph.
Implementation note:
Iteratively calls node.infer_shape to infer shapes.
Node shapes stored in self.node_to_shape_map.
Parameters
----------
feed_shapes: node->shapes mapping for feed_dict nodes.
"""
self.node_to_shape_map = {}
for node in self.topo_order:
if node in feed_shapes:
self.node_to_shape_map[node] = tuple(feed_shapes[node])
else:
input_shapes = [self.node_to_shape_map[n] for n in node.inputs]
cur_shape = node.infer_shape(input_shapes)
self.node_to_shape_map[node] = cur_shape if cur_shape is None else tuple(
cur_shape)
def memory_plan(self):
"""Allocates ndarray.NDArray for every node except feed_dict nodes.
Parameters
----------
"""
for node, shape in self.node_to_shape_map.items():
if isinstance(node, PlaceholderOp):
if self.config.placeholder_to_arr_map[node] is not None:
self.node_to_arr_map[node] = self.config.placeholder_to_arr_map[node]
elif node not in self.node_to_arr_map:
self.node_to_arr_map[node] = None
elif not isinstance(node, DataloaderOp) and not isinstance(node, GNNDataLoaderOp):
# add for OptimizerOp and ParameterServerOp
if shape is None:
self.node_to_arr_map[node] = None
continue
if isinstance(node, (EmbeddingLookUp_Gradient, DataD2HSparseOp)):
self.node_to_arr_map[node] = ndarray.IndexedSlices(
dense_shape=shape)
continue
if isinstance(node, EmbeddingLookUp) and (self.use_sparse_pull or self.cstable_policy) and self.config.prefetch:
self.node_to_arr_map[node] = self.param_psval_map[node.inputs[0]]
continue
if node.on_gpu:
if node.inplace:
self.node_to_arr_map[node] = ndarray.NDArray(None)
elif self.inference and isinstance(node, DropoutOp):
self.node_to_arr_map[node] = self.node_to_arr_map[node.inputs[0]]
else:
self.node_to_arr_map[node] = ndarray.empty(
shape, ctx=node.ctx)
else:
self.node_to_arr_map[node] = ndarray.empty(
shape, ctx=node.ctx)
def run(self, eval_node_list={}, feed_dict={}, convert_to_numpy_ret_vals=False):
"""
Parameters
----------
feed_dict: a dictionary of node->np.ndarray supplied by user.
convert_to_numpy_ret_vals: whether to convert ret vals to np.array
Returns
-------
A list of values for nodes in eval_node_list. NDArray or np.ndarray.
"""
assert len(feed_dict) == len(
self.need_feed_nodes) or self.use_p2p, 'Feed dict invalid.'
if eval_node_list != {} and eval_node_list != self.eval_node_list:
self.update_executor(eval_node_list)
feed_shapes = {}
need_reallocation = self.init_need_allocation
# get feed in values
for node, value in feed_dict.items():
if self.use_p2p and node not in self.need_feed_nodes:
continue
assert node in self.need_feed_nodes, 'Only allow feed in PlaceholderOp with no values, here got %s:%s.' % (
str(type(node)), node.name)
local_shape = tuple(value.shape)
local_realloc = local_shape != self.node_to_shape_map.get(
node, None)
need_reallocation = need_reallocation or local_realloc
if node.on_cpu:
assert isinstance(value, (np.ndarray, spmatrix, ndarray.NDArray)), \
"feed_dict value type not supported"
if isinstance(value, np.ndarray):
if local_realloc:
self.node_to_arr_map[node] = ndarray.empty(
local_shape, ctx=node.ctx)
self.node_to_arr_map[node][:] = value
else:
self.node_to_arr_map[node] = value
else:
if isinstance(value, np.ndarray):
if local_realloc:
self.node_to_arr_map[node] = ndarray.array(
value, ctx=node.ctx)
else:
self.node_to_arr_map[node][:] = value
elif isinstance(value, spmatrix):
value = coo_matrix(value)
value = ndarray.sparse_array(value.data,
(value.row, value.col), shape=local_shape, ctx=node.ctx)
self.node_to_arr_map[node] = value
elif isinstance(value, ndarray.NDArray):
if value.ctx == node.ctx:
self.node_to_arr_map[node] = value
else:
if local_realloc:
self.node_to_arr_map[node] = ndarray.empty(
local_shape, ctx=node.ctx)
else:
self.node_to_arr_map[node][:] = value
elif isinstance(value, ndarray.ND_Sparse_Array):
self.node_to_arr_map[node] = value
else:
assert False, "feed_dict value type not supported"
feed_shapes[node] = local_shape
# get dataloader values
for node in self.dataloader_nodes:
local_shape = node.get_cur_shape(self.name)
local_realloc = local_shape != self.node_to_shape_map.get(
node, None)
need_reallocation = need_reallocation or local_realloc
self.node_to_arr_map[node] = node.get_arr(self.name)
feed_shapes[node] = local_shape
# reallocation, infer shapes and allocate memory
if need_reallocation:
self.init_need_allocation = False
self.infer_shape(feed_shapes)
self.memory_plan()
# computing
for node in self.computing_nodes:
if node.on_cpu and isinstance(self.node_to_arr_map[node], ndarray.NDArray):
if DNNL_LIB['cpu_ArraySet'] and not isinstance(node, DataD2HOp):
cpu_array_set(self.node_to_arr_map[node], 0.0)
else:
# here we suppose not using DNNL_LIB
# self.node_to_arr_map[node][:] = np.zeros(self.node_to_shape_map[node]).astype(np.float32)
pass
input_vals = [self.node_to_arr_map[n] for n in node.inputs]
node_val = self.node_to_arr_map[node]
for n in node.inputs:
if n.event:
n.event.sync()
if isinstance(node, (ParameterServerCommunicateOp, ParameterServerSparsePullOp)):
# Here we use d2h stream in ps op, since the stream is used for d2h data transfer.
# Please take care at this part.
node.compute(input_vals, node_val, self.d2h_stream)
elif isinstance(node, AllReduceCommunicateOp):
node.compute(input_vals, node_val, self.nccl_stream)
elif isinstance(node, DataH2DOp):
node.compute(input_vals, node_val, self.h2d_stream)
elif isinstance(node, (DataD2HOp, DataD2HSparseOp)):
node.compute(input_vals, node_val, self.d2h_stream)
elif isinstance(node, (PipelineSendOp, PipelineReceiveOp)):
node.compute(input_vals, node_val)
elif isinstance(node, (DropoutOp, Batch_NormalizationOp, Layer_NormalizationOp)):
node.compute(input_vals, node_val,
self.comp_stream, inference=self.inference)
if isinstance(node.event, Event):
# for d2h op / eval nodes / nodes before [allreduce or ps nodes or pipelinesend nodes]
node.event.record(self.comp_stream)
else:
node.compute(input_vals, node_val, self.comp_stream)
if isinstance(node.event, Event):
# for d2h op / eval nodes / nodes before [allreduce or ps nodes or pipelinesend nodes]
node.event.record(self.comp_stream)
for n in self.eval_node_list:
# every node in eval_node_list should have an event (except dataloader/optimizer...)
if n.event:
n.event.sync()
# get results
results = [self.node_to_arr_map[n] for n in self.eval_node_list]
if convert_to_numpy_ret_vals:
for i in range(len(results)):
if results[i] is not None:
results[i] = results[i].asnumpy()
# remap to original order in model parallel
if self.use_p2p:
new_results = [None for _ in self.global_eval_nodes]
for i, j in enumerate(self.run_results_indices):
new_results[j] = results[i]
results = new_results
return results
def gradients(output_node, node_list, insert_grad=None):
"""Take gradient of output node with respect to each node in node_list.
Parameters
----------
output_node: output node that we are taking derivative of.
node_list: list of nodes that we are taking derivative wrt.
insert_grad: used to assign gradient to output_node in model parallel.
Returns
-------
A list of gradient values, one for each node in node_list respectively.
"""
if isinstance(output_node, list):
node_to_output_grads_list = {
output_node[i]: [OnesLike.oneslike_op(output_node[i])] if insert_grad is None
else [insert_grad[i]] for i in range(len(output_node))
}
else:
node_to_output_grads_list = {
output_node: [OnesLike.oneslike_op(output_node)] if insert_grad is None else [
insert_grad]
}
output_node = [output_node]
node_to_output_grad = {}
# Traverse forward graph in reverse topological order
reverse_topo_order = reversed(find_topo_sort(output_node))
for node in reverse_topo_order:
# here the ctx for embedding lookup is a workaround
# TODO: when implement PS strategy for context semantics, modify here
if isinstance(node, EmbeddingLookUp):
output_grad = sum_node_list(
node_to_output_grads_list[node], node_to_output_grads_list[node][0].raw_ctx)
else:
output_grad = sum_node_list(
node_to_output_grads_list[node], node.raw_ctx)
if output_grad is None:
for n in node.inputs:
if n not in node_to_output_grads_list:
node_to_output_grads_list[n] = []
continue
node_to_output_grad[node] = output_grad
input_grads_list = node.gradient(output_grad)
for i in range(len(node.inputs)):
if node.inputs[i] not in node_to_output_grads_list:
node_to_output_grads_list[node.inputs[i]] = []
# Calculate partial adjoint for input nodes.
node_to_output_grads_list[node.inputs[i]].append(
input_grads_list[i])
grad_node_list = [node_to_output_grad[node] for node in node_list]
return grad_node_list
##################
# Helper Methods #
##################
def topo_sort_with_hook(node_list, config):
visited = set()
for node in node_list:
topo_sort_dfs_with_hook(node, visited, config)
def topo_sort_dfs_with_hook(node, visited, config):
if node in visited:
return
visited.add(node)
node.backward_hook(config)
# move param from node to config
if isinstance(node, PlaceholderOp):
config.placeholder_to_arr_map[node] = node.tensor_value
node.tensor_value = None
for n in node.inputs:
topo_sort_dfs_with_hook(n, visited, config)
node.forward_hook(config)
def find_topo_sort(node_list):
"""Given a list of nodes, return a topo ordering | |
v["data"] is None:
logger.warning(
f'Missing channel {v["color_space"]}, {v["channel_name"]}'
)
continue
seed_ = f'{v["color_space"]}_{c}'
hist = cv2.calcHist([v["data"]], [0], mask, [n], [0, (256 - 1)])
total_pixels = np.sum(hist)
for i, qtt in enumerate([hist_val[0] for hist_val in hist]):
self.csv_data_holder.update_csv_value(
f"quantile_color_{seed_}_{i + 1}_{n}_percent",
qtt / total_pixels * 100,
True,
)
self.csv_data_holder.data_list.pop(k, None)
return True
def fill_mask_holes(self, src_mask: Any, dbg_text: str = ""):
"""Fills holes inside mask using floodfill method
Arguments:
src_mask {numpy array} -- Source mask
dbg_text {str} -- debug string for storing step images
Returns:
numpy array -- Filled image
"""
im_floodfill = src_mask.copy()
# Mask used to flood filling.
# Notice the size needs to be 2 pixels than the image.
h, w = src_mask.shape[:2]
mask = np.zeros((h + 2, w + 2), np.uint8)
# Floodfill from point (0, 0)
cv2.floodFill(im_floodfill, mask, (0, 0), 255)
if dbg_text:
self.store_image(im_floodfill, "{}_floodfill".format(dbg_text))
# Invert floodfilled image
im_floodfill_inv = cv2.bitwise_not(im_floodfill)
if dbg_text:
self.store_image(im_floodfill_inv, "{}_floodfill_inv".format(dbg_text))
# Combine the two images to get the foreground.
mask_leafs = src_mask | im_floodfill_inv
if dbg_text:
self.store_image(mask_leafs, "{}_mask_filled".format(dbg_text))
return mask_leafs
@staticmethod
def get_distance_data(hull, origin, max_dist):
"""
Calculates distances from origin to contour barycenter,
also returns surface data
:param hull:
:param origin:
:param max_dist:
:return: dict
"""
m = cv2.moments(hull)
if m["m00"] != 0:
cx_ = int(m["m10"] / m["m00"])
cy_ = int(m["m01"] / m["m00"])
dist_ = math.sqrt(math.pow(cx_ - origin.x, 2) + math.pow(cy_ - origin.y, 2))
dist_scaled_inverted = 1 - dist_ / max_dist
res_ = dict(
dist=dist_,
cx=cx_,
cy=cy_,
dist_scaled_inverted=dist_scaled_inverted,
area=cv2.contourArea(hull),
scaled_area=cv2.contourArea(hull) * math.pow(dist_scaled_inverted, 2),
)
else:
res_ = dict(dist=0, cx=0, cy=0, dist_scaled_inverted=0, area=0, scaled_area=0)
return res_
@staticmethod
def contours_min_distance(cnt_a, cnt_b):
"""Returns minimal distance between 2 contours.
Possible returns
* 0: The two contours touch or intersect one another
* > 0: The two contours are separated
"""
min_dist = None
for pt in cnt_a:
cnt_point = Point(pt[0][0], pt[0][1])
cur_dist = cv2.pointPolygonTest(cnt_b, (cnt_point.x, cnt_point.y), True)
if cur_dist >= 0:
return 0
else:
if min_dist is None:
min_dist = abs(cur_dist)
elif abs(cur_dist) < min_dist:
min_dist = abs(cur_dist)
return min_dist
def check_hull(
self,
mask,
cmp_hull,
master_hull,
tolerance_area=None,
tolerance_distance=None,
dilation_iter=0,
trusted_safe_zone=False,
area_override_size=0,
):
"""Compares to hulls
Arguments:
cmp_hull {numpy array} -- hull to be compared
master_hull {numpy array} -- master hull
Returns:
int -- 1 if overlaps, 0 if fully inside, -1 if fully outside
"""
def last_chance_(test_cnt):
ok_size = (tolerance_area is not None) and (
(tolerance_area < 0) or cv2.contourArea(test_cnt) >= tolerance_area
)
ok_dist = (tolerance_distance is not None) and (
tolerance_distance < 0 or min_dist <= tolerance_distance
)
if ok_size and ok_dist:
return KLC_OK_TOLERANCE
elif ok_size:
if self.rois_intersects("safe", test_cnt):
return KLC_PROTECTED_SIZE_OK
else:
res_ = KLC_NO_CLOSE_ENOUGH
elif ok_dist:
if self.rois_intersects("safe", test_cnt) and cv2.contourArea(
test_cnt
) > pow(dilation_iter * 2, 2):
return KLC_PROTECTED_DIST_OK
else:
res_ = KLC_NO_BIG_ENOUGH
elif trusted_safe_zone and self.rois_intersects("safe", test_cnt):
return KLC_OK_TOLERANCE
else:
res_ = KLC_OUTSIDE
# Check area override limit
if (area_override_size > 0) and (
cv2.contourArea(test_cnt) > area_override_size
):
return KLC_BIG_ENOUGH_TO_IGNORE_DISTANCE
else:
return res_
# Check hull intersection
if (dilation_iter < 0) and (
cv2.contourArea(cmp_hull) > cv2.contourArea(master_hull)
):
cmp_img = np.full(mask.shape, 0, np.uint8)
cv2.drawContours(cmp_img, [cmp_hull], -1, 255, -1)
master_img = np.full(mask.shape, 0, np.uint8)
cv2.drawContours(master_img, [master_hull], -1, 255, -1)
test_img = cv2.bitwise_and(cmp_img, cmp_img, mask=master_img)
if np.array_equal(test_img, cmp_img):
return KLC_FULLY_INSIDE
_, max_val, _, _ = cv2.minMaxLoc(test_img)
nz_cmp_img = np.nonzero(cmp_img)
nz_test_img = np.nonzero(test_img)
if (
(max_val > 0)
and nz_cmp_img
and nz_test_img
and (len(nz_cmp_img[0]) > len(nz_test_img[0]))
):
return KLC_OVERLAPS
# Check point to point
is_inside = False
is_outside = False
is_protected = False
min_dist = mask.shape[0] * mask.shape[1]
for pt in cmp_hull:
cnt_point = Point(pt[0][0], pt[0][1])
cur_dist = cv2.pointPolygonTest(master_hull, (cnt_point.x, cnt_point.y), True)
if cur_dist >= 0:
is_inside = True
else:
if abs(cur_dist) < min_dist:
min_dist = abs(cur_dist)
is_outside = True
if (is_inside and is_outside) or is_protected:
break
if is_inside and is_outside:
return KLC_OVERLAPS
elif is_inside:
return KLC_FULLY_INSIDE
else:
return last_chance_(cmp_hull)
@time_method
def keep_biggest_contour(self, **kwargs):
"""
Keep contours inside the beggest contour
Keyword Arguments:
* src_image: Source image, required=False, default source
* src_mask: Mask to clean, required=False, default mask
* dilation_iter: if positive number of dilations, if negative number of erosions, required=False, default=0
* roi: initial ROI, required=False, default=full image
* root_position: if initial ROI exists, position to start contour, required=False, default=BOTTOM_CENTER
* trusted_safe_zone: if true all contours in zones tagged safe will be accepted, required=False, default=False
:return: : Filtered mask
"""
src_image = kwargs.get("src_image", self.current_image)
src_mask = kwargs.get("src_mask", self.mask)
if (src_image is None) or (src_mask is None):
logger.error(
f'Source & mask are mandatory for keep linked contours "{str(self)}'
)
return None
dilation_iter = kwargs.get("dilation_iter", 0)
roi = kwargs.get("roi", self.get_roi("main_roi"))
root_position = kwargs.get("root_position", "BOTTOM_CENTER")
trusted_safe_zone = kwargs.get("trusted_safe_zone", False)
if dilation_iter > 0:
dil_mask = self.dilate(src_mask, proc_times=dilation_iter)
elif dilation_iter < 0:
dil_mask = self.erode(src_mask, proc_times=abs(dilation_iter))
else:
dil_mask = src_mask.copy()
self.store_image(dil_mask, "dil_mask")
contours = ipc.get_contours(
mask=dil_mask, retrieve_mode=cv2.RETR_LIST, method=cv2.CHAIN_APPROX_SIMPLE
)
self.store_image(
cv2.drawContours(dil_mask.copy(), contours, -1, ipc.C_LIME, 2, 8),
"img_dilated_cnt",
)
self.store_image(
cv2.drawContours(src_image.copy(), contours, -1, ipc.C_GREEN, 2, 8),
"src_img_with_cnt",
)
# Transform all contours into approximations
hulls = []
eps = 0.001
for cnt in contours:
hulls.append(cv2.approxPolyDP(cnt, eps * cv2.arcLength(cnt, True), True))
hull_img = src_image.copy()
cv2.drawContours(hull_img, hulls, -1, (0, 255, 0), 4)
self.store_image(hull_img, "src_img_with_cnt_approx_{}".format(eps))
# Find the largest hull
main_hull = hulls[0]
if roi: # There's a ROI, lets keep the biggest hull close to its root
roi_root = roi.point_at_position(root_position, True)
if root_position == "MIDDLE_CENTER":
dist_max = roi.radius
else:
dist_max = math.sqrt(roi.width ** 2 + roi.height ** 2)
hull_img = src_image.copy()
max_area = 0
for hull in hulls:
morph_dict = self.get_distance_data(hull, roi_root, dist_max)
cl_cmp = morph_dict["dist_scaled_inverted"] * 255
cv2.drawContours(
hull_img, [hull], 0, (0, int(cl_cmp), int((1 - cl_cmp) * 255)), 2
)
if morph_dict["scaled_area"] > max_area:
max_area = morph_dict["scaled_area"]
main_hull = hull
self.store_image(hull_img, "src_img_with_cnt_distance_map")
else: # No ROI defined
max_area = cv2.contourArea(hulls[0])
for i, hull in enumerate(hulls):
cur_area = cv2.contourArea(hull)
if cur_area > max_area:
max_area = cur_area
main_hull = hull
# At this point we have the zone were the contours are allowed to be
contours = ipc.get_contours(
mask=src_mask, retrieve_mode=cv2.RETR_LIST, method=cv2.CHAIN_APPROX_SIMPLE
)
for cnt in contours:
hull = cv2.approxPolyDP(cnt, eps * cv2.arcLength(cnt, True), True)
res = self.check_hull(
mask=src_mask,
cmp_hull=hull,
master_hull=main_hull,
dilation_iter=dilation_iter,
trusted_safe_zone=trusted_safe_zone,
)
if res in [
KLC_FULLY_INSIDE,
KLC_OVERLAPS,
KLC_PROTECTED_DIST_OK,
KLC_PROTECTED_SIZE_OK,
KLC_OK_TOLERANCE,
KLC_BIG_ENOUGH_TO_IGNORE_DISTANCE,
]:
cv2.drawContours(src_image, [cnt], 0, (0, 255, 0), 2)
else:
cv2.drawContours(src_image, [cnt], 0, (0, 0, 255), 2)
cv2.drawContours(src_mask, [cnt], 0, (0, 0, 0), -1)
self.store_image(src_image, "img_wth_tagged_cnt", force_store=True)
self.store_image(src_mask, "mask_lnk_cnts")
return src_mask
@time_method
def keep_linked_contours(self, **kwargs) -> object:
"""
Keep contours only linked to the root position
Keyword Arguments:
* src_image: Source image, required=False, default source
* src_mask: Mask to clean, required=False, default mask
* dilation_iter: if positive number of dilations, if negative number of erosions, required=False, default=0
* tolerance_distance: max distance allowed between tested contour and current blob, required=False, default=0
* tolerance_area: min contour area accepted, required=False, default=0
* roi: initial ROI, required=False, default=full image
* root_position: if initial ROI exists, position to start contour, required=False, default=BOTTOM_CENTER
* trusted_safe_zone: if true all contours in zones tagged safe will be accepted, required=False, default=False
* area_override_size: over this area all contours will be accepted as long as they are in a safe-ish or better region
* delete_all_bellow: all contours smaller than value will be deleted
:return: : Filtered mask
"""
src_image = kwargs.get("src_image", self.current_image)
src_mask = kwargs.get("src_mask", self.mask)
if (src_image is None) or (src_mask is None):
logger.error(
f'Source & mask are mandatory for keep linked contours "{str(self)}',
)
return None
dilation_iter = kwargs.get("dilation_iter", 0)
tolerance_distance = kwargs.get("tolerance_distance", 0)
tolerance_area = kwargs.get("tolerance_area", 0)
roi: AbstractRegion = kwargs.get("roi", self.get_roi("main_roi"))
root_position = kwargs.get("root_position", "BOTTOM_CENTER")
trusted_safe_zone = kwargs.get("trusted_safe_zone", False)
area_override_size = kwargs.get("area_override_size", 0)
delete_all_bellow = kwargs.get("delete_all_bellow", 0)
if tolerance_distance != int(tolerance_distance) or tolerance_area != int(
tolerance_area
):
raise NotImplementedError("Only integers allowed")
self.store_image(src_mask, "raw__mask")
# Delete all small contours
if delete_all_bellow > 0:
fnt = (cv2.FONT_HERSHEY_SIMPLEX, 0.6)
contours = ipc.get_contours(
mask=src_mask, retrieve_mode=cv2.RETR_LIST, method=cv2.CHAIN_APPROX_SIMPLE
)
small_img = src_mask.copy()
small_img = np.dstack((small_img, small_img, small_img))
for cnt in contours:
area_ = cv2.contourArea(cnt)
if area_ < delete_all_bellow:
# Delete
cv2.drawContours(src_mask, [cnt], 0, | |
except ValueError:
warnings.append('Warning:HorizontalFlipSwitchShouldBeBool')
configs['augmentation']['horizontal_flip_switch'] = 'False'
try:
str2bool(configs['augmentation']['vertical_flip_switch'])
except ValueError:
warnings.append('Warning:VerticalFlipSwitchShouldBeBool')
configs['augmentation']['vertical_flip_switch'] = 'False'
if any(configs['augmentation']['zca_epsilon']):
try:
if type(literal_eval(configs['augmentation']['zca_epsilon'])) is float\
or literal_eval(configs['augmentation']['zca_epsilon']) is None:
pass
except ValueError:
warnings.append('Warning:ZCAWhiteningEpsilonShouldBeFloatorNone')
configs['augmentation']['zca_epsilon'] = 'None'
else:
configs['augmentation']['zca_epsilon'] = 'None'
if any(configs['augmentation']['random_seed']):
try:
int(configs['augmentation']['random_seed'])
except ValueError:
warnings.append('Warning:RandomSeedShouldBeInt')
configs['augmentation']['random_seed'] = '1'
else:
configs['augmentation']['random_seed'] = '1'
if any(configs['augmentation']['rounds']):
try:
int(configs['augmentation']['rounds'])
except ValueError:
warnings.append('Warning:RoundsShouldBeInt')
configs['augmentation']['rounds'] = '1'
else:
configs['augmentation']['rounds'] = '1'
if any(configs['loss_function']['loss'] in x for x in ['categorical_crossentropy',
'weighted_categorical_crossentropy',
'sparse_categorical_crossentropy', 'mean_squared_error',
'mean_absolute_error', 'tversky', 'pix2pix',
'cyclegan', 'ssd', 'jaccard', 'focal', 'soft_dice']):
pass
else:
errors.append('Level1Error:NonexistentLossFunction')
if any(configs['loss_function']['parameter1']):
try:
float(configs['loss_function']['parameter1'])
except ValueError:
warnings.append('Warning:Parameter1ShouldBeFloat')
else:
configs['loss_function']['parameter1'] = '0.0'
if any(configs['loss_function']['parameter2']):
try:
float(configs['loss_function']['parameter2'])
except ValueError:
warnings.append('Warning:Parameter2ShouldBeFloat')
else:
configs['loss_function']['parameter2'] = '0.0'
if any(configs['learning_rate_schedule']['learning_rate']):
try:
float(configs['learning_rate_schedule']['learning_rate'])
except ValueError:
warnings.append('Warning:LearningRateShouldBeFloat')
else:
configs['learning_rate_schedule']['learning_rate'] = '0.0001'
if any(configs['learning_rate_schedule']['learning_rate_decay_factor']):
try:
float(configs['learning_rate_schedule']['learning_rate_decay_factor'])
except ValueError:
warnings.append('Warning:LearningRateDecayFactorShouldBeFloat')
else:
configs['learning_rate_schedule']['learning_rate_decay_factor'] = '0.0'
try:
str2bool(configs['learning_rate_schedule']['decay_on_plateau_switch'])
except ValueError:
warnings.append('Warning:DecayOnPlateauSwitchShouldBeBool')
configs['learning_rate_schedule']['decay_on_plateau_switch'] = 'False'
if any(configs['learning_rate_schedule']['decay_on_plateau_factor']):
try:
float(configs['learning_rate_schedule']['decay_on_plateau_factor'])
except ValueError:
warnings.append('Warning:DecayOnPlateauFactorShouldBeFloat')
else:
configs['learning_rate_schedule']['decay_on_plateau_factor'] = '0.0'
if any(configs['learning_rate_schedule']['decay_on_plateau_patience']):
try:
int(configs['learning_rate_schedule']['decay_on_plateau_patience'])
except ValueError:
warnings.append('Warning:DecayOnPlateauPatienceShouldBeInt')
else:
configs['learning_rate_schedule']['decay_on_plateau_patience'] = '3'
try:
str2bool(configs['learning_rate_schedule']['step_decay_switch'])
except ValueError:
warnings.append('Warning:StepDecaySwitchShouldBeBool')
configs['learning_rate_schedule']['step_decay_switch'] = 'False'
if any(configs['learning_rate_schedule']['step_decay_factor']):
try:
float(configs['learning_rate_schedule']['step_decay_factor'])
except ValueError:
warnings.append('Warning:StepDecayFactorShouldBeFloat')
else:
configs['learning_rate_schedule']['step_decay_factor'] = '0.0'
if any(configs['learning_rate_schedule']['step_decay_period']):
try:
int(configs['learning_rate_schedule']['step_decay_period'])
except ValueError:
warnings.append('Warning:StepDecayPeriodShouldBeInt')
else:
configs['learning_rate_schedule']['step_decay_period'] = '3'
if any(configs['learning_rate_schedule']['discriminator_learning_rate']):
try:
values = configs['learning_rate_schedule']['discriminator_learning_rate'].split(':')
if type(literal_eval(values[0])) is float:
pass
else:
warnings.append('Warning:DiscriminatorLearningRateShouldBeFloat')
values[0] = '0.0001'
if type(literal_eval(values[1])) is float:
pass
else:
warnings.append('Warning:DiscriminatorLearningRateDecayShouldBeFloat')
values[1] = '0.0'
configs['learning_rate_schedule']['discriminator_learning_rate'] = ':'.join([values[0], values[1]])
except ValueError:
errors.append('Level1Error:CannotDetermineDiscriminatorLearningRateConfigurations')
else:
configs['learning_rate_schedule']['discriminator_learning_rate'] = '0.0001:0.0'
if any(configs['learning_rate_schedule']['gan_learning_rate']):
try:
values = configs['learning_rate_schedule']['gan_learning_rate'].split(':')
if type(literal_eval(values[0])) is float:
pass
else:
warnings.append('Warning:GANLearningRateShouldBeFloat')
values[0] = '0.0001'
if type(literal_eval(values[0])) is float:
pass
else:
warnings.append('Warning:GANLearningRateDecayShouldBeFloat')
values[1] = '0.0'
configs['learning_rate_schedule']['gan_learning_rate'] = ':'.join([values[0], values[1]])
except ValueError:
errors.append('Level1Error:CannotDetermineGANLearningRateConfigurations')
else:
configs['learning_rate_schedule']['gan_learning_rate'] = '0.0001:0.0'
if any(configs['optimizer']['optimizer'] in x for x in ['Adam', 'NAdam', 'SGD', 'RMSprop',
'Adagrad', 'Adadelta', 'Adamax']):
pass
else:
errors.append('Level1Error:NonexistentOptimizer')
if any(configs['optimizer']['beta1']):
try:
float(configs['optimizer']['beta1'])
except ValueError:
warnings.append('Warning:OptimizerBeta1ShouldBeFloat')
configs['optimizer']['beta1'] = '0.9'
else:
configs['optimizer']['beta1'] = '0.9'
if any(configs['optimizer']['beta2']):
try:
float(configs['optimizer']['beta2'])
except ValueError:
warnings.append('Warning:OptimizerBeta2ShouldBeFloat')
configs['optimizer']['beta2'] = '0.999'
else:
configs['optimizer']['beta2'] = '0.999'
if any(configs['optimizer']['rho']):
try:
float(configs['optimizer']['rho'])
except ValueError:
warnings.append('Warning:OptimizerRhoShouldBeFloat')
configs['optimizer']['rho'] = '0.9'
else:
configs['optimizer']['rho'] = '0.9'
if any(configs['optimizer']['momentum']):
try:
float(configs['optimizer']['momentum'])
except ValueError:
warnings.append('Warning:OptimizerMomentumShouldBeFloat')
configs['optimizer']['momentum'] = '0.0'
else:
configs['optimizer']['momentum'] = '0.0'
if any(configs['optimizer']['epsilon']):
try:
if type(literal_eval(configs['optimizer']['epsilon'])) is float\
or literal_eval(configs['optimizer']['epsilon']) is None:
pass
except ValueError:
warnings.append('Warning:OptimizerEpsilonShouldBeFloatorNone')
configs['optimizer']['epsilon'] = 'None'
else:
configs['optimizer']['epsilon'] = 'None'
if any(configs['optimizer']['discriminator_optimizer']):
try:
values = configs['optimizer']['discriminator_optimizer'].split(':')
if any(values[0] in x for x in ['Adam', 'NAdam', 'SGD', 'RMSprop', 'Adagrad', 'Adadelta', 'Adamax']):
pass
else:
errors.append('Level1Error:NonexistentDiscriminatorOptimizer')
if type(literal_eval(values[1])) is float:
pass
else:
warnings.append('Warning:DiscriminatorOptimizerBeta1ShouldBeFloat')
values[1] = '0.9'
if type(literal_eval(values[2])) is float:
pass
else:
warnings.append('Warning:DiscriminatorOptimizerBeta2ShouldBeFloat')
values[2] = '0.999'
if type(literal_eval(values[3])) is float:
pass
else:
warnings.append('Warning:DiscriminatorOptimizerRhoShouldBeFloat')
values[3] = '0.9'
if type(literal_eval(values[4])) is float:
pass
else:
warnings.append('Warning:DiscriminatorOptimizerMomentumShouldBeFloat')
values[4] = '0.0'
if type(literal_eval(values[5])) is float or literal_eval(values[5]) is None:
pass
else:
warnings.append('Warning:DiscriminatorOptimizerEpsilonShouldBeFloatorNone')
values[5] = 'None'
configs['optimizer']['discriminator_optimizer'] = ':'.join([values[0], values[1], values[2],
values[3], values[4], values[5]])
except ValueError:
errors.append('Level1Error:CannotDetermineDiscriminatorOptimizerConfigurations')
else:
configs['optimizer']['discriminator_optimizer'] = 'Adam:0.9:0.999:0.9:0.0:None'
if any(configs['optimizer']['gan_optimizer']):
try:
values = configs['optimizer']['gan_optimizer'].split(':')
if any(values[0] in x for x in ['Adam', 'NAdam', 'SGD', 'RMSprop', 'Adagrad', 'Adadelta', 'Adamax']):
pass
else:
errors.append('Level1Error:NonexistentGANOptimizer')
if type(literal_eval(values[1])) is float:
pass
else:
warnings.append('Warning:GANOptimizerBeta1ShouldBeFloat')
values[1] = '0.9'
if type(literal_eval(values[2])) is float:
pass
else:
warnings.append('Warning:GANOptimizerBeta2ShouldBeFloat')
values[2] = '0.999'
if type(literal_eval(values[3])) is float:
pass
else:
warnings.append('Warning:GANOptimizerRhoShouldBeFloat')
values[3] = '0.9'
if type(literal_eval(values[4])) is float:
pass
else:
warnings.append('Warning:GANOptimizerMomentumShouldBeFloat')
values[4] = '0.0'
if type(literal_eval(values[5])) is float or literal_eval(values[5]) is None:
pass
else:
warnings.append('Warning:GANOptimizerEpsilonShouldBeFloatorNone')
values[5] = 'None'
configs['optimizer']['gan_optimizer'] = ':'.join([values[0], values[1], values[2],
values[3], values[4], values[5]])
except ValueError:
errors.append('Level1Error:CannotDetermineGANOptimizerConfigurations')
else:
configs['optimizer']['gan_optimizer'] = 'Adam:0.9:0.999:0.9:0.0:None'
if any(configs['training_configurations']['hardware'] in x for x in ['gpu', 'multi-gpu', 'cpu']):
pass
else:
errors.append('Level1Error:NonexistentHardware')
if any(configs['training_configurations']['number_of_gpus']):
try:
int(configs['training_configurations']['number_of_gpus'])
except ValueError:
warnings.append('Warning:NumberOfGpusShouldBeInt')
configs['training_configurations']['number_of_gpus'] = '1'
else:
configs['training_configurations']['number_of_gpus'] = '1'
try:
str2bool(configs['training_configurations']['early_stop_switch'])
except ValueError:
warnings.append('Warning:EarlyStopSwitchShouldBeBool')
configs['training_configurations']['early_stop_switch'] = 'False'
if any(configs['training_configurations']['early_stop_patience']):
try:
int(configs['training_configurations']['early_stop_patience'])
except ValueError:
warnings.append('Warning:EarlyStopPatienceShouldBeInt')
configs['training_configurations']['early_stop_patience'] = '10'
else:
configs['training_configurations']['early_stop_patience'] = '10'
if any(configs['training_configurations']['batch_size']):
try:
int(configs['training_configurations']['batch_size'])
except ValueError:
warnings.append('Warning:BatchSizeShouldBeInt')
configs['training_configurations']['batch_size'] = '32'
else:
configs['training_configurations']['batch_size'] = '32'
if any(configs['training_configurations']['epochs']):
try:
int(configs['training_configurations']['epochs'])
except ValueError:
warnings.append('Warning:EpochsShouldBeInt')
configs['training_configurations']['epochs'] = '500'
else:
configs['training_configurations']['epochs'] = '500'
try:
str2bool(configs['training_configurations']['shuffle_data_switch'])
except ValueError:
warnings.append('Warning:ShuffleDataSwitchShouldBeBool')
configs['training_configurations']['shuffle_data_switch'] = 'True'
if any(configs['training_configurations']['validation_split']):
try:
float(configs['training_configurations']['validation_split'])
except ValueError:
warnings.append('Warning:ValidationSplitShouldBeFloat')
configs['training_configurations']['validation_split'] = '0.0'
else:
configs['training_configurations']['validation_split'] = '0.0'
try:
str2bool(configs['monitors']['mse_switch'])
except ValueError:
warnings.append('Warning:MSESwitchShouldBeBool')
configs['monitors']['mse_switch'] = 'False'
try:
str2bool(configs['monitors']['mae_switch'])
except ValueError:
warnings.append('Warning:MAESwitchShouldBeBool')
configs['monitors']['mae_switch'] = 'False'
try:
str2bool(configs['monitors']['accuracy_switch'])
except ValueError:
warnings.append('Warning:AccuracySwitchShouldBeBool')
configs['monitors']['accuracy_switch'] = 'True'
try:
str2bool(configs['save_configurations']['save_model_switch'])
except ValueError:
warnings.append('Warning:SaveModelSwitchShouldBeBool')
configs['save_configurations']['save_model_switch'] = 'False'
if any(configs['save_configurations']['save_model_path']):
if os.path.exists(os.path.dirname(configs['save_configurations']['save_model_path'])) is False:
errors.append('Level1Error:NonexistentSaveModelDirectory')
file, ext = os.path.splitext(configs['save_configurations']['save_model_path'])
if ext != '.h5':
warnings.append('Warning:SaveModelFileExtensionMustBeh5')
configs['save_configurations']['save_model_path'] = file + '.h5'
try:
str2bool(configs['save_configurations']['save_csv_switch'])
except ValueError:
warnings.append('Warning:SaveCSVSwitchShouldBeBool')
configs['save_configurations']['save_csv_switch'] = 'False'
if any(configs['save_configurations']['save_csv_path']):
if os.path.exists(os.path.dirname(configs['save_configurations']['save_csv_path'])) is False:
errors.append('Level1Error:NonexistentSaveCSVDirectory')
file, ext = os.path.splitext(configs['save_configurations']['save_csv_path'])
if ext != '.csv':
warnings.append('Warning:SaveCSVFileExtensionMustBecsv')
configs['save_configurations']['save_csv_path'] = file + '.csv'
try:
str2bool(configs['save_configurations']['save_checkpoints_switch'])
except ValueError:
warnings.append('Warning:SaveModelCheckpointsSwitchShouldBeBool')
configs['save_configurations']['save_checkpoints_switch'] = 'False'
if any(configs['save_configurations']['save_checkpoints_path']):
if os.path.exists(os.path.dirname(configs['save_configurations']['save_checkpoints_path'])) is False:
errors.append('Level1Error:NonexistentSaveModelCheckpointsDirectory')
file, ext = os.path.splitext(configs['save_configurations']['save_checkpoints_path'])
if ext != '.h5':
warnings.append('Warning:SaveModelCheckpointsFileExtensionMustBeh5')
configs['save_configurations']['save_checkpoints_path'] = file + '.h5'
if any(configs['save_configurations']['save_checkpoints_frequency']):
try:
int(configs['save_configurations']['save_checkpoints_frequency'])
except ValueError:
warnings.append('Warning:SaveCheckpointsFrequencyShouldBeInt')
try:
str2bool(configs['save_configurations']['save_tensorboard_switch'])
except ValueError:
warnings.append('Warning:SaveTensorboardSwitchShouldBeBool')
configs['save_configurations']['save_tensorboard_switch'] = 'False'
if any(configs['save_configurations']['save_tensorboard_path']):
if os.path.exists(os.path.dirname(configs['save_configurations']['save_tensorboard_path'])) is False:
errors.append('Level1Error:NonexistentSaveTensorboardDirectory')
if any(configs['save_configurations']['save_tensorboard_frequency']):
try:
int(configs['save_configurations']['save_tensorboard_frequency'])
except ValueError:
warnings.append('Warning:SaveTensorboardFrequencyShouldBeInt')
if any(configs['layers']['serial_layer_list']):
for layer in configs['layers']['serial_layer_list']:
if type(layer) is not str:
errors.append('Level1Error:SerialLayersListContainsInvalidLayer')
break
if any(configs['layers']['generator_layer_list']):
for layer in configs['layers']['generator_layer_list']:
if type(layer) is not str:
errors.append('Level1Error:GeneratorLayersListContainsInvalidLayer')
break
if any(configs['layers']['discriminator_layer_list']):
for layer in configs['layers']['discriminator_layer_list']:
if type(layer) is not str:
errors.append('Level1Error:DiscriminatorLayersListContainsInvalidLayer')
break
if any(configs['bbd_options']['scaling_type'] in x for x in ['global', 'per predictor layer']):
pass
else:
errors.append('Level1Error:NonexistentScalingType')
if any(configs['bbd_options']['scales']):
values = configs['bbd_options']['scales'].split(',')
if len(values) == 1:
try:
literal_eval(values)
except ValueError:
errors.append('Level1Error:ScalesMustBeNoneorFloatorMultipleFloatsSeparatedbyComma')
else:
try:
[float(value) for value in values]
except ValueError:
errors.append('Level1Error:ScalesMustBeNoneorFloatorMultipleFloatsSeparatedbyComma')
else:
warnings.append('Warning:NoBbdScalesSpecified')
configs['bbd_options']['scales'] = 'None'
if any(configs['bbd_options']['aspect_ratios_type'] in x for x in ['global', 'per predictor layer']):
pass
else:
errors.append('Level1Error:NonexistentAspectRatiosType')
if any(configs['bbd_options']['aspect_ratios']):
try:
ars = literal_eval(configs['bbd_options']['aspect_ratios'])
if type(ars) is tuple:
for ar in ars:
if type(ar) is tuple:
try:
[float(ar_val) for ar_val in ar]
except ValueError:
errors.append('Level1Error:AspectRatiosMustbeTupleofFloatsorTupleofTuplesofFloats')
else:
try:
float(ar)
except ValueError:
errors.append('Level1Error:AspectRatiosMustbeTupleofFloatsorTupleofTuplesofFloats')
break
else:
errors.append('Level1Error:AspectRatiosMustbeTupleofFloatsorTupleofTuplesofFloats')
except ValueError:
errors.append('Level1Error:AspectRatiosMustbeTupleofFloatsorTupleofTuplesofFloats')
else:
errors.append('Level1Error:AspectRatiosMustbeSpecified')
if any(configs['bbd_options']['number_classes']):
try:
int(configs['bbd_options']['number_classes'])
except ValueError:
errors.append('Level1Error:NoNumberofBbdClassesSpecified')
else:
errors.append('Level1Error:NoNumberofBbdClassesSpecified')
if any(configs['bbd_options']['steps']):
try:
steps = literal_eval(configs['bbd_options']['steps'])
if type(steps) is tuple:
for step in steps:
if type(step) is tuple:
try:
[float(step_val) for step_val in step]
except ValueError:
errors.append('Level1Error:StepsMustbeNoneorTupleofFloatsorTupleofTuplesofTwoFloats')
else:
try:
float(step)
except ValueError:
errors.append('Level1Error:StepsMustbeNoneorTupleofFloatsorTupleofTuplesofTwoFloats')
break
elif steps is None:
pass
else:
errors.append('Level1Error:StepsMustbeNoneorTupleofFloatsorTupleofTuplesofTwoFloats')
except ValueError:
errors.append('Level1Error:StepsMustbeNoneorTupleofFloatsorTupleofTuplesofTwoFloats')
else:
warnings.append('Warning:NoStepsSpecified')
configs['bbd_options']['steps'] = 'None'
if any(configs['bbd_options']['offsets']):
try:
offsets = literal_eval(configs['bbd_options']['offsets'])
if type(offsets) is tuple:
for offset in offsets:
if type(offset) is tuple:
try:
[float(offset_val) for offset_val in offset]
except ValueError:
errors.append('Level1Error:OffsetsMustbeNoneorTupleofFloatsorTupleofTuplesofTwoFloats')
else:
try:
float(offset)
except ValueError:
errors.append('Level1Error:OffsetsMustbeNoneorTupleofFloatsorTupleofTuplesofTwoFloats')
break
elif offsets is None:
pass
else:
errors.append('Level1Error:OffsetsMustbeNoneorTupleofFloatsorTupleofTuplesofTwoFloats')
except ValueError:
errors.append('Level1Error:OffsetsMustbeNoneorTupleofFloatsorTupleofTuplesofTwoFloats')
else:
warnings.append('Warning:NoOffsetsSpecified')
configs['bbd_options']['offsets'] = 'None'
if any(configs['bbd_options']['variances']):
try:
variances = literal_eval(configs['bbd_options']['variances'])
if type(variances) is tuple:
if len(variances) == 4:
try:
[float(variance) for variance in variances]
except ValueError:
errors.append('Level1Error:VariancesMustbeTupleofFourFloatsGreaterthanZero')
else:
errors.append('Level1Error:VariancesMustbeTupleofFourFloatsGreaterthanZero')
else:
errors.append('Level1Error:VariancesMustbeTupleofFourFloatsGreaterthanZero')
except ValueError:
errors.append('Level1Error:VariancesMustbeTupleofFourFloatsGreaterthanZero')
else:
warnings.append('Warning:NoOffsetsSpecified')
configs['bbd_options']['variances'] = '(1.0, 1.0, 1.0, 1.0)'
if any(configs['bbd_options']['confidence_threshold']):
try:
float(configs['bbd_options']['confidence_threshold'])
except ValueError:
warnings.append('Warning:ConfidenceThresholdShouldBeFloat')
configs['bbd_options']['confidence_threshold'] = '0.1'
else:
configs['bbd_options']['confidence_threshold'] = '0.1'
if any(configs['bbd_options']['iou_threshold']):
try:
float(configs['bbd_options']['iou_threshold'])
except ValueError:
warnings.append('Warning:IoUThresholdShouldBeFloat')
configs['bbd_options']['iou_threshold'] = '0.5'
else:
configs['bbd_options']['iou_threshold'] = '0.5'
if any(configs['bbd_options']['top_k']):
try:
int(configs['bbd_options']['top_k'])
except ValueError:
warnings.append('Warning:NoBbdTopKSpecified')
configs['bbd_options']['top_k'] = '200'
else:
warnings.append('Warning:NoBbdTopKSpecified')
configs['bbd_options']['top_k'] = '200'
if any(configs['bbd_options']['nms_maximum_output']):
try:
int(configs['bbd_options']['nms_maximum_output'])
except ValueError:
warnings.append('Warning:NoBbdNmsSpecified')
configs['bbd_options']['nms_maximum_output'] = '400'
else:
warnings.append('Warning:NoBbdNmsSpecified')
configs['bbd_options']['nms_maximum_output'] = '400'
if any(configs['bbd_options']['coordinates_type'] in x for x in ['centroids', 'minmax', 'corners']):
pass
else:
errors.append('Level1Error:NonexistentCoordinatesType')
try:
str2bool(configs['bbd_options']['two_boxes_for_AR1_switch'])
except ValueError:
warnings.append('Warning:TwoBoxesforAR1ShouldBeBool')
configs['bbd_options']['two_boxes_for_AR1_switch'] = 'False'
try:
str2bool(configs['bbd_options']['clip_boxes_switch'])
except ValueError:
warnings.append('Warning:ClipBoxesShouldBeBool')
configs['bbd_options']['clip_boxes_switch'] = 'False'
try:
str2bool(configs['bbd_options']['normalize_coordinates_switch'])
except ValueError:
warnings.append('Warning:NormalizeCoordinatesShouldBeBool')
configs['bbd_options']['normalize_coordinates_switch'] = 'False'
if any(configs['bbd_options']['positive_iou_threshold']):
try:
float(configs['bbd_options']['positive_iou_threshold'])
except ValueError:
warnings.append('Warning:PositiveIoUThresholdShouldBeFloat')
configs['bbd_options']['positive_iou_threshold'] = '0.5'
else:
configs['bbd_options']['positive_iou_threshold'] = '0.5'
if any(configs['bbd_options']['negative_iou_limit']):
try:
float(configs['bbd_options']['negative_iou_limit'])
except ValueError:
warnings.append('Warning:NegativeIoULimitShouldBeFloat')
configs['bbd_options']['negative_iou_limit'] = '0.3'
else:
configs['bbd_options']['negative_iou_limit'] = '0.3'
return configs, errors, warnings
def level_two_error_checking(configs):
engine_configs = EngineConfigurations(configs)
errors = engine_configs.train_data.errors\
+ engine_configs.val_data.errors\
+ engine_configs.test_data.errors\
+ engine_configs.saver.errors
warnings = engine_configs.train_data.warnings\
+ engine_configs.val_data.warnings\
+ engine_configs.test_data.warnings\
+ engine_configs.saver.warnings
return engine_configs, errors, warnings
def get_io(layer_definitions):
inner_skip_starts = []
outer_skip_starts = []
bbd_hooks = []
errors = []
inputs = None
x = None
for i, layer_definition in enumerate(layer_definitions):
try:
layer = create_layer(layer_definition)
if i == 0:
if layer.type != 'Input':
errors.append('Level3Error:FirstLayerMustBeInput')
break
else:
inputs = layer.keras_layer
elif i == 1:
try:
if layer.type in ['Xception', 'VGG16', 'VGG19', 'ResNet50', 'ResNet101', 'ResNet152',
'ResNet50V2', 'ResNet101V2', 'ResNet152V2', 'ResNeXt50', 'ResNeXt101',
'InceptionV3', 'InceptionResNetV2', | |
0x7a00, Length = 128
0x7a00,0x80,
0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,
0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,
0xdc,0x24,0x34,0x28,0x59,0x8b,0x10,0x3d,0x28,0xa5,0xde,0x3e,0x73,0x32,0x6e,0x3f,
0x35,0x5e,0x87,0x36,0x8b,0x93,0x54,0x3e,0xc3,0x49,0x20,0x2f,0xdc,0x24,0x34,0x28,
0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,
0xdc,0x24,0x34,0x28,0xf2,0xb7,0xb3,0x3b,0x36,0xf5,0xb6,0x3e,0xc4,0xd2,0xcb,0x2f,
0x97,0x8f,0x75,0x3f,0xd7,0xe4,0x5e,0x39,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,
0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28],[
# Record 213: Address = 0x7a80, Length = 128
0x7a80,0x80,
0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xe0,0xaa,0x58,0x3d,0x53,0x5e,0x23,0x3e,
0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xa7,0xfa,0x41,0x3d,
0x62,0x78,0x59,0x3f,0x8d,0xe2,0x1a,0x33,0x6f,0x1,0xbd,0x3b,0xc,0xec,0xc5,0x3d,
0x5f,0x40,0xbf,0x3c,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,
0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0x30,0x32,0x1,0x3d,
0x7f,0xb9,0x7d,0x3a,0x50,0x40,0x3f,0x3c,0x30,0x53,0x6a,0x3f,0xb2,0x6f,0xc5,0x31,
0x52,0x58,0xa7,0x3d,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0x53,0x58,0xa7,0x3d,
0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0x5f,0x40,0x3f,0x3c,0xdc,0x24,0x34,0x28],[
# Record 214: Address = 0x7b00, Length = 128
0x7b00,0x80,
0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,
0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0x5f,0x40,0xbf,0x3c,
0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0x5c,0x46,0xb9,0x3e,0xdc,0x24,0x34,0x28,
0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,
0x7d,0x4,0x7b,0x3e,0xdc,0x24,0x34,0x28,0x71,0x6e,0xcf,0x39,0x3c,0x6a,0x75,0x3e,
0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,
0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,
0xdc,0x24,0x34,0x28,0xfe,0x67,0xb4,0x32,0xf7,0x82,0x6a,0x3e,0xf3,0xb3,0x2e,0x3f],[
# Record 215: Address = 0x7b80, Length = 128
0x7b80,0x80,
0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xcd,0x82,0x23,0x36,
0x11,0xf0,0xff,0x3d,0xdd,0xcf,0x23,0x37,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,
0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0x6c,0xdf,0x4,0x3d,
0xdc,0x24,0x34,0x28,0xee,0x73,0xbe,0x2a,0x97,0x5,0x3a,0x3d,0xdc,0x24,0x34,0x28,
0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,
0xdc,0x24,0x34,0x28,0x84,0x19,0xa8,0x36,0xce,0x4,0x25,0x3f,0x7e,0x83,0x22,0x38,
0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0x83,0xff,0x7f,0x3f,
0xd1,0x8,0x85,0x3a,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28],[
# Record 216: Address = 0x7c00, Length = 128
0x7c00,0x80,
0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xa3,0x6c,0xb0,0x30,0x6a,0x8e,0xb1,0x3d,
0xb5,0x7d,0x7c,0x33,0x9d,0xff,0x78,0x3e,0x32,0xce,0x69,0x3f,0xdc,0x24,0x34,0x28,
0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0x9,0x1d,0x53,0x3d,0x14,0x57,0x17,0x3f,
0x44,0x22,0xdf,0x32,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,
0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,
0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xa8,0xc4,0xa0,0x3c,0x93,0x98,0xc5,0x30,
0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,
0x5e,0x77,0xd,0x3a,0x15,0xf0,0x28,0x3e,0x73,0x10,0x0,0x32,0xdc,0x24,0x34,0x28],[
# Record 217: Address = 0x7c80, Length = 128
0x7c80,0x80,
0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xab,0xa4,0x6d,0x3f,0xcd,0xfb,0x33,0x3a,
0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,
0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,
0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,
0xdc,0x24,0x34,0x28,0xbb,0xe3,0xde,0x3a,0xc2,0x98,0xfc,0x3d,0x67,0x90,0xfd,0x3b,
0x22,0xd9,0x96,0x3a,0x41,0xbf,0x64,0x3b,0x5e,0x50,0xc1,0x38,0x73,0x7b,0x22,0x3e,
0x68,0xcb,0xed,0x2c,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0x1b,0xf4,0x62,0x3e,
0x4d,0xa2,0x24,0x3c,0xec,0xc4,0x8f,0x3a,0x94,0x1e,0xcf,0x33,0x37,0x93,0x3,0x3f],[
# Record 218: Address = 0x7d00, Length = 128
0x7d00,0x80,
0xa9,0x9a,0xb4,0x39,0xab,0x94,0x0,0x3e,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,
0x66,0x18,0xe1,0x2f,0x8b,0xf9,0x70,0x2e,0xd6,0x31,0xc,0x3b,0xe3,0xbf,0x81,0x3b,
0xd9,0x62,0xd6,0x3a,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xdf,0xb0,0x10,0x3d,
0xf0,0xfe,0xd5,0x3e,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0xb2,0xd0,0x34,0x3d,
0x61,0xab,0x8d,0x3d,0x43,0x43,0x72,0x3b,0xc9,0x14,0xe2,0x3e,0xdc,0x24,0x34,0x28,
0xdc,0x24,0x34,0x28,0x88,0x22,0x5e,0x2c,0x9d,0xb6,0x97,0x3e,0xdc,0x24,0x34,0x28,
0x9f,0xbe,0xae,0x3a,0xdc,0x24,0x34,0x28,0xbb,0x87,0x1a,0x36,0xdb,0xb7,0x6a,0x3b,
0xdc,0x24,0x34,0x28,0xf7,0x65,0x4a,0x3a,0x4f,0xe9,0xec,0x3d,0x2d,0x89,0x6f,0x3a],[
# Record 219: Address = 0x7d80, Length = 128
0x7d80,0x80,
0xc0,0x41,0x7b,0x3c,0x83,0x7e,0x42,0x3f,0x98,0xf1,0x4a,0x3b,0xe,0x81,0x92,0x3a,
0x1f,0xc2,0xd2,0x3a,0x30,0x66,0xab,0x3d,0x76,0x16,0x73,0x3c,0xdc,0x24,0x34,0x28,
0x4c,0x50,0xe,0x3b,0x92,0x14,0x4b,0x3a,0x4a,0xff,0x91,0x2c,0xe7,0x60,0xf0,0x3b,
0x39,0x46,0xcb,0x3e,0xe5,0x7a,0x9f,0x3c,0xcd,0x5a,0x40,0x3b,0xcf,0x1d,0xd3,0x3a,
0x6b,0x61,0xa8,0x3d,0xb3,0x63,0x85,0x3a,0x12,0x9f,0x93,0x3b,0x2a,0xa7,0x15,0x3e,
0xfc,0xbe,0x1d,0x3b,0xdc,0x24,0x34,0x28,0xfa,0xe1,0x86,0x3a,0xfb,0x8a,0x26,0x3f,
0xa1,0x31,0x74,0x3b,0x35,0x62,0xb,0x3b,0x48,0xf9,0x86,0x3e,0xa3,0xb7,0x8e,0x3b,
0x62,0xcb,0x1c,0x3b,0xe8,0xc8,0x81,0x3b,0xf5,0x60,0x35,0x3f,0x4f,0x12,0xbf,0x3c],[
# Record 220: Address = 0x7e00, Length = 128
0x7e00,0x80,
0x5,0xf,0xa2,0x3b,0x97,0x53,0xb0,0x3c,0x62,0xff,0x19,0x3a,0xbd,0x11,0xdc,0x3a,
0x52,0x1,0x53,0x3a,0xdc,0x24,0x34,0x28,0xdc,0x24,0x34,0x28,0x1,0x9d,0xd,0x3b,
0x6e,0x2a,0x13,0x3a,0xd,0xeb,0x60,0x3e,0x70,0x59,0x31,0x3c,0x42,0x30,0xab,0x3c,
0x2e,0x3a,0x45,0x3b,0xa6,0x15,0x73,0x3d,0xc5,0xef,0xcb,0x35,0xa9,0xf6,0x91,0x3c,
0xc9,0xec,0xa5,0x3e,0x71,0x65,0x7c,0x3d,0x84,0xd1,0x7b,0x3a,0xb7,0x2a,0x3a,0x3c,
0xb8,0xed,0xaa,0x3c,0xbb,0x3,0x27,0x39,0x73,0x5a,0x19,0x36,0x0,0xf3,0x7e,0x3d,
0x32,0x54,0x9f,0x3e,0x36,0x4,0xa5,0x3a,0x53,0xee,0x0,0x38,0x6f,0x9d,0x1c,0x3b,
0xd8,0xc8,0x22,0x3b,0x2c,0x85,0xb4,0x39,0xdb,0x6e,0x8f,0x39,0xa9,0x20,0xa5,0x3c],[
# Record 221: Address = 0x7e80, Length = 128
0x7e80,0x80,
0x2b,0x57,0x2e,0x3c,0xc6,0x22,0xb5,0x3e,0x26,0xa2,0x79,0x29,0x6c,0x6c,0x4a,0x3c,
0xca,0xa8,0x10,0x3c,0xcf,0xa1,0x9,0x2b,0xb6,0x88,0xf7,0x39,0x64,0x86,0x7d,0x3b,
0xfe,0x58,0x36,0x3e,0x68,0x2d,0x8c,0x3e,0x3e,0x38,0x1,0x3c,0xa,0xd7,0x27,0x3c,
0x20,0x33,0x52,0x3b,0x9e,0x5c,0xbb,0x3a,0x6c,0xb5,0xa9,0x3a,0x62,0x12,0x2b,0x3c,
0xd2,0x79,0xfb,0x3d,0x0,0x0,0x0,0x3f,0x0,0x0,0x0,0x3f,0x0,0x0,0x80,0x3f,
0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x80,0x3f,0x0,0x0,0x0,0x0,
0x0,0x0,0x0,0x0,0x0,0x0,0x80,0x3f,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,
0x0,0x0,0x80,0x3f,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x80,0x3e],[
# Record 222: Address = 0x7f00, Length = 124
0x7f00,0x7c,
0x0,0x0,0x80,0x3e,0x0,0x0,0x80,0x3e,0x0,0x0,0x80,0x3e,0x0,0x0,0x80,0x3e,
0x0,0x0,0x80,0x3e,0x0,0x0,0x80,0x3e,0x0,0x0,0x80,0x3e,0x0,0x0,0x0,0x0,
0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,
0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,
0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,
0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,
0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,
0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x4,0x4,0x4,0x4,
]]
# IV
LDR_IV = [0x42,0xff,0xff,0xbf,0xe6,0x6f,0xc5,0x1,0xc7,0x58,0x4b,0x46,0xec,0xf5]
LDR_VERSION = [
# FwVersion: LL0:100.14;p:HillstarV01 ;;i:B;f:25000;s:Beta2r1059:MO;c:MKI;t:2013/11/13 11:48:30;
0x4c,0x4c,0x30,0x3a,0x31,0x30,0x30,0x2e,0x31,0x34,0x3b,0x70,0x3a,0x48,0x69,0x6c,
0x6c,0x73,0x74,0x61,0x72,0x56,0x30,0x31,0x20,0x20,0x20,0x20,0x20,0x20,0x20,0x20,
0x20,0x20,0x20,0x20,0x20,0x20,0x20,0x20,0x20,0x20,0x20,0x20,0x20,0x3b,0x3b,0x69,
0x3a,0x42,0x3b,0x66,0x3a,0x32,0x35,0x30,0x30,0x30,0x3b,0x73,0x3a,0x42,0x65,0x74,
0x61,0x32,0x72,0x31,0x30,0x35,0x39,0x3a,0x4d,0x4f,0x3b,0x63,0x3a,0x4d,0x4b,0x49,
0x3b,0x74,0x3a,0x32,0x30,0x31,0x33,0x2f,0x31,0x31,0x2f,0x31,0x33,0x20,0x31,0x31,
0x3a,0x34,0x38,0x3a,0x33,0x30,0x3b,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,
0x0,0x0,0x0,0x0,0xfa,0xfc,0x0,0x0]
LDR_UPDATE_DATA = [[
# Record 0: Address = 0x1000, Length = 128
0x1000,0x80,
0x3d,0xf0,0x35,0x59,0x8b,0xfc,0xf8,0xef,0x24,0x67,0x54,0xd4,0x5a,0x99,0xd3,0x5d,
0x6d,0x5a,0x82,0xf0,0xb4,0x1f,0xcc,0x42,0x95,0xcc,0x60,0xd5,0x8,0xae,0xd7,0x65,
0xeb,0xc1,0x83,0xc5,0xf3,0xc7,0x37,0xc4,0xcd,0x13,0x7c,0xa5,0x29,0x58,0x99,0xec,
0x1f,0xc,0xdc,0xe9,0x57,0x50,0xdc,0xcf,0xcb,0xe6,0xe6,0x6a,0xe1,0xdb,0x73,0xcb,
0x56,0xeb,0x63,0x15,0x45,0xd9,0x29,0x16,0xcf,0x8e,0xf7,0xab,0x25,0x6e,0xc2,0x34,
0x45,0xe6,0x30,0xce,0x65,0x59,0x9d,0x6b,0x88,0x5a,0x94,0x71,0x27,0xd5,0x63,0x74,
0xc9,0x83,0x1,0xd9,0xdb,0x7,0x35,0x16,0x23,0x38,0x98,0xf7,0xaa,0xd5,0x81,0x6a,
0x5e,0xf3,0xa6,0x4a,0x74,0xea,0xb6,0xab,0xcf,0xa4,0xb8,0x38,0x74,0x8c,0x90,0xb7],[
# Record 1: Address = 0x1080, Length = 128
0x1080,0x80,
0x92,0xfb,0xec,0xee,0xf6,0x26,0xe5,0xb6,0x14,0xc,0xe4,0xf3,0x84,0x4f,0x51,0x94,
0xb6,0x2c,0x14,0xdb,0xe0,0x53,0xdb,0xa7,0xa4,0x86,0x58,0x9e,0xbb,0x26,0xe,0x3d,
0x29,0x76,0x10,0x73,0x72,0xc6,0x9f,0xb8,0xe8,0x8b,0x32,0x7b,0x8a,0x4c,0xf5,0x4,
0x8e,0x49,0x4a,0x9a,0xba,0x67,0xd6,0x25,0xe9,0x5,0xf9,0xd8,0xf3,0x25,0x4b,0xb1,
0x59,0x34,0x7f,0x86,0xff,0x1a,0x45,0x81,0x26,0xdd,0x3d,0xe8,0xe4,0xc5,0x44,0x2a,
0x7b,0x23,0x66,0xfa,0xc9,0x63,0xf1,0xbb,0x91,0xf1,0x7,0x96,0xf2,0xcb,0xd3,0xd1,
0xd8,0xe8,0x21,0x4b,0x76,0xdf,0xbb,0x3d,0xdc,0x17,0x45,0x6d,0xa2,0xd9,0x61,0x70,
0xf3,0x40,0x42,0x7,0x56,0x41,0xf5,0xc,0xe6,0xbb,0x2,0x6b,0x68,0x47,0x44,0xbf],[
# Record 2: Address = 0x1100, Length = 128
0x1100,0x80,
0x54,0x8d,0x61,0x64,0xb7,0xef,0xaf,0xa2,0x43,0x8e,0x2,0xbe,0x88,0xd5,0xeb,0xf3,
0xf1,0x12,0xbf,0xc7,0xcb,0x1,0x49,0x7,0xcf,0xc,0xf9,0x9d,0x35,0x93,0x83,0x29,
0xa9,0xce,0x38,0x21,0xa7,0x3a,0xd2,0xf,0x7a,0x60,0x13,0x6f,0x5e,0xab,0x5d,0xed,
0x72,0x56,0x2a,0x43,0xd4,0xea,0x37,0x31,0x4f,0xe0,0x17,0x4a,0x14,0x84,0xaf,0x40,
0x6e,0x98,0x54,0xf4,0x22,0x5a,0xe4,0x91,0x7d,0xa1,0xb,0x95,0xa3,0xe0,0xdc,0xd5,
0xff,0xfd,0x32,0xd0,0xeb,0xbb,0xaf,0x8d,0xa,0x42,0x58,0x30,0x2c,0xe9,0x7d,0x61,
0xc4,0x70,0xa4,0xfe,0xc1,0x7f,0xea,0xf5,0x9d,0xb3,0xe8,0x49,0xa,0x99,0xb,0x8b,
0xd,0x99,0x66,0xef,0x67,0xc6,0x6a,0xa6,0x40,0xba,0x3e,0xaf,0x16,0xc4,0x1f,0x53],[
# Record 3: Address = 0x1180, Length = 128
0x1180,0x80,
0x82,0x5d,0x3d,0xf5,0x13,0x98,0x43,0x6c,0xaa,0xd3,0x47,0x4f,0x49,0x81,0xc5,0x79,
0xd5,0x8e,0x3c,0x56,0xe5,0xfc,0xd3,0x94,0xb3,0x3b,0xfd,0xf7,0x2d,0x9b,0x75,0xc9,
0x94,0x5a,0x4b,0xe9,0x7c,0x72,0xc8,0xb5,0xc7,0xab,0xee,0x75,0xbe,0xdb,0x14,0xe7,
0xba,0x7a,0x8a,0x62,0x2c,0x30,0xc0,0x4b,0xab,0x42,0xdc,0x26,0xf4,0x4b,0xcf,0x3c,
0x44,0x4d,0xa9,0xc8,0x97,0x90,0xff,0x23,0x17,0x37,0x66,0xd,0x8a,0x70,0x72,0x99,
0x14,0x66,0xcb,0xec,0x97,0x82,0x15,0x7f,0x54,0x2c,0x34,0x81,0xbb,0xce,0xd7,0x8b,
0x6b,0xec,0x80,0xe2,0x6,0x4d,0x2e,0xe8,0xcf,0x87,0xe2,0x63,0x63,0x90,0xd5,0xe5,
0x65,0x74,0xc4,0x5c,0x2b,0xdc,0x91,0xd0,0x21,0x48,0x45,0x14,0xa7,0xcb,0x4e,0xce],[
# Record 4: Address = 0x1200, Length = 128
0x1200,0x80,
0x60,0x2a,0xfb,0x5f,0x93,0x26,0x43,0x9a,0xf4,0x7d,0x93,0xff,0x5d,0xcf,0xaa,0x19,
0xa,0x6c,0x90,0xd0,0x66,0x4f,0x89,0xcc,0x7,0x34,0x8b,0xc2,0xf9,0xde,0x8a,0x20,
0x25,0x56,0x3b,0xb,0xc0,0x7a,0x16,0x75,0x3d,0x5d,0x46,0xd7,0x78,0xd0,0x84,0x8b,
0x1,0xa7,0x62,0xf5,0x6,0xfe,0xeb,0x41,0x8e,0x6,0xa9,0x1,0x6,0xdc,0x87,0x49,
0xd1,0x8d,0xad,0xf5,0x53,0xf1,0xef,0xe3,0xfa,0x48,0x7a,0x9b,0xda,0xe5,0xda,0x42,
0x2b,0xef,0xd2,0xb4,0x32,0x7f,0x73,0x0,0xc6,0x7e,0x4b,0x8a,0x3e,0xde,0xb2,0x4e,
0xc1,0x6d,0xea,0xe9,0x99,0x9,0xbf,0xf6,0xa5,0xd2,0xb8,0x35,0x31,0xa6,0xfc,0x33,
0x91,0xce,0x99,0x5a,0x73,0x73,0xcb,0x18,0x6a,0xee,0x30,0x0,0xd,0xff,0x8b,0x6f],[
# Record 5: Address = 0x1280, Length = 128
0x1280,0x80,
0x9e,0xc3,0x48,0x4c,0x43,0x3c,0xed,0x42,0xc7,0x2c,0xbd,0x5d,0xd8,0x61,0xa4,0xf3,
0x13,0x2d,0xe4,0xb0,0xc,0x44,0x60,0x1e,0x23,0xbd,0xb3,0x2d,0x11,0xdf,0x9a,0x91,
0x2e,0x94,0x2b,0xa3,0x7e,0xee,0x25,0x2e,0xd4,0xd0,0x57,0xe5,0x71,0xe4,0x40,0xbd,
0x3a,0x37,0x30,0xb4,0x90,0x46,0x24,0xd6,0x3f,0x67,0x82,0xed,0x7a,0xbe,0xc9,0x9f,
0xf2,0xbd,0xc4,0xf5,0x7c,0x30,0x70,0xb8,0x2e,0xb8,0xc,0x79,0x7d,0xb8,0x40,0xa4,
0x27,0xa,0x26,0xf,0x1d,0xe1,0xc5,0xbd,0x11,0x80,0xaf,0xb2,0xb5,0x81,0x30,0x3b,
0xfe,0xdf,0xfe,0xee,0xbd,0xd0,0xbb,0x99,0x26,0xb,0x78,0x5f,0xbf,0xad,0xe2,0xdf,
0x9,0x2a,0xb5,0x9c,0x2a,0x90,0x2c,0xa3,0x2d,0xb5,0x8d,0x14,0x45,0xe3,0x7,0x5],[
# Record 6: Address = 0x1300, Length = 128
0x1300,0x80,
0x25,0x34,0x4f,0xf7,0x3d,0xb8,0x1c,0x98,0xae,0x28,0xfa,0x4c,0x49,0xc9,0x29,0x80,
0x6d,0xf2,0x69,0xcf,0x50,0x97,0xda,0xc2,0xbb,0x5e,0x75,0x15,0x3,0x76,0xd8,0x61,
0xf3,0x63,0x49,0xbf,0xf6,0x53,0x1b,0xd1,0xfe,0xd1,0x17,0x22,0xba,0x7,0xbc,0x99,
0x8f,0xd,0x2e,0x45,0x7b,0xb7,0x21,0x6,0xfe,0x43,0x25,0xd3,0xec,0x2a,0x5c,0xbd,
0x1,0xfe,0x2,0xb1,0x6a,0xeb,0xc9,0x9d,0xbc,0x6a,0x8d,0x34,0x83,0xba,0xb5,0xd6,
0xf3,0xa6,0xb5,0xf,0x20,0xc4,0xd5,0x26,0x36,0x5f,0x9a,0x6,0x31,0x87,0x61,0x67,
0x1d,0x1d,0x6a,0x9d,0x34,0xea,0xee,0x42,0xd,0xf,0xf1,0x10,0xe4,0x53,0x8a,0x7b,
0x53,0x90,0x84,0xfc,0x41,0x1b,0x21,0xa4,0xe2,0x58,0xc4,0xeb,0xab,0x19,0x71,0xac],[
# Record 7: Address = 0x1380, Length = 128
0x1380,0x80,
0x14,0x60,0x20,0xe0,0xeb,0x4e,0x82,0x7c,0x12,0x6b,0xd8,0x4f,0x16,0x18,0xbe,0xbf,
0x78,0x48,0x58,0xbe,0x67,0x23,0x12,0x41,0x8a,0xf7,0xf,0x56,0xf0,0x8f,0x1c,0x62,
0xf4,0x9e,0x66,0x66,0x94,0x2c,0xb1,0xba,0x97,0xe2,0xfd,0x1b,0xcb,0x65,0x87,0xf2,
0x32,0x11,0x6d,0x53,0x9c,0xe4,0x78,0x93,0xba,0xf8,0xbb,0x9a,0x2e,0x53,0xe6,0xa7,
0x64,0xeb,0x1,0x82,0x88,0xee,0xba,0xde,0x1d,0x9c,0x55,0x20,0x56,0xd3,0xd2,0xc9,
0x64,0x5,0x40,0x89,0xad,0x12,0xbb,0x94,0xd9,0xc9,0x30,0xd4,0xf3,0x21,0xf7,0xa4,
0x99,0x3b,0xad,0x49,0x76,0x20,0x1f,0xb1,0xf3,0x32,0xd9,0xc0,0x87,0x80,0xfb,0x19,
0x16,0x61,0x89,0xa4,0x45,0xba,0xab,0x20,0x63,0x9d,0xc0,0x7a,0xdd,0x23,0x73,0xf7],[
# Record 8: Address = 0x1400, Length = 128
0x1400,0x80,
0xd8,0xdb,0x73,0x6e,0xcf,0xe8,0xf7,0x7a,0x2,0xf2,0x67,0xa2,0x1a,0xe,0x57,0xe3,
0x72,0x82,0x58,0xc2,0x27,0x80,0x49,0xb6,0xbc,0xf6,0x58,0xf8,0x66,0xf6,0xd8,0x30,
0x3b,0xcd,0x7,0xc9,0x29,0x19,0x10,0xd9,0x7d,0x46,0xb1,0x31,0xf4,0x88,0x8d,0xa0,
0x9e,0x58,0xda,0x2e,0x5d,0x73,0x4f,0xe9,0x9,0x6f,0x39,0xc0,0xcd,0x7c,0x26,0x48,
0x9a,0xbd,0xde,0x27,0x9b,0x41,0xc,0x78,0x92,0xaf,0x4c,0x60,0xd9,0x8c,0x2e,0xbd,
0x70,0xb4,0x50,0xd1,0x10,0x40,0xb0,0xa9,0xf0,0xf2,0xbc,0xff,0xce,0xc,0xfe,0x4,
0xad,0xd6,0xb9,0x7d,0x4e,0xaf,0x57,0x80,0xf2,0xd1,0x6,0x72,0x5e,0x51,0x68,0xce,
0x2e,0x62,0x7c,0xf5,0x27,0x41,0x48,0x71,0x38,0xb9,0x8b,0x64,0x2c,0x2e,0x60,0x5d],[
# Record 9: Address = 0x1480, Length = 128
0x1480,0x80,
0x79,0x44,0x34,0xc4,0xc9,0xca,0x36,0xda,0xaf,0xde,0xf3,0x19,0x51,0x95,0xef,0xb6,
0x44,0xa8,0xe4,0x51,0x99,0x56,0xeb,0xe6,0xa4,0x81,0x42,0x51,0xc1,0x16,0xbe,0x1c,
0x48,0xb8,0x8a,0xef,0x48,0x72,0x9e,0x29,0xfe,0x25,0x8d,0xe3,0xe0,0xa9,0x81,0x8a,
0x4a,0x9d,0x9a,0x5d,0xdf,0x8a,0xac,0xc7,0xca,0x49,0x9f,0x29,0x99,0x50,0xa2,0xf3,
0x99,0xb5,0xe6,0x2c,0x59,0x21,0x35,0x6f,0xf3,0xb,0xea,0xd8,0x5,0xe1,0x18,0x73,
0xde,0x5d,0xeb,0x75,0xd6,0x90,0x13,0x4f,0x80,0xb4,0x5b,0xdc,0xda,0xa7,0x88,0xc3,
0x46,0xa5,0x2f,0x30,0x1d,0x85,0xf6,0x0,0xa2,0x55,0x91,0xe1,0xd5,0xd8,0x6f,0x82,
0x5e,0x15,0x5c,0x22,0xee,0xd2,0xd1,0x46,0x7b,0xa8,0x1c,0x76,0xe6,0x87,0x18,0x30],[
# Record 10: Address = 0x1500, Length = 128
0x1500,0x80,
0xfb,0x8e,0x34,0x95,0xa5,0xd4,0x6a,0x1d,0x62,0x39,0x24,0x4b,0x34,0x93,0x7f,0xa0,
0xb1,0x44,0x6f,0x21,0xf6,0xad,0x72,0x57,0x9f,0xee,0x26,0xfe,0xc,0xf3,0x69,0x37,
0xf2,0x63,0xc3,0xa8,0xc8,0xa4,0xd2,0x43,0x13,0x7,0xb3,0x79,0xc5,0x39,0x8b,0xe8,
0x0,0x8e,0x4f,0xc5,0xcf,0x58,0x38,0x2b,0x6,0x24,0x8,0xc4,0x12,0xc0,0xb3,0xd8,
0xb0,0x93,0x5b,0xfe,0x4d,0x47,0x10,0xa5,0xc3,0xf9,0xea,0xbd,0xa8,0x3c,0xfd,0x18,
0x8a,0x25,0xc3,0x1b,0x1e,0xee,0x8f,0x17,0x6e,0x1d,0x33,0xf4,0xe5,0x0,0x28,0x6d,
0x19,0x7b,0xf1,0x65,0xd,0xcb,0x7b,0xf2,0x20,0x9d,0x33,0xf9,0x64,0x3b,0x67,0xf6,
0xab,0x83,0x92,0x70,0x14,0xb0,0x4b,0xfa,0xee,0xe9,0x9f,0x3b,0x2a,0xbc,0x65,0x79],[
# Record 11: Address = 0x1580, Length = 128
0x1580,0x80,
0xf5,0x14,0xa7,0x23,0x9c,0x1e,0xf9,0xa,0xc7,0xc9,0x1c,0x2a,0xbb,0x8f,0x7b,0x55,
0xfa,0x6b,0x95,0x43,0x69,0x44,0x42,0xb7,0x7b,0x6,0xe0,0xdc,0xf8,0xf4,0x10,0x26,
0x36,0xb5,0x7b,0xe1,0x2b,0x0,0x4c,0xbb,0xed,0xc5,0xfd,0x9a,0xd2,0x7d,0x23,0x6f,
0xba,0x33,0xe1,0xc7,0xb8,0x25,0xe9,0xfe,0xe,0x3f,0x2c,0x39,0x93,0x73,0x9,0xc4,
0x86,0x6,0x3a,0x57,0x33,0x97,0x75,0x5a,0x81,0xdc,0x7,0x58,0x9b,0xfd,0x8b,0x5b,
0x46,0x23,0xe6,0x41,0x4c,0x9a,0xac,0x94,0xef,0x43,0x4f,0x8a,0xb6,0x52,0x15,0x38,
0x70,0x4c,0xb4,0xc,0xc0,0x18,0xc6,0x2f,0x20,0x7c,0xb9,0xda,0xaf,0x10,0xc9,0x20,
0x65,0x1f,0xb9,0x36,0x8a,0xa3,0xca,0xcf,0xd8,0x6c,0xb3,0x7a,0xf,0x51,0x5b,0x8d],[
# Record 12: Address = 0x1600, Length = 128
0x1600,0x80,
0xf8,0x41,0x3a,0xac,0x10,0x94,0x95,0x21,0x72,0xc6,0xd7,0xb3,0x53,0x8c,0x54,0xcc,
0xb8,0x0,0xaf,0x50,0xdf,0x87,0x7f,0x4d,0xa9,0xb5,0xec,0x87,0x6c,0x50,0xe,0xb9,
0x3e,0xb6,0xc,0x91,0x7b,0x7a,0x6,0xe9,0x7f,0xb9,0xb6,0xe1,0x36,0x50,0xaa,0x90,
0xa3,0x92,0x62,0x28,0x4b,0x73,0x4b,0x6e,0xea,0x54,0x3b,0xc,0xac,0xa1,0x67,0x92,
0xd2,0xd8,0x7c,0x16,0x14,0x9b,0x55,0xed,0x9d,0x38,0xc4,0xbf,0x7d,0x97,0xbd,0x6e,
0xcb,0x81,0x51,0xc8,0xb5,0x8,0xf2,0x8f,0xb2,0x70,0x9,0xef,0x8f,0x84,0x12,0xa1,
0x3e,0x27,0x80,0xe,0x6b,0xa8,0x6e,0xc8,0x9,0xdf,0x8e,0xb8,0xaa,0xa7,0x88,0xb1,
0x42,0xa6,0x99,0x4c,0x8d,0x87,0xc7,0xbf,0xf8,0x70,0xe,0xac,0x32,0x41,0x4,0x60],[
# Record 13: Address = 0x1680, Length = 128
0x1680,0x80,
0x1,0xde,0xaf,0x1,0x1b,0xd5,0x2e,0x43,0xb5,0xa3,0xd8,0xbb,0x63,0xd1,0xd7,0xec,
0x18,0x3e,0xc5,0xee,0xd0,0x13,0xb2,0x29,0x4,0x24,0xba,0x65,0x2c,0x72,0x60,0x5a,
0xe,0x62,0x4a,0x7b,0x8d,0x70,0xb,0x60,0x28,0x59,0xed,0xf9,0x8,0x9e,0x52,0x1a,
0x87,0x86,0x5c,0x65,0x93,0x6c,0x65,0x63,0xc8,0xdd,0xdb,0xfe,0x94,0xaf,0xd1,0x44,
0x22,0xe4,0xfc,0xf8,0x45,0x48,0xc4,0xfa,0x13,0xd4,0x97,0xe,0xbf,0xb8,0x5b,0x76,
0x61,0xbc,0x1c,0x7e,0xf8,0x2d,0x17,0x52,0xd6,0x5a,0x23,0x91,0xc,0xc,0xd2,0x89,
0xf7,0x34,0xc0,0xc4,0xc2,0xff,0xea,0x5f,0x82,0x2b,0xb0,0x47,0xc6,0x4,0x1b,0x62,
0x2e,0xff,0x51,0x94,0xfe,0x76,0x83,0x8d,0xef,0x2f,0xa,0xee,0xc7,0x23,0xd9,0xef],[
# Record 14: Address = 0x1700, Length = 128
0x1700,0x80,
0x42,0x30,0x88,0xaf,0xa4,0x3d,0x21,0xc8,0xff,0x26,0x5c,0xc2,0x74,0x2b,0x18,0x7c,
0xbc,0xe,0x49,0x4f,0x9d,0xd9,0x9b,0x90,0x36,0x11,0x59,0x24,0xa,0xf7,0x33,0x32,
0xdd,0x9d,0xc4,0x52,0x35,0x0,0xbc,0x36,0x48,0xba,0xb3,0xac,0x54,0x2f,0x9f,0xce,
0x33,0xb1,0xf6,0xf9,0x70,0xa3,0xaa,0x84,0xf5,0x59,0xcf,0x63,0x12,0xab,0x97,0x8a,
0x4a,0xcf,0x77,0xe,0x78,0x74,0x54,0xda,0x2d,0xde,0xfc,0x36,0xec,0x4d,0x2f,0x2c,
0x28,0xf4,0xc6,0xb5,0xd2,0xa3,0x5d,0x36,0xce,0x55,0xb8,0x52,0x8,0x54,0x74,0xb7,
0x2a,0x16,0x74,0x94,0x4a,0x32,0x97,0x61,0x0,0x6c,0x18,0xb7,0x69,0x20,0xa6,0xb,
0xf0,0xa,0x89,0x65,0x7c,0xed,0x71,0x7a,0x9f,0xff,0xdc,0x3a,0x1c,0xbb,0x26,0x39],[
# Record 15: Address = 0x1780, Length = 128
0x1780,0x80,
0xc2,0xba,0xdf,0x1,0x2b,0xf8,0x48,0xc3,0x7b,0xde,0xb3,0xc8,0x1b,0xe7,0xf4,0xbd,
0x8e,0xf1,0x58,0x8,0x5a,0xc,0xac,0xa7,0xe3,0x42,0xc0,0x9c,0x71,0x95,0x73,0xf7,
0xb9,0x9a,0xe3,0x76,0xad,0x3f,0x3e,0x82,0xb9,0xfe,0x75,0x33,0x73,0x58,0x6d,0x2c,
0xbd,0x6b,0x15,0xe3,0x7,0x2a,0x87,0x14,0x50,0x6e,0xb3,0xb9,0xba,0xc5,0xbf,0xf6,
0xe6,0xaa,0x2f,0x85,0x74,0xca,0x91,0xa5,0x89,0xad,0x6e,0x36,0xc5,0xeb,0xf,0xd7,
0x93,0x1,0x90,0xac,0x4c,0x68,0xd7,0x7d,0x7f,0xe1,0x71,0xa0,0xce,0x18,0xfc,0xc3,
0x60,0xa1,0x96,0x68,0xdc,0x91,0x3b,0x28,0x1a,0x2d,0x5b,0x57,0x2,0x6d,0xfc,0x2c,
0x7,0x4c,0xe1,0xb7,0xa7,0x84,0xe8,0xad,0x9c,0x6d,0xcb,0x5f,0xdc,0x1e,0x20,0x94],[
# Record 16: Address = 0x1800, Length = 128
0x1800,0x80,
0x81,0x62,0x74,0x8,0x90,0xe1,0x58,0x69,0xdc,0xe1,0x5c,0xa8,0xc3,0x64,0x63,0xfd,
0xfe,0x75,0xa2,0x18,0xcb,0xa6,0xa6,0xa6,0xcf,0x67,0x5f,0xbf,0xb8,0xff,0x1c,0x61,
0x7,0x1d,0x31,0x37,0xd0,0xa0,0x73,0xaf,0x7e,0x20,0x5e,0x28,0xf0,0x82,0xea,0x8c,
0xb8,0x5d,0x13,0x8,0xa6,0xe1,0x24,0xf8,0x24,0x54,0x30,0x48,0x75,0x8e,0x19,0x9a,
0x3a,0x51,0x4c,0x6d,0xa5,0x58,0xa3,0xa7,0x85,0x3f,0xf9,0x24,0xfc,0x4f,0xc8,0x41,
0x54,0xe9,0x30,0xe,0x23,0x9c,0x2c,0x1d,0x62,0x1e,0x80,0x8b,0x22,0x3f,0xd4,0x77,
0xf5,0x54,0x53,0xa0,0x34,0xba,0x42,0xd7,0x66,0x4a,0x73,0xba,0x64,0x69,0x50,0x65,
0xaf,0x53,0x63,0xb,0xde,0x54,0x5c,0x8,0x92,0x76,0x10,0x97,0x53,0x9d,0xab,0xd3],[
# Record 17: Address = 0x1880, Length = 128
0x1880,0x80,
0xd1,0x56,0xc9,0x61,0xe0,0x58,0x25,0x99,0x3a,0x25,0xfa,0xe1,0x65,0xee,0xa8,0xbb,
0x88,0xb5,0xeb,0x7a,0xdc,0x3d,0x31,0x78,0x5a,0x12,0x76,0x5d,0xed,0xdb,0x9f,0xbc,
0xe3,0x32,0x36,0xbe,0x8d,0xc,0xa1,0xb6,0x1e,0x6a,0x4f,0x9b,0x1a,0xef,0x38,0xd2,
0xa3,0xcf,0x21,0x2c,0x60,0x3c,0x8b,0x97,0x49,0xd0,0x51,0x83,0x6e,0xbc,0x0,0xbf,
0xc5,0x26,0xcc,0x5d,0xad,0xe7,0x8c,0x6f,0x19,0x9e,0x3a,0x4a,0x4a,0x24,0xd3,0x1d,
0xf7,0x3,0x25,0x59,0x57,0x56,0x62,0x44,0x7f,0x2e,0xd0,0x13,0xc7,0x70,0x5a,0xd4,
0x53,0x45,0x40,0xe5,0x72,0xa0,0xa6,0x6b,0x38,0x6,0x90,0x57,0x82,0x34,0x17,0x74,
0x20,0xef,0xca,0xf9,0x6,0xa3,0xa2,0x28,0x40,0xab,0xb,0xed,0x42,0x89,0xf1,0x76],[
# Record 18: Address = 0x1900, Length = 128
0x1900,0x80,
0x83,0x36,0x3b,0xe1,0x5,0xa4,0xef,0x23,0x58,0x51,0x8f,0x51,0x6f,0xbe,0x71,0x2b,
0xf5,0xe8,0xbf,0x77,0xb6,0xc8,0x28,0x5e,0xb0,0xa2,0x3,0x30,0x8c,0xa1,0xae,0xe1,
0xe8,0x80,0xcf,0xce,0x94,0x4c,0x6d,0xd2,0x45,0x2a,0x84,0x10,0x9c,0x94,0xec,0x1a,
0x45,0x0,0xa1,0xd4,0x14,0xab,0x2a,0x1e,0xc5,0xb4,0x66,0x25,0x38,0x1b,0xcf,0x36,
0xae,0x3c,0xca,0x33,0xe9,0xd3,0x95,0xe3,0xf0,0x5b,0x7c,0x53,0xa8,0x24,0xb2,0x7c,
0xdb,0x36,0x64,0x94,0xcd,0x32,0x13,0xf1,0xf1,0xca,0x13,0xfe,0x2b,0xd9,0xde,0x83,
0xd2,0x30,0xd1,0x80,0x9f,0x86,0xd,0xdf,0xfd,0x49,0x11,0xce,0xe2,0x2a,0x46,0xb5,
0xbe,0x46,0xce,0x4,0xd3,0xb3,0x5,0xd5,0x97,0x44,0x1d,0x94,0xc,0x8,0xba,0x98],[
# Record 19: Address = 0x1980, Length = 128
0x1980,0x80,
0x36,0x31,0xf5,0x46,0xd2,0x67,0x78,0x6e,0x32,0x73,0xe8,0xf,0x89,0x54,0x65,0xce,
0x98,0xd0,0x2d,0xb,0xe6,0x54,0x2d,0xc,0xe7,0xb0,0xf7,0x91,0xf1,0xdf,0xf7,0x69,
0x82,0xbc,0x40,0xf1,0xc,0x17,0x6b,0x3e,0x26,0xab,0x2d,0x63,0x7b,0x88,0x49,0x91,
0x6c,0x55,0x3b,0x7,0xab,0x49,0xca,0xa,0xf7,0x6b,0x6a,0xf3,0x13,0x6b,0xaa,0xd1,
0xbf,0xc5,0xe6,0x42,0x14,0xfc,0x97,0x92,0x22,0x77,0x9d,0x90,0xc5,0x19,0x2f,0xb3,
0xea,0xfc,0x50,0xe4,0x92,0x9,0xa3,0x55,0x2c,0xd5,0x45,0xc7,0xd9,0x23,0x8d,0x89,
0xbf,0x6a,0x3e,0x88,0x6e,0x82,0x8d,0x27,0x14,0xab,0xf8,0x69,0x94,0x8f,0x26,0x6,
0x50,0xea,0x27,0xee,0x11,0x62,0xd1,0x2f,0x2a,0x6a,0x7d,0x14,0xcd,0x47,0x59,0x24],[
# Record 20: Address = 0x1a00, Length = 128
0x1a00,0x80,
0xeb,0xb4,0xd6,0x1f,0x8,0x7b,0x8b,0x4a,0x68,0xb1,0xeb,0xad,0xcd,0x40,0x61,0xd5,
0xb0,0xf9,0x19,0x7b,0x37,0x55,0x44,0x5c,0xcf,0x27,0xf9,0xf1,0xf6,0x9a,0xdf,0x33,
0x5f,0xb7,0x9c,0x59,0x3f,0x6a,0x9f,0xe6,0xe,0x4,0x60,0xb0,0x1d,0xab,0xf3,0xab,
0x49,0x36,0x6d,0x9e,0xb3,0x68,0x4f,0xb8,0x69,0x6f,0x8b,0x90,0xf0,0x90,0x2,0x35,
0x75,0xe8,0x89,0xde,0x8a,0x9d,0xa1,0xc0,0x52,0x31,0xdf,0x4d,0x1c,0xe8,0xa4,0x91,
0x74,0x79,0x43,0x68,0xfc,0x97,0xfb,0xa3,0x20,0xe1,0x70,0x16,0xcb,0x93,0x6d,0xa5,
0xaf,0x1a,0x27,0x2,0x72,0x45,0x10,0xe2,0x83,0x8c,0xab,0x45,0xdd,0xe9,0xe5,0x26,
0xe6,0x67,0x90,0x3b,0x6a,0x11,0x2c,0x28,0x1a,0x40,0x1b,0x39,0x26,0xa2,0xf1,0x83],[
# Record 21: Address = 0x1a80, Length = 128
0x1a80,0x80,
0xa5,0x3f,0xfa,0x77,0x2f,0x0,0x1,0x4b,0xc7,0x7c,0x5e,0x0,0x22,0x5e,0x60,0xd7,
0x3b,0x8a,0xb8,0xf5,0xa4,0x68,0xb0,0x33,0xb0,0x7,0x8,0x61,0x9b,0x66,0x99,0xc5,
0x88,0xc5,0x6f,0xd6,0x63,0xd3,0x80,0x6c,0x1c,0x42,0xa0,0x32,0x25,0xca,0xf2,0xb0,
0x17,0xc3,0xbd,0xff,0x96,0x9,0x70,0x65,0xb6,0x6,0x2d,0xbc,0x99,0x77,0x2c,0xc9,
0x28,0xc4,0x0,0xe3,0xac,0xf4,0xd4,0x68,0xa2,0x21,0x3a,0xae,0x77,0xeb,0x96,0xd6,
0x83,0x2e,0x12,0x72,0x5,0x7,0x74,0xf7,0xbd,0x99,0x87,0xce,0x1e,0x52,0x17,0x4d,
0xd9,0xdc,0xc7,0x5d,0xb5,0xab,0xd2,0xbd,0xc1,0xf8,0x78,0x4f,0xa,0xe0,0x67,0x42,
0x8,0x38,0xb4,0x3,0xef,0xde,0x65,0x1e,0x16,0x8f,0xa1,0xc5,0x7a,0xf9,0x40,0x32],[
# Record 22: Address = 0x1b00, Length = 128
0x1b00,0x80,
0xc0,0xe9,0x1d,0x46,0xe7,0xe6,0x83,0x1a,0xe8,0x81,0x2b,0x9a,0xad,0x9a,0xea,0x4f,
0xb6,0xc2,0x7f,0x1c,0x2e,0x95,0x3e,0x86,0xf9,0x98,0x58,0xbf,0xdc,0xee,0x9d,0x9b,
0x41,0x18,0xa,0xca,0x61,0xd8,0x82,0xba,0xa2,0x1c,0x9f,0x64,0xaf,0xf5,0x34,0x2,
0xc9,0xfb,0x38,0x32,0xf4,0xd1,0xd1,0x37,0x43,0xfb,0x24,0x85,0xb8,0xc9,0x50,0x72,
0x15,0xfa,0x1f,0x30,0x42,0xa2,0x9e,0xa6,0x9,0x69,0xd4,0x76,0x9f,0x86,0x2,0xc3,
0x2d,0x8d,0x28,0x44,0x2e,0x40,0x2b,0xcd,0x1d,0x0,0x46,0xde,0xa8,0x89,0x23,0xe5,
0x76,0x7d,0x5f,0x2,0x9d,0x39,0x88,0xc1,0xa7,0x53,0x2b,0xd3,0x3d,0x26,0x98,0x8c,
0xc8,0x3b,0xc2,0x90,0xe1,0xe1,0xc9,0x3d,0xb9,0x33,0x12,0x8,0xbb,0xe9,0x28,0x75],[
# Record 23: Address = 0x1b80, Length = 128
0x1b80,0x80,
0x97,0x7e,0x33,0x1f,0x32,0xc0,0xcb,0x2b,0x1d,0x5e,0x9b,0x30,0xb,0x52,0x6b,0xb2,
0xa6,0x21,0x25,0xcd,0x21,0x60,0x66,0x4f,0xa0,0x91,0xb6,0x3d,0xc4,0x5d,0x6e,0xbf,
0x26,0x6d,0xa9,0x1c,0xe2,0x0,0xda,0xeb,0xc1,0x61,0xb3,0xb6,0x56,0x5,0xf9,0x9f,
0xd3,0xac,0xb3,0x7,0x8b,0x6a,0xa8,0xbd,0x7f,0x97,0xe1,0x35,0xa5,0x71,0xe4,0x82,
0xee,0x8a,0x3c,0x81,0x66,0xb3,0x54,0xf,0x63,0x88,0x3b,0x4,0xa1,0xa3,0x95,0x77,
0x2d,0x4a,0xa4,0x8b,0xa4,0xaf,0x46,0x7c,0xc3,0x7b,0xdd,0x51,0xd0,0xae,0x34,0x95,
0xf2,0x2a,0x24,0xee,0xb,0x20,0x4d,0xa7,0x4a,0x6d,0xda,0x54,0x68,0xd2,0x90,0x5d,
0x26,0xe8,0x1c,0x56,0x29,0x25,0x34,0x72,0x1e,0x2a,0x22,0x18,0x58,0xd2,0x63,0xd],[
# Record 24: Address = 0x1c00, Length = 128
0x1c00,0x80,
0x98,0x7a,0xaf,0xf0,0xce,0xf4,0x2e,0xd8,0x88,0xf4,0x1a,0x0,0xaa,0x3a,0x92,0x1c,
0xcd,0x39,0x82,0x29,0xb4,0xf6,0x89,0x2b,0xd,0xc6,0x2a,0x16,0x24,0x36,0x9f,0x39,
0x1b,0xb7,0xcf,0x9f,0x8b,0x92,0x4c,0x93,0xa8,0xcb,0x33,0x72,0xd4,0x11,0x39,0x48,
0x27,0x34,0x16,0x32,0xaf,0xc5,0x72,0xf5,0xa3,0x77,0xd9,0xe3,0x68,0x32,0x50,0x8f,
0x71,0x11,0xe0,0xe5,0x9b,0x63,0xc8,0x3c,0x90,0x59,0xdb,0x17,0x5c,0xc8,0xf8,0x85,
0x74,0x7a,0x4,0x40,0xe7,0x4b,0xf3,0xfe,0x54,0x6,0x6a,0xd5,0x21,0xd2,0x3b,0x85,
0x2a,0xc3,0x56,0xed,0xb8,0xca,0x3d,0x31,0xf5,0x3c,0xfe,0x84,0xa9,0x8c,0x53,0x65,
0xc4,0xcf,0x67,0xe0,0x1e,0x32,0xa6,0xb4,0x96,0x57,0x8f,0x81,0xd7,0x8a,0x6e,0xd],[
# Record 25: Address = 0x1c80, Length = 128
0x1c80,0x80,
0x42,0x1f,0x41,0xa0,0xa4,0x75,0x71,0xa0,0x56,0xe4,0x2c,0xa5,0x32,0x11,0x59,0x4a,
0xfa,0x77,0x15,0x4e,0xc5,0x6,0x6e,0x3,0x1c,0xe,0x41,0x45,0x87,0x73,0xdd,0xe9,
0x44,0x30,0xe4,0x22,0xe1,0x6b,0x59,0x11,0x98,0xf,0x68,0x40,0x55,0xe7,0x8a,0x77,
0x5d,0xcf,0xa,0x93,0x1,0x68,0xc0,0xa2,0x26,0x74,0x27,0xb4,0x7a,0x92,0xd,0xaa,
0x6a,0x6,0x1f,0xa5,0x82,0x70,0x4a,0x30,0x21,0x46,0x8b,0xcf,0x39,0x73,0xef,0xd1,
0x7,0x99,0xe,0x21,0x3a,0x1a,0xec,0xa4,0x9b,0xfd,0xd9,0x7f,0x8b,0xc7,0x16,0xf8,
0x4a,0xd6,0x9d,0x5a,0x19,0x60,0x8c,0xaa,0x1d,0x34,0xfe,0x87,0xca,0xf6,0xc9,0xb1,
0x93,0xca,0xa3,0x6f,0x4b,0x3e,0x50,0x7d,0x2,0x15,0xff,0x4d,0xa9,0xd5,0x83,0xf6],[
# Record 26: Address = 0x1d00, Length = 128
0x1d00,0x80,
0x56,0x33,0xac,0xa9,0xfd,0x86,0xae,0xee,0xbf,0x8e,0x44,0x59,0xff,0xe1,0x95,0x7b,
0xf1,0x37,0x61,0x89,0x4d,0x3b,0xfb,0x5e,0xb3,0x94,0x81,0x8e,0xeb,0xb,0x9d,0x72,
0xde,0xbf,0x1e,0xb6,0x34,0xc3,0x3b,0xa7,0x94,0x9,0x4a,0xcd,0x71,0xf7,0xd1,0x70,
0xa7,0x80,0xb,0xd0,0x98,0xd7,0xfe,0x4b,0x8f,0x6c,0x8c,0xa7,0x3d,0xca,0x5f,0x44,
0x41,0xab,0x4c,0xfb,0x63,0xfa,0xba,0xdd,0x2b,0x67,0x2e,0x36,0x34,0x23,0xaf,0x11,
0x56,0x6a,0xe8,0x18,0xb4,0xef,0x6f,0x14,0x11,0xa9,0x10,0x34,0x2,0xfa,0x68,0x1b,
0xce,0xc8,0x7d,0x99,0xd2,0x2f,0xa4,0xc0,0xb8,0x74,0xfb,0x26,0xb,0xef,0xd9,0x43,
0x20,0x36,0x33,0xf0,0xab,0x0,0xfb,0x83,0xb,0xba,0xec,0x4b,0xa2,0x6,0x44,0x91],[
# Record 27: Address = 0x1d80, Length = 128
0x1d80,0x80,
0xa2,0x40,0x30,0x8,0x5f,0xfd,0xed,0xa6,0xe5,0x70,0x2e,0x99,0x60,0x12,0x63,0xfe,
0xb8,0xd4,0x95,0xc1,0x6a,0xf9,0xb,0xb,0xb,0x96,0x43,0xf8,0x4b,0xc,0xfe,0x21,
0x6f,0x3,0x6,0x6,0x7,0xc6,0xb3,0x81,0x73,0x79,0xd2,0x3b,0xe8,0x10,0x69,0xaf,
0x43,0xab,0x1d,0xbf,0x9a,0x63,0x40,0xa7,0x12,0x17,0x6,0xd1,0x8c,0x39,0xbe,0xbc,
0xe0,0x1d,0x9a,0x5b,0x7c,0x7,0xb0,0x69,0xd1,0x12,0x99,0xf7,0x14,0x38,0xb1,0x87,
0x27,0x53,0x3b,0x8a,0x20,0x8e,0x73,0xe2,0xbb,0xc4,0x6c,0x33,0xe,0xb3,0xff,0x6a,
0x68,0xdd,0x12,0x57,0xbe,0x14,0x64,0xa2,0x6c,0xb4,0x80,0xa9,0x59,0xf8,0xb0,0xf1,
0x83,0x91,0x4e,0x88,0x4e,0xd0,0x2,0x61,0x2c,0x32,0x3,0x7c,0xf2,0x2b,0xd4,0x61],[
# Record 28: Address = 0x1e00, Length = 128
0x1e00,0x80,
0x2,0x17,0x26,0x9,0xbd,0x43,0x91,0x15,0x2d,0x83,0xfa,0x84,0xb7,0x73,0x2,0x5b,
0x7b,0xc7,0xe5,0xdb,0x58,0x28,0x1e,0x3b,0xc6,0xf4,0xad,0x70,0x0,0x7,0x57,0xd6,
0x93,0xab,0xa4,0xe1,0x7d,0x2f,0x1f,0x54,0xe9,0x4d,0xfe,0xad,0xa8,0xc5,0x49,0x2f,
0x11,0xe6,0xa8,0x98,0x5f,0xd6,0x67,0xc3,0xa5,0x5d,0x8e,0x70,0x2a,0x48,0x2c,0x2b,
0x6e,0xe,0x7f,0xd8,0xc0,0x7b,0x19,0x6,0x6a,0x26,0xa8,0xc3,0x86,0x1,0xd7,0x3,
0xc8,0xfc,0xe7,0xa3,0xfa,0x3b,0x19,0x4a,0x51,0xae,0x5f,0xaf,0x34,0xb8,0xfb,0x4b,
0x24,0xee,0xb4,0x28,0x1b,0xe,0x22,0xae,0x3e,0x9d,0x3a,0x31,0x44,0x8,0xa6,0x15,
0x75,0xd2,0x4,0x65,0x95,0xc9,0x2e,0x9e,0xa8,0xf7,0x31,0x8d,0xe9,0xb,0x99,0xd8],[
# Record 29: Address = 0x1e80, Length = 128
0x1e80,0x80,
0x9f,0x7e,0x4,0x6b,0xd2,0x66,0xae,0xb9,0xc3,0x66,0x62,0xcb,0xcf,0x8a,0x13,0xa7,
0x20,0x48,0x8,0x43,0x79,0x9c,0xef,0x97,0x17,0xf8,0x82,0x28,0x32,0x60,0xf,0x2,
0xad,0xd0,0x1b,0x90,0xed,0xae,0x9c,0xab,0x50,0xb0,0x2e,0xbf,0x3f,0xb9,0x26,0xb9,
0x24,0xf4,0xb,0xcc,0xe3,0x66,0xb0,0xd9,0x4,0x12,0xc9,0x44,0x34,0xd,0xd0,0xbf,
0x9e,0x87,0xec,0x9d,0xee,0x2a,0xa9,0xd,0x35,0xe,0x36,0xdb,0xa2,0x7b,0x2d,0xd1,
0xf3,0x44,0x87,0x88,0xee,0x53,0x4e,0xc1,0x7f,0x44,0x2,0xcc,0xde,0xb9,0x75,0x16,
0xe0,0xf5,0x35,0x94,0x33,0x8e,0x37,0x5c,0xd0,0xd1,0x2e,0x25,0x68,0x24,0x20,0x29,
0xba,0x4d,0x7d,0x5d,0xaa,0xff,0x41,0x3c,0x12,0x39,0x84,0x4b,0x5a,0xd0,0x51,0x5b],[
# Record 30: Address = 0x1f00, Length = 128
0x1f00,0x80,
0x81,0x6d,0xd2,0xb3,0x64,0xf7,0xa,0x6f,0xbc,0x5c,0xed,0x36,0x51,0x80,0x1c,0x9d,
0xae,0xd,0x6f,0xee,0x4c,0x2d,0xe2,0x5e,0x39,0x51,0x81,0xb2,0xe8,0xe9,0x54,0x9c,
0x1,0x1,0x1,0xcd,0x1,0xdb,0xb7,0xf1,0x4,0xc0,0x9,0x46,0x17,0xa6,0xb6,0x88,
0x65,0x4d,0xbd,0x5b,0x7c,0x80,0x3a,0x53,0xeb,0x60,0x32,0xb5,0xdd,0xd2,0x7b,0xce,
0xc7,0x36,0xcb,0x68,0x24,0x1c,0xba,0xed,0x2a,0x0,0x58,0xa9,0x52,0x3e,0x50,0x50,
0x4d,0x64,0xc4,0xef,0x98,0x1e,0x71,0x2a,0xa0,0x12,0x26,0xc7,0x92,0x57,0xc1,0xaf,
0xa3,0xba,0x64,0xd6,0xd6,0xef,0xc0,0x6f,0x98,0xde,0x4d,0x5f,0xd7,0xb8,0x26,0x79,
0xe6,0x48,0x6d,0x75,0xfc,0x6e,0xb4,0x64,0x3f,0xb3,0xd4,0x66,0x79,0xa,0xc8,0xb2],[
# Record 31: Address = 0x1f80, Length = 128
0x1f80,0x80,
0xaf,0x20,0xaf,0x9c,0x59,0x91,0x11,0xf,0x5a,0xc8,0x40,0x97,0x1d,0x33,0x7b,0xfd,
0x57,0x4b,0x5b,0xde,0x9b,0xc9,0xc,0x59,0x8b,0x3a,0x35,0x20,0x8b,0x9a,0xff,0xb2,
0x73,0xe0,0xa9,0xb1,0xe5,0x7b,0x9d,0xe0,0x9f,0xee,0xdc,0x5,0x1d,0xf8,0x6d,0xa2,
0x9c,0xdb,0x83,0xb1,0x35,0x73,0xc9,0x5f,0x70,0x3e,0x91,0x87,0x39,0x61,0x85,0xcb,
0xa7,0xb1,0x8e,0x5c,0x8e,0x80,0xb2,0x2f,0xb,0x8e,0x5c,0xe3,0xc2,0xf3,0xe0,0x19,
0x27,0x0,0x82,0x87,0x4c,0x5,0x10,0xf2,0x91,0x71,0x6c,0x96,0xab,0x1a,0x6e,0x9e,
0x56,0x58,0xfb,0xde,0xd4,0xbc,0x19,0x2d,0x77,0x37,0x5c,0x37,0xed,0x5f,0xa1,0xb8,
0x97,0x25,0xa5,0x60,0xfc,0x4e,0x50,0x3,0x5e,0xf1,0x4b,0x28,0x30,0x46,0x69,0xc4],[
# Record 32: Address = 0x2000, Length = 128
0x2000,0x80,
0x62,0x91,0xae,0xb9,0xe8,0x78,0xd6,0x46,0x23,0x3e,0x1a,0x20,0x35,0x29,0xae,0xc8,
0x6,0x8b,0x13,0x5b,0x5b,0x8,0xb3,0xd2,0xbf,0xe6,0x17,0xc0,0x73,0x35,0x18,0x56,
0x31,0x5b,0xf4,0xf7,0x5e,0x5b,0xf9,0xa4,0xf7,0x65,0x51,0x76,0x7f,0x35,0x52,0xc5,
0xdd,0x90,0x3b,0xcd,0x3f,0xbf,0xff,0xf3,0xa4,0xa1,0x9f,0xfb,0x21,0xb9,0xaf,0x63,
0xba,0xc9,0xd5,0x41,0xbb,0x12,0xf7,0x46,0x1d,0xa,0xc3,0x8f,0x73,0xcc,0x74,0xac,
0xdb,0xb2,0x54,0xab,0x5a,0x71,0xa6,0x87,0x94,0x35,0x70,0xa1,0xec,0xe6,0xd2,0x6b,
0x5c,0xfb,0xdd,0x65,0x46,0xfe,0xbc,0x0,0x1c,0xcc,0xa9,0x35,0x65,0x35,0xa4,0xbc,
0x67,0xf0,0xfe,0x8b,0x7e,0xda,0x39,0xcb,0x39,0x23,0x1a,0x17,0x34,0xd,0xe2,0xb6],[
# Record 33: Address = 0x2080, Length = 128
0x2080,0x80,
0x9b,0x61,0x80,0x6e,0x23,0x20,0xd8,0xb7,0xab,0x50,0xd5,0x28,0x78,0xec,0x91,0xd7,
0x3c,0x69,0x82,0x2d,0x43,0x37,0x2,0x87,0xff,0xe0,0xf9,0x7c,0x4f,0xb,0x4a,0x5f,
0xcf,0x6d,0x8a,0xa0,0xc2,0x29,0xf,0x71,0x70,0xd,0x73,0x88,0x22,0xf1,0x7,0x5d,
0x21,0x3d,0x69,0x37,0x4b,0x3d,0x37,0x33,0x21,0xd9,0x44,0xa6,0xbd,0xe,0x17,0x4a,
0x5e,0x8b,0x5e,0xbe,0x7c,0x0,0x4b,0xad,0x1c,0xf,0x7d,0x33,0x13,0x8e,0x2e,0xab,
0xa1,0xaa,0x60,0xf9,0xcf,0xa0,0x6d,0x3b,0xa7,0xbb,0xaf,0xb0,0xe4,0xf,0xd4,0x60,
0xea,0x45,0x9d,0x4a,0x43,0xb8,0x89,0xed,0x4e,0x9c,0x28,0x29,0xd6,0x7,0x4a,0x16,
0x87,0xd6,0x65,0x92,0x76,0xe3,0x21,0x86,0x4d,0x2f,0x35,0x13,0x10,0x97,0xe7,0xe0],[
# Record 34: Address = 0x2100, Length = 128
0x2100,0x80,
0x42,0x37,0x20,0xbc,0xb0,0x11,0xcd,0x4e,0xab,0x2e,0xc2,0x9,0x69,0xa2,0x6d,0x68,
0x8c,0x19,0xff,0xa2,0x27,0xaa,0x63,0xc4,0x3f,0x6,0xb8,0xee,0x3c,0x42,0xec,0x1b,
0x2a,0xe0,0x89,0xd4,0xf3,0x3b,0xf7,0x5c,0x50,0xe5,0x7e,0x6c,0xcb,0xa7,0xd6,0x6d,
0x32,0x36,0xcb,0x3,0x9f,0xb,0xec,0x5e,0x9a,0x41,0x76,0xe6,0x1c,0x1,0x8d,0xbb,
0x3b,0x49,0xac,0x83,0x30,0xc3,0xdd,0x44,0x1e,0xfc,0x44,0x35,0xe3,0x54,0x39,0x4b,
0xf3,0xcd,0x5b,0xc5,0x16,0x61,0xd9,0x92,0x81,0x7a,0xb5,0xbc,0x7d,0xe7,0x4,0xc9,
0x61,0x68,0x57,0xf0,0x53,0x73,0x1e,0x58,0xd1,0xc3,0x3b,0xa3,0xbe,0x55,0xa8,0xe1,
0x20,0xa6,0x5a,0x28,0x6d,0x59,0x6b,0x65,0x5d,0x70,0x12,0x33,0x5e,0x27,0xba,0xaa],[
# Record 35: Address = 0x2180, Length = 128
0x2180,0x80,
0xe1,0xe8,0x65,0xa7,0x49,0x7d,0x2e,0x16,0xbe,0xaa,0x58,0x5d,0x5f,0xee,0xbd,0xdb,
0xa5,0x75,0x2f,0x7b,0xfa,0xd1,0x51,0x4c,0x86,0x69,0x53,0x8d,0x67,0xbf,0xff,0x48,
0xd7,0x3f,0xdf,0x12,0x3f,0xc5,0x96,0x5,0xa0,0xc2,0x62,0xc6,0x6a,0x9c,0xb1,0x9a,
0xc,0xa,0x79,0x8,0x66,0xbc,0xf0,0x6d,0x3,0xa6,0xff,0xe,0xe8,0x3e,0x63,0x91,
0x37,0x66,0x26,0xdd,0xe8,0xaa,0x92,0x43,0xee,0xbb,0xbc,0x9,0x94,0xc,0x62,0x55,
0x50,0x22,0x43,0x75,0x91,0x3,0xfa,0xf0,0xd0,0xf0,0xa1,0x7f,0x7b,0xef,0x71,0x20,
0x15,0xb7,0xe7,0x1f,0xb9,0xab,0x46,0x9f,0x47,0x21,0xd3,0x33,0xf1,0x17,0xba,0xaa,
0xf2,0xcf,0x33,0xee,0xfa,0x11,0x36,0xd6,0xd,0xf7,0xea,0x13,0xc4,0xd3,0xc3,0xa0],[
# Record 36: Address = 0x2200, Length = 128
0x2200,0x80,
0x36,0xda,0x62,0x39,0xc3,0x99,0x77,0x2d,0x83,0x9b,0x64,0xbf,0xc5,0x61,0xcd,0xac,
0x61,0xd,0xe4,0x26,0x55,0xf9,0x9f,0xd7,0x93,0xbc,0xd9,0x2d,0x6d,0x6e,0x20,0xd3,
0x6a,0x10,0x46,0x16,0xdf,0xd7,0xcd,0xbf,0x37,0x79,0x5b,0xa2,0x2,0xd7,0x81,0x7e,
0x3,0xd6,0x9e,0xcd,0x82,0x1e,0x96,0x11,0xe7,0xfe,0xda,0x97,0x29,0xfc,0x61,0xeb,
0x3e,0x28,0xc8,0x7e,0x5a,0xd2,0xcc,0x96,0xd6,0x87,0x3,0xfd,0xb7,0x44,0xac,0xed,
0xbd,0xe4,0xed,0x9,0xa7,0x2,0x6e,0xd5,0x29,0x31,0xe6,0x84,0xa,0xd3,0xf3,0x4c,
0x2a,0x4e,0xa7,0x4c,0x2,0x50,0x3e,0x74,0xa3,0x3b,0xda,0x5d,0x8c,0x46,0xe8,0x95,
0x67,0x39,0xc2,0xf7,0x16,0x9f,0x6f,0xea,0xed,0xeb,0x8d,0x1b,0xf,0xa,0x81,0xea],[
# Record 37: Address = 0x2280, Length = 128
0x2280,0x80,
0xa6,0xe7,0x96,0xfc,0xe6,0xec,0x5b,0x92,0x8d,0x6,0x51,0xcd,0x1d,0x48,0x87,0xd,
0x91,0x4,0xb4,0x16,0xd2,0x1,0xdc,0x11,0x24,0xb4,0xe7,0x28,0x96,0x18,0xd9,0x34,
0x34,0xbd,0xd8,0xd5,0xe5,0xbe,0x5c,0x8d,0x9c,0xbf,0x87,0xde,0x34,0x49,0x2a,0x2e,
0xd2,0x63,0xe9,0x77,0xc9,0x8f,0xa7,0x57,0x69,0xe3,0x61,0xf1,0x86,0xc0,0xda,0x61,
0x4c,0xc,0x62,0x94,0x22,0xb4,0xf8,0x4f,0x9d,0xa6,0x48,0xc4,0xc1,0x5e,0xb9,0xee,
0x2b,0xbd,0x87,0xda,0xa2,0x5,0xa5,0x2b,0x1a,0xa2,0xf9,0x6c,0xbc,0xc,0xbe,0x89,
0x8d,0xee,0x14,0x77,0x73,0x67,0x46,0xb0,0x1e,0xb3,0xe5,0x5,0xd5,0x79,0x1c,0x83,
0xc3,0x50,0x64,0x19,0xf,0x8a,0xee,0x8a,0x18,0x25,0x93,0xac,0x8b,0x1a,0xf2,0x4f],[
# Record 38: Address = 0x2300, Length = 128
0x2300,0x80,
0x7a,0x29,0x99,0x8f,0x7a,0xa5,0xba,0x17,0x1e,0x73,0xf8,0xae,0x6d,0xc,0x2c,0x9a,
0xb0,0xa2,0x74,0x6a,0x30,0x97,0xe4,0x44,0xc5,0x53,0x6b,0xe,0x2b,0x4a,0xf0,0x4a,
0x31,0x36,0x9b,0x1b,0x79,0x2a,0x7d,0xc8,0xce,0x61,0x80,0xfc,0xca,0x9c,0x77,0xc5,
0x61,0x85,0x21,0x19,0x14,0x4b,0xaf,0x7c,0x9d,0x52,0x40,0x1e,0xf9,0xfe,0xa8,0xed,
0xd7,0x9c,0xe6,0xdb,0xe2,0xfe,0x2f,0x5f,0xda,0x17,0x9e,0x33,0xd9,0xcc,0xb3,0x5b,
0x98,0xaf,0xc2,0xec,0x91,0xc2,0xe5,0xed,0x93,0xad,0xcd,0x74,0xb0,0x94,0x9d,0xff,
0xf3,0xf3,0xfc,0x64,0x27,0xc1,0x40,0xa7,0x55,0x64,0xc6,0xc7,0xd,0xf5,0xbb,0xb,
0x75,0x2d,0x55,0x73,0x4d,0x9a,0x79,0x89,0x27,0x2b,0x4f,0x36,0x73,0xdb,0x4f,0xb5],[
# Record 39: Address = 0x2380, Length = 128
0x2380,0x80,
0x47,0xd4,0xb1,0xbc,0x95,0x81,0xfd,0x66,0x96,0x7d,0xea,0xc0,0x5f,0xb5,0xf1,0xd0,
0x5b,0x7,0xcb,0xa7,0x61,0x32,0x82,0x39,0x5,0x84,0xd4,0xdb,0x43,0x17,0x81,0xd9,
0xf0,0x5b,0xec,0x31,0x13,0x94,0xe8,0x3d,0x0,0xc9,0xa5,0x25,0x55,0xa5,0x2e,0x3e,
0x5d,0xc6,0x0,0xe0,0xac,0x8d,0xbf,0x6f,0x89,0x9e,0x82,0x59,0xc2,0xdd,0x5d,0xde,
0xff,0xc,0xf3,0x16,0xc0,0x2e,0x89,0xed,0x7e,0x3f,0x6f,0x40,0x87,0xe5,0x7c,0x1,
0xdf,0x92,0xe5,0x32,0x12,0x1d,0x54,0x51,0x22,0x56,0x4b,0x2d,0xb2,0x9,0x99,0xd9,
0x2,0xb8,0xf0,0x2b,0x84,0x2b,0x10,0xe6,0x2e,0xa4,0x47,0x11,0x46,0x4d,0x78,0xc6,
0xa8,0x37,0x8f,0xdb,0x4c,0xc6,0x53,0x89,0xa7,0x1e,0xd,0xdf,0xef,0x1d,0x4b,0x5],[
# Record 40: Address = 0x2400, Length = 128
0x2400,0x80,
0x1d,0x3f,0x8b,0x97,0xee,0xa8,0xad,0x6,0x14,0x77,0x7e,0x9b,0x4,0xa7,0xb3,0x1,
0x46,0xeb,0xa9,0xdf,0x51,0x46,0xa2,0x98,0xbf,0x3a,0x63,0x84,0x37,0x73,0xa1,0x5,
0x16,0x4,0x1e,0x30,0xee,0x18,0x95,0xe9,0x99,0xef,0x13,0x95,0xad,0xc2,0xb4,0x30,
0xf1,0x6b,0xe,0xac,0xa8,0x35,0x90,0xe1,0xe5,0x9d,0x93,0x7,0xcc,0xb1,0xf0,0x48,
0x7b,0x12,0x2d,0x39,0x27,0xdc,0x11,0x4c,0x75,0x3d,0x17,0xf,0x54,0xc7,0x59,0xf2,
0x96,0xa2,0x0,0xa5,0x59,0x85,0x53,0x91,0xb8,0x86,0xdd,0x28,0x98,0x6,0x53,0xfd,
0x63,0xe5,0x5a,0xc6,0xff,0x12,0x43,0x0,0xe5,0x57,0x42,0xd6,0xf8,0x5c,0x4c,0x68,
0xe2,0x6,0x1d,0x6,0xb8,0x72,0x23,0x39,0x79,0x1d,0x89,0xb7,0x25,0xdb,0xeb,0x46],[
# Record 41: Address = 0x2480, Length = 128
0x2480,0x80,
0x4f,0xc6,0x7e,0x3d,0x60,0x22,0xd3,0xcb,0x7d,0x30,0x44,0x37,0x8d,0xf1,0x51,0xea,
0x55,0x72,0xac,0xa3,0x90,0x80,0xf6,0x1b,0x5c,0x78,0xdb,0xe0,0xfe,0xfd,0x8d,0x6a,
0xd2,0xd2,0x7,0xa8,0xc8,0xac,0xb5,0x72,0x9d,0x26,0x42,0xe5,0x19,0x7f,0xef,0x58,
0xdb,0xfa,0x69,0xd3,0xf7,0xab,0x44,0x60,0x5e,0xec,0x2e,0x32,0x5b,0x93,0xb,0xb0,
0x7c,0xfc,0xa,0xb4,0x99,0x19,0xfa,0x96,0x62,0x7c,0x4d,0x39,0x7f,0x56,0xd4,0x67,
0x25,0x79,0x8e,0xfe,0x0,0xdf,0x56,0x93,0x1d,0x7,0x73,0x13,0x40,0xad,0x3b,0xd1,
0xaf,0xea,0xd5,0xf1,0x9d,0xa1,0x2d,0xde,0x8c,0xa3,0xe,0xe3,0x53,0xa0,0x8f,0xb1,
0x9d,0xe7,0x1d,0x2b,0xfa,0x5a,0x9e,0x91,0xb7,0x13,0xa8,0x6f,0xc4,0x4a,0xf5,0x17],[
# Record 42: Address = 0x2500, Length = 128
0x2500,0x80,
0xf2,0x5a,0x10,0x9d,0x53,0x54,0x45,0x7a,0xa2,0xf0,0xc3,0xa0,0xfd,0xa1,0xf8,0x3d,
0xd,0xe6,0x24,0x53,0x87,0x7d,0xab,0x81,0x4b,0x5a,0xce,0xd9,0xe1,0xdd,0x10,0x61,
0xb2,0xae,0x4b,0xa8,0xf4,0x6e,0xb8,0x71,0xdd,0x2,0xda,0x7b,0xd1,0xc2,0x7c,0x6b,
0x6e,0xbd,0xdc,0x2d,0xda,0x16,0xee,0xcc,0xc5,0x6d,0xce,0x6e,0x7d,0xb1,0xf9,0xc3,
0x8e,0x83,0x7c,0xd9,0xf5,0xbc,0x44,0xc3,0x7d,0xa,0x9e,0x3b,0xea,0xd0,0x40,0x41,
0xa2,0xef,0x80,0x1,0x55,0xf1,0xaf,0xe,0x77,0xae,0xa4,0x9d,0x7f,0xe9,0xc4,0xff,
0xd3,0x16,0x4e,0xc1,0x53,0x5e,0x47,0x8b,0x4c,0x4f,0x19,0x72,0x2a,0x8a,0x8c,0x7c,
0xb7,0x87,0xb2,0x22,0xfa,0xd,0x51,0x1b,0xb9,0xb9,0x33,0xfb,0xcd,0xbe,0x7a,0x34],[
# Record 43: Address = 0x2580, Length = 128
0x2580,0x80,
0x9b,0x7,0xa6,0xbe,0xe2,0x91,0xe5,0xe6,0xdf,0x15,0x24,0xcc,0x7d,0xff,0xc8,0x98,
0x1,0xd6,0x34,0x5b,0x61,0xa5,0xd1,0xaa,0x92,0x98,0x98,0xbc,0x39,0x15,0xca,0xdc,
0x7b,0x40,0xa,0xb3,0x8b,0x9d,0x80,0xe3,0x2f,0x45,0x17,0xf2,0xdf,0x54,0x75,0xd6,
0xfd,0xca,0x4b,0xa1,0x22,0xdf,0x5e,0x34,0x5e,0x42,0x6,0xf6,0xcb,0x2,0xa2,0x54,
0xf3,0x9,0x41,0xb5,0xa9,0x28,0xd4,0x26,0x1,0xa3,0x2b,0x7d,0x47,0xdb,0x25,0xbd,
0xc1,0xac,0x42,0x6d,0xb9,0x38,0x10,0x45,0xa5,0xe,0xf4,0x87,0xfa,0x4d,0x30,0xbe,
0xcf,0xe6,0xc5,0x24,0x51,0x51,0x20,0x17,0xf,0x7f,0x19,0x55,0xee,0xa,0x6,0x18,
0xdd,0x2d,0x8c,0x7d,0x98,0xdd,0xba,0x6f,0xf1,0x80,0x6a,0xd0,0xfc,0xe4,0xd,0xe3],[
# Record 44: Address = 0x2600, Length = 128
0x2600,0x80,
0xe9,0x11,0x4c,0x74,0xa0,0xca,0xa6,0x20,0x77,0xf3,0xba,0x69,0x3f,0xbf,0xbc,0xe6,
0x8e,0xf6,0x3e,0xc3,0x65,0x71,0x18,0x31,0x69,0xd8,0xa6,0x65,0x96,0x10,0xba,0x69,
0xd4,0xa2,0xdd,0x2f,0xf4,0x5,0xb4,0xdd,0x30,0x70,0xbe,0x41,0x4a,0xa8,0x7a,0xcd,
0xca,0xc8,0x87,0xd3,0x73,0x44,0x25,0xc9,0xd,0xde,0xe1,0x2d,0x9f,0xbc,0xc3,0xd1,
0xd,0x3f,0x43,0x47,0xc4,0xf,0xc0,0x2c,0x85,0x48,0x73,0x7d,0xb4,0x6d,0xae,0xb9,
0x1d,0x35,0x16,0x32,0xbe,0x60,0x40,0x1f,0xd0,0x7f,0xf3,0xcd,0x52,0xc2,0xaa,0xba,
0x2,0x9d,0xe2,0x4e,0x5f,0x2e,0x3a,0x4f,0x47,0x3a,0x3c,0x2d,0x6e,0x82,0x7b,0xb8,
0x92,0x54,0x74,0x14,0x5,0x81,0xa8,0x8d,0x3f,0x8c,0x86,0x37,0x62,0x9b,0x34,0x24],[
# Record 45: Address = 0x2680, Length = 128
0x2680,0x80,
0xe0,0xf1,0xfd,0xb3,0x85,0x36,0x49,0x2b,0x3d,0xfb,0xc2,0xe5,0xa4,0xa5,0x10,0x96,
0x86,0xda,0x95,0x65,0x40,0xff,0xd4,0x1e,0xb8,0xba,0x72,0x35,0xd3,0x7,0x36,0x89,
0x50,0x84,0x31,0x2b,0x1f,0xb5,0x41,0xba,0xea,0xcc,0x69,0x96,0x56,0x9a,0x2e,0x45,
0xc1,0xd7,0x3e,0x3b,0xde,0x2e,0xa2,0x3f,0x6,0x77,0xc,0x3d,0xfd,0x19,0x68,0xba,
0x3d,0xd,0x6b,0x6c,0x2b,0xe2,0xd9,0x96,0xf2,0xf5,0xcd,0xfc,0xdc,0x50,0xa5,0x15,
0xfc,0x24,0x64,0xf7,0xed,0xaf,0xaa,0xdd,0x46,0xaa,0xc1,0x41,0x1d,0xa4,0x42,0xe6,
0x14,0x6b,0x9c,0xed,0x93,0x46,0x2d,0x9,0x4d,0xac,0xbe,0xfe,0x64,0xad,0xe,0xe3,
0xca,0x5b,0x3d,0x46,0xf7,0x23,0x3a,0x48,0xf0,0x24,0x6e,0x0,0x6e,0xe8,0x8,0xa],[
# Record 46: Address = 0x2700, Length = 128
0x2700,0x80,
0xa5,0xed,0x58,0x29,0xd2,0xf4,0xeb,0x96,0x27,0x51,0x61,0x0,0x26,0xb1,0x64,0xc7,
0x75,0xfd,0x5,0xb6,0x3b,0xd7,0xd5,0x48,0x7d,0x42,0x30,0xa,0xaa,0x4a,0x44,0x87,
0xa7,0xd8,0x2d,0x7a,0x6a,0xc8,0x8,0x51,0x34,0x24,0x38,0x5e,0xd7,0x9,0x95,0x25,
0x79,0x16,0x95,0x3d,0xac,0xa9,0xb9,0x6e,0x40,0x17,0xb1,0xc7,0xd9,0xf8,0xd,0xeb,
0xfa,0x72,0x3,0xac,0xc2,0x79,0xce,0x98,0x82,0x16,0x47,0xeb,0x6b,0x3,0x29,0x12,
0xae,0x78,0xcd,0xe,0x1d,0x6e,0xd6,0x66,0xc8,0x8a,0x66,0xc1,0xc4,0xa2,0x5a,0x83,
0x8c,0x4d,0xbd,0x8b,0xfb,0x4e,0xaf,0x27,0xa1,0x94,0x1f,0xd8,0xa7,0xfa,0xe0,0x62,
0xbd,0xf0,0xa,0xda,0x56,0xd6,0xc5,0x83,0x5f,0xc1,0xec,0x6e,0x7c,0xc3,0x6d,0xe1],[
# Record 47: Address = 0x2780, Length = 128
0x2780,0x80,
0xca,0xf,0x12,0x4b,0xa7,0x2c,0x1e,0x50,0x1d,0xc2,0x60,0x92,0x8,0x16,0x21,0xfb,
0xf,0x45,0xf0,0xd4,0x82,0x92,0x2f,0xde,0xf,0xa3,0xce,0xd8,0x9f,0x52,0x97,0x8e,
0x19,0xc9,0x77,0x50,0x9a,0x62,0x3d,0x45,0xe2,0x15,0x45,0xbc,0xe8,0x48,0xef,0xfb,
0xed,0x94,0x85,0x71,0x2d,0x54,0xd2,0xf,0xe2,0xe3,0xd8,0x6a,0x4c,0x3c,0x1d,0xa6,
0x5f,0xfc,0x8,0x59,0xf7,0x5a,0x2d,0xb3,0x4b,0x32,0xb8,0x17,0x63,0x28,0x68,0xbb,
0x1d,0x66,0x73,0x41,0x54,0xcf,0x84,0xbd,0x70,0xaf,0xa6,0x3,0x80,0x4,0x26,0xde,
0xd6,0xf2,0x99,0x25,0x73,0x37,0xa9,0x85,0xe1,0xb2,0x19,0x89,0xbd,0x13,0xdb,0xce,
0xb4,0x7,0xe6,0xad,0x4c,0x8b,0x33,0x67,0xbb,0x9a,0x4c,0xef,0x93,0x50,0xbd,0x5b],[
# Record 48: Address = 0x2800, Length = 128
0x2800,0x80,
0xa2,0xb5,0xe6,0xc0,0x34,0x68,0x2e,0x98,0x67,0x7,0xb,0x45,0x86,0x8,0x10,0x60,
0xe5,0xdf,0x82,0x9a,0x88,0x30,0xe3,0x4e,0xcd,0x48,0x3d,0xf8,0x66,0x8a,0xa0,0xda,
0xfd,0x2e,0x33,0xb8,0x1,0xb4,0xbf,0x9c,0x7c,0x17,0xa6,0x35,0x3d,0x86,0x41,0xbc,
0x18,0x65,0x0,0xb1,0x4e,0xb7,0xd4,0xc6,0x6d,0xbb,0xd3,0x43,0xaf,0x35,0xd,0x1a,
0x51,0x34,0x6b,0x75,0x13,0x44,0xdc,0xbe,0x4a,0x59,0x58,0x35,0xa0,0xd8,0x19,0xb5,
0xe,0x66,0x78,0x87,0xa4,0xa,0xe0,0xe,0xa5,0xfe,0xf6,0xe2,0x21,0xa6,0xcd,0xe3,
0x5,0xf2,0x6d,0x8d,0x26,0x54,0xed,0xa,0xac,0x16,0x58,0x2a,0x62,0xe7,0x5b,0x62,
0x77,0x8c,0x47,0xe1,0xfa,0x84,0x96,0xa7,0x68,0x83,0xd,0x46,0x34,0xad,0xfd,0xef],[
# Record 49: Address = 0x2880, Length = 128
0x2880,0x80,
0x9f,0x2d,0x53,0x58,0x40,0x7a,0x5c,0xcc,0x5e,0xeb,0x37,0xa9,0x62,0xc9,0x50,0x76,
0xe6,0x5,0xb0,0x9b,0x9b,0xf,0xfe,0x74,0x22,0xd,0x42,0xc1,0x1f,0xa3,0x73,0x35,
0xaa,0x2a,0x77,0xc3,0xd3,0x58,0x48,0x38,0xdf,0x11,0xd3,0x23,0x1e,0x16,0xe7,0x9c,
0x5e,0xa8,0x72,0x62,0x7e,0xf3,0x6e,0xed,0xea,0xa9,0x56,0xc6,0x1,0x27,0x7d,0x36,
0xbe,0x37,0x18,0x32,0x3d,0xaf,0xac,0xdf,0xc4,0xa,0x70,0x1b,0xdd,0xe9,0x47,0xec,
0xbb,0x4f,0x4a,0x94,0x54,0x76,0x70,0xce,0x55,0xa,0xc,0xc9,0x59,0x39,0x1a,0x9b,
0x6f,0x9,0xcc,0x51,0x9d,0x59,0x7c,0x73,0xf7,0xea,0xe2,0x12,0xae,0x3f,0x35,0x8a,
0x6d,0x38,0x19,0xe3,0x1c,0x36,0x10,0xf2,0xdb,0x66,0x39,0x1c,0x78,0x24,0x9c,0xeb],[
# Record 50: Address = 0x2900, Length = 128
0x2900,0x80,
0x6d,0x24,0x72,0x35,0xf8,0xe7,0xbb,0x61,0x83,0xdf,0xf3,0x58,0xd5,0x58,0x4d,0xc8,
0xf4,0x41,0x8c,0xd4,0x82,0xd4,0x3a,0x41,0x38,0x41,0xe2,0xec,0x57,0x15,0x3a,0x97,
0x7f,0x65,0xe2,0x6c,0xf5,0x82,0x1f,0x9,0x74,0xa3,0xd9,0xdd,0x7e,0x6,0x5d,0xd2,
0x7b,0x67,0xdc,0xa7,0xd8,0x33,0x62,0xa,0x22,0x7f,0x87,0x44,0x1c,0x9,0x19,0xcb,
0x7b,0xa8,0x5a,0x6b,0xaf,0x8e,0xce,0x88,0xc8,0x4f,0x2d,0x33,0xbf,0xbc,0x8c,0x5a,
0x53,0x47,0x41,0xe0,0xba,0x4a,0x56,0xde,0x3f,0x58,0x65,0x30,0x26,0x0,0x92,0xb6,
0xf6,0x9,0x5d,0x79,0x66,0xf4,0xe0,0xd0,0x22,0x55,0xef,0xf8,0x29,0x14,0x66,0xda,
0x86,0xa3,0xf5,0x6f,0x46,0x74,0x72,0x7c,0x36,0x26,0xc7,0xaf,0xef,0x8f,0x16,0xb2],[
# Record 51: Address = 0x2980, Length = 128
0x2980,0x80,
0x9e,0x8f,0x30,0x36,0x59,0x4e,0x6e,0x78,0xca,0x16,0x4b,0x3c,0x22,0xdd,0x1f,0xb2,
0x34,0x16,0xd7,0x8a,0x10,0x51,0x2b,0xa5,0x32,0x6a,0x49,0x32,0xc5,0x6,0x9f,0x5c,
0xee,0x8e,0x99,0x35,0xd5,0x25,0x14,0xb3,0xa3,0x47,0x21,0x37,0xe6,0x38,0x19,0x88,
0xb8,0x98,0x76,0x9f,0xe8,0x27,0x88,0xf4,0x63,0x18,0xa,0xed,0xae,0x75,0x2e,0x5f,
0x68,0x2f,0x6e,0x0,0x92,0x1e,0x59,0x3f,0xe,0xf7,0x11,0xe7,0x6d,0x18,0x42,0xbc,
0xb,0x4c,0x75,0x3b,0xd9,0x5,0x31,0xfd,0x67,0x50,0x3a,0xe8,0xd6,0x1c,0x7,0x24,
0x13,0xd,0x90,0xe5,0xe0,0x4f,0xf1,0x96,0x16,0x78,0x66,0x36,0x32,0xd8,0x93,0x48,
0x84,0xa4,0x53,0xcb,0xf4,0x76,0x76,0x8d,0xb9,0xdc,0x5c,0xc4,0x87,0x4c,0x80,0xe1],[
# Record 52: Address = 0x2a00, Length = 128
0x2a00,0x80,
0x46,0xbe,0x6c,0x8b,0x6a,0x9e,0x54,0x19,0x4e,0x59,0x80,0xeb,0x5b,0x30,0x7c,0x44,
0x53,0x3e,0x52,0x53,0x51,0x4,0x92,0x33,0x97,0x59,0xd1,0xbc,0x94,0xca,0x28,0xb5,
0x2,0x53,0x67,0xd1,0x60,0xaa,0x2a,0xfc,0x14,0x15,0x4,0x5f,0x61,0x88,0xc9,0xf8,
0x6a,0x36,0xe8,0x1b,0xfa,0x87,0x5f,0xf6,0x5f,0xd8,0xa1,0x7c,0x49,0x4e,0x84,0x29,
0xd,0x13,0xe2,0x76,0x7,0x28,0x95,0x19,0x7d,0x84,0x93,0xbb,0xb0,0xce,0x16,0x48,
0xb4,0x3a,0x8c,0xd6,0xcd,0x50,0x60,0x22,0xf1,0x4d,0x1c,0xf4,0xd8,0x53,0x55,0xf2,
0x4c,0xb5,0x8,0xe6,0x5,0xd5,0x82,0xb2,0x6c,0x2a,0xfa,0x67,0x5f,0xa2,0x2c,0x9c,
0x42,0x27,0x90,0x2f,0xb3,0x65,0x76,0xa4,0xca,0x74,0x1b,0x34,0x45,0x37,0x49,0x59],[
# Record 53: Address = 0x2a80, Length = 128
0x2a80,0x80,
0x9,0x90,0x8,0x44,0xf,0xde,0x15,0x72,0x79,0xa6,0xb7,0x8,0xb5,0xf,0xc1,0xbd,
0xd1,0x95,0xa0,0xde,0xa9,0xc9,0x25,0x37,0x8f,0x90,0x92,0x19,0x2e,0x84,0xfa,0x70,
0x44,0x3d,0x91,0xc1,0xe,0xcd,0xe3,0x6c,0xba,0x65,0x7e,0x93,0x1f,0x8d,0xba,0xf4,
0xf7,0x1f,0x81,0x4b,0x36,0x27,0x69,0x9d,0x29,0xbb,0x21,0x79,0x92,0x7e,0x6b,0x23,
0x77,0x62,0x1a,0x35,0x31,0x9d,0x68,0x7b,0x51,0xbd,0x2a,0x3c,0x2,0xce,0xe6,0x51,
0x9b,0x5e,0xda,0xba,0xf1,0xf3,0x98,0x5e,0x9c,0x16,0xdc,0xf6,0xa5,0x39,0xe0,0x6,
0x19,0xf8,0x7b,0x55,0xc4,0x77,0xc7,0x5e,0xb6,0xdd,0x5e,0x1b,0x59,0xe1,0x12,0xd4,
0x8e,0xd8,0xf0,0xf1,0x3c,0xfa,0x51,0xc5,0x43,0xf0,0x96,0x9e,0xb6,0x7a,0x76,0x84],[
# Record 54: Address = 0x2b00, Length = 128
0x2b00,0x80,
0x54,0x39,0x13,0x3e,0xc6,0xea,0xec,0x56,0xa6,0x1d,0x6e,0x95,0x0,0x9,0xc5,0xbd,
0x32,0x60,0x8f,0xa6,0xed,0x87,0x31,0xd8,0xa0,0x69,0xc5,0xa7,0xd4,0x92,0x51,0x93,
0x33,0xa5,0x1d,0xc8,0x9c,0xe7,0xc,0xb4,0x38,0xca,0x60,0x11,0xa2,0x1,0x77,0x46,
0x6e,0x79,0x90,0x2e,0xe5,0x6a,0x16,0x6e,0xaf,0x18,0x8b,0xc5,0xde,0x76,0xf6,0xb6,
0x9e,0x5c,0x5b,0x78,0x2c,0xcb,0x2,0xaf,0xce,0xa8,0x34,0x78,0xda,0xd5,0x7d,0xd8,
0x56,0x4,0xf4,0x3a,0xce,0xcb,0x67,0x79,0x70,0x11,0x52,0x5f,0x83,0x66,0x66,0x19,
0x2f,0x69,0x7c,0x3b,0xfa,0xe2,0x76,0xb,0x1e,0xf1,0x43,0xda,0x6f,0x9d,0x4c,0x9a,
0x79,0x19,0xb,0xa7,0x73,0xc0,0x7b,0x94,0xcc,0x44,0x75,0x60,0xa3,0x3,0xcb,0x72],[
# Record 55: Address = 0x2b80, Length = 128
0x2b80,0x80,
0xd,0x59,0x1d,0x7b,0x61,0xe0,0xb8,0xe4,0x6,0x70,0x98,0xd8,0xb1,0xdf,0xa2,0xab,
0x1,0xd5,0xb2,0xc8,0x39,0xe9,0x13,0xc0,0xc8,0xb0,0xc4,0xf8,0x5c,0x2d,0x20,0x90,
0xe9,0x83,0xd3,0xaf,0x82,0x22,0x2,0x38,0x4d,0x96,0xfc,0x2e,0x40,0x1f,0xe4,0xf0,
0x10,0x9d,0xa,0xf3,0xc0,0x29,0x52,0xc7,0x97,0xc9,0xf7,0x3a,0x37,0x8d,0xce,0xb7,
0x67,0x81,0xc1,0x59,0xe9,0xcd,0x70,0x78,0x0,0xda,0x11,0x8a,0x3,0x51,0xf4,0x53,
0xfb,0xf4,0xe4,0xc8,0x40,0xa6,0xf5,0xa6,0xfe,0x9a,0xbb,0xe4,0x2f,0x3,0x6,0x18,
0xcb,0xad,0xbe,0x71,0x3d,0xf9,0x1c,0xf0,0x5f,0x48,0xa3,0x43,0x17,0x22,0xd9,0x66,
0xaa,0x43,0xa6,0x96,0x16,0xfa,0x9c,0xb,0x84,0xfc,0x9b,0x7a,0xf7,0x6a,0x81,0xa4],[
# Record 56: Address = 0x2c00, Length = 128
0x2c00,0x80,
0x91,0xc,0xbf,0xe7,0xd8,0x4b,0x3b,0x63,0xc6,0xfb,0xd3,0x4a,0xb5,0x6d,0x4b,0x8d,
0xc9,0x28,0xc0,0x5b,0x6,0x2c,0xd4,0x2d,0x60,0x91,0x25,0xf1,0x4,0x28,0x69,0x65,
0xa7,0x40,0xa4,0xa4,0x2a,0x3e,0x6d,0x21,0x8a,0x98,0xf6,0x7f,0xe8,0x4c,0xaf,0x6e,
0x95,0x51,0x20,0xd5,0x15,0xf,0x61,0x4e,0xcc,0x32,0x2b,0x8c,0x2e,0x17,0xb2,0xe1,
0xb7,0x5d,0x97,0x56,0x15,0x76,0xc8,0xfd,0xe8,0x86,0x2c,0x51,0x1c,0x32,0xf1,0x55,
0x6c,0xc,0x0,0x7a,0x29,0xa8,0xfd,0x8d,0x3c,0xaa,0x45,0x4d,0x4e,0x49,0x61,0x7c,
0x3c,0x97,0xbd,0xf9,0x22,0xe,0x23,0xf2,0x20,0x43,0x58,0x5b,0x3f,0xec,0xc7,0x5e,
0xd3,0x2e,0x58,0x49,0xac,0x80,0xcb,0xba,0xde,0x61,0x2c,0x26,0xf8,0xf7,0x8,0x1],[
# Record 57: Address = 0x2c80, Length = 128
0x2c80,0x80,
0x7a,0xed,0x73,0xfc,0xd6,0x12,0x57,0x77,0xf8,0xcc,0x4f,0x33,0xa2,0xcf,0x4c,0x34,
0xd6,0xef,0x10,0x23,0xa1,0x8,0x3f,0x26,0xb7,0xeb,0x1f,0x76,0x73,0xba,0x9,0x41,
0xd1,0xd6,0xfc,0xb6,0x52,0xb6,0x23,0x59,0x8a,0xb9,0x40,0x5d,0x63,0xf8,0xaf,0x82,
0x48,0x2,0x7c,0xf8,0x73,0x99,0x45,0x46,0xa9,0x78,0xc6,0x68,0x5e,0x59,0x9f,0x67,
0xf9,0x3a,0x7b,0x72,0xe1,0xa6,0x32,0x6,0x11,0xd2,0x36,0x8e,0x57,0xff,0xbd,0x2,
0x34,0x7d,0x95,0xa2,0x91,0xcb,0x85,0x87,0xd3,0xe6,0x25,0x38,0x32,0x82,0xb1,0x87,
0x59,0xee,0x5f,0x26,0x4,0x99,0xcb,0x4d,0xfc,0xcc,0x94,0x2f,0x69,0xf5,0x9a,0xbd,
0x85,0xb1,0x97,0x59,0xf,0xbf,0x88,0x42,0xf8,0x1d,0x8f,0xcd,0xe3,0xc1,0xa9,0xed],[
# Record 58: Address = 0x2d00, Length = 128
0x2d00,0x80,
0xab,0x78,0x6,0xc8,0x1a,0x6f,0xd0,0xbd,0xa6,0xb3,0x60,0xfa,0xec,0x94,0x16,0x53,
0xef,0x9f,0xc,0xe8,0x7c,0x4a,0x88,0xbe,0x59,0xda,0x2,0x2f,0x18,0x72,0x49,0xc5,
0xc,0xbc,0xa0,0xdc,0x16,0x36,0x3c,0x71,0x3c,0x4c,0xf5,0xe3,0x3c,0x4f,0x29,0x17,
0x34,0xd3,0xf5,0x7f,0x94,0x7a,0xaf,0x76,0x6d,0x47,0x3b,0x91,0x92,0xf6,0x7e,0xdc,
0xd9,0x20,0x66,0x1f,0xc8,0x78,0x2c,0x1c,0xad,0xb4,0x8a,0xe0,0xdd,0x8c,0x28,0xb5,
0x17,0x42,0x89,0xe5,0x69,0xf3,0x41,0xcf,0x23,0xd5,0x3e,0xcb,0xe6,0x71,0xe,0x24,
0xd3,0xc1,0x6c,0x9b,0x98,0xa8,0x50,0xed,0x57,0xda,0x33,0x3c,0xa,0xc6,0x8e,0x7e,
0xa2,0x54,0xb4,0xb1,0xd0,0x51,0xc1,0x32,0xfa,0xc3,0xf5,0x52,0xc0,0xd8,0x67,0xca],[
# Record 59: Address | |
# coding=utf-8
# Copyright 2021 The Deeplab2 Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implements Axial-Blocks proposed in Axial-DeepLab [1].
Axial-Blocks are based on residual bottleneck blocks, but with the 3x3
convolution replaced with two axial-attention layers, one on the height-axis,
followed by the other on the width-axis.
[1] Axial-Deeplab: Stand-Alone Axial-Attention for Panoptic Segmentation,
ECCV 2020 Spotlight.
<NAME>, <NAME>, <NAME>, <NAME>, <NAME>,
<NAME>.
"""
import tensorflow as tf
from deeplab2.model import utils
from deeplab2.model.layers import activations
from deeplab2.model.layers import axial_layers
from deeplab2.model.layers import convolutions
from deeplab2.model.layers import squeeze_and_excite
class AxialBlock(tf.keras.layers.Layer):
"""An AxialBlock as a building block for an Axial-ResNet model.
We implement the Axial-Block proposed in [1] in a general way that also
includes convolutional residual blocks, such as the basic block and the
bottleneck block (w/ and w/o Switchable Atrous Convolution).
A basic block consists of two 3x3 convolutions and a residual connection. It
is the main building block for wide-resnet variants.
A bottleneck block consists of consecutive 1x1, 3x3, 1x1 convolutions and a
residual connection. It is the main building block for standard resnet
variants.
An axial block consists of a 1x1 input convolution, a self-attention layer
(either axial-attention or global attention), a 1x1 output convolution, and a
residual connection. It is the main building block for axial-resnet variants.
Note: We apply the striding in the first spatial operation (i.e. 3x3
convolution or self-attention layer).
"""
def __init__(self,
filters_list,
kernel_size=3,
strides=1,
atrous_rate=1,
use_squeeze_and_excite=False,
use_sac=False,
bn_layer=tf.keras.layers.BatchNormalization,
activation='relu',
name=None,
conv_kernel_weight_decay=0.0,
basic_block_second_conv_atrous_rate=None,
attention_type=None,
axial_layer_config=None):
"""Initializes an AxialBlock.
Args:
filters_list: A list of filter numbers in the residual block. We currently
support filters_list with two or three elements. Two elements specify
the filters for two consecutive 3x3 convolutions, while three elements
specify the filters for three convolutions (1x1, 3x3, and 1x1).
kernel_size: The size of the convolution kernels (default: 3).
strides: The strides of the block (default: 1).
atrous_rate: The atrous rate of the 3x3 convolutions (default: 1). If this
residual block is a basic block, it is recommendeded to specify correct
basic_block_second_conv_atrous_rate for the second 3x3 convolution.
Otherwise, the second conv will also use atrous rate, which might cause
atrous inconsistency with different output strides, as tested in
axial_block_groups_test.test_atrous_consistency_basic_block.
use_squeeze_and_excite: A boolean flag indicating whether
squeeze-and-excite (SE) is used.
use_sac: A boolean, using the Switchable Atrous Convolution (SAC) or not.
bn_layer: A tf.keras.layers.Layer that computes the normalization
(default: tf.keras.layers.BatchNormalization).
activation: A string specifying the activation function to apply.
name: An string specifying the name of the layer (default: None).
conv_kernel_weight_decay: A float, the weight decay for convolution
kernels.
basic_block_second_conv_atrous_rate: An integer, the atrous rate for the
second convolution of basic block. This is necessary to ensure atrous
consistency with different output_strides. Defaults to atrous_rate.
attention_type: A string, type of attention to apply. Support 'axial' and
'global'.
axial_layer_config: A dict, an argument dictionary for the axial layer.
Raises:
ValueError: If filters_list does not have two or three elements.
ValueError: If attention_type is not supported.
ValueError: If double_global_attention is True in axial_layer_config.
"""
super(AxialBlock, self).__init__(name=name)
self._filters_list = filters_list
self._strides = strides
self._use_squeeze_and_excite = use_squeeze_and_excite
self._bn_layer = bn_layer
self._activate_fn = activations.get_activation(activation)
self._attention_type = attention_type
if axial_layer_config is None:
axial_layer_config = {}
if basic_block_second_conv_atrous_rate is None:
basic_block_second_conv_atrous_rate = atrous_rate
if len(filters_list) == 3:
# Three consecutive convolutions: 1x1, 3x3, and 1x1.
self._conv1_bn_act = convolutions.Conv2DSame(
filters_list[0], 1, 'conv1_bn_act',
use_bias=False,
use_bn=True,
bn_layer=bn_layer,
activation=activation,
conv_kernel_weight_decay=conv_kernel_weight_decay)
if attention_type is None or attention_type.lower() == 'none':
self._conv2_bn_act = convolutions.Conv2DSame(
filters_list[1], kernel_size, 'conv2_bn_act',
strides=strides,
atrous_rate=atrous_rate,
use_bias=False,
use_bn=True,
bn_layer=bn_layer,
activation=activation,
use_switchable_atrous_conv=use_sac,
# We default to use global context in SAC if use_sac is True. This
# setting is experimentally found effective.
use_global_context_in_sac=use_sac,
conv_kernel_weight_decay=conv_kernel_weight_decay)
elif attention_type == 'axial':
if 'double_global_attention' in axial_layer_config:
if axial_layer_config['double_global_attention']:
raise ValueError('Double_global_attention takes no effect in '
'AxialAttention2D.')
del axial_layer_config['double_global_attention']
self._attention = axial_layers.AxialAttention2D(
strides=strides,
filters=filters_list[1],
name='attention',
bn_layer=bn_layer,
conv_kernel_weight_decay=conv_kernel_weight_decay,
**axial_layer_config)
elif attention_type == 'global':
self._attention = axial_layers.GlobalAttention2D(
strides=strides,
filters=filters_list[1],
name='attention',
bn_layer=bn_layer,
conv_kernel_weight_decay=conv_kernel_weight_decay,
**axial_layer_config)
else:
raise ValueError(attention_type + ' is not supported.')
# Here we apply a batch norm with gamma initialized at zero. This ensures
# that at random initialization of the model, the skip connections
# dominate all residual blocks. In this way, all the skip connections
# construct an identity mapping that passes the gradients (without any
# distortion from the randomly initialized blocks) to all residual blocks.
# This trick helps training at early epochs.
# Reference: "Accurate, Large Minibatch SGD: Training ImageNet in 1 Hour".
# https://arxiv.org/abs/1706.02677
self._conv3_bn = convolutions.Conv2DSame(
filters_list[2], 1, 'conv3_bn',
use_bias=False,
use_bn=True,
bn_layer=bn_layer,
bn_gamma_initializer='zeros',
activation='none',
conv_kernel_weight_decay=conv_kernel_weight_decay)
elif len(filters_list) == 2:
# Two consecutive convolutions: 3x3 and 3x3.
self._conv1_bn_act = convolutions.Conv2DSame(
filters_list[0], kernel_size, 'conv1_bn_act',
strides=strides,
atrous_rate=atrous_rate,
use_bias=False,
use_bn=True,
bn_layer=bn_layer,
activation=activation,
use_switchable_atrous_conv=use_sac,
use_global_context_in_sac=use_sac,
conv_kernel_weight_decay=conv_kernel_weight_decay)
# Here we apply a batch norm with gamma initialized at zero. This ensures
# that at random initialization of the model, the skip connections
# dominate all residual blocks. In this way, all the skip connections
# construct an identity mapping that passes the gradients (without any
# distortion from the randomly initialized blocks) to all residual blocks.
# This trick helps training at early epochs.
# Reference: "Accurate, Large Minibatch SGD: Training ImageNet in 1 Hour".
# https://arxiv.org/abs/1706.02677
self._conv2_bn = convolutions.Conv2DSame(
filters_list[1], kernel_size, 'conv2_bn',
strides=1,
atrous_rate=basic_block_second_conv_atrous_rate,
use_bias=False,
use_bn=True,
bn_layer=bn_layer,
bn_gamma_initializer='zeros',
activation='none',
use_switchable_atrous_conv=use_sac,
use_global_context_in_sac=use_sac,
conv_kernel_weight_decay=conv_kernel_weight_decay)
else:
raise ValueError('Expect filters_list to have length 2 or 3; got %d' %
len(filters_list))
if self._use_squeeze_and_excite:
self._squeeze_and_excite = squeeze_and_excite.SimplifiedSqueezeAndExcite(
filters_list[-1])
self._conv_kernel_weight_decay = conv_kernel_weight_decay
def build(self, input_shape_list):
input_tensor_shape = input_shape_list[0]
self._shortcut = None
if input_tensor_shape[3] != self._filters_list[-1]:
self._shortcut = convolutions.Conv2DSame(
self._filters_list[-1], 1, 'shortcut',
strides=self._strides,
use_bias=False,
use_bn=True,
bn_layer=self._bn_layer,
activation='none',
conv_kernel_weight_decay=self._conv_kernel_weight_decay)
def call(self, inputs):
"""Performs a forward pass.
We have to define drop_path_random_mask outside the layer call and pass it
into the layer, because recompute_grad (gradient checkpointing) does not
allow any randomness within the function call. In addition, recompute_grad
only supports float tensors as inputs. For this reason, the training flag
should be also passed as a float tensor. For the same reason, we cannot
support passing drop_path_random_mask as None. Instead, we ask the users to
pass only the first two tensors when drop path is not used.
Args:
inputs: A tuple of 2 or 3 tensors, containing
input_tensor should be an input tensor of type tf.Tensor with shape
[batch, height, width, channels].
float_tensor_training should be a float tensor of 0.0 or 1.0, whether
the model is in training mode.
(optional) drop_path_random_mask is a drop path random mask of type
tf.Tensor with shape [batch, 1, 1, 1].
Returns:
outputs: two tensors. The first tensor does not use the last activation
function. The second tensor uses the activation. We return non-activated
output to support MaX-DeepLab which uses non-activated feature for the
stacked decoders.
Raises:
ValueError: If the length of inputs is not 2 or 3.
"""
if len(inputs) not in (2, 3):
raise ValueError('The length of inputs should be either 2 or 3.')
# Unpack the inputs.
input_tensor, float_tensor_training, drop_path_random_mask = (
utils.pad_sequence_with_none(inputs, target_length=3))
# Recompute_grad takes only float tensors as inputs. It does not allow
# bools or boolean tensors. For this reason, we cast training to a float
# tensor outside this call, and now we cast it back to a boolean tensor.
training = tf.cast(float_tensor_training, tf.bool)
shortcut = input_tensor
if self._shortcut is not None:
shortcut = self._shortcut(shortcut, training=training)
elif self._strides != 1:
shortcut = shortcut[:, ::self._strides, ::self._strides, :]
if len(self._filters_list) == 3:
x = self._conv1_bn_act(input_tensor, training=training)
if (self._attention_type is None or
self._attention_type.lower() == 'none'):
x = self._conv2_bn_act(x, training=training)
else:
x = self._attention(x, training=training)
x = self._activate_fn(x)
x = | |
'Breed1', 'Age', 'Fee']].groupby([ 'State', 'Breed1', 'Age'])['Fee'].transform('mean')
alldata['#Feature_state_breed2_age_freq'] = alldata[[ 'State', 'Breed2', 'Age']].groupby([ 'State', 'Breed2'])['Age'].transform('mean')
alldata['#Feature_state_breed2_age_fee_freq'] = alldata[[ 'State', 'Breed2', 'Age', 'Fee']].groupby([ 'State', 'Breed2', 'Age'])['Fee'].transform('mean')
alldata['#Feature_avg_type_age_breed1_fee'] = alldata[['Type','Age', 'Breed1', 'Fee']].groupby(['Type','Age', 'Breed1'])['Fee'].transform('mean')
alldata['#Feature_avg_type_age_breed2_fee'] = alldata[['Type','Age', 'Breed2', 'Fee']].groupby(['Type','Age', 'Breed2'])['Fee'].transform('mean')
alldata['#Feature_age_type_breed1_maturity_sz'] = alldata[['Type', 'Age', 'Breed1', 'MaturitySize']].groupby(['Type', 'Age', 'Breed1'])['MaturitySize'].transform('count') / alldata.shape[0]
alldata['#Feature_age_type_breed2_maturity_sz'] = alldata[['Type', 'Age', 'Breed2', 'MaturitySize']].groupby(['Type', 'Age', 'Breed2'])['MaturitySize'].transform('count') / alldata.shape[0]
alldata['#Feature_age_type_breed1_fur'] = alldata[['Type', 'Age', 'Breed1', 'FurLength']].groupby(['Type', 'Age', 'Breed1'])['FurLength'].transform('count') / alldata.shape[0]
alldata['#Feature_age_type_breed2_fur'] = alldata[['Type', 'Age', 'Breed2', 'FurLength']].groupby(['Type', 'Age', 'Breed2'])['FurLength'].transform('count') / alldata.shape[0]
alldata['#Feature_age_type_breed1_fee'] = alldata[['Type', 'Age', 'Breed1', 'Fee']].groupby(['Type', 'Age', 'Breed1'])['Fee'].transform('count') / alldata.shape[0]
alldata['#Feature_age_type_breed2_fee'] = alldata[['Type', 'Age', 'Breed2', 'Fee']].groupby(['Type', 'Age', 'Breed2'])['Fee'].transform('count') / alldata.shape[0]
alldata['#Feature_state_type_breed1_age_freq'] = alldata[['Type', 'State', 'Breed1', 'Age']].groupby(['Type', 'State', 'Breed1'])['Age'].transform('mean')
alldata['#Feature_state_type_breed1_age_fee_freq'] = alldata[['Type', 'State', 'Breed1', 'Age', 'Fee']].groupby(['Type', 'State', 'Breed1', 'Age'])['Fee'].transform('mean')
alldata['#Feature_state_type_breed2_age_freq'] = alldata[['Type', 'State', 'Breed2', 'Age']].groupby(['Type', 'State', 'Breed2'])['Age'].transform('mean')
alldata['#Feature_state_type_breed2_age_fee_freq'] = alldata[['Type', 'State', 'Breed2', 'Age', 'Fee']].groupby(['Type', 'State', 'Breed2', 'Age'])['Fee'].transform('mean')
###########################################################################################################
alldata['RelAge'] = alldata[['Type', 'Age']].apply(relative_age, axis=1)
alldata['IsNameGenuine'] = alldata[['Name', 'Quantity']].apply(genuine_name, axis=1)
alldata['InstaFeature'] = alldata[['PhotoAmt', 'VideoAmt']].apply(seo_value, axis=1)
alldata['ShowsMore'] = alldata['PhotoAmt'].apply(VerifibalePhotoAmy)
alldata["Vaccinated_Deworked_Mutation"] = alldata['Vaccinated'].apply(str) + "_" + alldata['Dewormed'].apply(str)
alldata["Vaccinated_Deworked_Mutation"] = alldata['Vaccinated'].apply(str) + "_" + alldata['Dewormed'].apply(str)
alldata = pd.get_dummies(alldata, columns=['Vaccinated_Deworked_Mutation'], prefix="Vaccinated_Dewormed")
alldata['GlobalInstaRank'] = alldata['InstaFeature'].rank(method='max')
print(">> Ranking Features By State")
alldata = rankbyG(alldata, "State")
print(">> Ranking Features By Animal")
alldata = rankbyG(alldata, "Type")
print(">> Ranking Features By Breed1")
alldata = rankbyG(alldata, "Breed1")
print(">> Ranking Features By Gender")
alldata = rankbyG(alldata, "Gender")
top_dogs = [179, 205, 195, 178, 206, 109, 189, 103]
top_cats = [276, 268, 285, 252, 243, 251, 288, 247, 280, 290]
alldata['#Feature_SecondaryColors'] = alldata['Color2'] + alldata['Color3']
alldata['#Feature_MonoColor'] = np.where(alldata['#Feature_SecondaryColors'], 1, 0)
alldata['top_breeds'] = 0
alldata.loc[alldata['Breed1'].isin(top_dogs + top_cats) == True, 'top_breeds'] = 1
alldata['top_breed_free'] = 0
alldata.loc[alldata[(alldata['Fee'] == 0) & (alldata['top_breeds'] == 1)].index, 'top_breed_free'] = 1
alldata['free_pet'] = 0
alldata.loc[alldata[alldata['Fee'] == 0].index, 'free_pet'] = 1
alldata['free_pet_age_1'] = 0
alldata.loc[alldata[(alldata['Fee'] == 0) & (alldata['Age'] == 1)].index, 'free_pet_age_1'] = 1
alldata['year'] = alldata['Age'] / 12.
alldata['#Feature_less_a_year'] = np.where(alldata['Age'] < 12, 1, 0)
alldata['#Feature_top_2_states'] = 0
alldata.loc[alldata['State'].isin([41326, 41401]) == True, '#Feature_top_2_states'] = 1
alldata['#Feature_age_exact'] = 0
alldata.loc[alldata['Age'].isin([12, 24, 36, 48, 60, 72, 84, 96, 108]) == True, '#Feature_age_exact'] = 1
alldata['#Feature_isLonely'] = np.where(alldata['Quantity'] > 1, 1, 0)
alldata['total_img_video'] = alldata['PhotoAmt'] + alldata['VideoAmt']
# alldata['#Feature_avg_age_breed1_fee'] = alldata[['Age', 'Breed1', 'Fee']].groupby(['Age', 'Breed1'])[
# 'Fee'].transform('mean')
# alldata['#Feature_avg_age_breed2_fee'] = alldata[['Age', 'Breed2', 'Fee']].groupby(['Age', 'Breed2'])[
# 'Fee'].transform('mean')
# alldata['#Feature_age_breed1_maturity_sz'] = alldata[['Age', 'Breed1', 'MaturitySize']].groupby(['Age', 'Breed1'])[
# 'MaturitySize'].transform('count') / alldata.shape[0]
# alldata['#Feature_age_breed2_maturity_sz'] = alldata[['Age', 'Breed2', 'MaturitySize']].groupby(['Age', 'Breed2'])[
# 'MaturitySize'].transform('count') / alldata.shape[0]
# alldata['#Feature_age_breed1_fur'] = alldata[['Age', 'Breed1', 'FurLength']].groupby(['Age', 'Breed1'])[
# 'FurLength'].transform('count') / alldata.shape[0]
# alldata['#Feature_age_breed2_fur'] = alldata[['Age', 'Breed2', 'FurLength']].groupby(['Age', 'Breed2'])[
# 'FurLength'].transform('count') / alldata.shape[0]
# alldata['#Feature_age_breed1_fee'] = alldata[['Age', 'Breed1', 'Fee']].groupby(['Age', 'Breed1'])['Fee'].transform(
# 'count') / alldata.shape[0]
# alldata['#Feature_age_breed2_fee'] = alldata[['Age', 'Breed2', 'Fee']].groupby(['Age', 'Breed2'])['Fee'].transform(
# 'count') / alldata.shape[0]
# alldata['#Feature_state_breed1_age_freq'] = alldata[['State', 'Breed1', 'Age']].groupby(['State', 'Breed1'])[
# 'Age'].transform('mean')
# alldata['#Feature_state_breed1_age_fee_freq'] = \
# alldata[['State', 'Breed1', 'Age', 'Fee']].groupby(['State', 'Breed1', 'Age'])['Fee'].transform('mean')
# alldata['#Feature_state_breed2_age_freq'] = alldata[['State', 'Breed2', 'Age']].groupby(['State', 'Breed2'])[
# 'Age'].transform('mean')
# alldata['#Feature_state_breed2_age_fee_freq'] = \
# alldata[['State', 'Breed2', 'Age', 'Fee']].groupby(['State', 'Breed2', 'Age'])['Fee'].transform('mean')
# Clean the name
# alldata['Name'] = alldata['Name'].apply(lambda x: clean_name(x))
# alldata['Name'] = alldata['Name'].fillna("No Name")
rescuer_count = alldata.groupby(['RescuerID'])['PetID'].count().reset_index()
rescuer_count.columns = ['RescuerID', 'RescuerID_COUNT']
alldata = alldata.merge(rescuer_count, how='left', on='RescuerID')
Description_count = alldata.groupby(['Description'])['PetID'].count().reset_index()
Description_count.columns = ['Description', 'Description_COUNT']
alldata = alldata.merge(Description_count, how='left', on='Description')
Name_count = alldata.groupby(['Name'])['PetID'].count().reset_index()
Name_count.columns = ['Name', 'Name_COUNT']
alldata = alldata.merge(Name_count, how='left', on='Name')
agg = {}
agg['Quantity'] = ['mean', 'var', 'max', 'min', 'skew', 'median']
agg['Fee'] = ['mean', 'var', 'max', 'min', 'skew', 'median']
agg['Age'] = ['mean', 'sum', 'var', 'max', 'min', 'skew', 'median']
agg['Breed1'] = ['nunique', 'var', 'max', 'min', 'skew', 'median']
agg['Breed2'] = ['nunique', 'var', 'max', 'min', 'skew', 'median']
agg['Type'] = ['nunique', 'var', 'max', 'min', 'skew', 'median']
agg['Gender'] = ['nunique', 'var', 'max', 'min', 'skew', 'median']
agg['Color1'] = ['nunique', 'var', 'max', 'min', 'skew', 'median']
agg['Color2'] = ['nunique', 'var', 'max', 'min', 'skew', 'median']
agg['Color3'] = ['nunique', 'var', 'max', 'min', 'skew', 'median']
agg['MaturitySize'] = ['nunique', 'var', 'max', 'min', 'skew', 'median']
agg['FurLength'] = ['nunique', 'var', 'max', 'min', 'skew', 'median']
agg['Vaccinated'] = ['nunique', 'var', 'max', 'min', 'skew', 'median']
agg['Sterilized'] = ['nunique', 'var', 'max', 'min', 'skew', 'median']
agg['Health'] = ['nunique', 'var', 'max', 'min', 'skew', 'median']
agg["PhotoAmt"] = ['nunique', 'var', 'max', 'min', 'skew', 'median']
agg["RelAge"] = ['nunique', 'var', 'max', 'min', 'skew', 'median']
# RescuerID
grouby = 'RescuerID'
agg_df = agg_features(alldata, grouby, agg, grouby)
alldata = alldata.merge(agg_df, on=grouby, how='left')
agg_kurt_df = alldata.groupby(grouby)[list(agg.keys())].apply(pd.DataFrame.kurt)
agg_kurt_df.columns = [f"{key}_kurt" for key in list(agg.keys())]
alldata = alldata.merge(agg_kurt_df, on=grouby, how='left')
agg_perc_df = alldata.groupby(grouby)[list(agg.keys())].quantile(.25)
agg_perc_df.columns = [f"{key}_perc_25" for key in list(agg.keys())]
alldata = alldata.merge(agg_perc_df, on=grouby, how='left')
agg_perc_df = alldata.groupby(grouby)[list(agg.keys())].quantile(.75)
agg_perc_df.columns = [f"{key}_perc_75" for key in list(agg.keys())]
alldata = alldata.merge(agg_perc_df, on=grouby, how='left')
# State
################################################CREATING MULTIPLE COLUMNS WITH_X NEED TO BE FIXED
grouby = 'State'
agg_df = agg_features(alldata, grouby, agg, grouby)
alldata = alldata.merge(agg_df, on=grouby, how='left')
agg_kurt_df = alldata.groupby(grouby)[list(agg.keys())].apply(pd.DataFrame.kurt)
agg_kurt_df.columns = [f"{key}_kurt" for key in list(agg.keys())]
alldata = alldata.merge(agg_kurt_df, on=grouby, how='left')
agg_perc_df = alldata.groupby(grouby)[list(agg.keys())].quantile(.25)
agg_perc_df.columns = [f"{key}_perc_25" for key in list(agg.keys())]
alldata = alldata.merge(agg_perc_df, on=grouby, how='left')
agg_perc_df = alldata.groupby(grouby)[list(agg.keys())].quantile(.75)
agg_perc_df.columns = [f"{key}_perc_75" for key in list(agg.keys())]
alldata = alldata.merge(agg_perc_df, on=grouby, how='left')
train = alldata[:len(train)]
test = alldata[len(train):]
return train, test
def image_dim_features(train, test):
# Load IDs and Image data
# ===========================================
split_char = "/"
train_df_ids = train[['PetID']]
test_df_ids = test[['PetID']]
train_image_files = sorted(glob.glob('../input/petfinder-adoption-prediction/train_images/*.jpg'))
test_image_files = sorted(glob.glob('../input/petfinder-adoption-prediction/test_images/*.jpg'))
train_df_imgs = pd.DataFrame(train_image_files)
train_df_imgs.columns = ['image_filename']
train_imgs_pets = train_df_imgs['image_filename'].apply(lambda x: x.split('/')[-1].split('-')[0])
train_df_imgs = train_df_imgs.assign(PetID=train_imgs_pets)
test_df_imgs = pd.DataFrame(test_image_files)
test_df_imgs.columns = ['image_filename']
test_imgs_pets = test_df_imgs['image_filename'].apply(lambda x: x.split('/')[-1].split('-')[0])
test_df_imgs = test_df_imgs.assign(PetID=test_imgs_pets)
# ===========================================
train_df_imgs['image_size'] = train_df_imgs['image_filename'].apply(getSize)
train_df_imgs['temp_size'] = train_df_imgs['image_filename'].apply(getDimensions)
train_df_imgs['width'] = train_df_imgs['temp_size'].apply(lambda x: x[0])
train_df_imgs['height'] = train_df_imgs['temp_size'].apply(lambda x: x[1])
train_df_imgs = train_df_imgs.drop(['temp_size'], axis=1)
test_df_imgs['image_size'] = test_df_imgs['image_filename'].apply(getSize)
test_df_imgs['temp_size'] = test_df_imgs['image_filename'].apply(getDimensions)
test_df_imgs['width'] = test_df_imgs['temp_size'].apply(lambda x: x[0])
test_df_imgs['height'] = test_df_imgs['temp_size'].apply(lambda x: x[1])
test_df_imgs = test_df_imgs.drop(['temp_size'], axis=1)
aggs = {
'image_size': ['sum', 'mean', 'var'],
'width': ['sum', 'mean', 'var'],
'height': ['sum', 'mean', 'var'],
}
agg_train_imgs = train_df_imgs.groupby('PetID').agg(aggs)
new_columns = [
k + '_' + agg for k in aggs.keys() for agg in aggs[k]
]
agg_train_imgs.columns = new_columns
agg_train_imgs = agg_train_imgs.reset_index()
agg_test_imgs = test_df_imgs.groupby('PetID').agg(aggs)
new_columns = [
k + '_' + agg for k in aggs.keys() for agg in aggs[k]
]
agg_test_imgs.columns = new_columns
agg_test_imgs = agg_test_imgs.reset_index()
agg_imgs = pd.concat([agg_train_imgs, agg_test_imgs], axis=0).reset_index(drop=True)
return agg_imgs
def metadata_features(train, test):
if not preload:
train_pet_ids = train.PetID.unique()
test_pet_ids = test.PetID.unique()
# Train Feature Extractions
# ===============================
dfs_train = Parallel(n_jobs=12, verbose=1)(
delayed(extract_additional_features)(i, mode='train') for i in train_pet_ids)
train_dfs_sentiment = [x[0] for x in dfs_train if isinstance(x[0], pd.DataFrame)]
train_dfs_metadata = [x[1] for x in dfs_train if isinstance(x[1], pd.DataFrame)]
train_dfs_sentiment = pd.concat(train_dfs_sentiment, ignore_index=True, sort=False)
train_dfs_metadata = pd.concat(train_dfs_metadata, ignore_index=True, sort=False)
# Test Feature Extractions
# ===============================
dfs_test = Parallel(n_jobs=6, verbose=1)(delayed(extract_additional_features)(i, mode='test') for i in test_pet_ids)
test_dfs_sentiment = [x[0] for x in dfs_test if isinstance(x[0], pd.DataFrame)]
test_dfs_metadata = [x[1] for x in dfs_test if isinstance(x[1], pd.DataFrame)]
test_dfs_sentiment = pd.concat(test_dfs_sentiment, ignore_index=True, sort=False)
test_dfs_metadata = pd.concat(test_dfs_metadata, ignore_index=True, sort=False)
else:
train_dfs_sentiment = pd.read_csv("./processed_data/train_dfs_sentiment.csv")
train_dfs_metadata = pd.read_csv("./processed_data/train_dfs_metadata.csv")
test_dfs_sentiment = pd.read_csv("./processed_data/test_dfs_sentiment.csv")
test_dfs_metadata = pd.read_csv("./processed_data/test_dfs_metadata.csv")
train_dfs_sentiment['sentiment_entities'].fillna('', inplace=True)
train_dfs_metadata['metadata_annots_top_desc'].fillna('', inplace=True)
test_dfs_sentiment['sentiment_entities'].fillna('', inplace=True)
test_dfs_metadata['metadata_annots_top_desc'].fillna('', inplace=True)
# Meta data Aggregates
# ===============================
aggregates = ['mean', 'sum', 'var']
# Train Aggregates
# ---------------------------
train_metadata_desc = train_dfs_metadata.groupby(['PetID'])['metadata_annots_top_desc'].unique()
train_metadata_desc = train_metadata_desc.reset_index()
train_metadata_desc['metadata_annots_top_desc'] = train_metadata_desc['metadata_annots_top_desc'].apply(
lambda x: ' '.join(x.tolist()))
prefix = 'metadata'
train_metadata_gr = train_dfs_metadata.drop(['metadata_annots_top_desc'], axis=1)
for i in train_metadata_gr.columns:
if 'PetID' not in i:
train_metadata_gr[i] = train_metadata_gr[i].astype(float)
train_metadata_gr = train_metadata_gr.groupby(['PetID']).agg(aggregates)
train_metadata_gr.columns = pd.Index(['{}_{}_{}'.format(
prefix, c[0], c[1].upper()) for c in train_metadata_gr.columns.tolist()])
train_metadata_gr = train_metadata_gr.reset_index()
train_sentiment_desc = train_dfs_sentiment.groupby(['PetID'])['sentiment_entities'].unique()
train_sentiment_desc = train_sentiment_desc.reset_index()
train_sentiment_desc['sentiment_entities'] = train_sentiment_desc['sentiment_entities'].apply(
lambda x: ' '.join(x.tolist()))
prefix = 'sentiment'
train_sentiment_gr = train_dfs_sentiment.drop(['sentiment_entities'], axis=1)
for i in train_sentiment_gr.columns:
if 'PetID' not in i:
train_sentiment_gr[i] = train_sentiment_gr[i].astype(float)
train_sentiment_gr = train_sentiment_gr.groupby(['PetID']).agg(aggregates)
train_sentiment_gr.columns = pd.Index(['{}_{}_{}'.format(
prefix, c[0], c[1].upper()) for c in train_sentiment_gr.columns.tolist()])
train_sentiment_gr = train_sentiment_gr.reset_index()
# Test data Aggregates
# ---------------------------
test_metadata_desc = test_dfs_metadata.groupby(['PetID'])['metadata_annots_top_desc'].unique()
test_metadata_desc = test_metadata_desc.reset_index()
test_metadata_desc[
'metadata_annots_top_desc'] = test_metadata_desc[
'metadata_annots_top_desc'].apply(lambda x: ' '.join(x.tolist()))
prefix = 'metadata'
test_metadata_gr = test_dfs_metadata.drop(['metadata_annots_top_desc'], axis=1)
for i in test_metadata_gr.columns:
if 'PetID' not in i:
test_metadata_gr[i] = test_metadata_gr[i].astype(float)
test_metadata_gr = test_metadata_gr.groupby(['PetID']).agg(aggregates)
test_metadata_gr.columns = pd.Index(['{}_{}_{}'.format(
prefix, c[0], c[1].upper()) for c in test_metadata_gr.columns.tolist()])
test_metadata_gr = test_metadata_gr.reset_index()
test_sentiment_desc = test_dfs_sentiment.groupby(['PetID'])['sentiment_entities'].unique()
test_sentiment_desc = test_sentiment_desc.reset_index()
test_sentiment_desc[
'sentiment_entities'] = test_sentiment_desc[
'sentiment_entities'].apply(lambda x: ' | |
return _map
result = dict()
if self.success is not None:
result['success'] = self.success
if self.result is not None:
result['result'] = self.result.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('success') is not None:
self.success = m.get('success')
if m.get('result') is not None:
temp_model = SendInteractiveCardResponseBodyResult()
self.result = temp_model.from_map(m['result'])
return self
class SendInteractiveCardResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: SendInteractiveCardResponseBody = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = SendInteractiveCardResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class GetSceneGroupInfoHeaders(TeaModel):
def __init__(
self,
common_headers: Dict[str, str] = None,
x_acs_dingtalk_access_token: str = None,
):
self.common_headers = common_headers
self.x_acs_dingtalk_access_token = x_acs_dingtalk_access_token
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.common_headers is not None:
result['commonHeaders'] = self.common_headers
if self.x_acs_dingtalk_access_token is not None:
result['x-acs-dingtalk-access-token'] = self.x_acs_dingtalk_access_token
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('commonHeaders') is not None:
self.common_headers = m.get('commonHeaders')
if m.get('x-acs-dingtalk-access-token') is not None:
self.x_acs_dingtalk_access_token = m.get('x-acs-dingtalk-access-token')
return self
class GetSceneGroupInfoRequest(TeaModel):
def __init__(
self,
open_conversation_id: str = None,
cool_app_code: str = None,
ding_token_grant_type: int = None,
ding_org_id: int = None,
ding_isv_org_id: int = None,
ding_suite_key: str = None,
ding_client_id: str = None,
ding_oauth_app_id: int = None,
):
# 开放群ID
self.open_conversation_id = open_conversation_id
# 酷应用编码
self.cool_app_code = cool_app_code
self.ding_token_grant_type = ding_token_grant_type
self.ding_org_id = ding_org_id
self.ding_isv_org_id = ding_isv_org_id
self.ding_suite_key = ding_suite_key
self.ding_client_id = ding_client_id
self.ding_oauth_app_id = ding_oauth_app_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.open_conversation_id is not None:
result['openConversationId'] = self.open_conversation_id
if self.cool_app_code is not None:
result['coolAppCode'] = self.cool_app_code
if self.ding_token_grant_type is not None:
result['dingTokenGrantType'] = self.ding_token_grant_type
if self.ding_org_id is not None:
result['dingOrgId'] = self.ding_org_id
if self.ding_isv_org_id is not None:
result['dingIsvOrgId'] = self.ding_isv_org_id
if self.ding_suite_key is not None:
result['dingSuiteKey'] = self.ding_suite_key
if self.ding_client_id is not None:
result['dingClientId'] = self.ding_client_id
if self.ding_oauth_app_id is not None:
result['dingOauthAppId'] = self.ding_oauth_app_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('openConversationId') is not None:
self.open_conversation_id = m.get('openConversationId')
if m.get('coolAppCode') is not None:
self.cool_app_code = m.get('coolAppCode')
if m.get('dingTokenGrantType') is not None:
self.ding_token_grant_type = m.get('dingTokenGrantType')
if m.get('dingOrgId') is not None:
self.ding_org_id = m.get('dingOrgId')
if m.get('dingIsvOrgId') is not None:
self.ding_isv_org_id = m.get('dingIsvOrgId')
if m.get('dingSuiteKey') is not None:
self.ding_suite_key = m.get('dingSuiteKey')
if m.get('dingClientId') is not None:
self.ding_client_id = m.get('dingClientId')
if m.get('dingOauthAppId') is not None:
self.ding_oauth_app_id = m.get('dingOauthAppId')
return self
class GetSceneGroupInfoResponseBody(TeaModel):
def __init__(
self,
success: bool = None,
open_conversation_id: str = None,
template_id: str = None,
title: str = None,
owner_user_id: str = None,
icon: str = None,
group_url: str = None,
):
# result
self.success = success
# 开放群id
self.open_conversation_id = open_conversation_id
# 场景群模板ID
self.template_id = template_id
# 群名称
self.title = title
# 群主员工id
self.owner_user_id = owner_user_id
# 群头像mediaId
self.icon = icon
# 群url
self.group_url = group_url
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.success is not None:
result['success'] = self.success
if self.open_conversation_id is not None:
result['openConversationId'] = self.open_conversation_id
if self.template_id is not None:
result['templateId'] = self.template_id
if self.title is not None:
result['title'] = self.title
if self.owner_user_id is not None:
result['ownerUserId'] = self.owner_user_id
if self.icon is not None:
result['icon'] = self.icon
if self.group_url is not None:
result['groupUrl'] = self.group_url
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('success') is not None:
self.success = m.get('success')
if m.get('openConversationId') is not None:
self.open_conversation_id = m.get('openConversationId')
if m.get('templateId') is not None:
self.template_id = m.get('templateId')
if m.get('title') is not None:
self.title = m.get('title')
if m.get('ownerUserId') is not None:
self.owner_user_id = m.get('ownerUserId')
if m.get('icon') is not None:
self.icon = m.get('icon')
if m.get('groupUrl') is not None:
self.group_url = m.get('groupUrl')
return self
class GetSceneGroupInfoResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: GetSceneGroupInfoResponseBody = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = GetSceneGroupInfoResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class InteractiveCardCreateInstanceHeaders(TeaModel):
def __init__(
self,
common_headers: Dict[str, str] = None,
x_acs_dingtalk_access_token: str = None,
):
self.common_headers = common_headers
self.x_acs_dingtalk_access_token = x_acs_dingtalk_access_token
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.common_headers is not None:
result['commonHeaders'] = self.common_headers
if self.x_acs_dingtalk_access_token is not None:
result['x-acs-dingtalk-access-token'] = self.x_acs_dingtalk_access_token
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('commonHeaders') is not None:
self.common_headers = m.get('commonHeaders')
if m.get('x-acs-dingtalk-access-token') is not None:
self.x_acs_dingtalk_access_token = m.get('x-acs-dingtalk-access-token')
return self
class InteractiveCardCreateInstanceRequestCardData(TeaModel):
def __init__(
self,
card_param_map: Dict[str, str] = None,
card_media_id_param_map: Dict[str, str] = None,
):
# 卡片模板内容替换参数-普通文本类型
self.card_param_map = card_param_map
# 卡片模板内容替换参数-多媒体类型
self.card_media_id_param_map = card_media_id_param_map
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.card_param_map is not None:
result['cardParamMap'] = self.card_param_map
if self.card_media_id_param_map is not None:
result['cardMediaIdParamMap'] = self.card_media_id_param_map
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('cardParamMap') is not None:
self.card_param_map = m.get('cardParamMap')
if m.get('cardMediaIdParamMap') is not None:
self.card_media_id_param_map = m.get('cardMediaIdParamMap')
return self
class InteractiveCardCreateInstanceRequest(TeaModel):
def __init__(
self,
ding_isv_org_id: int = None,
card_template_id: str = None,
open_conversation_id: str = None,
receiver_user_id_list: List[str] = None,
ding_token_grant_type: int = None,
out_track_id: str = None,
ding_suite_key: str = None,
robot_code: str = None,
ding_org_id: int = None,
conversation_type: int = None,
callback_route_key: str = None,
card_data: InteractiveCardCreateInstanceRequestCardData = None,
private_data: Dict[str, PrivateDataValue] = None,
ding_oauth_app_id: int = None,
chat_bot_id: str = None,
user_id_type: int = None,
):
self.ding_isv_org_id = ding_isv_org_id
# 卡片模板ID
self.card_template_id = card_template_id
# 接收卡片的群的openConversationId
self.open_conversation_id = open_conversation_id
# 接收人userId列表
self.receiver_user_id_list = receiver_user_id_list
self.ding_token_grant_type = ding_token_grant_type
# 唯一标识一张卡片的外部ID(卡片幂等ID,可用于更新或重复发送同一卡片到多个群会话)
self.out_track_id = out_track_id
self.ding_suite_key = ding_suite_key
# 【robotCode & chatBotId二选一必填】机器人编码(群模板机器人)
self.robot_code = robot_code
self.ding_org_id = ding_org_id
# 发送的会话类型:单聊-0, 群聊-1(单聊时:openConversationId不用填写;receiverUserIdList填写有且一个员工号)
self.conversation_type = conversation_type
# 可控制卡片回调时的路由Key,用于指定特定的callbackUrl【可空:不填写默认用企业的回调地址】
self.callback_route_key = callback_route_key
self.card_data = card_data
# 指定用户可见的按钮列表(key:用户userId;value:用户数据)
self.private_data = private_data
self.ding_oauth_app_id = ding_oauth_app_id
# 【robotCode & chatBotId二选一必填】机器人ID(企业机器人)
self.chat_bot_id = chat_bot_id
# 用户ID类型:1:staffId模式【默认】;2:unionId模式;对应receiverUserIdList、privateData字段关于用户id的值填写方式
self.user_id_type = user_id_type
def validate(self):
if self.card_data:
self.card_data.validate()
if self.private_data:
for v in self.private_data.values():
if v:
v.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.ding_isv_org_id is not None:
result['dingIsvOrgId'] = self.ding_isv_org_id
if self.card_template_id is not None:
result['cardTemplateId'] = self.card_template_id
if self.open_conversation_id is not None:
result['openConversationId'] = self.open_conversation_id
if self.receiver_user_id_list is not None:
result['receiverUserIdList'] = self.receiver_user_id_list
if self.ding_token_grant_type is not None:
result['dingTokenGrantType'] = self.ding_token_grant_type
if self.out_track_id is not None:
result['outTrackId'] = self.out_track_id
if self.ding_suite_key is not None:
result['dingSuiteKey'] = self.ding_suite_key
if self.robot_code is not None:
result['robotCode'] = self.robot_code
if self.ding_org_id is not None:
result['dingOrgId'] = self.ding_org_id
if self.conversation_type is not None:
result['conversationType'] = self.conversation_type
if self.callback_route_key is not None:
result['callbackRouteKey'] = self.callback_route_key
if self.card_data is not None:
result['cardData'] = self.card_data.to_map()
result['privateData'] = {}
if self.private_data is not None:
for k, v in self.private_data.items():
result['privateData'][k] = v.to_map()
if self.ding_oauth_app_id is not None:
result['dingOauthAppId'] = self.ding_oauth_app_id
if self.chat_bot_id is not None:
result['chatBotId'] = self.chat_bot_id
if self.user_id_type is not None:
result['userIdType'] = self.user_id_type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('dingIsvOrgId') is not None:
self.ding_isv_org_id = m.get('dingIsvOrgId')
if m.get('cardTemplateId') is not None:
self.card_template_id = m.get('cardTemplateId')
if | |
in self._entity_data:
return float(self._entity_data.get('freepass_timetotrigger'))
return float(0)
@property
def freepass_duration(self):
if "freepass_duration" in self._entity_data:
return float(self._entity_data.get('freepass_duration'))
return float(0)
@property
def freepass_movetolerance(self):
if "freepass_movetolerance" in self._entity_data:
return float(self._entity_data.get('freepass_movetolerance'))
return float(120)
@property
def freepass_refillrate(self):
if "freepass_refillrate" in self._entity_data:
return float(self._entity_data.get('freepass_refillrate'))
return float(0.5)
@property
def freepass_peektime(self):
if "freepass_peektime" in self._entity_data:
return float(self._entity_data.get('freepass_peektime'))
return float(0)
@property
def StartOn(self):
if "StartOn" in self._entity_data:
return bool(self._entity_data.get('StartOn'))
return bool(1)
class ai_goal_operator(Targetname, EnableDisable):
pass
@property
def actor(self):
if "actor" in self._entity_data:
return self._entity_data.get('actor')
return ""
@property
def target(self):
if "target" in self._entity_data:
return self._entity_data.get('target')
return ""
@property
def contexttarget(self):
if "contexttarget" in self._entity_data:
return self._entity_data.get('contexttarget')
return ""
@property
def state(self):
if "state" in self._entity_data:
return self._entity_data.get('state')
return "0"
@property
def moveto(self):
if "moveto" in self._entity_data:
return self._entity_data.get('moveto')
return "1"
class monster_generic(BaseNPC):
pass
@property
def spawnflags(self):
flags = []
if "spawnflags" in self._entity_data:
value = self._entity_data.get("spawnflags", None)
for name, (key, _) in {'Not solid': (65536, 0)}.items():
if value & key > 0:
flags.append(name)
return flags
@property
def model(self):
if "model" in self._entity_data:
return self._entity_data.get('model')
return None
@property
def body(self):
if "body" in self._entity_data:
return int(self._entity_data.get('body'))
return int(0)
class generic_actor(BaseNPC, Parentname, Studiomodel):
pass
@property
def hull_name(self):
if "hull_name" in self._entity_data:
return self._entity_data.get('hull_name')
return "Human"
@property
def footstep_script(self):
if "footstep_script" in self._entity_data:
return self._entity_data.get('footstep_script')
return ""
@property
def act_as_flyer(self):
if "act_as_flyer" in self._entity_data:
return self._entity_data.get('act_as_flyer')
return "0"
@property
def is_friendly_npc(self):
if "is_friendly_npc" in self._entity_data:
return bool(self._entity_data.get('is_friendly_npc'))
return bool(0)
class cycler_actor(BaseNPC):
pass
@property
def model(self):
if "model" in self._entity_data:
return self._entity_data.get('model')
return None
@property
def Sentence(self):
if "Sentence" in self._entity_data:
return self._entity_data.get('Sentence')
return ""
class npc_maker(BaseNPCMaker):
pass
icon_sprite = "editor/npc_maker.vmat"
@property
def spawnflags(self):
flags = []
if "spawnflags" in self._entity_data:
value = self._entity_data.get("spawnflags", None)
for name, (key, _) in {'Fade Corpse': (16, 0)}.items():
if value & key > 0:
flags.append(name)
return flags
@property
def NPCType(self):
if "NPCType" in self._entity_data:
return self._entity_data.get('NPCType')
return None
@property
def NPCTargetname(self):
if "NPCTargetname" in self._entity_data:
return self._entity_data.get('NPCTargetname')
return None
@property
def NPCSquadname(self):
if "NPCSquadname" in self._entity_data:
return self._entity_data.get('NPCSquadname')
return None
@property
def NPCHintGroup(self):
if "NPCHintGroup" in self._entity_data:
return self._entity_data.get('NPCHintGroup')
return None
@property
def additionalequipment(self):
if "additionalequipment" in self._entity_data:
return self._entity_data.get('additionalequipment')
return "0"
class BaseScripted(Targetname, Parentname):
pass
@property
def m_iszEntity(self):
if "m_iszEntity" in self._entity_data:
return self._entity_data.get('m_iszEntity')
return None
@property
def m_iszIdle(self):
if "m_iszIdle" in self._entity_data:
return self._entity_data.get('m_iszIdle')
return ""
@property
def m_iszEntry(self):
if "m_iszEntry" in self._entity_data:
return self._entity_data.get('m_iszEntry')
return ""
@property
def m_iszPlay(self):
if "m_iszPlay" in self._entity_data:
return self._entity_data.get('m_iszPlay')
return ""
@property
def m_iszPostIdle(self):
if "m_iszPostIdle" in self._entity_data:
return self._entity_data.get('m_iszPostIdle')
return ""
@property
def m_iszCustomMove(self):
if "m_iszCustomMove" in self._entity_data:
return self._entity_data.get('m_iszCustomMove')
return ""
@property
def sync_group(self):
if "sync_group" in self._entity_data:
return self._entity_data.get('sync_group')
return None
@property
def m_bLoopActionSequence(self):
if "m_bLoopActionSequence" in self._entity_data:
return bool(self._entity_data.get('m_bLoopActionSequence'))
return bool(0)
@property
def m_bSynchPostIdles(self):
if "m_bSynchPostIdles" in self._entity_data:
return bool(self._entity_data.get('m_bSynchPostIdles'))
return bool(0)
@property
def m_bAllowCustomInterruptConditions(self):
if "m_bAllowCustomInterruptConditions" in self._entity_data:
return bool(self._entity_data.get('m_bAllowCustomInterruptConditions'))
return bool(0)
@property
def conflict_response(self):
if "conflict_response" in self._entity_data:
return self._entity_data.get('conflict_response')
return "0"
@property
def m_nGroundIKPreference(self):
if "m_nGroundIKPreference" in self._entity_data:
return self._entity_data.get('m_nGroundIKPreference')
return "0"
@property
def m_flRadius(self):
if "m_flRadius" in self._entity_data:
return int(self._entity_data.get('m_flRadius'))
return int(0)
@property
def m_flRepeat(self):
if "m_flRepeat" in self._entity_data:
return int(self._entity_data.get('m_flRepeat'))
return int(0)
@property
def m_fMoveTo(self):
if "m_fMoveTo" in self._entity_data:
return self._entity_data.get('m_fMoveTo')
return "1"
@property
def m_iszNextScript(self):
if "m_iszNextScript" in self._entity_data:
return self._entity_data.get('m_iszNextScript')
return None
@property
def m_bIgnoreGravity(self):
if "m_bIgnoreGravity" in self._entity_data:
return bool(self._entity_data.get('m_bIgnoreGravity'))
return bool(0)
@property
def m_bDisableNPCCollisions(self):
if "m_bDisableNPCCollisions" in self._entity_data:
return bool(self._entity_data.get('m_bDisableNPCCollisions'))
return bool(0)
@property
def m_bKeepAnimgraphLockedPost(self):
if "m_bKeepAnimgraphLockedPost" in self._entity_data:
return bool(self._entity_data.get('m_bKeepAnimgraphLockedPost'))
return bool(0)
class scripted_sentence(Targetname):
pass
icon_sprite = "editor/scripted_sentence.vmat"
@property
def spawnflags(self):
flags = []
if "spawnflags" in self._entity_data:
value = self._entity_data.get("spawnflags", None)
for name, (key, _) in {'Fire Once': (1, 1), 'Followers Only': (2, 0), 'Interrupt Speech': (4, 1),
'Concurrent': (8, 0), 'Speak to Activator': (16, 1)}.items():
if value & key > 0:
flags.append(name)
return flags
@property
def sentence(self):
if "sentence" in self._entity_data:
return self._entity_data.get('sentence')
return ""
@property
def entity(self):
if "entity" in self._entity_data:
return self._entity_data.get('entity')
return None
@property
def delay(self):
if "delay" in self._entity_data:
return self._entity_data.get('delay')
return "0"
@property
def radius(self):
if "radius" in self._entity_data:
return int(self._entity_data.get('radius'))
return int(512)
@property
def refire(self):
if "refire" in self._entity_data:
return self._entity_data.get('refire')
return "3"
@property
def listener(self):
if "listener" in self._entity_data:
return self._entity_data.get('listener')
return None
@property
def volume(self):
if "volume" in self._entity_data:
return self._entity_data.get('volume')
return "10"
@property
def attenuation(self):
if "attenuation" in self._entity_data:
return self._entity_data.get('attenuation')
return "0"
class scripted_target(Targetname, Parentname):
pass
icon_sprite = "editor/info_target.vmat"
@property
def StartDisabled(self):
if "StartDisabled" in self._entity_data:
return bool(self._entity_data.get('StartDisabled'))
return bool(1)
@property
def m_iszEntity(self):
if "m_iszEntity" in self._entity_data:
return self._entity_data.get('m_iszEntity')
return None
@property
def m_flRadius(self):
if "m_flRadius" in self._entity_data:
return int(self._entity_data.get('m_flRadius'))
return int(0)
@property
def MoveSpeed(self):
if "MoveSpeed" in self._entity_data:
return int(self._entity_data.get('MoveSpeed'))
return int(5)
@property
def PauseDuration(self):
if "PauseDuration" in self._entity_data:
return int(self._entity_data.get('PauseDuration'))
return int(0)
@property
def EffectDuration(self):
if "EffectDuration" in self._entity_data:
return int(self._entity_data.get('EffectDuration'))
return int(2)
@property
def target(self):
if "target" in self._entity_data:
return self._entity_data.get('target')
return None
class base_ai_relationship(Targetname):
pass
icon_sprite = "editor/ai_relationship.vmat"
@property
def disposition(self):
if "disposition" in self._entity_data:
return self._entity_data.get('disposition')
return "3"
@property
def radius(self):
if "radius" in self._entity_data:
return float(self._entity_data.get('radius'))
return float(0)
@property
def rank(self):
if "rank" in self._entity_data:
return int(self._entity_data.get('rank'))
return int(0)
@property
def StartActive(self):
if "StartActive" in self._entity_data:
return bool(self._entity_data.get('StartActive'))
return bool(0)
@property
def Reciprocal(self):
if "Reciprocal" in self._entity_data:
return bool(self._entity_data.get('Reciprocal'))
return bool(0)
@property
def spawnflags(self):
flags = []
if "spawnflags" in self._entity_data:
value = self._entity_data.get("spawnflags", None)
for name, (key, _) in {"Notify subject of target's location": (1, 0),
"Notify target of subject's location": (2, 0)}.items():
if value & key > 0:
flags.append(name)
return flags
class ai_relationship(base_ai_relationship):
pass
icon_sprite = "editor/ai_relationship.vmat"
@property
def subject(self):
if "subject" in self._entity_data:
return self._entity_data.get('subject')
return ""
@property
def target(self):
if "target" in self._entity_data:
return self._entity_data.get('target')
return ""
class LeadGoalBase(Targetname):
pass
@property
def actor(self):
if "actor" in self._entity_data:
return self._entity_data.get('actor')
return None
@property
def goal(self):
if "goal" in self._entity_data:
return self._entity_data.get('goal')
return None
@property
def WaitPointName(self):
if "WaitPointName" in self._entity_data:
return self._entity_data.get('WaitPointName')
return None
@property
def WaitDistance(self):
if "WaitDistance" in self._entity_data:
return float(self._entity_data.get('WaitDistance'))
return float(0)
@property
def LeadDistance(self):
if "LeadDistance" in self._entity_data:
return float(self._entity_data.get('LeadDistance'))
return float(64)
@property
def RetrieveDistance(self):
if "RetrieveDistance" in self._entity_data:
return float(self._entity_data.get('RetrieveDistance'))
return float(96)
@property
def SuccessDistance(self):
if "SuccessDistance" in self._entity_data:
return float(self._entity_data.get('SuccessDistance'))
return float(0)
@property
def Run(self):
if "Run" in self._entity_data:
return bool(self._entity_data.get('Run'))
return bool(0)
@property
def Retrieve(self):
if "Retrieve" in self._entity_data:
return self._entity_data.get('Retrieve')
return "1"
@property
def ComingBackWaitForSpeak(self):
if "ComingBackWaitForSpeak" in self._entity_data:
return self._entity_data.get('ComingBackWaitForSpeak')
return "1"
@property
def RetrieveWaitForSpeak(self):
if "RetrieveWaitForSpeak" in self._entity_data:
return self._entity_data.get('RetrieveWaitForSpeak')
return "1"
@property
def DontSpeakStart(self):
if "DontSpeakStart" in self._entity_data:
return self._entity_data.get('DontSpeakStart')
return "0"
@property
def LeadDuringCombat(self):
if "LeadDuringCombat" in self._entity_data:
return self._entity_data.get('LeadDuringCombat')
return "0"
@property
def GagLeader(self):
if "GagLeader" in self._entity_data:
return self._entity_data.get('GagLeader')
return "0"
@property
def AttractPlayerConceptModifier(self):
if "AttractPlayerConceptModifier" in self._entity_data:
return self._entity_data.get('AttractPlayerConceptModifier')
return ""
@property
def WaitOverConceptModifier(self):
if "WaitOverConceptModifier" in self._entity_data:
return self._entity_data.get('WaitOverConceptModifier')
return ""
@property
def ArrivalConceptModifier(self):
if "ArrivalConceptModifier" in self._entity_data:
return self._entity_data.get('ArrivalConceptModifier')
return ""
@property
def PostArrivalConceptModifier(self):
if "PostArrivalConceptModifier" in self._entity_data:
return self._entity_data.get('PostArrivalConceptModifier')
return None
@property
def SuccessConceptModifier(self):
if "SuccessConceptModifier" in self._entity_data:
return self._entity_data.get('SuccessConceptModifier')
return ""
@property
def FailureConceptModifier(self):
if "FailureConceptModifier" in self._entity_data:
return self._entity_data.get('FailureConceptModifier')
return ""
@property
def ComingBackConceptModifier(self):
if "ComingBackConceptModifier" in self._entity_data:
return self._entity_data.get('ComingBackConceptModifier')
return ""
@property
def RetrieveConceptModifier(self):
if "RetrieveConceptModifier" in self._entity_data:
return self._entity_data.get('RetrieveConceptModifier')
return ""
@property
def spawnflags(self):
flags = []
if "spawnflags" in self._entity_data:
value = self._entity_data.get("spawnflags", None)
for name, (key, _) in {'No def success': (1, 0), 'No def failure': (2, 0),
'Use goal facing': (4, 1)}.items():
if value & key > 0:
flags.append(name)
return flags
class ai_goal_lead(LeadGoalBase):
pass
icon_sprite = "editor/ai_goal_lead.vmat"
@property
def SearchType(self):
if "SearchType" in self._entity_data:
return self._entity_data.get('SearchType')
return "0"
class FollowGoal(Targetname):
pass
@property
def actor(self):
if "actor" in self._entity_data:
return self._entity_data.get('actor')
return None
@property
def goal(self):
if "goal" in self._entity_data:
return self._entity_data.get('goal')
return None
@property
def SearchType(self):
if "SearchType" in self._entity_data:
return self._entity_data.get('SearchType')
return "0"
@property
def StartActive(self):
if "StartActive" in self._entity_data:
return bool(self._entity_data.get('StartActive'))
return bool(0)
@property
def MaximumState(self):
if "MaximumState" in self._entity_data:
return self._entity_data.get('MaximumState')
return "1"
@property
def Formation(self):
if "Formation" in self._entity_data:
return self._entity_data.get('Formation')
return "0"
class ai_goal_follow(FollowGoal):
pass
icon_sprite = "editor/ai_goal_follow.vmat"
class | |
# -*- coding: utf-8 -*-
"""Package where the MLPipeline class is defined."""
import json
import logging
import os
import re
import warnings
from collections import Counter, OrderedDict, defaultdict
from copy import deepcopy
from datetime import datetime
import numpy as np
import psutil
from graphviz import Digraph
from mlblocks.discovery import load_pipeline
from mlblocks.mlblock import MLBlock
LOGGER = logging.getLogger(__name__)
class MLPipeline():
"""MLPipeline Class.
The **MLPipeline** class represents a Machine Learning Pipeline, which
is an ordered collection of Machine Learning tools or Primitives,
represented by **MLBlock instances**, that will be fitted and then used
sequentially in order to produce results.
The MLPipeline has two working modes or phases: **fitting** and
**predicting**.
During the **fitting** phase, each MLBlock instance, or **block** will be
fitted and immediately after used to produce results on the same
fitting data.
This results will be then passed to the next block of the sequence
as its fitting data, and this process will be repeated until the last
block is fitted.
During the **predicting** phase, each block will be used to produce results
on the output of the previous one, until the last one has produce its
results, which will be returned as the prediction of the pipeline.
Attributes:
primitives (list):
List of the names of the primitives that compose this pipeline.
blocks (list):
OrderedDict of the block names and the corresponding MLBlock instances.
init_params (dict):
init_params dictionary, as given when the instance was created.
input_names (dict):
input_names dictionary, as given when the instance was created.
output_names (dict):
output_names dictionary, as given when the instance was created.
Args:
pipeline (str, list, dict or MLPipeline):
The pipeline argument accepts four different types with different interpretations:
* `str`: the name of the pipeline to search and load.
* `list`: the primitives list.
* `dict`: a complete pipeline specification.
* `MLPipeline`: another pipeline to be cloned.
primitives (list):
List with the names of the primitives that will compose this pipeline.
init_params (dict):
dictionary containing initialization arguments to be passed when creating the
MLBlocks instances. The dictionary keys must be the corresponding primitive names
and the values must be another dictionary that will be passed as ``**kargs`` to the
MLBlock instance.
input_names (dict):
dictionary that maps input variable names with the actual names expected by each
primitive. This allows reusing the same input argument for multiple primitives that
name it differently, as well as passing different values to primitives that expect
arguments named similary.
output_names (dict):
dictionary that maps output variable names with the name these variables will be
given when stored in the context dictionary. This allows storing the output of
different primitives in different variables, even if the primitive output name is
the same one.
outputs (dict):
dictionary containing lists of output variables associated to a name.
verbose (bool):
whether to log the exceptions that occur when running the pipeline before
raising them or not.
"""
def _get_tunable_hyperparameters(self):
"""Get the tunable hyperperparameters from all the blocks in this pipeline."""
tunable = {}
for block_name, block in self.blocks.items():
tunable[block_name] = block.get_tunable_hyperparameters()
return tunable
def _build_blocks(self):
blocks = OrderedDict()
last_fit_block = None
block_names_count = Counter()
for primitive in self.primitives:
if isinstance(primitive, str):
primitive_name = primitive
else:
primitive_name = primitive['name']
try:
block_names_count.update([primitive_name])
block_count = block_names_count[primitive_name]
block_name = '{}#{}'.format(primitive_name, block_count)
block_params = self.init_params.get(block_name, dict())
if not block_params:
block_params = self.init_params.get(primitive_name, dict())
if block_params and block_count > 1:
LOGGER.warning(('Non-numbered init_params are being used '
'for more than one block %s.'), primitive_name)
block = MLBlock(primitive, **block_params)
blocks[block_name] = block
if bool(block._fit):
last_fit_block = block_name
except Exception:
LOGGER.exception('Exception caught building MLBlock %s', primitive)
raise
return blocks, last_fit_block
@staticmethod
def _get_pipeline_dict(pipeline, primitives):
if isinstance(pipeline, dict):
return pipeline
elif isinstance(pipeline, str):
return load_pipeline(pipeline)
elif isinstance(pipeline, MLPipeline):
return pipeline.to_dict()
elif isinstance(pipeline, list):
if primitives is not None:
raise ValueError('if `pipeline` is a `list`, `primitives` must be `None`')
return {'primitives': pipeline}
elif pipeline is None:
if primitives is None:
raise ValueError('Either `pipeline` or `primitives` must be not `None`.')
return dict()
def _get_block_outputs(self, block_name):
"""Get the list of output variables for the given block."""
outputs = self._get_block_variables(
block_name,
'produce_output',
self.output_names.get(block_name, dict())
)
for context_name, output in outputs.items():
output['variable'] = '{}.{}'.format(block_name, context_name)
return list(outputs.values())
def _get_block_variables(self, block_name, variables_attr, names):
"""Get dictionary of variable names to the variable for a given block
Args:
block_name (str):
Name of the block for which to get the specification
variables_attr (str):
Name of the attribute that has the variables list. It can be
`fit_args`, `produce_args` or `produce_output`.
names (dict):
Dictionary used to translate the variable names.
"""
block = self.blocks[block_name]
variables = deepcopy(getattr(block, variables_attr))
if isinstance(variables, str):
variables = getattr(block.instance, variables)()
variable_dict = {}
for variable in variables:
name = variable['name']
context_name = names.get(name, name)
variable_dict[context_name] = variable
return variable_dict
def _get_outputs(self, pipeline, outputs):
"""Get the output definitions from the pipeline dictionary.
If the ``"default"`` entry does not exist, it is built using the
outputs from the last block in the pipeline.
"""
outputs = outputs or pipeline.get('outputs')
if outputs is None:
outputs = dict()
if 'default' not in outputs:
outputs['default'] = self._get_block_outputs(self._last_block_name)
return outputs
def _get_block_name(self, index):
"""Get the name of the block in the ``index`` position."""
return list(self.blocks.keys())[index]
def __init__(self, pipeline=None, primitives=None, init_params=None,
input_names=None, output_names=None, outputs=None, verbose=True):
pipeline = self._get_pipeline_dict(pipeline, primitives)
self.primitives = primitives or pipeline['primitives']
self.init_params = init_params or pipeline.get('init_params', dict())
self.blocks, self._last_fit_block = self._build_blocks()
self._last_block_name = self._get_block_name(-1)
self.input_names = input_names or pipeline.get('input_names', dict())
self.output_names = output_names or pipeline.get('output_names', dict())
self.outputs = self._get_outputs(pipeline, outputs)
self.verbose = verbose
tunable = pipeline.get('tunable_hyperparameters')
if tunable is not None:
self._tunable_hyperparameters = tunable
else:
self._tunable_hyperparameters = self._get_tunable_hyperparameters()
hyperparameters = pipeline.get('hyperparameters')
if hyperparameters:
self.set_hyperparameters(hyperparameters)
self._re_block_name = re.compile(r'(^[^#]+#\d+)(\..*)?')
def _get_str_output(self, output):
"""Get the outputs that correspond to the str specification."""
if output in self.outputs:
return self.outputs[output]
elif output in self.blocks:
return [{'name': output, 'variable': output}]
# return self._get_block_outputs(output)
elif '.' in output:
block_name, variable_name = output.rsplit('.', 1)
block = self.blocks.get(block_name)
if not block:
raise ValueError('Invalid block name: {}'.format(block_name))
for variable in block.produce_output:
if variable['name'] == variable_name:
output_variable = deepcopy(variable)
output_variable['variable'] = output
return [output_variable]
raise ValueError('Block {} has no output {}'.format(block_name, variable_name))
raise ValueError('Invalid Output Specification: {}'.format(output))
def get_inputs(self, fit=True):
"""Get a relation of all the input variables required by this pipeline.
The result is a list contains all of the input variables.
Optionally include the fit arguments.
Args:
fit (bool):
Optional argument to include fit arguments or not. Defaults to ``True``.
Returns:
list:
Dictionary specifying all the input variables.
Each dictionary contains the entry ``name``, as
well as any other metadata that may have been included in the
pipeline inputs specification.
"""
inputs = dict()
for block_name in reversed(self.blocks.keys()): # iterates through pipeline backwards
produce_outputs = self._get_block_variables(
block_name,
'produce_output',
self.output_names.get(block_name, dict())
)
for produce_output_name in produce_outputs.keys():
inputs.pop(produce_output_name, None)
produce_inputs = self._get_block_variables(
block_name,
'produce_args',
self.input_names.get(block_name, dict())
)
inputs.update(produce_inputs)
if fit:
fit_inputs = self._get_block_variables(
block_name,
'fit_args',
self.input_names.get(block_name, dict())
)
inputs.update(fit_inputs)
return inputs
def get_fit_args(self):
return list(self.get_inputs(fit=True).values())
def get_predict_args(self):
return list(self.get_inputs(fit=False).values())
def get_outputs(self, outputs='default'):
"""Get the list of output variables that correspond to the specified outputs.
Outputs specification can either be a single string, a single integer, or a
list of strings and integers.
If strings are given, they can either be one of the named outputs that have
been specified on the pipeline definition or the name of a block, including the
counter number at the end, or a full variable specification following the format
``{block-name}.{variable-name}``.
Alternatively, integers can be passed as indexes of the blocks from which to get
the outputs.
If output specifications that resolve to multiple output variables are given,
such as the named outputs or block names, all the variables are concatenated
together, in order, in a single variable list.
Args:
outputs (str, int or list[str or int]):
Single or list of output specifications.
Returns:
list:
List of dictionaries specifying all the output variables. Each
dictionary contains the entries ``name`` and ``variable``, as
well as any other metadata that may have been included in the
pipeline outputs | |
= code.co_varnames[i]
if name != 'self':
arg = locals_.get(name, '*undefined*')
if arg:
if isinstance(arg, (list, tuple)):
val = '[%s]' % ','.join([self.show(z) for z in arg if self.show(z)])
else:
val = self.show(arg)
if val:
result.append('%s=%s' % (name, val))
return ','.join(result)
#@+node:ekr.20140402060647.16845: *4* do_line (Sherlock)
def do_line(self, frame, arg):
'''print each line of enabled functions.'''
code = frame.f_code
fn = code.co_filename
locals_ = frame.f_locals
name = code.co_name
full_name = self.get_full_name(locals_, name)
if self.is_enabled(fn, full_name, self.patterns):
n = frame.f_lineno - 1 # Apparently, the first line is line 1.
d = self.contents_d
lines = d.get(fn)
if not lines:
with open(fn) as f:
s = f.read()
lines = g.splitLines(s)
d[fn] = lines
line = lines[n].rstrip() if n < len(lines) else '<EOF>'
if 1:
# i = full_name.find('::')
# name = full_name if i == -1 else full_name[i+2:]
print('%3s %s' % (name, line))
else:
print('%s %s %s %s' % (g.shortFileName(fn), n, full_name, line))
#@+node:ekr.20130109154743.10172: *4* do_return & helper
def do_return(self, frame, arg): # Arg *is* used below.
'''Trace a return statement.'''
import os
code = frame.f_code
fn = code.co_filename
locals_ = frame.f_locals
name = code.co_name
full_name = self.get_full_name(locals_, name)
if self.is_enabled(fn, full_name, self.patterns):
n = 0
while frame:
frame = frame.f_back
n += 1
dots = '.' * max(0, n - self.n) if self.dots else ''
path = '%-20s' % (os.path.basename(fn)) if self.verbose else ''
if name and name == '__init__':
try:
ret1 = locals_ and locals_.get('self', None)
ret = self.format_ret(ret1)
except NameError:
ret = '<%s>' % ret1.__class__.__name__
else:
ret = self.format_ret(arg)
print('%s%s-%s%s' % (path, dots, full_name, ret))
#@+node:ekr.20130111120935.10192: *5* format_ret
def format_ret(self, arg):
'''Format arg, the value returned by a "return" statement.'''
try:
if isinstance(arg, types.GeneratorType):
ret = '<generator>'
elif isinstance(arg, (tuple, list)):
ret = '[%s]' % ','.join([self.show(z) for z in arg])
if len(ret) > 40:
ret = '[\n%s]' % ('\n,'.join([self.show(z) for z in arg]))
elif arg:
ret = self.show(arg)
if len(ret) > 40:
ret = '\n %s' % ret
else:
ret = '' if arg is None else repr(arg)
except Exception:
exctype, value = sys.exc_info()[: 2]
s = '<**exception: %s,%s arg: %r**>' % (exctype.__name__, value, arg)
ret = ' ->\n %s' % (s) if len(s) > 40 else ' -> %s' % (s)
return ' -> %s' % ret # if ret else ''
#@+node:ekr.20121128111829.12185: *4* fn_is_enabled
def fn_is_enabled(self, fn, patterns):
'''
Return True if tracing for fn is enabled.
Used only to enable *statistics* for fn.
'''
import re
try:
enabled, pattern = False, None
for pattern in patterns:
if pattern.startswith('+:'):
if re.match(pattern[2:], fn):
enabled = True
elif pattern.startswith('-:'):
if re.match(pattern[2:], fn):
enabled = False
return enabled
except Exception:
self.bad_pattern(pattern)
return False
#@+node:ekr.20130112093655.10195: *4* get_full_name
def get_full_name(self, locals_, name):
'''Return class_name::name if possible.'''
full_name = name
try:
user_self = locals_ and locals_.get('self', None)
if user_self:
full_name = user_self.__class__.__name__ + '::' + name
except Exception:
pass
return full_name
#@+node:ekr.20121128111829.12183: *4* is_enabled
def is_enabled(self, fn, name, patterns=None):
'''Return True if tracing for name in fn is enabled.'''
import re
enabled = False
if patterns is None: patterns = self.patterns
for pattern in patterns:
try:
if pattern.startswith('+:'):
if re.match(pattern[2:], fn):
enabled = True
elif pattern.startswith('-:'):
if re.match(pattern[2:], fn):
enabled = False
elif pattern.startswith('+'):
if re.match(pattern[1:], name):
enabled = True
elif pattern.startswith('-'):
if re.match(pattern[1:], name):
enabled = False
else:
self.bad_pattern(pattern)
except Exception:
self.bad_pattern(pattern)
return enabled
#@+node:ekr.20121128111829.12182: *4* print_stats
def print_stats(self, patterns=None):
'''Print all accumulated statisitics.'''
print('\nSherlock statistics...')
if not patterns: patterns = ['+.*', '+:.*',]
for fn in sorted(self.stats.keys()):
d = self.stats.get(fn)
if self.fn_is_enabled(fn, patterns):
result = sorted(d.keys())
else:
result = [key for key in sorted(d.keys()) if self.is_enabled(fn, key, patterns)]
if result:
print('')
fn = fn.replace('\\', '/')
parts = fn.split('/')
print('/'.join(parts[-2:]))
for key in result:
print('%4s %s' % (d.get(key), key))
#@+node:ekr.20121128031949.12614: *4* run
# Modified from pdb.Pdb.set_trace.
def run(self, frame=None):
'''Trace from the given frame or the caller's frame.'''
import sys
print('SherlockTracer.run:patterns:\n%s' % '\n'.join(self.patterns))
if frame is None:
frame = sys._getframe().f_back
# Compute self.n, the number of frames to ignore.
self.n = 0
while frame:
frame = frame.f_back
self.n += 1
# Pass self to sys.settrace to give easy access to all methods.
sys.settrace(self)
#@+node:ekr.20140322090829.16834: *4* push & pop
def push(self, patterns):
'''Push the old patterns and set the new.'''
self.pattern_stack.append(self.patterns)
self.set_patterns(patterns)
print('SherlockTracer.push: %s' % self.patterns)
def pop(self):
'''Restore the pushed patterns.'''
if self.pattern_stack:
self.patterns = self.pattern_stack.pop()
print('SherlockTracer.pop: %s' % self.patterns)
else:
print('SherlockTracer.pop: pattern stack underflow')
#@+node:ekr.20140326100337.16845: *4* set_patterns
def set_patterns(self, patterns):
'''Set the patterns in effect.'''
self.patterns = [z for z in patterns if self.check_pattern(z)]
#@+node:ekr.20140322090829.16831: *4* show
def show(self, item):
'''return the best representation of item.'''
if not item:
return repr(item)
if isinstance(item, dict):
return 'dict'
if isinstance(item, str):
s = repr(item)
if len(s) <= 20:
return s
return s[: 17] + '...'
return repr(item)
#@+node:ekr.20121128093229.12616: *4* stop
def stop(self):
'''Stop all tracing.'''
import sys
sys.settrace(None)
#@-others
#@+node:ekr.20080531075119.1: *3* class g.Tracer
class Tracer:
'''A "debugger" that computes a call graph.
To trace a function and its callers, put the following at the function's start:
g.startTracer()
'''
#@+others
#@+node:ekr.20080531075119.2: *4* __init__ (Tracer)
def __init__(self, limit=0, trace=False, verbose=False):
self.callDict = {}
# Keys are function names.
# Values are the number of times the function was called by the caller.
self.calledDict = {}
# Keys are function names.
# Values are the total number of times the function was called.
self.count = 0
self.inited = False
self.limit = limit # 0: no limit, otherwise, limit trace to n entries deep.
self.stack = []
self.trace = trace
self.verbose = verbose # True: print returns as well as calls.
#@+node:ekr.20080531075119.3: *4* computeName
def computeName(self, frame):
if not frame: return ''
code = frame.f_code; result = []
module = inspect.getmodule(code)
if module:
module_name = module.__name__
if module_name == 'leo.core.leoGlobals':
result.append('g')
else:
tag = 'leo.core.'
if module_name.startswith(tag):
module_name = module_name[len(tag):]
result.append(module_name)
try:
# This can fail during startup.
self_obj = frame.f_locals.get('self')
if self_obj: result.append(self_obj.__class__.__name__)
except Exception:
pass
result.append(code.co_name)
return '.'.join(result)
#@+node:ekr.20080531075119.4: *4* report
def report(self):
if 0:
g.pr('\nstack')
for z in self.stack:
g.pr(z)
g.pr('\ncallDict...')
for key in sorted(self.callDict):
# Print the calling function.
g.pr('%d' % (self.calledDict.get(key, 0)), key)
# Print the called functions.
d = self.callDict.get(key)
for key2 in sorted(d):
g.pr('%8d' % (d.get(key2)), key2)
#@+node:ekr.20080531075119.5: *4* stop
def stop(self):
sys.settrace(None)
self.report()
#@+node:ekr.20080531075119.6: *4* tracer
def tracer(self, frame, event, arg):
'''A function to be passed to sys.settrace.'''
n = len(self.stack)
if event == 'return':
n = max(0, n - 1)
pad = '.' * n
if event == 'call':
if not self.inited:
# Add an extra stack element for the routine containing the call to startTracer.
self.inited = True
name = self.computeName(frame.f_back)
self.updateStats(name)
self.stack.append(name)
name = self.computeName(frame)
if self.trace and (self.limit == 0 or len(self.stack) < self.limit):
g.trace('%scall' % (pad), name)
self.updateStats(name)
self.stack.append(name)
return self.tracer
if event == 'return':
if self.stack:
name = self.stack.pop()
if (
self.trace and
self.verbose and
(self.limit == 0 or len(self.stack) < self.limit)
):
g.trace('%sret ' % (pad), name)
else:
g.trace('return underflow')
self.stop()
return None
if self.stack:
return self.tracer
self.stop()
return None
return self.tracer
#@+node:ekr.20080531075119.7: *4* updateStats
def updateStats(self, name):
if not self.stack:
return
caller = self.stack[-1]
d = self.callDict.get(caller, {})
# d is a dict reprenting the called functions.
# Keys are called functions, values are counts.
d[name] = 1 + d.get(name, 0)
self.callDict[caller] = d
# Update the total counts.
self.calledDict[name] = 1 + self.calledDict.get(name, 0)
#@-others
def startTracer(limit=0, trace=False, verbose=False):
import sys
t = g.Tracer(limit=limit, trace=trace, verbose=verbose)
sys.settrace(t.tracer)
return t
#@+node:ekr.20031219074948.1: *3* class g.Tracing/NullObject & helpers
#@@nobeautify
class NullObject:
"""An object that does nothing, and does it very well."""
def __init__(self, *args, **keys): pass
def __call__(self, *args, **keys): return self
def __repr__(self): return "NullObject"
def __str__(self): return "NullObject"
# Attribute access...
def __delattr__(self, attr): return self
def __getattr__(self, attr): return self
def __setattr__(self, attr, val): return self
# Container methods..
def __bool__(self): return False
def __contains__(self, item): return False
def __getitem__(self, key): raise KeyError
def __iter__(self): return self
def __len__(self): return | |
the scheduled_time of this Task.
:param scheduled_time: The scheduled_time of this Task. # noqa: E501
:type: int
"""
self._scheduled_time = scheduled_time
@property
def start_time(self):
"""Gets the start_time of this Task. # noqa: E501
:return: The start_time of this Task. # noqa: E501
:rtype: int
"""
return self._start_time
@start_time.setter
def start_time(self, start_time):
"""Sets the start_time of this Task.
:param start_time: The start_time of this Task. # noqa: E501
:type: int
"""
self._start_time = start_time
@property
def end_time(self):
"""Gets the end_time of this Task. # noqa: E501
:return: The end_time of this Task. # noqa: E501
:rtype: int
"""
return self._end_time
@end_time.setter
def end_time(self, end_time):
"""Sets the end_time of this Task.
:param end_time: The end_time of this Task. # noqa: E501
:type: int
"""
self._end_time = end_time
@property
def update_time(self):
"""Gets the update_time of this Task. # noqa: E501
:return: The update_time of this Task. # noqa: E501
:rtype: int
"""
return self._update_time
@update_time.setter
def update_time(self, update_time):
"""Sets the update_time of this Task.
:param update_time: The update_time of this Task. # noqa: E501
:type: int
"""
self._update_time = update_time
@property
def start_delay_in_seconds(self):
"""Gets the start_delay_in_seconds of this Task. # noqa: E501
:return: The start_delay_in_seconds of this Task. # noqa: E501
:rtype: int
"""
return self._start_delay_in_seconds
@start_delay_in_seconds.setter
def start_delay_in_seconds(self, start_delay_in_seconds):
"""Sets the start_delay_in_seconds of this Task.
:param start_delay_in_seconds: The start_delay_in_seconds of this Task. # noqa: E501
:type: int
"""
self._start_delay_in_seconds = start_delay_in_seconds
@property
def retried_task_id(self):
"""Gets the retried_task_id of this Task. # noqa: E501
:return: The retried_task_id of this Task. # noqa: E501
:rtype: str
"""
return self._retried_task_id
@retried_task_id.setter
def retried_task_id(self, retried_task_id):
"""Sets the retried_task_id of this Task.
:param retried_task_id: The retried_task_id of this Task. # noqa: E501
:type: str
"""
self._retried_task_id = retried_task_id
@property
def retried(self):
"""Gets the retried of this Task. # noqa: E501
:return: The retried of this Task. # noqa: E501
:rtype: bool
"""
return self._retried
@retried.setter
def retried(self, retried):
"""Sets the retried of this Task.
:param retried: The retried of this Task. # noqa: E501
:type: bool
"""
self._retried = retried
@property
def executed(self):
"""Gets the executed of this Task. # noqa: E501
:return: The executed of this Task. # noqa: E501
:rtype: bool
"""
return self._executed
@executed.setter
def executed(self, executed):
"""Sets the executed of this Task.
:param executed: The executed of this Task. # noqa: E501
:type: bool
"""
self._executed = executed
@property
def callback_from_worker(self):
"""Gets the callback_from_worker of this Task. # noqa: E501
:return: The callback_from_worker of this Task. # noqa: E501
:rtype: bool
"""
return self._callback_from_worker
@callback_from_worker.setter
def callback_from_worker(self, callback_from_worker):
"""Sets the callback_from_worker of this Task.
:param callback_from_worker: The callback_from_worker of this Task. # noqa: E501
:type: bool
"""
self._callback_from_worker = callback_from_worker
@property
def response_timeout_seconds(self):
"""Gets the response_timeout_seconds of this Task. # noqa: E501
:return: The response_timeout_seconds of this Task. # noqa: E501
:rtype: int
"""
return self._response_timeout_seconds
@response_timeout_seconds.setter
def response_timeout_seconds(self, response_timeout_seconds):
"""Sets the response_timeout_seconds of this Task.
:param response_timeout_seconds: The response_timeout_seconds of this Task. # noqa: E501
:type: int
"""
self._response_timeout_seconds = response_timeout_seconds
@property
def workflow_instance_id(self):
"""Gets the workflow_instance_id of this Task. # noqa: E501
:return: The workflow_instance_id of this Task. # noqa: E501
:rtype: str
"""
return self._workflow_instance_id
@workflow_instance_id.setter
def workflow_instance_id(self, workflow_instance_id):
"""Sets the workflow_instance_id of this Task.
:param workflow_instance_id: The workflow_instance_id of this Task. # noqa: E501
:type: str
"""
self._workflow_instance_id = workflow_instance_id
@property
def workflow_type(self):
"""Gets the workflow_type of this Task. # noqa: E501
:return: The workflow_type of this Task. # noqa: E501
:rtype: str
"""
return self._workflow_type
@workflow_type.setter
def workflow_type(self, workflow_type):
"""Sets the workflow_type of this Task.
:param workflow_type: The workflow_type of this Task. # noqa: E501
:type: str
"""
self._workflow_type = workflow_type
@property
def task_id(self):
"""Gets the task_id of this Task. # noqa: E501
:return: The task_id of this Task. # noqa: E501
:rtype: str
"""
return self._task_id
@task_id.setter
def task_id(self, task_id):
"""Sets the task_id of this Task.
:param task_id: The task_id of this Task. # noqa: E501
:type: str
"""
self._task_id = task_id
@property
def reason_for_incompletion(self):
"""Gets the reason_for_incompletion of this Task. # noqa: E501
:return: The reason_for_incompletion of this Task. # noqa: E501
:rtype: str
"""
return self._reason_for_incompletion
@reason_for_incompletion.setter
def reason_for_incompletion(self, reason_for_incompletion):
"""Sets the reason_for_incompletion of this Task.
:param reason_for_incompletion: The reason_for_incompletion of this Task. # noqa: E501
:type: str
"""
self._reason_for_incompletion = reason_for_incompletion
@property
def callback_after_seconds(self):
"""Gets the callback_after_seconds of this Task. # noqa: E501
:return: The callback_after_seconds of this Task. # noqa: E501
:rtype: int
"""
return self._callback_after_seconds
@callback_after_seconds.setter
def callback_after_seconds(self, callback_after_seconds):
"""Sets the callback_after_seconds of this Task.
:param callback_after_seconds: The callback_after_seconds of this Task. # noqa: E501
:type: int
"""
self._callback_after_seconds = callback_after_seconds
@property
def worker_id(self):
"""Gets the worker_id of this Task. # noqa: E501
:return: The worker_id of this Task. # noqa: E501
:rtype: str
"""
return self._worker_id
@worker_id.setter
def worker_id(self, worker_id):
"""Sets the worker_id of this Task.
:param worker_id: The worker_id of this Task. # noqa: E501
:type: str
"""
self._worker_id = worker_id
@property
def output_data(self):
"""Gets the output_data of this Task. # noqa: E501
:return: The output_data of this Task. # noqa: E501
:rtype: dict(str, object)
"""
return self._output_data
@output_data.setter
def output_data(self, output_data):
"""Sets the output_data of this Task.
:param output_data: The output_data of this Task. # noqa: E501
:type: dict(str, object)
"""
self._output_data = output_data
@property
def workflow_task(self):
"""Gets the workflow_task of this Task. # noqa: E501
:return: The workflow_task of this Task. # noqa: E501
:rtype: WorkflowTask
"""
return self._workflow_task
@workflow_task.setter
def workflow_task(self, workflow_task):
"""Sets the workflow_task of this Task.
:param workflow_task: The workflow_task of this Task. # noqa: E501
:type: WorkflowTask
"""
self._workflow_task = workflow_task
@property
def domain(self):
"""Gets the domain of this Task. # noqa: E501
:return: The domain of this Task. # noqa: E501
:rtype: str
"""
return self._domain
@domain.setter
def domain(self, domain):
"""Sets the domain of this Task.
:param domain: The domain of this Task. # noqa: E501
:type: str
"""
self._domain = domain
@property
def rate_limit_per_frequency(self):
"""Gets the rate_limit_per_frequency of this Task. # noqa: E501
:return: The rate_limit_per_frequency of this Task. # noqa: E501
:rtype: int
"""
return self._rate_limit_per_frequency
@rate_limit_per_frequency.setter
def rate_limit_per_frequency(self, rate_limit_per_frequency):
"""Sets the rate_limit_per_frequency of this Task.
:param rate_limit_per_frequency: The rate_limit_per_frequency of this Task. # noqa: E501
:type: int
"""
self._rate_limit_per_frequency = rate_limit_per_frequency
@property
def rate_limit_frequency_in_seconds(self):
"""Gets the rate_limit_frequency_in_seconds of this Task. # noqa: E501
:return: The rate_limit_frequency_in_seconds of this Task. # noqa: E501
:rtype: int
"""
return self._rate_limit_frequency_in_seconds
@rate_limit_frequency_in_seconds.setter
def rate_limit_frequency_in_seconds(self, rate_limit_frequency_in_seconds):
"""Sets the rate_limit_frequency_in_seconds of this Task.
:param rate_limit_frequency_in_seconds: The rate_limit_frequency_in_seconds of this Task. # noqa: E501
:type: int
"""
self._rate_limit_frequency_in_seconds = rate_limit_frequency_in_seconds
@property
def external_input_payload_storage_path(self):
"""Gets the external_input_payload_storage_path of this Task. # noqa: E501
:return: The external_input_payload_storage_path of this Task. # noqa: E501
:rtype: str
"""
return self._external_input_payload_storage_path
@external_input_payload_storage_path.setter
def external_input_payload_storage_path(self, external_input_payload_storage_path):
"""Sets the external_input_payload_storage_path of this Task.
:param external_input_payload_storage_path: The external_input_payload_storage_path of this Task. # noqa: E501
:type: str
"""
self._external_input_payload_storage_path = external_input_payload_storage_path
@property
def external_output_payload_storage_path(self):
"""Gets the external_output_payload_storage_path of this Task. # noqa: E501
:return: The external_output_payload_storage_path of this Task. # noqa: E501
:rtype: str
"""
return self._external_output_payload_storage_path
@external_output_payload_storage_path.setter
def external_output_payload_storage_path(self, external_output_payload_storage_path):
"""Sets the external_output_payload_storage_path of this Task.
:param external_output_payload_storage_path: The external_output_payload_storage_path of this Task. # noqa: E501
:type: str
"""
self._external_output_payload_storage_path = external_output_payload_storage_path
@property
def workflow_priority(self):
"""Gets the workflow_priority of this Task. # noqa: E501
:return: The workflow_priority of this Task. # noqa: E501
:rtype: int
"""
return self._workflow_priority
@workflow_priority.setter
def workflow_priority(self, workflow_priority):
"""Sets the workflow_priority of this Task.
:param workflow_priority: The workflow_priority of this Task. # noqa: E501
:type: int
"""
self._workflow_priority = workflow_priority
@property
def execution_name_space(self):
"""Gets the execution_name_space of this Task. # noqa: E501
:return: The execution_name_space of this Task. # noqa: E501
:rtype: str
"""
return self._execution_name_space
@execution_name_space.setter
def execution_name_space(self, execution_name_space):
"""Sets the execution_name_space of this Task.
:param execution_name_space: The execution_name_space of this Task. # noqa: E501
:type: str
"""
self._execution_name_space = execution_name_space
@property
def isolation_group_id(self):
"""Gets the isolation_group_id of this Task. | |
<filename>elasticmagic/compiler.py
import operator
from collections import OrderedDict
from collections import namedtuple
from functools import partial
from elasticsearch import ElasticsearchException
from elasticmagic.attribute import AttributedField
from .compat import Iterable
from .compat import Mapping
from .compat import string_types
from .document import DOC_TYPE_JOIN_FIELD
from .document import DOC_TYPE_FIELD
from .document import DOC_TYPE_NAME_FIELD
from .document import DOC_TYPE_PARENT_FIELD
from .document import Document
from .document import DynamicDocument
from .document import get_doc_type_for_hit
from .document import mk_uid
from .expression import Bool
from .expression import Exists
from .expression import Filtered
from .expression import FunctionScore
from .expression import HighlightedField
from .expression import Ids
from .expression import MatchPhrase
from .expression import MatchPhrasePrefix
from .expression import Params
from .expression import Terms
from .result import BulkResult
from .result import CountResult
from .result import DeleteByQueryResult
from .result import DeleteResult
from .result import ExistsResult
from .result import ExplainResult
from .result import PutMappingResult
from .result import SearchResult
from .search import BaseSearchQuery
from .search import SearchQueryContext
from .types import ValidationError
from .util import collect_doc_classes
BOOL_OPERATOR_NAMES = {
operator.and_: 'and',
operator.or_: 'or',
}
BOOL_OPERATORS_MAP = {
operator.and_: Bool.must,
operator.or_: Bool.should,
}
DEFAULT_DOC_TYPE = '_doc'
ESVersion = namedtuple('ESVersion', ['major', 'minor', 'patch'])
ElasticsearchFeatures = namedtuple(
'ExpressionFeatures',
[
'supports_old_boolean_queries',
'supports_missing_query',
'supports_parent_id_query',
'supports_bool_filter',
'supports_search_exists_api',
'supports_match_type',
'supports_mapping_types',
'supports_doc_type',
'stored_fields_param',
'script_source_field_name',
'script_id_field_name',
'source_include_param',
'source_exclude_param',
'patch_source_include_param',
'patch_source_exclude_param',
'supports_script_file',
'supports_nested_script',
]
)
class CompilationError(Exception):
pass
class MultiSearchError(ElasticsearchException):
pass
def _is_emulate_doc_types_mode(features, doc_cls):
return (
not features.supports_mapping_types and
doc_cls and
doc_cls.get_doc_type() and
doc_cls.has_parent_doc_cls()
)
def _add_doc_type_fields_into_stored_fields(
stored_fields, add_source
):
extra_stored_fields = []
if add_source:
extra_stored_fields.append('_source')
extra_stored_fields.extend([DOC_TYPE_NAME_FIELD, DOC_TYPE_PARENT_FIELD])
if not stored_fields:
return extra_stored_fields
elif isinstance(stored_fields, string_types):
return [stored_fields] + extra_stored_fields
elif isinstance(stored_fields, list):
return stored_fields + extra_stored_fields
raise ValueError(
'Unsupported stored_fields type: {}'.format(type(stored_fields))
)
def _patch_stored_fields_in_params(features, params, add_source_field):
stored_fields_param = features.stored_fields_param
stored_fields = params.get(stored_fields_param)
if isinstance(stored_fields, string_types):
stored_fields = stored_fields.split(',')
stored_fields = _add_doc_type_fields_into_stored_fields(
stored_fields, add_source_field
)
params[stored_fields_param] = ','.join(stored_fields)
return params
def _has_custom_source(params):
source = params.get('_source')
return bool(
(
source not in (False, 'false') and
source != [False] and
source != ['false']
) or
params.get('_source_include') or
params.get('_source_exclude')
)
def _mk_doc_type(doc_types):
return ','.join(doc_types)
def _mk_doc_cls_map(doc_classes, supports_doc_type):
if doc_classes is None:
doc_classes = ()
elif isinstance(doc_classes, Iterable):
doc_classes = list(doc_classes)
else:
doc_classes = (doc_classes,)
doc_cls_map = {
doc_cls.__doc_type__: doc_cls for doc_cls in doc_classes
}
if not supports_doc_type and len(doc_classes) == 1:
doc_cls_map['_doc'] = doc_classes[0]
return doc_cls_map
class Compiled(object):
compiler = None
features = None
def __init__(self, expression, params=None):
self.expression = expression
self.body = self.visit(expression)
self.params = self.prepare_params(params or {})
def prepare_params(self, params):
return params
def visit(self, expr, **kwargs):
visit_name = None
if hasattr(expr, '__visit_name__'):
visit_name = expr.__visit_name__
if visit_name:
visit_func = getattr(self, 'visit_{}'.format(visit_name))
return visit_func(expr, **kwargs)
if isinstance(expr, dict):
return self.visit_dict(expr)
if isinstance(expr, (list, tuple)):
return self.visit_list(expr)
return expr
def visit_params(self, params):
res = {}
for k, v in params.items():
res[self.visit(k)] = self.visit(v)
return res
def visit_dict(self, dct):
return {self.visit(k): self.visit(v) for k, v in dct.items()}
def visit_list(self, lst):
return [self.visit(v) for v in lst]
def visit_script(self, script):
if not self.features.supports_nested_script:
raise CompilationError(
'Elasticsearch v0.x and v1.x does not support Script')
res = dict()
if script.lang:
res['lang'] = script.lang
if script.script_params:
res['params'] = script.script_params
if script.inline:
res[self.features.script_source_field_name] = script.inline
elif script.id:
res[self.features.script_id_field_name] = script.id
elif self.features.supports_script_file and script.file:
res['file'] = script.file
else:
raise CompilationError('Invalid arguments for Script')
return self.visit_dict(res)
class CompiledEndpoint(Compiled):
def process_result(self, raw_result):
raise NotImplementedError
class CompiledExpression(Compiled):
def __init__(self, expr, params=None, doc_classes=None):
self.doc_classes = doc_classes
super(CompiledExpression, self).__init__(expr, params)
def visit_literal(self, expr):
return expr.obj
def visit_field(self, field):
return field._name
def visit_mapping_field(self, field):
return field._name
def visit_attributed_field(self, field):
return field._field._name
def visit_boost_expression(self, expr):
return '{}^{}'.format(self.visit(expr.expr), self.visit(expr.weight))
def visit_query_expression(self, expr):
return {
expr.__query_name__: self.visit(expr.params)
}
def visit_field_query(self, expr, **kwargs):
expr_params = Params(expr.params, **kwargs)
if expr_params:
params = {expr.__query_key__: self.visit(expr.query)}
params.update(expr_params)
return {
expr.__query_name__: {
self.visit(expr.field): params
}
}
else:
return {
expr.__query_name__: {
self.visit(expr.field): self.visit(expr.query)
}
}
def visit_match(self, expr):
if not self.features.supports_match_type and expr.type:
if expr.type == 'phrase':
return self.visit(
MatchPhrase(expr.field, expr.query, **expr.params)
)
elif expr.type == 'phrase_prefix':
return self.visit(
MatchPhrasePrefix(expr.field, expr.query, *expr.params)
)
else:
raise ValueError(
'Match query type is not supported: [{}]'.format(expr.type)
)
params = self.visit_field_query(expr, type=expr.type)
return params
def visit_range(self, expr):
field_params = {
self.visit(expr.field): self.visit(expr.params)
}
return {
'range': dict(self.visit(expr.range_params), **field_params)
}
@staticmethod
def _get_field_doc_cls(field):
if isinstance(field, AttributedField):
return field.get_parent()
def visit_term(self, term):
field_name = self.visit(term.field)
if field_name == '_id':
doc_cls = self._get_field_doc_cls(term.field)
if _is_emulate_doc_types_mode(self.features, doc_cls):
return self.visit(Ids([term.query], doc_cls))
elif (
self.doc_classes and
any(map(
partial(_is_emulate_doc_types_mode, self.features),
self.doc_classes
))
):
return self.visit(Ids([term.query], self.doc_classes))
return self.visit_field_query(term)
def visit_terms(self, expr):
field_name = self.visit(expr.field)
if field_name == '_id':
doc_cls = self._get_field_doc_cls(expr.field)
if _is_emulate_doc_types_mode(self.features, doc_cls):
return self.visit(Ids(expr.terms, doc_cls))
elif (
self.doc_classes and
any(map(
partial(_is_emulate_doc_types_mode, self.features),
self.doc_classes
))
):
return self.visit(Ids(expr.terms, self.doc_classes))
params = {self.visit(expr.field): self.visit(expr.terms)}
params.update(self.visit(expr.params))
return {
'terms': params
}
def visit_missing(self, expr):
if self.features.supports_missing_query:
return {
'missing': self.visit(expr.params)
}
return self.visit(
Bool.must_not(Exists(**expr.params))
)
def visit_multi_match(self, expr):
params = {
'query': self.visit(expr.query),
'fields': [self.visit(f) for f in expr.fields],
}
params.update(self.visit(expr.params))
return {
'multi_match': params
}
def visit_match_all(self, expr):
return {'match_all': self.visit(expr.params)}
def visit_query(self, expr):
params = {
'query': self.visit(expr.query)
}
if expr.params:
params.update(self.visit(expr.params))
return {
'fquery': params
}
return params
def visit_boolean_expression(self, expr):
if not self.features.supports_old_boolean_queries:
return self.visit(
BOOL_OPERATORS_MAP[expr.operator](*expr.expressions)
)
if expr.params:
params = {
'filters': [self.visit(e) for e in expr.expressions]
}
params.update(self.visit(expr.params))
else:
params = [self.visit(e) for e in expr.expressions]
return {
BOOL_OPERATOR_NAMES[expr.operator]: params
}
def visit_not(self, expr):
if not self.features.supports_old_boolean_queries:
return self.visit(Bool.must_not(expr))
if expr.params:
params = {
'filter': self.visit(expr.expr)
}
params.update(self.visit(expr.params))
else:
params = self.visit(expr.expr)
return {
'not': params
}
def visit_sort(self, expr):
if expr.params:
params = {'order': self.visit(expr.order)}
params.update(self.visit(expr.params))
return {
self.visit(expr.expr): params
}
elif expr.order:
return {
self.visit(expr.expr): self.visit(expr.order)
}
else:
return self.visit(expr.expr)
def visit_sort_script(self, sort_script):
res = dict(script=dict())
if sort_script.script_type:
res['type'] = sort_script.script_type
if sort_script.order:
res['order'] = sort_script.order
if sort_script.script:
res['script'] = self.visit(sort_script.script)
else:
raise CompilationError('Invalid arguments for ScriptSort')
return self.visit_dict({'_script': res})
def visit_agg(self, agg):
return {
agg.__agg_name__: self.visit(agg.params)
}
def visit_bucket_agg(self, agg):
params = {
agg.__agg_name__: self.visit(agg.params)
}
if agg._aggregations:
params['aggregations'] = self.visit(agg._aggregations)
return params
def visit_filter_agg(self, agg):
params = self.visit_bucket_agg(agg)
params[agg.__agg_name__] = self.visit(agg.filter)
return params
def visit_top_hits_agg(self, agg):
params = self.visit(agg.params)
if not self.features.supports_mapping_types:
self._patch_stored_fields(params, self.doc_classes)
return {
agg.__agg_name__: params
}
def visit_source(self, expr):
if expr.include or expr.exclude:
params = {}
if expr.include:
params['include'] = self.visit(expr.include)
if expr.exclude:
params['exclude'] = self.visit(expr.exclude)
return params
if isinstance(expr.fields, bool):
return expr.fields
return [self.visit(f) for f in expr.fields]
def visit_query_rescorer(self, rescorer):
return {'query': self.visit(rescorer.params)}
def visit_rescore(self, rescore):
params = self.visit(rescore.rescorer)
if rescore.window_size is not None:
params['window_size'] = rescore.window_size
return params
def visit_highlighted_field(self, hf):
return {
self.visit(hf.field): self.visit(hf.params)
}
def visit_highlight(self, highlight):
params = self.visit(highlight.params)
if highlight.fields:
if isinstance(highlight.fields, Mapping):
compiled_fields = {}
for f, options in highlight.fields.items():
compiled_fields[self.visit(f)] = self.visit(options)
params['fields'] = compiled_fields
elif isinstance(highlight.fields, Iterable):
compiled_fields = []
for f in highlight.fields:
if isinstance(f, (HighlightedField, Mapping)):
compiled_fields.append(self.visit(f))
else:
compiled_fields.append({self.visit(f): {}})
params['fields'] = compiled_fields
return params
def visit_ids(self, expr):
params = self.visit(expr.params)
if (
isinstance(expr.type, type) and
issubclass(expr.type, Document) and
_is_emulate_doc_types_mode(self.features, expr.type)
):
params['values'] = [
mk_uid(expr.type.get_doc_type(), v)
for v in expr.values
]
elif (
self.doc_classes and
any(map(
partial(_is_emulate_doc_types_mode, self.features),
self.doc_classes
))
):
ids = []
for doc_cls in self.doc_classes:
if _is_emulate_doc_types_mode(self.features, doc_cls):
ids.extend(
mk_uid(doc_cls.__doc_type__, v)
for v in expr.values
)
params['values'] = ids
else:
params['values'] = expr.values
if expr.type:
doc_type = getattr(expr.type, '__doc_type__', None)
if doc_type:
params['type'] = doc_type
else:
params['type'] = self.visit(expr.type)
return {
'ids': params
}
def visit_parent_id(self, expr):
if not self.features.supports_parent_id_query:
raise CompilationError(
'Elasticsearch before 5.x does not have support for '
'parent_id query'
)
if _is_emulate_doc_types_mode(self.features, expr.child_type):
parent_id = mk_uid(
expr.child_type.__parent__.__doc_type__,
expr.parent_id
)
else:
parent_id = expr.parent_id
child_type = expr.child_type
if hasattr(child_type, '__doc_type__'):
child_type = child_type.__doc_type__
if not child_type:
raise CompilationError(
"Cannot detect child type, specify 'child_type' argument"
)
return {'parent_id': {'type': child_type, 'id': parent_id}}
def visit_has_parent(self, expr):
params = self.visit(expr.params)
parent_type = expr.parent_type
if hasattr(parent_type, '__doc_type__'):
parent_type = parent_type.__doc_type__
if not parent_type:
parent_doc_classes = collect_doc_classes(expr.params)
if len(parent_doc_classes) == 1:
parent_type = next(iter(parent_doc_classes)).__doc_type__
elif len(parent_doc_classes) > 1:
raise CompilationError(
'Too many candidates for parent type, '
'should be only one'
)
else:
raise CompilationError(
'Cannot detect parent type, '
'specify \'parent_type\' argument'
)
params['parent_type'] = parent_type
return {'has_parent': params}
def visit_has_child(self, expr):
params = self.visit(expr.params)
child_type = expr.type
if hasattr(child_type, '__doc_type__'):
child_type = child_type.__doc_type__
if not child_type:
child_doc_classes = expr.params._collect_doc_classes()
if len(child_doc_classes) == 1:
child_type = next(iter(child_doc_classes)).__doc_type__
elif len(child_doc_classes) > 1:
raise CompilationError(
'Too many candidates for child type, '
'should be only one'
)
| |
present in transitions line but not in reaction: '+self.name+':\n'+self.stoichLine())
for prod in prods:
if not self.exchange: # product are not included for exchange reactions in sbml
if not prod.name in prodsSBMLDict:
print prodsSBMLDict
raise Exception('Product '+prod.name+' present in transitions line but not in reaction: '+self.name+':\n'+self.stoichLine())
# Reversibilities
if prioritySBML:
if self.reversible:
self.transitionLine.conv2reversible()
else:
self.transitionLine.conv2irreversible()
else:
self.reversible = self.transitionLine.reversible
# Upper and Lower bounds
if self.fluxBounds:
measured,flux = fluxBounds(self.fluxBounds.net.lo,self.fluxBounds.net.hi,self.reversible,self.exchange)
self.fluxBounds = flux
def checkTransitionLineCompleteness(self,coremets):
"""
Checks that transition lines products and reactants are complete.
i.e. if they show up in core mets and in stoichiometry they should show up in transitionLine
"""
reactsSBML = self.reactants
prodsSBML = self.products
reactsSBMLDict = self.reactDict
prodsSBMLDict = self.prodDict
reacts = copy.deepcopy(self.transitionLine.reactants)
prods = copy.deepcopy(self.transitionLine.products)
reactsDict = reacts.metDict
prodsDict = prods.metDict
for react in reactsSBML:
if (react.name in reactsSBMLDict) and (react.name in coremets) and not(react.name in reactsDict):
print react.name
print reactsDict
print self.stoichLine()
print self.transitionLine
raise Exception('Reactant '+react.name+' present in core metabolites and in stochiometry but not in transitions for reaction: '+self.name)
for prod in prodsSBML:
if not self.exchange: # product are not included for exchange reactions in sbml
if (prod.name in prodsSBMLDict) and (prod.name in coremets) and not(prod.name in prodsDict):
print prod.name
print prodsDict
print self.stoichLine()
print self.transitionLine
raise Exception('Product '+prod.name+' present in core metabolites and in stochiometry but not in transitions for reaction: '+self.name)
def setGenes(self, geneSets=None):
"""
When called with an array of GeneSet objects, this function makes a deep copy of each object,
and assigns the copies to this Reaction, saving them in self.geneSets.
Each GeneSet contains one or more Gene objects that collectively control this Reaction.
For example, if self.geneSets contains two GeneSets, A and B, such that:
A contains Gene objects CCDC83, RPL11, LUC7L3,
B contains Gene objects DDX26B, LUC7L3,
then the Reaction is controlled by the logical rule "(CCDC83 AND RPL11 AND LUC7L3) OR (DDX26B AND LUC7L3)"
This function also creates a single GeneSet object that acts as a master set, and contains a consolidated
set of all the Gene objects in the objects in self.geneSets. This is used to de-duplicate newly added sets.
In addition, since each Gene object is derived a NamedRangedNumber object, each Gene can be given a
measurement value (typically RPKMs). Since the Gene objects are re-used between all GeneSets, changing the
value of one named Gene object will effectively change it in all of them.
For example, changing the measured value of LUC7L3 in GeneSet A above will also automatically change it
for LUC7L3 in GeneSet B, and in the master set as self.genes.
"""
# A consolidated master set containing all Gene objects
self.genes = Genes.GeneSet()
# A list of sets of genes, each set a potential cause of the reaction
self.geneSets = []
if geneSets is not None:
# Make sure all the Gene objects are represented in the master set,
# and that genes mentioned multiple times are represented by the same Gene object.
for subSet in geneSets:
self.geneSets.append(self.genes.recastSet(subSet))
def setProteins(self, proteinSets=None):
"""
When called with an array of ProteinSet objects, this function makes a deep copy of each object,
and assigns the copies to this Reaction, saving them in self.proteinSets.
Each ProteinSet contains one or more Protein objects that collectively control this Reaction.
For example, if self.proteinSets contains two ProteinSets, A and B, such that:
A contains Protein objects VEGF, TGFB1, P53,
B contains Protein objects VMF, P53,
then the Reaction is controlled by the logical rule "(CCDC83 AND RPL11 AND LUC7L3) OR (DDX26B AND LUC7L3)"
This function also creates a single ProteinSet object that acts as a master set, and contains a consolidated
set of all the Protein objects in the objects in self.proteinSets. This is used to de-duplicate newly added sets.
In addition, since each Protein object is derived a NamedRangedNumber object, each Protein can be given a
measurement value. Since the Protein objects are re-used between all ProteinSets, changing the
value of one named Protein object will effectively change it in all of them.
For example, changing the measured value of P53 in ProteinSet A above will also automatically change it
for P53 in ProteinSet B, and in the master set at self.proteins.
"""
# A consolidated master set containing all Protein objects
self.proteins = Proteins.ProteinSet()
# A list of sets of proteins, each set a potential agent of the reaction
self.proteinSets = []
if proteinSets is not None:
# Make sure all the Protein objects are represented in the master set,
# and that proteins mentioned multiple times are represented by the same Protein object.
for subSet in proteinSets:
self.proteinSets.append(self.proteins.recastSet(subSet))
def getSignificantTranscriptomicsValue(self):
"""
Walk through all the measurements (transcription values) in the GeneSet objects and test for significance.
If all the Genes in any GeneSet are at or above the threshold, return true.
"""
for geneSet in self.geneSets:
if geneSet.testSignificance(TRANSCRIPTOMICS_SIGNIFICANCE_THRESHOLD):
return True
return False
def getSignificantProteomicsValue(self):
"""
Walk through all the measurements in the ProteinSet objects and test for significance
If all the Proteins in any ProteinSet are above the threshold, return true.
"""
for proteinSet in self.proteinSets:
if proteinSet.testSignificance(PROTEOMICS_SIGNIFICANCE_THRESHOLD, equalIsntEnough=True):
return True
return False
class MetaboliteName(str):
"""Subclass of str for metabolite names, that converts to BiGG Universal format, and provides other formats.
For example, m = MetaboliteName("ala-L[c]") will show as "ala_L_c" when printed, and
m.std will contain "ala-L[c]",
m.sbml will contain "ala_L_c" (same as when printed),
m.sbmlb will contain "ala_DASH_L_c",
m.compact will contain "alaL",
m.no_compartment will contain "ala_L",
and m.compartment will contain "c".
Note that some conversions will render nonsensical results for some inputs, for example
m = MetaboliteName("12ppd__S_e") is a BiGG Universal name and will print as such,
but m.std will then contain the nonsensical "12ppd--S[e]". """
def __new__(cls, name):
# Find out what type of name it is and move to SBML format
defaultCompartment = 'c'
endingA = name[-3:]
endingB = name[-2:]
if (endingA[0]=='[' and endingA[2]==']') and endingA[1] in compartments: # standard name (ala-L[c])
newName = re.sub('\[(\w)\]','_\g<1>',name).replace('-','_')
elif (endingB[0]=='_') and endingB[1] in compartments and '_DASH_' not in name: # SBML (ala_L_c)
newName = name.replace('-','_')
elif (endingB[0]=='_') and endingB[1] in compartments and '_DASH_' in name: # SBMLb (ala_DASH_L_c)
newName = name.replace('_DASH_','_') # Going directly to underscores, for SBML name.
else: # Compact (alaL)
newName = name
if (name[-1]=='L' or name[-1]=='D') and (name[-2] != '-'):
newName = name[0:-1]+'_'+name[-1]
newName = newName.replace('-L','_L').replace('-D','_D').replace('-','_')
newName = newName + '_'+str(defaultCompartment)
return str.__new__(cls, newName)
def __init__(self, name):
# Standard name, stored in .std
# TODO: This can be improved. It turns BiGG's metabolite "12ppd__S_e" into "12ppd--S[e]", for example,
# which is a nonsensical name.
# http://bigg.ucsd.edu/models/iY75_1357/metabolites/12ppd__S_e
self.compartment = name[-1]
nameNoComp = name[0:-2]
self.no_compartment = nameNoComp
self.std = nameNoComp.replace('_','-')+'['+self.compartment+']'
# SBML name, a.k.a. BiGG Universal Identifier, stored in .sbml
self.sbml = name
# SBMLb name, used in some old SBML documents
self.sbmlb = name.replace('_L','_DASH_L').replace('_D','_DASH_D')
# Compact name, stored in .compact
newName = name[0:-2]
self.compact = newName[0:-2] + newName[-2:].replace('_L','L').replace('_D','D')
def changeCompartment(self,newComp):
"Returns another MetaboliteName instance with different compartment information"
return MetaboliteName(self.sbml[0:-1] + newComp)
class ReactionName(str):
"""Class for reaction names, with methods to change the name to different formats"""
def __new__(cls,name):
# Move name to SBML format (EX_glc_e_)
newName = name.replace('_LPAREN_e_RPAREN_','(e)').replace('_LPAREN(e)RPAREN_','(e)').replace('(e)','_e_').replace('_DASH_','_')
return newName
def getNameIn(self, nameType):
"Produces name in desired format: std (standard, EX_glc(e)) or SBML (EX_glc_e_)"
if nameType == 'std':
newName = self.name.replace('_LPAREN_e_RPAREN_','(e)').replace('_LPAREN(e)RPAREN_','(e)').replace('_e_','(e)').replace('_DASH_','-')
elif nameType == 'SBML':
newName = self.name
else:
raise Exception('nameType unknown: '+str(nameType))
return newName
class EMU(): # TODO(Hector): redo, holding the Metabolite instance?
"""
Class for Elementary Metabolite Units (EMUs) as defined in
Antoniewicz MR, <NAME>, Stephanopoulos G: Elementary metabolite units (EMU): a novel framework for modeling isotopic distributions.
Metab Eng 2007, 9:68-86.
"""
def __init__(self,name,equivalent='default'):
"""
name is the EMU name (e.g. cit_1_2_3_4_5_6)
equivalente is an equivalent name
"""
self.name = name
self.getMetName()
if equivalent != 'default':
self.equivalent = equivalent
| |
<filename>faker/providers/person/de_AT/__init__.py
from .. import Provider as PersonProvider
class Provider(PersonProvider):
formats_male = (
"{{first_name_male}} {{last_name}}",
"{{first_name_male}} {{last_name}}",
"{{first_name_male}} {{last_name}}",
"{{first_name_male}} {{last_name}}",
"{{first_name_male}} {{last_name}}-{{last_name}}",
"{{prefix_male}} {{first_name_male}} {{last_name}}",
)
formats_female = (
"{{first_name_female}} {{last_name}}",
"{{first_name_female}} {{last_name}}",
"{{first_name_female}} {{last_name}}",
"{{first_name_female}} {{last_name}}",
"{{first_name_female}} {{last_name}}-{{last_name}}",
"{{prefix_female}} {{first_name_female}} {{last_name}}",
)
formats = formats_male + formats_female
# source: https://www.data.gv.at/katalog/dataset/87fc82a0-0042-49c8-b6f9-2602cd3dc17a
first_names_male = (
"Aaron",
"Adam",
"Adrian",
"Adriano",
"Alan",
"Aleksander",
"Alex",
"Alexandar",
"Alexander",
"Andreas",
"Andrej",
"Angelo",
"Anton",
"Antonio",
"Antonius",
"Arda",
"Armin",
"Aron",
"Arthur",
"Aurelio",
"Axel",
"Bastian",
"Ben",
"Benedict",
"Benedikt",
"Beni",
"Benjamin",
"Bernhard",
"Boris",
"Bruno",
"Calvin",
"Carl",
"Carlo",
"Chris",
"Christian",
"Christoph",
"Christopher",
"Clemens",
"Constantin",
"Cornelius",
"Cristiano",
"Damian",
"Daniel",
"Danilo",
"Dario",
"Darius",
"Darko",
"David",
"Dennis",
"Dominik",
"Eduard",
"Elias",
"Elyas",
"Emanuel",
"Emil",
"Emilian",
"Emmanuel",
"Eric",
"Erik",
"Erwin",
"Fabian",
"Fabio",
"Felix",
"Ferdinand",
"Fernando",
"Filip",
"Finn",
"Florentin",
"Florian",
"Florin",
"Franz",
"Frederik",
"Fridolin",
"Friedrich",
"Gabriel",
"Georg",
"Gregor",
"Gustav",
"Heinrich",
"Henri",
"Henrik",
"Henry",
"Hubert",
"Hugo",
"Igor",
"Ilias",
"Isa",
"Ismail",
"Jacob",
"Jakob",
"James",
"Jamie",
"Jan",
"Jannik",
"Jannis",
"Jasper",
"Joel",
"Johann",
"Johannes",
"John",
"Jonas",
"Jonathan",
"Josef",
"Joseph",
"Joshua",
"Julian",
"Julius",
"Justin",
"Justus",
"Kai",
"Karim",
"Karl",
"Kevin",
"Kilian",
"Konrad",
"Konstantin",
"Kristian",
"Lars",
"Laurenz",
"Laurin",
"Lean",
"Leander",
"Lennard",
"Lennart",
"Leo",
"Leon",
"Leonard",
"Leonardo",
"Leonhard",
"Leopold",
"Levi",
"Liam",
"Lino",
"Linus",
"Lionel",
"Lorenz",
"Lorenzo",
"Louis",
"Luca",
"Lucas",
"Luis",
"Luka",
"Lukas",
"Maksim",
"Manuel",
"Marc",
"Marcel",
"Marco",
"Marcus",
"Mario",
"Marius",
"Mark",
"Marko",
"Markus",
"Martin",
"Marvin",
"Mateo",
"Matheo",
"Mathias",
"Matteo",
"Matthias",
"Maurice",
"Max",
"Maximilian",
"Merlin",
"Mert",
"Michael",
"Mika",
"Mike",
"Milan",
"Milo",
"Moritz",
"Natan",
"Nathan",
"Nicholas",
"Nick",
"Nico",
"Nicolai",
"Nicolas",
"Niklas",
"Niko",
"Nikola",
"Nikolai",
"Nikolas",
"Nikolaus",
"Nils",
"Nino",
"Noah",
"Noel",
"Oliver",
"Oscar",
"Oskar",
"Pascal",
"Patrick",
"Patrik",
"Paul",
"Peter",
"Philip",
"Philipp",
"Phillip",
"Raffael",
"Ralph",
"Raphael",
"Rene",
"Ricardo",
"Richard",
"Robert",
"Robin",
"Roman",
"Ruben",
"Sam",
"Samuel",
"Sandro",
"Sascha",
"Sebastian",
"Severin",
"Simon",
"Stefan",
"Stephan",
"Steven",
"Sven",
"Teodor",
"Theo",
"Theodor",
"Thomas",
"Tim",
"Timo",
"Timon",
"Tobias",
"Tom",
"Tristan",
"Valentin",
"Valentino",
"Victor",
"Viktor",
"Vincent",
"Vito",
"William",
"Xavier",
)
# source: https://www.data.gv.at/katalog/dataset/87fc82a0-0042-49c8-b6f9-2602cd3dc17a
first_names_female = (
"Adelina",
"Adriana",
"Ajna",
"Alara",
"Aleksandra",
"Alena",
"Alexa",
"Alexandra",
"Alexia",
"Alice",
"Alma",
"Amanda",
"Amelia",
"Amelie",
"Anabella",
"Anastasia",
"Andjela",
"Andjelina",
"Andrea",
"Angela",
"Angelika",
"Angelina",
"Anika",
"Anita",
"Anja",
"Anna",
"Anna-Lena",
"Anna-Maria",
"Annabell",
"Annabella",
"Annabelle",
"Annalena",
"Anne",
"Annika",
"Antonella",
"Antonia",
"Ariana",
"Ariane",
"Aurelia",
"Aurora",
"Ava",
"Aylin",
"Barbara",
"Beatrice",
"Bernadette",
"Berra",
"Bianca",
"Carina",
"Carla",
"Carlotta",
"Carolina",
"Caroline",
"Catharina",
"Cecilia",
"Charlotte",
"Christina",
"Christine",
"Claire",
"Clara",
"Clarissa",
"Claudia",
"Constanze",
"Cristina",
"Dana",
"Daniela",
"Denise",
"Diana",
"Dilara",
"Domenica",
"Dora",
"Eda",
"Edda",
"Ela",
"Elena",
"Eleonora",
"Elina",
"Elisa",
"Elisabeth",
"Ella",
"Ellie",
"Elma",
"Elona",
"Elsa",
"Elvira",
"Emanuela",
"Emely",
"Emilia",
"Emilie",
"Emilija",
"Emma",
"Erina",
"Estelle",
"Esther",
"Eva",
"Evelyn",
"Felicitas",
"Fiona",
"Florentina",
"Francesca",
"Franziska",
"Frida",
"Gabriela",
"Gloria",
"Hanna",
"Hannah",
"Heidi",
"Helena",
"Helene",
"Ina",
"Ines",
"Irina",
"Iris",
"Irma",
"Isabel",
"Isabell",
"Isabella",
"Isabelle",
"Jana",
"Janine",
"Jasmina",
"Jasmine",
"Jennifer",
"Jessica",
"Johanna",
"Josefine",
"Jovana",
"Julia",
"Juliana",
"Juliane",
"Julijana",
"Juna",
"Kalina",
"Karina",
"Karla",
"Karolina",
"Karoline",
"Katarina",
"Katharina",
"Katja",
"Kerstin",
"Klara",
"Kristina",
"Kyra",
"Laetitia",
"Laila",
"Lana",
"Lara",
"Lara-Sophie",
"Larissa",
"Laura",
"Laureen",
"Lea",
"Lea-Sophie",
"Leah",
"Leandra",
"Lena",
"Leni",
"Leona",
"Leoni",
"Leonie",
"Leonora",
"Leontina",
"Leticia",
"Leyla",
"Lia",
"Lilia",
"Lilian",
"Liliana",
"Liliane",
"Lilli",
"Lilly",
"Lily",
"Lina",
"Linda",
"Linnea",
"Lisa",
"Lisa-Marie",
"Lola",
"Lora",
"Lorena",
"Lotta",
"Lotte",
"Louisa",
"Louise",
"Luana",
"Lucia",
"Lucie",
"Luisa",
"Luise",
"Luna",
"Lydia",
"Madeleine",
"Magdalena",
"Maida",
"Maja",
"Malena",
"Manuela",
"Mara",
"Maria",
"Mariam",
"Mariana",
"Marie",
"Marie-Louise",
"Marie-Sophie",
"Mariella",
"Marijana",
"Marina",
"Marissa",
"Marlene",
"Marta",
"Martha",
"Martina",
"Maryam",
"Mathilda",
"Matilda",
"Maya",
"Melanie",
"Melek",
"Melina",
"Melisa",
"Melissa",
"Mia",
"Michaela",
"Michelle",
"Mila",
"Milica",
"Mina",
"Mira",
"Miriam",
"Mona",
"Nadia",
"Nadin",
"Nadine",
"Nadja",
"Naomi",
"Natalia",
"Natalie",
"Natascha",
"Nathalie",
"Nela",
"Nele",
"Nelly",
"Nicola",
"Nicole",
"Nika",
"Nikita",
"Nikola",
"Nikolina",
"Nina",
"Nisa",
"Nora",
"Norah",
"Olivia",
"Patricia",
"Paula",
"Paulina",
"Pauline",
"Petra",
"Philippa",
"Pia",
"Rachel",
"Raffaela",
"Rana",
"Rayana",
"Rebecca",
"Rita",
"Romy",
"Ronja",
"Ronya",
"Rosa",
"Rosalie",
"Ruth",
"Sabine",
"Sabrina",
"Sahra",
"Salma",
"Sandra",
"Sara",
"Sarah",
"Selena",
"Selin",
"Selina",
"Selma",
"Sena",
"Siena",
"Sigrid",
"Sofia",
"Sofie",
"Sofija",
"Sonja",
"Sophia",
"Sophie",
"Sophie-Marie",
"Soraya",
"Stefanie",
"Stella",
"Stephanie",
"Tamara",
"Tanja",
"Tea",
"Theodora",
"Theresa",
"Therese",
"Tiffany",
"Tina",
"Valentina",
"Vanessa",
"Vera",
"Verena",
"Veronika",
"Victoria",
"Viktoria",
"Viola",
"Violetta",
"Vivian",
"Yasmina",
"Ylvie",
"Yvonne",
"Zara",
"Zoe",
"Zoey",
)
first_names = first_names_male + first_names_female
# about 1000 of the most popular Austrian surnames
# https://de.wiktionary.org/wiki/Verzeichnis:Deutsch/Namen/die_h%C3%A4ufigsten_Nachnamen_%C3%96sterreichs
last_names = (
"Achleitner",
"Ackerl",
"Adam",
"Adler",
"Aichholzer",
"Aichinger",
"Aigner",
"Albrecht",
"Altmann",
"Amann",
"Amon",
"Anderl",
"Angerer",
"Arnold",
"Artner",
"Aschauer",
"Auer",
"Augustin",
"Auinger",
"Bacher",
"Bachinger",
"Bachler",
"Bachmann",
"Bader",
"Baier",
"Baldauf",
"Barth",
"Bartl",
"Bauer",
"Baumann",
"Baumgartner",
"Bayer",
"Beck",
"Becker",
"Beer",
"Berchtold",
"Berger",
"Bergmann",
"Berner",
"Bernhard",
"Berthold",
"Bichler",
"Biedermann",
"Binder",
"Bischof",
"Bitschnau",
"Bittner",
"Blauensteiner",
"Blum",
"Blümel",
"Bock",
"Bodner",
"Bogner",
"Brandl",
"Brandner",
"Brandstetter",
"Brandstätter",
"Brandtner",
"Braun",
"Brenner",
"Breuer",
"Bruckner",
"Brugger",
"Brunner",
"Bräuer",
"Buchberger",
"Buchegger",
"Bucher",
"Buchinger",
"Buchner",
"Burger",
"Burgstaller",
"Burtscher",
"Bäck",
"Böck",
"Böhler",
"Böhm",
"Bösch",
"Bürger",
"Dallinger",
"Dangl",
"Danner",
"Danninger",
"Decker",
"Dengg",
"Denk",
"Deutschmann",
"Dietl",
"Dietrich",
"Dirnberger",
"Dittrich",
"Dobler",
"Doppler",
"Dorfer",
"Dorn",
"Dorner",
"Draxler",
"Dünser",
"Eberhard",
"Eberharter",
"Eberl",
"Ebner",
"Ecker",
"Eder",
"Edlinger",
"Egger",
"Eibl",
"Eichberger",
"Eichhorn",
"Eichinger",
"Eisl",
"Eisner",
"Eller",
"Ender",
"Engel",
"Engl",
"Enzinger",
"Erber",
"Erhart",
"Erlacher",
"Erler",
"Ernst",
"Ertl",
"Fabian",
"Falkner",
"Fankhauser",
"Farkas",
"Fasching",
"Fehringer",
"Feichtenschlager",
"Feichter",
"Feichtinger",
"Feichtner",
"Feigl",
"Felber",
"Felder",
"Fellinger",
"Fellner",
"Fercher",
"Ferstl",
"Fichtinger",
"Fiedler",
"Fink",
"Fischer",
"Fitz",
"Fleck",
"Fleischhacker",
"Fleischmann",
"Foidl",
"Forster",
"Forstner",
"Frank",
"Franz",
"Freitag",
"Freudenthaler",
"Frey",
"Frick",
"Friedl",
"Friedrich",
"Frisch",
"Fritsch",
"Fritz",
"Froschauer",
"Fröhlich",
"Fröschl",
"Frühwirth",
"Fuchs",
"Fuhrmann",
"Füreder",
"Fürst",
"Gabriel",
"Gahleitner",
"Galler",
"Gamsjäger",
"Gangl",
"Gartner",
"Gasser",
"Gassner",
"Gattringer",
"Geier",
"Geiger",
"Geisler",
"Geyer",
"Gindl",
"Glaser",
"Glatz",
"Glück",
"Gmeiner",
"Gollner",
"Gosch",
"Grabher",
"Grabner",
"Graf",
"Grasser",
"Grassl",
"Gratz",
"Gratzer",
"Gratzl",
"Greiner",
"Griesser",
"Grill",
"Gritsch",
"Gross",
"Groß",
"Gruber",
"Grundner",
"Grünberger",
"Grüner",
"Grünwald",
"Gschaider",
"Gschwandtner",
"Gstrein",
"Guggenberger",
"Gutmann",
"Gärtner",
"Göschl",
"Götz",
"Günther",
"Haas",
"Haberl",
"Hacker",
"Hackl",
"Haderer",
"Hafner",
"Hagen",
"Hager",
"Hahn",
"Haid",
"Haiden",
"Haider",
"Haidinger",
"Haindl",
"Hainzl",
"Haller",
"Hammer",
"Hammerer",
"Hammerl",
"Handl",
"Handler",
"Haring",
"Harrer",
"Hartl",
"Hartmann",
"Haslauer",
"Haslinger",
"Hattinger",
"Hauer",
"Haumer",
"Hausberger",
"Hauser",
"Hebenstreit",
"Hechenberger",
"Heger",
"Heigl",
"Heim",
"Heindl",
"Heinrich",
"Heinz",
"Heinzl",
"Heiss",
"Heissenberger",
"Held",
"Hell",
"Heller",
"Helm",
"Hemetsberger",
"Herbst",
"Hermann",
"Herrmann",
"Herzog",
"Himmelbauer",
"Hinterberger",
"Hinteregger",
"Hinterleitner",
"Hirsch",
"Hirschmann",
"Hochleitner",
"Hochreiter",
"Hofbauer",
"Hofer",
"Hoffmann",
"Hofinger",
"Hofmann",
"Hofmeister",
"Hofstetter",
"Hofstätter",
"Holl",
"Hollaus",
"Holler",
"Holzer",
"Holzinger",
"Holzknecht",
"Holzmann",
"Horak",
"Horn",
"Hosp",
"Huber",
"Hubmann",
"Huemer",
"Hufnagl",
"Humer",
"Hummel",
"Hummer",
"Huter",
"Hutter",
"Hutterer",
"Hämmerle",
"Häusler",
"Hödl",
"Höfer",
"Höfler",
"Höglinger",
"Höller",
"Hölzl",
"Hörl",
"Hörmann",
"Hübner",
"Hütter",
"Jahn",
"Jandl",
"Janisch",
"Jank",
"Jauk",
"Jenewein",
"Jost",
"Jovanovic",
"Juen",
"Jung",
"Jungwirth",
"Jäger",
"Jöbstl",
"Kager",
"Kahr",
"Kain",
"Kaindl",
"Kainz",
"Kaiser",
"Kalcher",
"Kaltenbrunner",
"Kaltenböck",
"Kaltenegger",
"Kammerer",
"Kapeller",
"Kappel",
"Kargl",
"Karl",
"Karner",
"Karrer",
"Kaspar",
"Kasper",
"Kastner",
"Kaufmann",
"Keller",
"Kellner",
"Keplinger",
"Kern",
"Kerschbaum",
"Kerschbaumer",
"Kessler",
"Kirchmair",
"Kirchner",
"Kirschner",
"Kiss",
"Kitzler",
"Klammer",
"Klaus",
"Klausner",
"Klein",
"Klement",
"Klinger",
"Klingler",
"Klocker",
"Kloiber",
"Klotz",
"Klug",
"Knapp",
"Knaus",
"Knoll",
"Kober",
"Koch",
"Kocher",
"Kofler",
"Kogler",
"Kohl",
"Kohler",
"Kolar",
"Kolb",
"Koller",
"Kollmann",
"Kolm",
"Konrad",
"Kopf",
"Kopp",
"Koppensteiner",
"Kraft",
"Krainer",
"Krainz",
"Kral",
"Krall",
"Kramer",
"Krammer",
"Kratzer",
"Kraus",
"Kraxner",
"Kreidl",
"Kreiner",
"Kremser",
"Krenn",
"Kreuzer",
"Kriegl",
"Kronberger",
"Kronsteiner",
"Krug",
"Kröll",
"Kucera",
"Kugler",
"Kuhn",
"Kummer",
"Kunz",
"Kurz",
"Kurzmann",
"Käfer",
"Köberl",
"Köck",
"Köhler",
"Kölbl",
"Köll",
"König",
"Kössler",
"Lackner",
"Ladner",
"Lagler",
"Laimer",
"Lammer",
"Lampert",
"Lampl",
"Lamprecht",
"Landl",
"Lang",
"Langer",
"Larcher",
"Lassnig",
"Leber",
"Lechner",
"Lederer",
"Leeb",
"Lehner",
"Leibetseder",
"Leitgeb",
"Leithner",
"Leitner",
"Lengauer",
"Lenz",
"Leonhartsberger",
"Leopold",
"Lerch",
"Lercher",
"Lettner",
"Leutgeb",
"Lichtenegger",
"Linder",
"Lindinger",
"Lindner",
"Lindorfer",
"Lintner",
"Lipp",
"List",
"Loibl",
"Loidl",
"Lorenz",
"Ludwig",
"Luger",
"Luttenberger",
"Lutz",
"Löffler",
"Macher",
"Mader",
"Maier",
"Maierhofer",
"Mair",
"Mairhofer",
"Mandl",
"Mann",
"Margreiter",
"Maringer",
"Mark",
"Markl",
"Marte",
"Martin",
"Marx",
"Mathis",
"Maurer",
"Mayer",
"Mayerhofer",
"Mayr",
"Mayrhofer",
"Meier",
"Meindl",
"Meister",
"Meixner",
"Messner",
"Metzler",
"Meusburger",
"Meyer",
"Mitter",
"Mitteregger",
"Mitterer",
"Mitterlehner",
"Mittermayr",
"Mohr",
"Moosbrugger",
"Moritz",
"Moser",
"Muhr",
"Mörth",
"Mühlbacher",
"Mühlberger",
"Mühlböck",
"Müller",
"Müllner",
"Nagel",
"Nagele",
"Nagl",
"Nemeth",
"Neubacher",
"Neubauer",
"Neugebauer",
"Neuhauser",
"Neuhold",
"Neulinger",
"Neumann",
"Neumayer",
"Neumayr",
"Neumeister",
"Neumüller",
"Neuner",
"Neureiter",
"Neuwirth",
"Niederl",
"Nowak",
"Nussbaumer",
"Nußbaumer",
"Nöbauer",
"Oberhauser",
"Oberhofer",
"Oberleitner",
"Obermayr",
"Obermüller",
"Oberndorfer",
"Ofner",
"Ortner",
"Ostermann",
"Oswald",
"Ott",
"Pacher",
"Pachler",
"Paier",
"Pammer",
"Parzer",
"Pauer",
"Paul",
"Paulitsch",
"Payer",
"Peer",
"Peham",
"Pendl",
"Penz",
"Perner",
"Pertl",
"Pesendorfer",
"Peter",
"Petz",
"Pfeffer",
"Pfeifer",
"Pfeiffer",
"Pfister",
"Pfleger",
"Philipp",
"Pichler",
"Pieber",
"Pilz",
"Pinter",
"Pircher",
"Pirker",
"Plank",
"Plattner",
"Platzer",
"Pock",
"Pohl",
"Pointner",
"Pokorny",
"Pollak",
"Polzer",
"Posch",
"Postl",
"Prager",
"Prantl",
"Praxmarer",
"Prem",
"Prenner",
"Prinz",
"Probst",
"Prohaska",
"Pröll",
"Pucher",
"Puchner",
"Puntigam",
"Punz",
"Putz",
"Pöll",
"Pölzl",
"Pöschl",
"Pühringer",
"Raab",
"Rabitsch",
"Rabl",
"Radl",
"Rainer",
"Ramsauer",
"Rath",
"Rauch",
"Rausch",
"Rauscher",
"Rauter",
"Rechberger",
"Redl",
"Reich",
"Reichel",
"Reicher",
"Reichl",
"Reichmann",
"Reif",
"Reinbacher",
"Reindl",
"Reiner",
"Reinisch",
"Reinprecht",
"Reinthaler",
"Reischl",
"Reisinger",
"Reisner",
| |
<reponame>backwardn/ccs-calendarserver
#
# Copyright (c) 2005-2017 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from pycalendar.datetime import DateTime
from pycalendar.duration import Duration
from pycalendar.period import Period
from pycalendar.timezone import Timezone
from twext.python.log import Logger
from txweb2.http import HTTPError
from twisted.internet.defer import inlineCallbacks, returnValue
from twistedcaldav import customxml
from twistedcaldav.accounting import emitAccounting, accountingEnabled
from twistedcaldav.config import config
from twistedcaldav.ical import Property, DTSTAMP_PARAM
from twistedcaldav.instance import InvalidOverriddenInstanceError
from txdav.caldav.datastore.scheduling.freebusy import FreebusyQuery
from txdav.caldav.datastore.scheduling.itip import iTipProcessing, iTIPRequestStatus, \
iTipGenerator
from txdav.caldav.datastore.scheduling.utils import getCalendarObjectForRecord
from txdav.caldav.datastore.scheduling.utils import normalizeCUAddr
from txdav.caldav.datastore.scheduling.work import ScheduleRefreshWork, \
ScheduleAutoReplyWork
from txdav.caldav.icalendarstore import ComponentUpdateState, \
ComponentRemoveState, QueryMaxResources
from txdav.who.idirectory import AutoScheduleMode
import collections
import hashlib
import json
import uuid
"""
CalDAV implicit processing.
This module handles the processing of scheduling messages being delivered to a calendar user's inbox.
It determines who is scheduling (organizer or attendee) and applies the scheduling message changes
to the recipient's calendar data as well as depositing the scheduling message in the inbox. For users
who have an auto-accept option on, it will also handle the automatic response. Also, refreshes of other
attendees (when one attendee replies) are triggered from here.
"""
__all__ = [
"ImplicitProcessor",
"ImplicitProcessorException",
]
log = Logger()
class ImplicitProcessorException(Exception):
def __init__(self, msg):
self.msg = msg
class ImplicitProcessor(object):
def __init__(self):
pass
@inlineCallbacks
def doImplicitProcessing(self, txn, message, originator, recipient, noAttendeeRefresh=False):
"""
Do implicit processing of a scheduling message, and possibly also auto-process it
if the recipient has auto-accept on.
@param message: the iTIP message
@type message: L{twistedcaldav.ical.Component}
@param originator: calendar user sending the message
@type originator: C{str}
@param recipient: calendar user receiving the message
@type recipient: C{str}
@return: a C{tuple} of (C{bool}, C{bool}, C{bool}, C{bool}) indicating whether the message was processed,
and if it was whether auto-processing has taken place, whether it needs to be stored in the inbox, and
the changes property for the inbox item.
"""
self.txn = txn
self.message = message
self.originator = originator
self.recipient = recipient
self.noAttendeeRefresh = noAttendeeRefresh
# TODO: for now going to assume that the originator is local - i.e. the scheduling message sent
# represents the actual organizer's view.
# First see whether this is the organizer or attendee sending the message
self.extractCalendarData()
if self.isOrganizerReceivingMessage():
result = (yield self.doImplicitOrganizer())
elif self.isAttendeeReceivingMessage():
try:
result = (yield self.doImplicitAttendee())
except ImplicitProcessorException:
# These we always pass up
raise
except Exception, e:
# We attempt to recover from this. That involves trying to re-write the attendee data
# to match that of the organizer assuming we have the organizer's full data available, then
# we try the processing operation again.
log.failure("{processor}.doImplicitAttendee()", processor=self)
log.error("ImplicitProcessing - originator '{orig}' to recipient '{recip}' with UID: '{uid}' - exception raised will try to fix: {ex}", orig=self.originator.cuaddr, recip=self.recipient.cuaddr, uid=self.uid, ex=e)
result = (yield self.doImplicitAttendeeEventFix(e))
if result:
log.error("ImplicitProcessing - originator '{orig}' to recipient '{recip}' with UID: '{uid}' - restored organizer's copy", orig=self.originator.cuaddr, recip=self.recipient.cuaddr, uid=self.uid)
try:
result = (yield self.doImplicitAttendee())
except Exception, e:
log.failure("{processor}.doImplicitAttendee()", processor=self)
log.error("ImplicitProcessing - originator '{orig}' to recipient '{recip}' with UID: '{uid}' - exception raised after fix: {ex}", orig=self.originator.cuaddr, recip=self.recipient.cuaddr, uid=self.uid, ex=e)
raise ImplicitProcessorException("5.1;Service unavailable")
else:
log.error("ImplicitProcessing - originator '{orig}' to recipient '{recip}' with UID: '{uid}' - could not fix", orig=self.originator.cuaddr, recip=self.recipient.cuaddr, uid=self.uid)
raise ImplicitProcessorException("5.1;Service unavailable")
else:
log.error("METHOD:{method} not supported for implicit scheduling.", method=self.method)
raise ImplicitProcessorException("3.14;Unsupported capability")
returnValue(result)
def extractCalendarData(self):
# Some other useful things
self.method = self.message.propertyValue("METHOD")
self.uid = self.message.resourceUID()
def isOrganizerReceivingMessage(self):
return self.method in ("REPLY", "REFRESH", "X-RESTORE")
def isAttendeeReceivingMessage(self):
return self.method in ("REQUEST", "ADD", "CANCEL", "POLLSTATUS")
@inlineCallbacks
def getRecipientsCopy(self):
"""
Get the Recipient's copy of the event being processed.
"""
self.recipient_calendar = None
self.recipient_calendar_resource = None
calendar_resource = (yield getCalendarObjectForRecord(self.txn, self.recipient.record, self.uid))
if calendar_resource:
self.recipient_calendar = (yield calendar_resource.componentForUser(self.recipient.record.uid)).duplicate()
self.recipient_calendar_resource = calendar_resource
self.recipient_in_trash = self.recipient_calendar_resource.isInTrash()
@inlineCallbacks
def doImplicitOrganizer(self):
"""
Process an iTIP message sent to the organizer.
"""
# Locate the organizer's copy of the event.
yield self.getRecipientsCopy()
if self.recipient_calendar is None or self.recipient_in_trash:
log.debug("ImplicitProcessing - originator '{orig}' to recipient '{recip}' ignoring UID: '{uid}' - organizer has no copy", orig=self.originator.cuaddr, recip=self.recipient.cuaddr, uid=self.uid)
returnValue((True, True, False, None,))
# Handle new items differently than existing ones.
if self.method in ("REPLY", "X-RESTORE"):
result = (yield self.doImplicitOrganizerUpdate())
elif self.method == "REFRESH":
# With implicit we ignore refreshes.
# TODO: for iMIP etc we do need to handle them
result = (True, True, False, None,)
returnValue(result)
@inlineCallbacks
def doImplicitOrganizerUpdate(self):
"""
An iTIP REPLY has been sent by an attendee to an organizer and the attendee state needs to be sync'd
to the organizer's copy of the event.
"""
# Check to see if this is a valid reply - this will also merge the changes to the organizer's copy
result, processed = iTipProcessing.processReply(self.message, self.recipient_calendar)
if result:
# Let the store know that no time-range info has changed
self.recipient_calendar.noInstanceIndexing = True
# Update the organizer's copy of the event
log.debug("ImplicitProcessing - originator '{orig}' to recipient '{recip}' processing METHOD:REPLY, UID: '{uid}' - updating event", orig=self.originator.cuaddr, recip=self.recipient.cuaddr, uid=self.uid)
self.organizer_calendar_resource = (yield self.writeCalendarResource(None, self.recipient_calendar_resource, self.recipient_calendar))
self.organizer_uid = self.organizer_calendar_resource.parentCollection().ownerHome().uid()
self.organizer_calendar_resource_id = self.organizer_calendar_resource.id()
organizer = self.recipient_calendar.getOrganizer()
attendeeReplying, rids = processed
if self.method == "X-RESTORE":
changes = None
refreshNeeded = True
yield self._doRefresh(self.organizer_calendar_resource, only_attendees=(attendeeReplying,))
else:
# Build the schedule-changes XML element
refreshNeeded = False
reply_details = (customxml.Attendee.fromString(attendeeReplying),)
for rid, reply_changes in sorted(rids):
recurrence = []
if rid == "":
recurrence.append(customxml.Master())
else:
recurrence.append(customxml.RecurrenceID.fromString(rid))
changes = []
for param in reply_changes.params:
changes.append(customxml.ChangedProperty(customxml.ChangedParameter(name=param), name="ATTENDEE"))
refreshNeeded = True
for prop in reply_changes.props:
changes.append(customxml.ChangedProperty(name=prop))
recurrence.append(customxml.Changes(*changes))
reply_details += (customxml.Recurrence(*recurrence),)
changes = customxml.ScheduleChanges(
customxml.DTStamp(),
customxml.Action(
customxml.Reply(*reply_details),
),
)
# Only update other attendees when the partstat was changed by the reply,
# and only if the request does not indicate we should skip attendee refresh
# (e.g. inbox item processing during migration from non-implicit server)
if refreshNeeded and not self.noAttendeeRefresh:
# Check limit of attendees
if config.Scheduling.Options.AttendeeRefreshCountLimit == 0 or len(self.recipient_calendar.getAllUniqueAttendees()) <= config.Scheduling.Options.AttendeeRefreshCountLimit:
yield self.queueAttendeeUpdate((attendeeReplying, organizer,))
result = (True, False, True, changes,)
else:
# Ignore scheduling message
result = (True, True, False, None,)
returnValue(result)
@inlineCallbacks
def queueAttendeeUpdate(self, exclude_attendees):
"""
Queue up a background update to attendees.
@param exclude_attendees: list of attendees who should not be refreshed (e.g., the one that triggered the refresh)
@type exclude_attendees: C{list}
"""
self.uid = self.recipient_calendar.resourceUID()
# Check for batched refreshes
if config.Scheduling.Options.AttendeeRefreshBatch:
# Batch refresh those attendees that need it.
allAttendees = sorted(list(self.recipient_calendar.getAllUniqueAttendees()))
allAttendees = filter(lambda x: x not in exclude_attendees, allAttendees)
if allAttendees:
yield self._enqueueBatchRefresh(allAttendees)
else:
yield self._doRefresh(self.organizer_calendar_resource, exclude_attendees)
def _enqueueBatchRefresh(self, attendees):
"""
Create a batch refresh work item. Do this in a separate method to allow for easy
unit testing.
@param attendees: the list of attendees to refresh
@type attendees: C{list}
"""
return ScheduleRefreshWork.refreshAttendees(
self.txn,
self.recipient_calendar_resource,
self.recipient_calendar,
attendees,
)
@inlineCallbacks
def _doRefresh(self, organizer_resource, exclude_attendees=(), only_attendees=None):
"""
Do a refresh of attendees.
@param organizer_resource: the resource for the organizer's calendar data
@type organizer_resource: L{DAVResource}
@param exclude_attendees: list of attendees to not refresh
@type exclude_attendees: C{tuple}
@param only_attendees: list of attendees to refresh (C{None} - refresh all)
@type only_attendees: C{tuple}
"""
log.debug("ImplicitProcessing - refreshing UID: '{uid}', Attendees: {att}", uid=self.uid, att=", ".join(only_attendees) if only_attendees else "all")
from txdav.caldav.datastore.scheduling.implicit import ImplicitScheduler
scheduler = ImplicitScheduler()
yield scheduler.refreshAllAttendeesExceptSome(
self.txn,
organizer_resource,
exclude_attendees,
only_attendees=only_attendees,
)
@inlineCallbacks
def doImplicitAttendee(self):
"""
Process an iTIP message sent to an attendee.
"""
# Locate the attendee's copy of the event if it exists.
yield self.getRecipientsCopy()
self.new_resource = self.recipient_calendar is None
# If we get a CANCEL and we don't have a matching resource already stored, simply
# ignore the CANCEL.
if self.new_resource and self.method == "CANCEL":
log.debug("ImplicitProcessing - originator '{orig}' to recipient '{recip}' ignoring METHOD:CANCEL, UID: '{uid}' - attendee has no copy", orig=self.originator.cuaddr, recip=self.recipient.cuaddr, uid=self.uid)
result = (True, True, False, None)
else:
result = (yield self.doImplicitAttendeeUpdate())
returnValue(result)
@inlineCallbacks
def doImplicitAttendeeUpdate(self):
"""
| |
configurations)
if custom_ami_id is not None:
pulumi.set(__self__, "custom_ami_id", custom_ami_id)
if ebs_configuration is not None:
pulumi.set(__self__, "ebs_configuration", ebs_configuration)
if market is not None:
pulumi.set(__self__, "market", market)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter(name="instanceCount")
def instance_count(self) -> pulumi.Input[int]:
return pulumi.get(self, "instance_count")
@instance_count.setter
def instance_count(self, value: pulumi.Input[int]):
pulumi.set(self, "instance_count", value)
@property
@pulumi.getter(name="instanceType")
def instance_type(self) -> pulumi.Input[str]:
return pulumi.get(self, "instance_type")
@instance_type.setter
def instance_type(self, value: pulumi.Input[str]):
pulumi.set(self, "instance_type", value)
@property
@pulumi.getter(name="autoScalingPolicy")
def auto_scaling_policy(self) -> Optional[pulumi.Input['ClusterAutoScalingPolicyArgs']]:
return pulumi.get(self, "auto_scaling_policy")
@auto_scaling_policy.setter
def auto_scaling_policy(self, value: Optional[pulumi.Input['ClusterAutoScalingPolicyArgs']]):
pulumi.set(self, "auto_scaling_policy", value)
@property
@pulumi.getter(name="bidPrice")
def bid_price(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "bid_price")
@bid_price.setter
def bid_price(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "bid_price", value)
@property
@pulumi.getter
def configurations(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ClusterConfigurationArgs']]]]:
return pulumi.get(self, "configurations")
@configurations.setter
def configurations(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ClusterConfigurationArgs']]]]):
pulumi.set(self, "configurations", value)
@property
@pulumi.getter(name="customAmiId")
def custom_ami_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "custom_ami_id")
@custom_ami_id.setter
def custom_ami_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "custom_ami_id", value)
@property
@pulumi.getter(name="ebsConfiguration")
def ebs_configuration(self) -> Optional[pulumi.Input['ClusterEbsConfigurationArgs']]:
return pulumi.get(self, "ebs_configuration")
@ebs_configuration.setter
def ebs_configuration(self, value: Optional[pulumi.Input['ClusterEbsConfigurationArgs']]):
pulumi.set(self, "ebs_configuration", value)
@property
@pulumi.getter
def market(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "market")
@market.setter
def market(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "market", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@pulumi.input_type
class ClusterInstanceTypeConfigArgs:
def __init__(__self__, *,
instance_type: pulumi.Input[str],
bid_price: Optional[pulumi.Input[str]] = None,
bid_price_as_percentage_of_on_demand_price: Optional[pulumi.Input[float]] = None,
configurations: Optional[pulumi.Input[Sequence[pulumi.Input['ClusterConfigurationArgs']]]] = None,
custom_ami_id: Optional[pulumi.Input[str]] = None,
ebs_configuration: Optional[pulumi.Input['ClusterEbsConfigurationArgs']] = None,
weighted_capacity: Optional[pulumi.Input[int]] = None):
pulumi.set(__self__, "instance_type", instance_type)
if bid_price is not None:
pulumi.set(__self__, "bid_price", bid_price)
if bid_price_as_percentage_of_on_demand_price is not None:
pulumi.set(__self__, "bid_price_as_percentage_of_on_demand_price", bid_price_as_percentage_of_on_demand_price)
if configurations is not None:
pulumi.set(__self__, "configurations", configurations)
if custom_ami_id is not None:
pulumi.set(__self__, "custom_ami_id", custom_ami_id)
if ebs_configuration is not None:
pulumi.set(__self__, "ebs_configuration", ebs_configuration)
if weighted_capacity is not None:
pulumi.set(__self__, "weighted_capacity", weighted_capacity)
@property
@pulumi.getter(name="instanceType")
def instance_type(self) -> pulumi.Input[str]:
return pulumi.get(self, "instance_type")
@instance_type.setter
def instance_type(self, value: pulumi.Input[str]):
pulumi.set(self, "instance_type", value)
@property
@pulumi.getter(name="bidPrice")
def bid_price(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "bid_price")
@bid_price.setter
def bid_price(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "bid_price", value)
@property
@pulumi.getter(name="bidPriceAsPercentageOfOnDemandPrice")
def bid_price_as_percentage_of_on_demand_price(self) -> Optional[pulumi.Input[float]]:
return pulumi.get(self, "bid_price_as_percentage_of_on_demand_price")
@bid_price_as_percentage_of_on_demand_price.setter
def bid_price_as_percentage_of_on_demand_price(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "bid_price_as_percentage_of_on_demand_price", value)
@property
@pulumi.getter
def configurations(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ClusterConfigurationArgs']]]]:
return pulumi.get(self, "configurations")
@configurations.setter
def configurations(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ClusterConfigurationArgs']]]]):
pulumi.set(self, "configurations", value)
@property
@pulumi.getter(name="customAmiId")
def custom_ami_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "custom_ami_id")
@custom_ami_id.setter
def custom_ami_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "custom_ami_id", value)
@property
@pulumi.getter(name="ebsConfiguration")
def ebs_configuration(self) -> Optional[pulumi.Input['ClusterEbsConfigurationArgs']]:
return pulumi.get(self, "ebs_configuration")
@ebs_configuration.setter
def ebs_configuration(self, value: Optional[pulumi.Input['ClusterEbsConfigurationArgs']]):
pulumi.set(self, "ebs_configuration", value)
@property
@pulumi.getter(name="weightedCapacity")
def weighted_capacity(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "weighted_capacity")
@weighted_capacity.setter
def weighted_capacity(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "weighted_capacity", value)
@pulumi.input_type
class ClusterJobFlowInstancesConfigArgs:
def __init__(__self__, *,
additional_master_security_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
additional_slave_security_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
core_instance_fleet: Optional[pulumi.Input['ClusterInstanceFleetConfigArgs']] = None,
core_instance_group: Optional[pulumi.Input['ClusterInstanceGroupConfigArgs']] = None,
ec2_key_name: Optional[pulumi.Input[str]] = None,
ec2_subnet_id: Optional[pulumi.Input[str]] = None,
ec2_subnet_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
emr_managed_master_security_group: Optional[pulumi.Input[str]] = None,
emr_managed_slave_security_group: Optional[pulumi.Input[str]] = None,
hadoop_version: Optional[pulumi.Input[str]] = None,
keep_job_flow_alive_when_no_steps: Optional[pulumi.Input[bool]] = None,
master_instance_fleet: Optional[pulumi.Input['ClusterInstanceFleetConfigArgs']] = None,
master_instance_group: Optional[pulumi.Input['ClusterInstanceGroupConfigArgs']] = None,
placement: Optional[pulumi.Input['ClusterPlacementTypeArgs']] = None,
service_access_security_group: Optional[pulumi.Input[str]] = None,
termination_protected: Optional[pulumi.Input[bool]] = None):
if additional_master_security_groups is not None:
pulumi.set(__self__, "additional_master_security_groups", additional_master_security_groups)
if additional_slave_security_groups is not None:
pulumi.set(__self__, "additional_slave_security_groups", additional_slave_security_groups)
if core_instance_fleet is not None:
pulumi.set(__self__, "core_instance_fleet", core_instance_fleet)
if core_instance_group is not None:
pulumi.set(__self__, "core_instance_group", core_instance_group)
if ec2_key_name is not None:
pulumi.set(__self__, "ec2_key_name", ec2_key_name)
if ec2_subnet_id is not None:
pulumi.set(__self__, "ec2_subnet_id", ec2_subnet_id)
if ec2_subnet_ids is not None:
pulumi.set(__self__, "ec2_subnet_ids", ec2_subnet_ids)
if emr_managed_master_security_group is not None:
pulumi.set(__self__, "emr_managed_master_security_group", emr_managed_master_security_group)
if emr_managed_slave_security_group is not None:
pulumi.set(__self__, "emr_managed_slave_security_group", emr_managed_slave_security_group)
if hadoop_version is not None:
pulumi.set(__self__, "hadoop_version", hadoop_version)
if keep_job_flow_alive_when_no_steps is not None:
pulumi.set(__self__, "keep_job_flow_alive_when_no_steps", keep_job_flow_alive_when_no_steps)
if master_instance_fleet is not None:
pulumi.set(__self__, "master_instance_fleet", master_instance_fleet)
if master_instance_group is not None:
pulumi.set(__self__, "master_instance_group", master_instance_group)
if placement is not None:
pulumi.set(__self__, "placement", placement)
if service_access_security_group is not None:
pulumi.set(__self__, "service_access_security_group", service_access_security_group)
if termination_protected is not None:
pulumi.set(__self__, "termination_protected", termination_protected)
@property
@pulumi.getter(name="additionalMasterSecurityGroups")
def additional_master_security_groups(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "additional_master_security_groups")
@additional_master_security_groups.setter
def additional_master_security_groups(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "additional_master_security_groups", value)
@property
@pulumi.getter(name="additionalSlaveSecurityGroups")
def additional_slave_security_groups(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "additional_slave_security_groups")
@additional_slave_security_groups.setter
def additional_slave_security_groups(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "additional_slave_security_groups", value)
@property
@pulumi.getter(name="coreInstanceFleet")
def core_instance_fleet(self) -> Optional[pulumi.Input['ClusterInstanceFleetConfigArgs']]:
return pulumi.get(self, "core_instance_fleet")
@core_instance_fleet.setter
def core_instance_fleet(self, value: Optional[pulumi.Input['ClusterInstanceFleetConfigArgs']]):
pulumi.set(self, "core_instance_fleet", value)
@property
@pulumi.getter(name="coreInstanceGroup")
def core_instance_group(self) -> Optional[pulumi.Input['ClusterInstanceGroupConfigArgs']]:
return pulumi.get(self, "core_instance_group")
@core_instance_group.setter
def core_instance_group(self, value: Optional[pulumi.Input['ClusterInstanceGroupConfigArgs']]):
pulumi.set(self, "core_instance_group", value)
@property
@pulumi.getter(name="ec2KeyName")
def ec2_key_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "ec2_key_name")
@ec2_key_name.setter
def ec2_key_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ec2_key_name", value)
@property
@pulumi.getter(name="ec2SubnetId")
def ec2_subnet_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "ec2_subnet_id")
@ec2_subnet_id.setter
def ec2_subnet_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ec2_subnet_id", value)
@property
@pulumi.getter(name="ec2SubnetIds")
def ec2_subnet_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "ec2_subnet_ids")
@ec2_subnet_ids.setter
def ec2_subnet_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "ec2_subnet_ids", value)
@property
@pulumi.getter(name="emrManagedMasterSecurityGroup")
def emr_managed_master_security_group(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "emr_managed_master_security_group")
@emr_managed_master_security_group.setter
def emr_managed_master_security_group(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "emr_managed_master_security_group", value)
@property
@pulumi.getter(name="emrManagedSlaveSecurityGroup")
def emr_managed_slave_security_group(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "emr_managed_slave_security_group")
@emr_managed_slave_security_group.setter
def emr_managed_slave_security_group(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "emr_managed_slave_security_group", value)
@property
@pulumi.getter(name="hadoopVersion")
def hadoop_version(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "hadoop_version")
@hadoop_version.setter
def hadoop_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "hadoop_version", value)
@property
@pulumi.getter(name="keepJobFlowAliveWhenNoSteps")
def keep_job_flow_alive_when_no_steps(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "keep_job_flow_alive_when_no_steps")
@keep_job_flow_alive_when_no_steps.setter
def keep_job_flow_alive_when_no_steps(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "keep_job_flow_alive_when_no_steps", value)
@property
@pulumi.getter(name="masterInstanceFleet")
def master_instance_fleet(self) -> Optional[pulumi.Input['ClusterInstanceFleetConfigArgs']]:
return pulumi.get(self, "master_instance_fleet")
@master_instance_fleet.setter
def master_instance_fleet(self, value: Optional[pulumi.Input['ClusterInstanceFleetConfigArgs']]):
pulumi.set(self, "master_instance_fleet", value)
@property
@pulumi.getter(name="masterInstanceGroup")
def master_instance_group(self) -> Optional[pulumi.Input['ClusterInstanceGroupConfigArgs']]:
return pulumi.get(self, "master_instance_group")
@master_instance_group.setter
def master_instance_group(self, value: Optional[pulumi.Input['ClusterInstanceGroupConfigArgs']]):
pulumi.set(self, "master_instance_group", value)
@property
@pulumi.getter
def placement(self) -> Optional[pulumi.Input['ClusterPlacementTypeArgs']]:
return pulumi.get(self, "placement")
@placement.setter
def placement(self, value: Optional[pulumi.Input['ClusterPlacementTypeArgs']]):
pulumi.set(self, "placement", value)
@property
@pulumi.getter(name="serviceAccessSecurityGroup")
def service_access_security_group(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "service_access_security_group")
@service_access_security_group.setter
def service_access_security_group(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "service_access_security_group", value)
@property
@pulumi.getter(name="terminationProtected")
def termination_protected(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "termination_protected")
@termination_protected.setter
def termination_protected(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "termination_protected", value)
@pulumi.input_type
class ClusterKerberosAttributesArgs:
def __init__(__self__, *,
kdc_admin_password: pulumi.Input[str],
realm: pulumi.Input[str],
a_d_domain_join_password: Optional[pulumi.Input[str]] = None,
a_d_domain_join_user: Optional[pulumi.Input[str]] = None,
cross_realm_trust_principal_password: Optional[pulumi.Input[str]] = None):
pulumi.set(__self__, "kdc_admin_password", kdc_admin_password)
pulumi.set(__self__, "realm", realm)
if a_d_domain_join_password is not None:
pulumi.set(__self__, "a_d_domain_join_password", a_d_domain_join_password)
if a_d_domain_join_user is not None:
pulumi.set(__self__, "a_d_domain_join_user", a_d_domain_join_user)
if cross_realm_trust_principal_password is not None:
pulumi.set(__self__, "cross_realm_trust_principal_password", cross_realm_trust_principal_password)
@property
@pulumi.getter(name="kdcAdminPassword")
def kdc_admin_password(self) -> pulumi.Input[str]:
return pulumi.get(self, "kdc_admin_password")
@kdc_admin_password.setter
def kdc_admin_password(self, value: pulumi.Input[str]):
pulumi.set(self, "kdc_admin_password", value)
@property
@pulumi.getter
def realm(self) -> pulumi.Input[str]:
return pulumi.get(self, "realm")
@realm.setter
def realm(self, value: pulumi.Input[str]):
pulumi.set(self, "realm", value)
@property
@pulumi.getter(name="aDDomainJoinPassword")
def a_d_domain_join_password(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "a_d_domain_join_password")
@a_d_domain_join_password.setter
def a_d_domain_join_password(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "a_d_domain_join_password", value)
@property
@pulumi.getter(name="aDDomainJoinUser")
def a_d_domain_join_user(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "a_d_domain_join_user")
@a_d_domain_join_user.setter
def a_d_domain_join_user(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "a_d_domain_join_user", value)
@property
@pulumi.getter(name="crossRealmTrustPrincipalPassword")
def cross_realm_trust_principal_password(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "cross_realm_trust_principal_password")
@cross_realm_trust_principal_password.setter
def cross_realm_trust_principal_password(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cross_realm_trust_principal_password", value)
@pulumi.input_type
class ClusterKeyValueArgs:
def __init__(__self__, *,
key: Optional[pulumi.Input[str]] = None,
value: Optional[pulumi.Input[str]] = None):
if key is not None:
pulumi.set(__self__, "key", key)
if value is not None:
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def key(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "key")
@key.setter
def key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key", value)
@property
@pulumi.getter
def value(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "value")
@value.setter
def value(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "value", value)
@pulumi.input_type
class ClusterManagedScalingPolicyArgs:
def __init__(__self__, *,
compute_limits: Optional[pulumi.Input['ClusterComputeLimitsArgs']] = None):
if compute_limits is not None:
pulumi.set(__self__, "compute_limits", compute_limits)
@property
@pulumi.getter(name="computeLimits")
def compute_limits(self) -> Optional[pulumi.Input['ClusterComputeLimitsArgs']]:
return pulumi.get(self, "compute_limits")
@compute_limits.setter
def compute_limits(self, value: Optional[pulumi.Input['ClusterComputeLimitsArgs']]):
pulumi.set(self, "compute_limits", value)
@pulumi.input_type
class ClusterMetricDimensionArgs:
def __init__(__self__, *,
key: pulumi.Input[str],
value: pulumi.Input[str]):
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def key(self) -> pulumi.Input[str]:
return pulumi.get(self, "key")
@key.setter
def key(self, value: pulumi.Input[str]):
pulumi.set(self, "key", value)
@property
@pulumi.getter
def value(self) -> pulumi.Input[str]:
return pulumi.get(self, "value")
@value.setter
def value(self, value: pulumi.Input[str]):
pulumi.set(self, "value", value)
@pulumi.input_type
class ClusterOnDemandProvisioningSpecificationArgs:
def __init__(__self__, *,
allocation_strategy: pulumi.Input[str]):
pulumi.set(__self__, "allocation_strategy", allocation_strategy)
@property
@pulumi.getter(name="allocationStrategy")
def allocation_strategy(self) -> pulumi.Input[str]:
return pulumi.get(self, "allocation_strategy")
@allocation_strategy.setter
def allocation_strategy(self, value: pulumi.Input[str]):
pulumi.set(self, "allocation_strategy", value)
@pulumi.input_type
class ClusterPlacementTypeArgs:
def __init__(__self__, *,
availability_zone: pulumi.Input[str]):
pulumi.set(__self__, "availability_zone", availability_zone)
@property
@pulumi.getter(name="availabilityZone")
def availability_zone(self) -> pulumi.Input[str]:
return pulumi.get(self, "availability_zone")
@availability_zone.setter
def availability_zone(self, value: pulumi.Input[str]):
pulumi.set(self, "availability_zone", value)
@pulumi.input_type
class ClusterScalingActionArgs:
def __init__(__self__, *,
simple_scaling_policy_configuration: pulumi.Input['ClusterSimpleScalingPolicyConfigurationArgs'],
market: Optional[pulumi.Input[str]] = None):
pulumi.set(__self__, "simple_scaling_policy_configuration", simple_scaling_policy_configuration)
if market is not None:
pulumi.set(__self__, "market", market)
@property
@pulumi.getter(name="simpleScalingPolicyConfiguration")
def simple_scaling_policy_configuration(self) -> pulumi.Input['ClusterSimpleScalingPolicyConfigurationArgs']:
return pulumi.get(self, "simple_scaling_policy_configuration")
@simple_scaling_policy_configuration.setter
def simple_scaling_policy_configuration(self, value: pulumi.Input['ClusterSimpleScalingPolicyConfigurationArgs']):
pulumi.set(self, "simple_scaling_policy_configuration", value)
@property
@pulumi.getter
def market(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "market")
@market.setter
def market(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "market", value)
@pulumi.input_type
class ClusterScalingConstraintsArgs:
def __init__(__self__, *,
max_capacity: pulumi.Input[int],
min_capacity: pulumi.Input[int]):
pulumi.set(__self__, "max_capacity", max_capacity)
pulumi.set(__self__, "min_capacity", min_capacity)
@property
@pulumi.getter(name="maxCapacity")
def max_capacity(self) -> pulumi.Input[int]:
return pulumi.get(self, "max_capacity")
@max_capacity.setter
def max_capacity(self, value: pulumi.Input[int]):
pulumi.set(self, "max_capacity", value)
@property
@pulumi.getter(name="minCapacity")
def min_capacity(self) -> pulumi.Input[int]:
return pulumi.get(self, "min_capacity")
@min_capacity.setter
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.