code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
# -*- coding: utf-8 -*-
# Copyright 2013-2014, <NAME>, <EMAIL>
#
# Part of 'hiss' the asynchronous notification library
import os
import sys
sys.path.insert(0,
os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
import pytest
from hiss.encryption import EncryptionInfo
def test_EncryptionInfo_Init_BadInit():
with pytest.raises(TypeError):
_ei = EncryptionInfo()
def test_EncryptionInfo_Init_GoodInit():
_ei = EncryptionInfo('AES', b'1111111111')
def test_EncryptionInfo_Init_BadAlgorithm():
with pytest.raises(ValueError):
_ei = EncryptionInfo('WALLY', b'1111111111')
def test_EncryptionInfo_Init_Algorithm_IsUpperCase():
ei = EncryptionInfo('AES', b'1111111111')
assert ei.algorithm == 'AES'
def test_EncryptionInfo_Init_Algorithm_AnyCase():
ei = EncryptionInfo('aEs', b'1111111111')
assert ei.algorithm == 'AES'
def test_EncryptionInfo_Init_ivBytes():
ei = EncryptionInfo('aEs', b'1111111111')
assert ei.iv == b'1111111111'
def test_EncryptionInfo_Init_ivStr():
ei = EncryptionInfo('aEs', '1111111111')
assert ei.iv == b'\x11\x11\x11\x11\x11'
|
[
"pytest.raises",
"os.path.dirname",
"hiss.encryption.EncryptionInfo"
] |
[((460, 496), 'hiss.encryption.EncryptionInfo', 'EncryptionInfo', (['"""AES"""', "b'1111111111'"], {}), "('AES', b'1111111111')\n", (474, 496), False, 'from hiss.encryption import EncryptionInfo\n'), ((698, 734), 'hiss.encryption.EncryptionInfo', 'EncryptionInfo', (['"""AES"""', "b'1111111111'"], {}), "('AES', b'1111111111')\n", (712, 734), False, 'from hiss.encryption import EncryptionInfo\n'), ((829, 865), 'hiss.encryption.EncryptionInfo', 'EncryptionInfo', (['"""aEs"""', "b'1111111111'"], {}), "('aEs', b'1111111111')\n", (843, 865), False, 'from hiss.encryption import EncryptionInfo\n'), ((950, 986), 'hiss.encryption.EncryptionInfo', 'EncryptionInfo', (['"""aEs"""', "b'1111111111'"], {}), "('aEs', b'1111111111')\n", (964, 986), False, 'from hiss.encryption import EncryptionInfo\n'), ((1070, 1105), 'hiss.encryption.EncryptionInfo', 'EncryptionInfo', (['"""aEs"""', '"""1111111111"""'], {}), "('aEs', '1111111111')\n", (1084, 1105), False, 'from hiss.encryption import EncryptionInfo\n'), ((350, 374), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (363, 374), False, 'import pytest\n'), ((390, 406), 'hiss.encryption.EncryptionInfo', 'EncryptionInfo', ([], {}), '()\n', (404, 406), False, 'from hiss.encryption import EncryptionInfo\n'), ((553, 578), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (566, 578), False, 'import pytest\n'), ((594, 632), 'hiss.encryption.EncryptionInfo', 'EncryptionInfo', (['"""WALLY"""', "b'1111111111'"], {}), "('WALLY', b'1111111111')\n", (608, 632), False, 'from hiss.encryption import EncryptionInfo\n'), ((206, 231), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (221, 231), False, 'import os\n')]
|
import sys
from collections import namedtuple
Point = namedtuple('Point', 'x,y')
ai = False
class SokobanMap:
"""
Instance of a Sokoban game map. You may use this class and its functions
directly or duplicate and modify it in your solution. You should avoid
modifying this file directly.
COMP3702 2019 Assignment 1 Support Code
Last updated by njc 11/08/19
"""
# input file symbols
BOX_SYMBOL = 'B'
TGT_SYMBOL = 'T'
PLAYER_SYMBOL = 'P'
OBSTACLE_SYMBOL = '#'
FREE_SPACE_SYMBOL = ' '
BOX_ON_TGT_SYMBOL = 'b'
PLAYER_ON_TGT_SYMBOL = 'p'
# move symbols (i.e. output file symbols)
LEFT = 'l'
RIGHT = 'r'
UP = 'u'
DOWN = 'd'
# render characters
FREE_GLYPH = ' '
OBST_GLYPH = 'XXX'
BOX_GLYPH = '[B]'
TGT_GLYPH = '(T)'
PLAYER_GLYPH = '<P>'
def __init__(self, filename):
"""
Build a Sokoban map instance from the given file name
:param filename:
"""
f = open(filename, 'r')
rows = []
for line in f:
if len(line.strip()) > 0:
rows.append(list(line.strip()))
f.close()
row_len = len(rows[0])
for row in rows:
assert len(row) == row_len, "Mismatch in row length"
num_rows = len(rows)
box_positions = []
tgt_positions = []
dead_positions = []
# All obstacle positions
obstacle_positions = []
# Obstacle positions by row and column
obstacle_positions_x = []
obstacle_positions_y = []
player_position = None
for i in range(num_rows):
# Trying to get the deadzones on walls
# Positions of obstacles by row and column
obstacle_positions_row = []
obstacle_positions_column = []
obstacle_positions_x.append(obstacle_positions_row)
obstacle_positions_y.append(obstacle_positions_column)
for j in range(row_len):
if rows[i][j] == self.BOX_SYMBOL:
box_positions.append((i, j))
rows[i][j] = self.FREE_SPACE_SYMBOL
elif rows[i][j] == self.TGT_SYMBOL:
tgt_positions.append((i, j))
rows[i][j] = self.FREE_SPACE_SYMBOL
elif rows[i][j] == self.PLAYER_SYMBOL:
player_position = (i, j)
# Check if player start is possible dead zone
# Corner deadzone
if rows[i][j - 1] == self.OBSTACLE_SYMBOL or rows[i][j + 1] == self.OBSTACLE_SYMBOL:
if rows[i - 1][j] == self.OBSTACLE_SYMBOL or rows[i + 1][j] == self.OBSTACLE_SYMBOL:
dead_positions.append((i, j))
rows[i][j] = self.FREE_SPACE_SYMBOL
elif rows[i][j] == self.BOX_ON_TGT_SYMBOL:
box_positions.append((i, j))
tgt_positions.append((i, j))
rows[i][j] = self.FREE_SPACE_SYMBOL
elif rows[i][j] == self.PLAYER_ON_TGT_SYMBOL:
player_position = (i, j)
tgt_positions.append((i, j))
rows[i][j] = self.FREE_SPACE_SYMBOL
# Check for "deadzones" from map layout and add to list
# Corner deadzones
elif rows[i][j] == self.FREE_SPACE_SYMBOL:
if rows[i][j - 1] == self.OBSTACLE_SYMBOL or rows[i][j + 1] == self.OBSTACLE_SYMBOL:
if rows[i - 1][j] == self.OBSTACLE_SYMBOL or rows[i + 1][j] == self.OBSTACLE_SYMBOL:
dead_positions.append((i, j))
elif rows[i][j] == self.OBSTACLE_SYMBOL:
obstacle_positions.append((i, j))
obstacle_positions_row.append((i, j))
obstacle_positions_column.append((j, i))
print(dead_positions)
assert len(box_positions) == len(tgt_positions), "Number of boxes does not match number of targets"
self.x_size = row_len
self.y_size = num_rows
self.box_positions = box_positions
self.tgt_positions = tgt_positions
self.player_position = player_position
self.player_x = player_position[1]
self.player_y = player_position[0]
self.obstacle_map = rows
self.obstacle_positions = obstacle_positions
self.obstacle_positions_x = obstacle_positions_x
self.obstacle_positions_y = obstacle_positions_y
self.dead_positions = dead_positions
print(self.dead_positions)
print(self.tgt_positions)
def search(self, obstacle_map, player_position, dead_positions,search):
return False
def apply_move(self, move):
"""
Apply a player move to the map.
:param move: 'L', 'R', 'U' or 'D'
:return: True if move was successful, false if move could not be completed
"""
# basic obstacle check
if move == self.LEFT:
if self.obstacle_map[self.player_y][self.player_x - 1] == self.OBSTACLE_SYMBOL:
return False
else:
new_x = self.player_x - 1
new_y = self.player_y
elif move == self.RIGHT:
if self.obstacle_map[self.player_y][self.player_x + 1] == self.OBSTACLE_SYMBOL:
return False
else:
new_x = self.player_x + 1
new_y = self.player_y
elif move == self.UP:
if self.obstacle_map[self.player_y - 1][self.player_x] == self.OBSTACLE_SYMBOL:
return False
else:
new_x = self.player_x
new_y = self.player_y - 1
else:
if self.obstacle_map[self.player_y + 1][self.player_x] == self.OBSTACLE_SYMBOL:
return False
else:
new_x = self.player_x
new_y = self.player_y + 1
# pushed box collision check
if (new_y, new_x) in self.box_positions:
if move == self.LEFT:
if self.obstacle_map[new_y][new_x - 1] == self.OBSTACLE_SYMBOL or (new_y, new_x - 1) in self.box_positions:
return False
else:
new_box_x = new_x - 1
new_box_y = new_y
elif move == self.RIGHT:
if self.obstacle_map[new_y][new_x + 1] == self.OBSTACLE_SYMBOL or (new_y, new_x + 1) in self.box_positions:
return False
else:
new_box_x = new_x + 1
new_box_y = new_y
elif move == self.UP:
if self.obstacle_map[new_y - 1][new_x] == self.OBSTACLE_SYMBOL or (new_y - 1, new_x) in self.box_positions:
return False
else:
new_box_x = new_x
new_box_y = new_y - 1
else:
if self.obstacle_map[new_y + 1][new_x] == self.OBSTACLE_SYMBOL or (new_y + 1, new_x) in self.box_positions:
return False
else:
new_box_x = new_x
new_box_y = new_y + 1
# update box position
self.box_positions.remove((new_y, new_x))
self.box_positions.append((new_box_y, new_box_x))
# update player position
self.player_x = new_x
self.player_y = new_y
return True
"""
Check if the boxes have created a 'dead-zone' and the game is over
@:return True if the game is over
"""
def check_box_dead_zone(self):
# If 2 boxes are next to each other and against the wall then no move can be made
#####
for y, x in self.box_positions:
if self.box_positions.__contains__((y-1, x)) or \
self.box_positions.__contains__((y + 1, x)):
if self.obstacle_positions.__contains__((y, x - 1)) or \
self.obstacle_positions.__contains__((y, x + 1)):
if self.tgt_positions.__contains__((y, x)):
return False
else:
return True
if self.box_positions.__contains__((y, x - 1)) or \
self.box_positions.__contains__((y, x + 1)):
if self.obstacle_positions.__contains__((y - 1, x)) or \
self.obstacle_positions.__contains__((y + 1, x)):
if self.tgt_positions.__contains__((y, x)):
return False
else:
return True
def wall_dead_zone(self):
# Trying to implement wall dead zones
for y, x in self.tgt_positions:
if not self.obstacle_positions_x[-1].__contains__((y + 1, x)):
self.add_to_dead_zone(self.obstacle_positions_x[-1])
if not self.obstacle_positions_x[0].__contains__((y - 1, x)):
self.add_to_dead_zone(self.obstacle_positions_x[0])
if not self.obstacle_positions_y[0].__contains__((y, x - 1)):
self.add_to_dead_zone(self.obstacle_positions_y[0])
if not self.obstacle_positions_y[-1].__contains__((y, x + 1)):
self.add_to_dead_zone(self.obstacle_positions_y[-1])
"""
Check if the box is in a 'dead-zone' from the map positioning
@:return True if the game is over
"""
def check_map_dead_zone(self):
# Corner check
for i in self.box_positions:
for j in self.dead_positions:
if i == j:
return True
def add_to_dead_zone(self, coordinates):
# Adding coordinates to deadzones
for y, x in coordinates:
self.dead_positions.append((y, x))
def render(self):
"""
Render the map's current state to terminal
"""
for r in range(self.y_size):
line = ''
for c in range(self.x_size):
symbol = self.FREE_GLYPH
if self.obstacle_map[r][c] == self.OBSTACLE_SYMBOL:
symbol = self.OBST_GLYPH
if (r, c) in self.tgt_positions:
symbol = self.TGT_GLYPH
# box or player overwrites tgt
if (r, c) in self.box_positions:
symbol = self.BOX_GLYPH
if self.player_x == c and self.player_y == r:
symbol = self.PLAYER_GLYPH
line += symbol
print(line)
print('\n\n')
def is_finished(self):
finished = True
for i in self.box_positions:
if i not in self.tgt_positions:
finished = False
return finished
def main(arglist):
"""
Run a playable game of Sokoban using the given filename as the map file.
:param arglist: map file name
"""
try:
import msvcrt
getchar = msvcrt.getch
except ImportError:
getchar = sys.stdin.read(1)
if len(arglist) != 1:
print("Running this file directly launches a playable game of Sokoban based on the given map file.")
print("Usage: sokoban_map.py [map_file_name]")
return
print("Use the arrow keys to move. Press 'q' to quit. Press 'r' to restart the map.")
map_inst = SokobanMap(arglist[0])
map_inst.render()
actions = ['d','l','l','u','l','d','d']
#map_inst.wall_dead_zone()
#print(dead_positions)
steps = 0
if ai:
for a in actions:
map_inst.apply_move(a)
map_inst.render()
map_inst.check_box_dead_zone()
# check if the box is in a map dead zone
if map_inst.check_map_dead_zone():
print("can not complete/fail")
return
if map_inst.check_box_dead_zone():
print("can not complete/fail")
return
steps += 1
if map_inst.is_finished():
print("Puzzle solved in " + str(steps) + " steps!")
return
else:
while True:
char = getchar()
if char == b'q':
break
if char == b'r':
map_inst = SokobanMap(arglist[0])
map_inst.render()
steps = 0
if char == b'\xe0':
# got arrow - read direction
dir = getchar()
if dir == b'H':
a = SokobanMap.UP
elif dir == b'P':
a = SokobanMap.DOWN
elif dir == b'K':
a = SokobanMap.LEFT
elif dir == b'M':
a = SokobanMap.RIGHT
else:
print("!!!error")
a = SokobanMap.UP
map_inst.apply_move(a)
map_inst.render()
if map_inst.check_map_dead_zone():
print("can not complete/fail")
return
steps += 1
if map_inst.is_finished():
print("Puzzle solved in " + str(steps) + " steps!")
return
if map_inst.check_box_dead_zone():
print("can not complete/fail")
return
class Astar(object):
"""
Abstract implementation of AStar search
"""
# heuristic
def heuristic(self, node):
"""
Heuristic of this node (it's cost)
:param node:
:return:
"""
raise NotImplementedError
# findPath(self,start,end)
def findPath(self, start, end):
"""
Find a path from start node to end node, if path not found raise exception else return path.
:param start: Starting node
:param end: Ending node
:return: path if exists else raise error
"""
###list of open nodes
###list of closed / searched nodes
###method to check open/closed
raise NotImplementedError
###helper functions for finding path
# is end?
def isEnd(self, node, end):
"""
Determine if the node is the end node
:param node:
:return:
"""
raise NotImplementedError
##buildPath
def retrace_path(self, start, end, closed_set):
"""
Take the closedlist
:param start:
:param end:
:param closed_set:
:return:
"""
raise NotImplementedError
def reparent(self, node):
"""
Reparents a node if necessary.
:param node:
:return:
"""
raise NotImplementedError
# neighbours
def neighbours(self, node):
"""
Returns all neighbours of the supplied node
:param node:
:return:
"""
raise NotImplementedError
class AStarNode(object):
"""
Class implementing a node for use in A* search.
"""
def __init__(self):
"""
Constructor. Initialises h,g,f,parent to none
"""
self.h = 0
self.g = 0
self.f = 0
self.parent = None
def calculate_move_cost(self, node):
"""
Calculates cost to move from this node to target node --> 1 + the parent's move cost
:param node: target node
:return: move_cost
"""
raise NotImplementedError
def calculate_heuristic(self, goal_node):
"""
Calculates the heuristic of from this node to target node
:param goal_node: the target node
:return: heuristic for node in nodes: node.h = node.calculate_heuristic(goal_node)
"""
raise NotImplementedError
def __hash__(self):
"""
Hash function for use in equivalency check if this node has already been visited
"""
raise NotImplementedError
def __eq__(self, node):
"""
determine if this node is equivalent to another node
:param node: other node
:return: true if equal false if not
"""
raise NotImplementedError
class AstarGridNode(AStarNode):
def __init__(self, xyPoint, parent, h, g, f):
"""
Concrete constructor of an AStarNode for use within AStarGrid
:param xy: a namedTuple 'Point' containing the x,y co-ords of the node
"""
self.xy = xyPoint
self.directions = [Point(0, +1), Point(+1, 0), Point(0, -1), Point(-1, 0)]
self.parent = parent
self.h = h
self.g = g
self.f = f
def move_cost(self, direction, to_node):
"""
movement cost between any two adjacent nodes should be one if L/R else 2 if diagonal
:param to_node: the node to be moved to
:param direction: the direction of the to node
:return: int move cost
"""
raise NotImplementedError
def move_cost(self, to_node):
"""
:param to_node:
:return:
"""
return 1 + self.parent.g
def parent(self, node):
"""
Make the supplied node the parent of this node.
:param node:
:return:
"""
def get_parent(self):
"""
Return the parent node of this node, return None if not exists
:return:
"""
def __hash__(self):
pass
def __eq__(self, node):
pass
class AStarSoko(Astar):
#visited list
#unvisited list ==> literally everyone
#???
def reparent(self, node):
pass
def __init__(self, sokoban_map):
self.smap = sokoban_map
def heuristic(self, node):
"""
Manhattan distance from the search's target/end node.
:param node: target node
:return: the manhattan distance from the search's target/end node to the provided node.
"""
# manhattan = sum abs difference X + abs difference Y
diff = node.xy - self.end.xy # (x1-x2 , y1-y2) --> (x3,y3)
return abs(diff.x) + abs(diff.y) #for node in sokoMap --> node.h = AStarSoko.heuristic(
def neighbours(self, node):
"""
Returns
:param node:
:return:
"""
neighbours = []
for direction in self.directions: ## node holds the possible directions u,r,d,l
xy = node.xy + direction
if xy in self.grid:
neighbours.append(AstarGridNode(xy))
return neighbours
pass
def findPath(self, start, end):
"""
A* Search the map. If no path return None else return path.
:param start: initial node
:param end: end node
:return: an iterable of movement commands specifying the u,d,r,l path from start to end.
"""
#need open set/list/tree --> BST probably best
#closed set/list/tree
#start
pass
def isEnd(self, node, end): #goal reached
pass
def buildPath(self, start, end, closeList):
pass
if __name__ == '__main__':
main(sys.argv[1:])
|
[
"sys.stdin.read",
"collections.namedtuple"
] |
[((55, 81), 'collections.namedtuple', 'namedtuple', (['"""Point"""', '"""x,y"""'], {}), "('Point', 'x,y')\n", (65, 81), False, 'from collections import namedtuple\n'), ((11129, 11146), 'sys.stdin.read', 'sys.stdin.read', (['(1)'], {}), '(1)\n', (11143, 11146), False, 'import sys\n')]
|
# -*- coding: utf-8 -*-
"""
Created on Sat Jan 26 21:00:01 2020
@author: Meet
"""
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
import tensorflow as tf
import models.MobileNet_v2_config as MobileNet_v2_config
from models.MobileNet_v2_config import g
class MobileNet_v2:
def __init__(self, input_dims=(64, 64), num_classes=200):
self.model_name = 'MobileNet_v2'
self.num_classes = num_classes
self.k_init = tf.contrib.layers.variance_scaling_initializer() # as per paper
# variance scaling is good for models with relu activation
# whereas xavier initialization is good for models with sigmoid activation
self.k_reg = tf.contrib.layers.l2_regularizer(scale=MobileNet_v2_config.weight_decay)
def _depthwise_conv(self, ip, training, k_size=(3, 3), stride=1, padding='SAME', depth_mul=1.0, use_batch_norm=True):
dw_weights_shape = [k_size[0], k_size[1], ip.get_shape().as_list()[-1], depth_mul]
w = tf.get_variable('dw_weight', shape=dw_weights_shape, dtype=tf.float32, initializer=self.k_init)
# no need of using regularizer in depthwise convo as there are very less number of parameters. -- as per paper
depthwise = tf.nn.depthwise_conv2d(ip, w, (1, stride, stride, 1), padding)
if use_batch_norm:
depthwise = tf.layers.batch_normalization(depthwise, momentum=0.9, training=training)
depthwise = tf.nn.relu6(depthwise)
return depthwise
def _pointwise_conv(self, ip, training, op_filters, use_batch_norm=True):
pointwise = tf.layers.conv2d(ip, op_filters, (1, 1), 1, 'SAME', use_bias=not use_batch_norm, kernel_initializer=self.k_init, kernel_regularizer=self.k_reg)
# we can use kernel regularizer in pointwise convolution
if use_batch_norm:
pointwise = tf.layers.batch_normalization(pointwise, momentum=0.9, training=training)
# No activation to be applied here as this is only a linear layer. Only batchnorm can be applied here.
return pointwise
def _inv_res_block(self, ip, training, op_filters, k_size=(3, 3), stride=1, padding='SAME', depth_mul=1.0, expansion_factor=1.0, use_batch_norm=True, scope_name='inv_res_block'):
with tf.variable_scope(scope_name):
expanded_filters = ip.get_shape().as_list()[-1] * expansion_factor
bottleneck_conv = tf.layers.conv2d(ip, expanded_filters, (1, 1), 1, 'SAME', activation=None, use_bias=False, kernel_initializer=self.k_init, kernel_regularizer=self.k_reg)
bottleneck_conv = tf.layers.batch_normalization(bottleneck_conv, momentum=0.9, training=training)
bottleneck_conv = tf.nn.relu6(bottleneck_conv)
depth_wise_conv = self._depthwise_conv(bottleneck_conv, training, k_size, stride, padding, depth_mul, use_batch_norm)
pointwise_conv = self._pointwise_conv(depth_wise_conv, training, op_filters, use_batch_norm)
if stride == 2:
return pointwise_conv
elif stride == 1:
if ip.get_shape().as_list()[-1] != op_filters:
strided_ip = tf.layers.conv2d(ip, op_filters, (1, 1), 1, 'SAME', activation=None, use_bias=False, kernel_initializer=self.k_init, kernel_regularizer=self.k_reg)
return strided_ip + pointwise_conv
else:
return ip + pointwise_conv
def __call__(self, x, is_training):
# x : [None x 64 x 64 x 3]
with tf.variable_scope(self.model_name):
with tf.variable_scope("conv1"):
conv1 = tf.layers.conv2d(x, 32, (3, 3), 2, 'SAME', activation=None, use_bias=False, kernel_initializer=self.k_init, kernel_regularizer=self.k_reg)
conv1 = tf.layers.batch_normalization(conv1, momentum=0.9, training=is_training)
conv1 = tf.nn.relu6(conv1)
g(conv1)
# [None x 32 x 32 x 32]
inv_res_block_1_1 = self._inv_res_block(conv1, is_training, 16, (3, 3), stride=1, padding='SAME', depth_mul=1.0, expansion_factor=1.0, use_batch_norm=True, scope_name='inv_res_block_1_1')
g(inv_res_block_1_1)
# [None x 32 x 32 x 16]
inv_res_block_2_1 = self._inv_res_block(inv_res_block_1_1, is_training, 24, (3, 3), stride=2, padding='SAME', depth_mul=1.0, expansion_factor=6.0, use_batch_norm=True, scope_name='inv_res_block_2_1')
inv_res_block_2_2 = self._inv_res_block(inv_res_block_2_1, is_training, 24, (3, 3), stride=1, padding='SAME', depth_mul=1.0, expansion_factor=6.0, use_batch_norm=True, scope_name='inv_res_block_2_2')
g(inv_res_block_2_2)
# [None x 16 x 16 x 24]
inv_res_block_3_1 = self._inv_res_block(inv_res_block_2_2, is_training, 32, (3, 3), stride=2, padding='SAME', depth_mul=1.0, expansion_factor=6.0, use_batch_norm=True, scope_name='inv_res_block_3_1')
inv_res_block_3_2 = self._inv_res_block(inv_res_block_3_1, is_training, 32, (3, 3), stride=1, padding='SAME', depth_mul=1.0, expansion_factor=6.0, use_batch_norm=True, scope_name='inv_res_block_3_2')
inv_res_block_3_3 = self._inv_res_block(inv_res_block_3_2, is_training, 32, (3, 3), stride=1, padding='SAME', depth_mul=1.0, expansion_factor=6.0, use_batch_norm=True, scope_name='inv_res_block_3_3')
g(inv_res_block_3_3)
# [None x 8 x 8 x 32]
inv_res_block_4_1 = self._inv_res_block(inv_res_block_3_3, is_training, 64, (3, 3), stride=2, padding='SAME', depth_mul=1.0, expansion_factor=6.0, use_batch_norm=True, scope_name='inv_res_block_4_1')
inv_res_block_4_2 = self._inv_res_block(inv_res_block_4_1, is_training, 64, (3, 3), stride=1, padding='SAME', depth_mul=1.0, expansion_factor=6.0, use_batch_norm=True, scope_name='inv_res_block_4_2')
inv_res_block_4_3 = self._inv_res_block(inv_res_block_4_2, is_training, 64, (3, 3), stride=1, padding='SAME', depth_mul=1.0, expansion_factor=6.0, use_batch_norm=True, scope_name='inv_res_block_4_3')
inv_res_block_4_4 = self._inv_res_block(inv_res_block_4_3, is_training, 64, (3, 3), stride=1, padding='SAME', depth_mul=1.0, expansion_factor=6.0, use_batch_norm=True, scope_name='inv_res_block_4_4')
g(inv_res_block_4_4)
# [None x 4 x 4 x 64]
inv_res_block_5_1 = self._inv_res_block(inv_res_block_4_4, is_training, 96, (3, 3), stride=1, padding='SAME', depth_mul=1.0, expansion_factor=6.0, use_batch_norm=True, scope_name='inv_res_block_5_1')
inv_res_block_5_2 = self._inv_res_block(inv_res_block_5_1, is_training, 96, (3, 3), stride=1, padding='SAME', depth_mul=1.0, expansion_factor=6.0, use_batch_norm=True, scope_name='inv_res_block_5_2')
inv_res_block_5_3 = self._inv_res_block(inv_res_block_5_2, is_training, 96, (3, 3), stride=1, padding='SAME', depth_mul=1.0, expansion_factor=6.0, use_batch_norm=True, scope_name='inv_res_block_5_3')
g(inv_res_block_5_3)
# [None x 4 x 4 x 96]
inv_res_block_6_1 = self._inv_res_block(inv_res_block_5_3, is_training, 160, (3, 3), stride=2, padding='SAME', depth_mul=1.0, expansion_factor=6.0, use_batch_norm=True, scope_name='inv_res_block_6_1')
inv_res_block_6_2 = self._inv_res_block(inv_res_block_6_1, is_training, 160, (3, 3), stride=1, padding='SAME', depth_mul=1.0, expansion_factor=6.0, use_batch_norm=True, scope_name='inv_res_block_6_2')
inv_res_block_6_3 = self._inv_res_block(inv_res_block_6_2, is_training, 160, (3, 3), stride=1, padding='SAME', depth_mul=1.0, expansion_factor=6.0, use_batch_norm=True, scope_name='inv_res_block_6_3')
g(inv_res_block_6_3)
# [None x 2 x 2 x 160]
inv_res_block_7_1 = self._inv_res_block(inv_res_block_6_3, is_training, 320, (3, 3), stride=1, padding='SAME', depth_mul=1.0, expansion_factor=6.0, use_batch_norm=True, scope_name='inv_res_block_7_1')
g(inv_res_block_7_1)
# [None x 2 x 2 x 320]
with tf.variable_scope("conv2"):
conv2 = tf.layers.conv2d(inv_res_block_7_1, 1280, (1, 1), 1, 'SAME', activation=None, use_bias=False, kernel_initializer=self.k_init, kernel_regularizer=self.k_reg)
conv2 = tf.layers.batch_normalization(conv2, momentum=0.9, training=is_training)
conv2 = tf.nn.relu6(conv2)
g(conv2)
# [None x 2 x 2 x 1280]
with tf.variable_scope("tail"):
gap = tf.reduce_mean(conv2, axis=[1, 2], name='global_avg_pool', keepdims=True)
g(gap)
# [None x 1 x 1 x 1280]
conv3 = tf.layers.conv2d(gap, self.num_classes, (1, 1), 1, 'SAME', activation=None, use_bias=True, kernel_initializer=self.k_init, kernel_regularizer=self.k_reg)
g(conv3)
# [None x 1 x 1 x 200]
logits = tf.squeeze(conv3, axis=[1, 2], name='logits')
g(logits)
# [None x 200]
outputs = tf.nn.softmax(logits, name='softmax_op')
g(outputs)
# [B x 200]
return logits, outputs
|
[
"tensorflow.nn.softmax",
"tensorflow.contrib.layers.l2_regularizer",
"tensorflow.reduce_mean",
"tensorflow.nn.depthwise_conv2d",
"tensorflow.variable_scope",
"models.MobileNet_v2_config.g",
"tensorflow.layers.conv2d",
"tensorflow.squeeze",
"tensorflow.nn.relu6",
"tensorflow.layers.batch_normalization",
"tensorflow.contrib.layers.variance_scaling_initializer",
"tensorflow.get_variable"
] |
[((442, 490), 'tensorflow.contrib.layers.variance_scaling_initializer', 'tf.contrib.layers.variance_scaling_initializer', ([], {}), '()\n', (488, 490), True, 'import tensorflow as tf\n'), ((678, 750), 'tensorflow.contrib.layers.l2_regularizer', 'tf.contrib.layers.l2_regularizer', ([], {'scale': 'MobileNet_v2_config.weight_decay'}), '(scale=MobileNet_v2_config.weight_decay)\n', (710, 750), True, 'import tensorflow as tf\n'), ((990, 1089), 'tensorflow.get_variable', 'tf.get_variable', (['"""dw_weight"""'], {'shape': 'dw_weights_shape', 'dtype': 'tf.float32', 'initializer': 'self.k_init'}), "('dw_weight', shape=dw_weights_shape, dtype=tf.float32,\n initializer=self.k_init)\n", (1005, 1089), True, 'import tensorflow as tf\n'), ((1234, 1296), 'tensorflow.nn.depthwise_conv2d', 'tf.nn.depthwise_conv2d', (['ip', 'w', '(1, stride, stride, 1)', 'padding'], {}), '(ip, w, (1, stride, stride, 1), padding)\n', (1256, 1296), True, 'import tensorflow as tf\n'), ((1442, 1464), 'tensorflow.nn.relu6', 'tf.nn.relu6', (['depthwise'], {}), '(depthwise)\n', (1453, 1464), True, 'import tensorflow as tf\n'), ((1594, 1746), 'tensorflow.layers.conv2d', 'tf.layers.conv2d', (['ip', 'op_filters', '(1, 1)', '(1)', '"""SAME"""'], {'use_bias': '(not use_batch_norm)', 'kernel_initializer': 'self.k_init', 'kernel_regularizer': 'self.k_reg'}), "(ip, op_filters, (1, 1), 1, 'SAME', use_bias=not\n use_batch_norm, kernel_initializer=self.k_init, kernel_regularizer=self\n .k_reg)\n", (1610, 1746), True, 'import tensorflow as tf\n'), ((1348, 1421), 'tensorflow.layers.batch_normalization', 'tf.layers.batch_normalization', (['depthwise'], {'momentum': '(0.9)', 'training': 'training'}), '(depthwise, momentum=0.9, training=training)\n', (1377, 1421), True, 'import tensorflow as tf\n'), ((1854, 1927), 'tensorflow.layers.batch_normalization', 'tf.layers.batch_normalization', (['pointwise'], {'momentum': '(0.9)', 'training': 'training'}), '(pointwise, momentum=0.9, training=training)\n', (1883, 1927), True, 'import tensorflow as tf\n'), ((2270, 2299), 'tensorflow.variable_scope', 'tf.variable_scope', (['scope_name'], {}), '(scope_name)\n', (2287, 2299), True, 'import tensorflow as tf\n'), ((2410, 2572), 'tensorflow.layers.conv2d', 'tf.layers.conv2d', (['ip', 'expanded_filters', '(1, 1)', '(1)', '"""SAME"""'], {'activation': 'None', 'use_bias': '(False)', 'kernel_initializer': 'self.k_init', 'kernel_regularizer': 'self.k_reg'}), "(ip, expanded_filters, (1, 1), 1, 'SAME', activation=None,\n use_bias=False, kernel_initializer=self.k_init, kernel_regularizer=self\n .k_reg)\n", (2426, 2572), True, 'import tensorflow as tf\n'), ((2594, 2673), 'tensorflow.layers.batch_normalization', 'tf.layers.batch_normalization', (['bottleneck_conv'], {'momentum': '(0.9)', 'training': 'training'}), '(bottleneck_conv, momentum=0.9, training=training)\n', (2623, 2673), True, 'import tensorflow as tf\n'), ((2704, 2732), 'tensorflow.nn.relu6', 'tf.nn.relu6', (['bottleneck_conv'], {}), '(bottleneck_conv)\n', (2715, 2732), True, 'import tensorflow as tf\n'), ((3542, 3576), 'tensorflow.variable_scope', 'tf.variable_scope', (['self.model_name'], {}), '(self.model_name)\n', (3559, 3576), True, 'import tensorflow as tf\n'), ((4220, 4240), 'models.MobileNet_v2_config.g', 'g', (['inv_res_block_1_1'], {}), '(inv_res_block_1_1)\n', (4221, 4240), False, 'from models.MobileNet_v2_config import g\n'), ((4726, 4746), 'models.MobileNet_v2_config.g', 'g', (['inv_res_block_2_2'], {}), '(inv_res_block_2_2)\n', (4727, 4746), False, 'from models.MobileNet_v2_config import g\n'), ((5432, 5452), 'models.MobileNet_v2_config.g', 'g', (['inv_res_block_3_3'], {}), '(inv_res_block_3_3)\n', (5433, 5452), False, 'from models.MobileNet_v2_config import g\n'), ((6348, 6368), 'models.MobileNet_v2_config.g', 'g', (['inv_res_block_4_4'], {}), '(inv_res_block_4_4)\n', (6349, 6368), False, 'from models.MobileNet_v2_config import g\n'), ((7052, 7072), 'models.MobileNet_v2_config.g', 'g', (['inv_res_block_5_3'], {}), '(inv_res_block_5_3)\n', (7053, 7072), False, 'from models.MobileNet_v2_config import g\n'), ((7759, 7779), 'models.MobileNet_v2_config.g', 'g', (['inv_res_block_6_3'], {}), '(inv_res_block_6_3)\n', (7760, 7779), False, 'from models.MobileNet_v2_config import g\n'), ((8041, 8061), 'models.MobileNet_v2_config.g', 'g', (['inv_res_block_7_1'], {}), '(inv_res_block_7_1)\n', (8042, 8061), False, 'from models.MobileNet_v2_config import g\n'), ((3595, 3621), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""conv1"""'], {}), "('conv1')\n", (3612, 3621), True, 'import tensorflow as tf\n'), ((3647, 3789), 'tensorflow.layers.conv2d', 'tf.layers.conv2d', (['x', '(32)', '(3, 3)', '(2)', '"""SAME"""'], {'activation': 'None', 'use_bias': '(False)', 'kernel_initializer': 'self.k_init', 'kernel_regularizer': 'self.k_reg'}), "(x, 32, (3, 3), 2, 'SAME', activation=None, use_bias=False,\n kernel_initializer=self.k_init, kernel_regularizer=self.k_reg)\n", (3663, 3789), True, 'import tensorflow as tf\n'), ((3810, 3882), 'tensorflow.layers.batch_normalization', 'tf.layers.batch_normalization', (['conv1'], {'momentum': '(0.9)', 'training': 'is_training'}), '(conv1, momentum=0.9, training=is_training)\n', (3839, 3882), True, 'import tensorflow as tf\n'), ((3907, 3925), 'tensorflow.nn.relu6', 'tf.nn.relu6', (['conv1'], {}), '(conv1)\n', (3918, 3925), True, 'import tensorflow as tf\n'), ((3942, 3950), 'models.MobileNet_v2_config.g', 'g', (['conv1'], {}), '(conv1)\n', (3943, 3950), False, 'from models.MobileNet_v2_config import g\n'), ((8115, 8141), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""conv2"""'], {}), "('conv2')\n", (8132, 8141), True, 'import tensorflow as tf\n'), ((8171, 8336), 'tensorflow.layers.conv2d', 'tf.layers.conv2d', (['inv_res_block_7_1', '(1280)', '(1, 1)', '(1)', '"""SAME"""'], {'activation': 'None', 'use_bias': '(False)', 'kernel_initializer': 'self.k_init', 'kernel_regularizer': 'self.k_reg'}), "(inv_res_block_7_1, 1280, (1, 1), 1, 'SAME', activation=\n None, use_bias=False, kernel_initializer=self.k_init,\n kernel_regularizer=self.k_reg)\n", (8187, 8336), True, 'import tensorflow as tf\n'), ((8352, 8424), 'tensorflow.layers.batch_normalization', 'tf.layers.batch_normalization', (['conv2'], {'momentum': '(0.9)', 'training': 'is_training'}), '(conv2, momentum=0.9, training=is_training)\n', (8381, 8424), True, 'import tensorflow as tf\n'), ((8449, 8467), 'tensorflow.nn.relu6', 'tf.nn.relu6', (['conv2'], {}), '(conv2)\n', (8460, 8467), True, 'import tensorflow as tf\n'), ((8484, 8492), 'models.MobileNet_v2_config.g', 'g', (['conv2'], {}), '(conv2)\n', (8485, 8492), False, 'from models.MobileNet_v2_config import g\n'), ((8564, 8589), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""tail"""'], {}), "('tail')\n", (8581, 8589), True, 'import tensorflow as tf\n'), ((8629, 8702), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['conv2'], {'axis': '[1, 2]', 'name': '"""global_avg_pool"""', 'keepdims': '(True)'}), "(conv2, axis=[1, 2], name='global_avg_pool', keepdims=True)\n", (8643, 8702), True, 'import tensorflow as tf\n'), ((8719, 8725), 'models.MobileNet_v2_config.g', 'g', (['gap'], {}), '(gap)\n', (8720, 8725), False, 'from models.MobileNet_v2_config import g\n'), ((8810, 8972), 'tensorflow.layers.conv2d', 'tf.layers.conv2d', (['gap', 'self.num_classes', '(1, 1)', '(1)', '"""SAME"""'], {'activation': 'None', 'use_bias': '(True)', 'kernel_initializer': 'self.k_init', 'kernel_regularizer': 'self.k_reg'}), "(gap, self.num_classes, (1, 1), 1, 'SAME', activation=None,\n use_bias=True, kernel_initializer=self.k_init, kernel_regularizer=self.\n k_reg)\n", (8826, 8972), True, 'import tensorflow as tf\n'), ((8980, 8988), 'models.MobileNet_v2_config.g', 'g', (['conv3'], {}), '(conv3)\n', (8981, 8988), False, 'from models.MobileNet_v2_config import g\n'), ((9053, 9098), 'tensorflow.squeeze', 'tf.squeeze', (['conv3'], {'axis': '[1, 2]', 'name': '"""logits"""'}), "(conv3, axis=[1, 2], name='logits')\n", (9063, 9098), True, 'import tensorflow as tf\n'), ((9115, 9124), 'models.MobileNet_v2_config.g', 'g', (['logits'], {}), '(logits)\n', (9116, 9124), False, 'from models.MobileNet_v2_config import g\n'), ((9199, 9239), 'tensorflow.nn.softmax', 'tf.nn.softmax', (['logits'], {'name': '"""softmax_op"""'}), "(logits, name='softmax_op')\n", (9212, 9239), True, 'import tensorflow as tf\n'), ((9256, 9266), 'models.MobileNet_v2_config.g', 'g', (['outputs'], {}), '(outputs)\n', (9257, 9266), False, 'from models.MobileNet_v2_config import g\n'), ((3175, 3331), 'tensorflow.layers.conv2d', 'tf.layers.conv2d', (['ip', 'op_filters', '(1, 1)', '(1)', '"""SAME"""'], {'activation': 'None', 'use_bias': '(False)', 'kernel_initializer': 'self.k_init', 'kernel_regularizer': 'self.k_reg'}), "(ip, op_filters, (1, 1), 1, 'SAME', activation=None,\n use_bias=False, kernel_initializer=self.k_init, kernel_regularizer=self\n .k_reg)\n", (3191, 3331), True, 'import tensorflow as tf\n')]
|
'''Its an collection of usefull/repeated functions. It has traditional funcction as well as lambda'''
# =================LIST OPERATIONS===================#
def findDiff(first, second):
second = set(second)
return [item for item in first if item not in second]
# findDiff = lambda first, second: [item for item in first if item not in second]
def findDuplicates(x):
import collections
return [item for item, count in collections.Counter(x).items() if count > 1]
# findDuplicates = lambda source_list: [item for item, count in collections.Counter(source_list).items() if count > 1]
# =========================String========================#
def concateString(source_list, separator):
return separator.join(source_list)
# concateString = lambda source_list,separator: str(separator).join(source_list)
print(concateString(["Heloo", "World!"], " "))
#====================OTHERS==============================#
#Extracting domain information from URL
import tldextract
def extractDomain(url):
if "http" in str(url) or "www" in str(url):
parsed = tldextract.extract(url)
parsed = ".".join([i for i in parsed if i])
return parsed
else: return "NA"
print(extractDomain("https://google.co.uk?link=something"))
|
[
"collections.Counter",
"tldextract.extract"
] |
[((1076, 1099), 'tldextract.extract', 'tldextract.extract', (['url'], {}), '(url)\n', (1094, 1099), False, 'import tldextract\n'), ((438, 460), 'collections.Counter', 'collections.Counter', (['x'], {}), '(x)\n', (457, 460), False, 'import collections\n')]
|
"""Transform metrics stored in SQuaSH into InfluxDB format.
See sqr-009.lsst.io for a description on how metrics are stored in SQuaSH and
the resulting InfluxDB data model.
"""
__all__ = ["Transformer"]
import logging
import math
import pathlib
import urllib.parse
import requests
import yaml
from requests.exceptions import ConnectionError, HTTPError
from squash.tasks.utils.format import Formatter
logger = logging.getLogger("squash")
class Transformer(Formatter):
"""Transform metrics stored in SQuaSH into InfluxDB format.
Parameters
----------
squash_api_url : `str`
SQuaSH API URL.
data : `str`
SQuaSH job data in JSON.
"""
def __init__(self, squash_api_url, data):
super().__init__(squash_api_url=squash_api_url)
self.squash_api_url = squash_api_url
self.data = data
self.mapping = self.load_mapping()
def load_mapping(self):
"""Load the SQuaSH to InfluxDB mapping.
Returns
-------
mapping : `dict`
Dictionary with the SQuaSH to InfluxDB mapping.
"""
filename = pathlib.Path(__file__).parent / "mapping.yaml"
with open(filename) as f:
mapping = yaml.load(f, Loader=yaml.FullLoader)
return mapping
def run_mapping(self, key):
"""Return schema, key, and transformation from the mapping.
Parameters
----------
key : `str`
The key to look for in the mapping.
Returns
-------
schema : `str` or `None`
The InfluxDB schema to write or `None` if it should not
be added to InfluxDB.
mapped_key : `str` or `None`
The mapped key or `None` if it should not be added to InfluxDB.
transformation : `str` or `None`
The transformation that should be applied to the value if any.
"""
# By default, if the key is not found in the mapping, it should be
# added to InfluxDB as a tag and preserving the original name.
schema = "tag"
mapped_key = key
transformation = None
if key in self.mapping:
item = self.mapping[key]
schema = item["schema"]
mapped_key = item["key"]
transformation = item["transformation"]
return schema, mapped_key, transformation
def get_timestamp(self):
"""Get the timestamp to use in InfluxDB.
Use the timestamp when the verification job is recorded. If it runs
in Jenkins uses the pipeline runtime instead.
Returns
-------
timestamp : `int`
Formatted timestamp.
"""
timestamp = Formatter.format_timestamp(self.data["date_created"])
if self.data["meta"]["env"]["env_name"] == "jenkins":
ci_id = self.data["meta"]["env"]["ci_id"]
ci_name = self.data["meta"]["env"]["ci_name"]
# Get timestamp from Jenkins
jenkins_url = (
f"{self.squash_api_url}/jenkins/{ci_id}?ci_name={ci_name}"
)
try:
r = requests.get(jenkins_url)
r.raise_for_status()
except HTTPError:
message = "Could not get timestamp from Jenkins."
logger.error(message)
except ConnectionError:
message = (
f"Failed to establish connection with Jenkins "
f"{jenkins_url}."
)
logger.error(message)
date_created = r.json()["date_created"]
timestamp = Formatter.format_timestamp(date_created)
return timestamp
def update_metadata(self):
"""Add/remove metadata before the trandformation step."""
# Add extra metadata
id = self.data["id"]
self.data["meta"]["id"] = id
self.data["meta"]["url"] = urllib.parse.urljoin(
self.squash_api_url, f"/job/{id}"
)
self.data["meta"]["date_created"] = self.data["date_created"]
self.data["meta"]["env"]["ci_dataset"] = self.data["ci_dataset"]
# Fix dataset_repo_url duplication
if "dataset_repo_url" in self.data["meta"].keys():
del self.data["meta"]["dataset_repo_url"]
# Fix use of ci_dataset key in environments other than jenkins
if self.data["meta"]["env"]["env_name"] != "jenkins":
if "ci_dataset" in self.data["meta"]["env"]:
del self.data["meta"]["env"]["ci_dataset"]
# Add code changes metadata keys
if self.data["meta"]["env"]["env_name"] == "jenkins":
self.data["meta"]["env"]["code_changes"] = ""
self.data["meta"]["env"]["code_changes_counts"] = ""
# Add ci_name until DM-18599 is not implemented
if "ci_url" in self.data["meta"]["env"].keys():
if "validate_drp_gen3" in self.data["meta"]["env"]["ci_url"]:
self.data["meta"]["env"]["ci_name"] = "validate_drp_gen3"
elif "validate_drp" in self.data["meta"]["env"]["ci_url"]:
self.data["meta"]["env"]["ci_name"] = "validate_drp"
elif "ap_verify" in self.data["meta"]["env"]["ci_url"]:
self.data["meta"]["env"]["ci_name"] = "ap_verify"
def process_metadata(self, data):
"""Process SQuaSH metadata using a pre-configured mapping to InfluxDB.
Parameters
----------
data : `dict`
A dictionary with SQuaSH metadata.
Return
------
tags : `<list>`
List of tags to be written to InfluxDB.
fields : `<list>`
List of fields to be written to InfluxDB.
"""
tags = []
fields = []
for key, value in data.items():
# process nested dict
if isinstance(value, dict):
tmp_tags, tmp_fields = self.process_metadata(value)
tags.extend(tmp_tags)
fields.extend(tmp_fields)
else:
schema, mapped_key, transformation = self.run_mapping(key)
if transformation:
value = eval(transformation)
if mapped_key and schema == "tag":
tags.append(
"{}={}".format(
Formatter.sanitize(mapped_key),
Formatter.sanitize(value),
)
)
elif mapped_key and schema == "field":
if isinstance(value, str):
fields.append(
'{}="{}"'.format(
Formatter.sanitize(mapped_key), value
)
)
else:
fields.append(
"{}={}".format(
Formatter.sanitize(mapped_key), value
)
)
# Make sure tags and fields are unique
tags = list(set(tags))
fields = list(set(fields))
return tags, fields
def get_meas_by_package(self):
"""Group verify measurements by package.
By grouping verify measurements by package we can send them to InfluxDB
in batch. A package is mapped to an InfluxDB measurement.
"""
meas_by_package = {}
for meas in self.data["measurements"]:
# DM-18360 - SQuaSH API/measurements should return the verification
# package
# a metric fqn is <package>.<metric>, extract package name from the
# metric fqn
package = None
if "." in meas["metric"]:
package = meas["metric"].split(".")[0]
if package:
# No need to carry the package name prefix in the metric name.
if meas["metric"].startswith(package):
metric = meas["metric"][len(package) + 1 :]
value = meas["value"]
# InfluxDB does not store NaNs and it is safe to just skip
# values that are NaN.
# https://github.com/influxdata/influxdb/issues/4089
if not math.isnan(value):
if package not in meas_by_package:
meas_by_package[package] = []
meas_by_package[package].append(f"{metric}={value}")
return meas_by_package
def to_influxdb_line(self):
"""Process job data and make the InfluxDB lines.
Returns
-------
influxdb_lines : `list`
A list with strings representing each InfluxDB line.
"""
timestamp = self.get_timestamp()
self.update_metadata()
tags, extra_fields = self.process_metadata(self.data["meta"])
meas_by_package = self.get_meas_by_package()
influxdb_lines = []
for meas in meas_by_package:
fields = meas_by_package[meas] + extra_fields
influxdb_lines.append(
Formatter.format_influxdb_line(meas, tags, fields, timestamp)
)
return influxdb_lines
|
[
"squash.tasks.utils.format.Formatter.format_influxdb_line",
"yaml.load",
"math.isnan",
"squash.tasks.utils.format.Formatter.format_timestamp",
"pathlib.Path",
"squash.tasks.utils.format.Formatter.sanitize",
"requests.get",
"logging.getLogger"
] |
[((415, 442), 'logging.getLogger', 'logging.getLogger', (['"""squash"""'], {}), "('squash')\n", (432, 442), False, 'import logging\n'), ((2711, 2764), 'squash.tasks.utils.format.Formatter.format_timestamp', 'Formatter.format_timestamp', (["self.data['date_created']"], {}), "(self.data['date_created'])\n", (2737, 2764), False, 'from squash.tasks.utils.format import Formatter\n'), ((1227, 1263), 'yaml.load', 'yaml.load', (['f'], {'Loader': 'yaml.FullLoader'}), '(f, Loader=yaml.FullLoader)\n', (1236, 1263), False, 'import yaml\n'), ((3637, 3677), 'squash.tasks.utils.format.Formatter.format_timestamp', 'Formatter.format_timestamp', (['date_created'], {}), '(date_created)\n', (3663, 3677), False, 'from squash.tasks.utils.format import Formatter\n'), ((1123, 1145), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (1135, 1145), False, 'import pathlib\n'), ((3137, 3162), 'requests.get', 'requests.get', (['jenkins_url'], {}), '(jenkins_url)\n', (3149, 3162), False, 'import requests\n'), ((9157, 9218), 'squash.tasks.utils.format.Formatter.format_influxdb_line', 'Formatter.format_influxdb_line', (['meas', 'tags', 'fields', 'timestamp'], {}), '(meas, tags, fields, timestamp)\n', (9187, 9218), False, 'from squash.tasks.utils.format import Formatter\n'), ((8319, 8336), 'math.isnan', 'math.isnan', (['value'], {}), '(value)\n', (8329, 8336), False, 'import math\n'), ((6378, 6408), 'squash.tasks.utils.format.Formatter.sanitize', 'Formatter.sanitize', (['mapped_key'], {}), '(mapped_key)\n', (6396, 6408), False, 'from squash.tasks.utils.format import Formatter\n'), ((6438, 6463), 'squash.tasks.utils.format.Formatter.sanitize', 'Formatter.sanitize', (['value'], {}), '(value)\n', (6456, 6463), False, 'from squash.tasks.utils.format import Formatter\n'), ((6732, 6762), 'squash.tasks.utils.format.Formatter.sanitize', 'Formatter.sanitize', (['mapped_key'], {}), '(mapped_key)\n', (6750, 6762), False, 'from squash.tasks.utils.format import Formatter\n'), ((6967, 6997), 'squash.tasks.utils.format.Formatter.sanitize', 'Formatter.sanitize', (['mapped_key'], {}), '(mapped_key)\n', (6985, 6997), False, 'from squash.tasks.utils.format import Formatter\n')]
|
#!/Library/Frameworks/Python.framework/Versions/3.7/bin/python3
#
# Copyright (C) 2019 <NAME> (<EMAIL>)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from Crypto.Cipher import Blowfish
from struct import pack
bs = Blowfish.block_size
key = b'<KEY>'
ciphertext = b'MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBAM/NIqP7cvUpBnT67AsbEdInIF9KlFiklgzEF4UP1vN1wTSnHuVzQD/DNYBtYRvQOg6sr+usGV2DrnsAn1lgatwlNV3ethTOSPsLfv8HA//LofTW2ZGZ0D4CsQZBWgjmHRppVYb+2DUiVZG2IPo4SWAhtfmwNuVMDWHq5oKFxauVAgMBAAECgYBSbhA4tk0ivRRnoQWzXhiSoZyw0DfKdfdjtwGRcfgVeXebuFOEN1ScNoZpnHnx+4acPZpHRWyGcO7sshGD9cBNPqP2hvp9d+YvH3JOczO+D3xnSlfnMii0XR7eTaF32+T73rB4G/cQ8+Gp9IeoZwrj60sa4WZUrOuvUeH4NQEIIQJBAOgi0iM973ZntKbeJBoEeIRX0nYIz5qGytXyeZJPFegUhX0Ljf9wQD9x8Zwm+8AhHmGyFasb1Cw/u4j7ATOnl90CQQDlKeRg0KOZ9W6h+4o2XlDcL5aUJcEZulWGvIbUXcKUWBdQbrwMbCb/6bPpjScQFpTR6tZla4S9IULKkHJGPUMZAkEA42sBra8Gw1xUGkp02dxZaWZUdHirUnsNik6TlafPEV/RazD/uylwd/ecOVvjtVV82z9JhSmtUnBZvJgTlFRzLQJBALej2HWU/GWV/nAkCOAEuLuaDwrtLk8VuQ/d6BYqhJEn/pbgBiXWTXJqr1gLWzBTSDLoA6MGhDqjesik9E5BLZECQFDVDPjE10MbqVvkFMRPcPJvECBn44TFeg2MseEAkQHVgbfuvVgZ3eX2nc3uzqbflCfgi1F1lINBeoJQIb4eexQ='
iv = ciphertext[:bs]
ciphertext = ciphertext[bs:]
cipher = Blowfish.new(key, Blowfish.MODE_CBC, iv)
msg = cipher.decrypt(ciphertext)
last_byte = msg[-1]
msg = msg[:- (last_byte if type(last_byte) is int else ord(last_byte))]
print(repr(msg))
|
[
"Crypto.Cipher.Blowfish.new"
] |
[((1683, 1723), 'Crypto.Cipher.Blowfish.new', 'Blowfish.new', (['key', 'Blowfish.MODE_CBC', 'iv'], {}), '(key, Blowfish.MODE_CBC, iv)\n', (1695, 1723), False, 'from Crypto.Cipher import Blowfish\n')]
|
import hashlib
from time import time
from aiohttp import web
from aiohttp_session import get_session
from app.core.models.participant import Participant
routes = web.RouteTableDef()
def set_session(session, user_id, request):
session['email'] = str(user_id)
session['last_visit'] = time()
@routes.post('/login')
async def login(request):
login_data = await request.json()
email = login_data.get('email')
password = login_data.get('password')
participant = await Participant.get(request.app, email, password)
if participant:
session = await get_session(request)
set_session(session, str(participant), request)
response = web.json_response({'access': True}, status=200)
else:
response = web.json_response({'access': False}, status=403)
return response
@routes.post('/participant')
async def registration(request):
""" Register new participant """
session = await get_session(request)
if session.get('email') == 'admin':
data = await request.json()
result = await Participant.create(request.app, data)
if not result:
response = web.json_response({'result': 'success'}, status=200)
else:
response = web.json_response({'result': result}, status=400)
else:
response = web.json_response({'result': 'Недостаточно полномочий!'})
return response
@routes.post('/transfers')
async def transfer(request):
""" Get participants transactions """
session = await get_session(request)
participant_id = session.get("email")
if participant_id:
data = await request.json()
result = await Participant.get_transactions(request.app, participant_id, data)
response = web.json_response(result, status=200)
else:
response = web.json_response({'result': 'Левая сессия!'}, status=403)
return response
@routes.post('/transfer')
async def transfer(request):
""" Make participant transaction """
session = await get_session(request)
participant_id = session.get("email")
if participant_id:
data = await request.json()
result = await Participant.make_transaction(request.app, participant_id, data)
response = web.json_response(result, status=200)
else:
response = web.json_response({'result': 'Левая сессия!'}, status=403)
return response
|
[
"aiohttp.web.RouteTableDef",
"aiohttp_session.get_session",
"app.core.models.participant.Participant.make_transaction",
"aiohttp.web.json_response",
"time.time",
"app.core.models.participant.Participant.get_transactions",
"app.core.models.participant.Participant.create",
"app.core.models.participant.Participant.get"
] |
[((164, 183), 'aiohttp.web.RouteTableDef', 'web.RouteTableDef', ([], {}), '()\n', (181, 183), False, 'from aiohttp import web\n'), ((294, 300), 'time.time', 'time', ([], {}), '()\n', (298, 300), False, 'from time import time\n'), ((492, 537), 'app.core.models.participant.Participant.get', 'Participant.get', (['request.app', 'email', 'password'], {}), '(request.app, email, password)\n', (507, 537), False, 'from app.core.models.participant import Participant\n'), ((678, 725), 'aiohttp.web.json_response', 'web.json_response', (["{'access': True}"], {'status': '(200)'}), "({'access': True}, status=200)\n", (695, 725), False, 'from aiohttp import web\n'), ((755, 803), 'aiohttp.web.json_response', 'web.json_response', (["{'access': False}"], {'status': '(403)'}), "({'access': False}, status=403)\n", (772, 803), False, 'from aiohttp import web\n'), ((945, 965), 'aiohttp_session.get_session', 'get_session', (['request'], {}), '(request)\n', (956, 965), False, 'from aiohttp_session import get_session\n'), ((1318, 1375), 'aiohttp.web.json_response', 'web.json_response', (["{'result': 'Недостаточно полномочий!'}"], {}), "({'result': 'Недостаточно полномочий!'})\n", (1335, 1375), False, 'from aiohttp import web\n'), ((1516, 1536), 'aiohttp_session.get_session', 'get_session', (['request'], {}), '(request)\n', (1527, 1536), False, 'from aiohttp_session import get_session\n'), ((1744, 1781), 'aiohttp.web.json_response', 'web.json_response', (['result'], {'status': '(200)'}), '(result, status=200)\n', (1761, 1781), False, 'from aiohttp import web\n'), ((1811, 1869), 'aiohttp.web.json_response', 'web.json_response', (["{'result': 'Левая сессия!'}"], {'status': '(403)'}), "({'result': 'Левая сессия!'}, status=403)\n", (1828, 1869), False, 'from aiohttp import web\n'), ((2008, 2028), 'aiohttp_session.get_session', 'get_session', (['request'], {}), '(request)\n', (2019, 2028), False, 'from aiohttp_session import get_session\n'), ((2236, 2273), 'aiohttp.web.json_response', 'web.json_response', (['result'], {'status': '(200)'}), '(result, status=200)\n', (2253, 2273), False, 'from aiohttp import web\n'), ((2303, 2361), 'aiohttp.web.json_response', 'web.json_response', (["{'result': 'Левая сессия!'}"], {'status': '(403)'}), "({'result': 'Левая сессия!'}, status=403)\n", (2320, 2361), False, 'from aiohttp import web\n'), ((582, 602), 'aiohttp_session.get_session', 'get_session', (['request'], {}), '(request)\n', (593, 602), False, 'from aiohttp_session import get_session\n'), ((1065, 1102), 'app.core.models.participant.Participant.create', 'Participant.create', (['request.app', 'data'], {}), '(request.app, data)\n', (1083, 1102), False, 'from app.core.models.participant import Participant\n'), ((1149, 1201), 'aiohttp.web.json_response', 'web.json_response', (["{'result': 'success'}"], {'status': '(200)'}), "({'result': 'success'}, status=200)\n", (1166, 1201), False, 'from aiohttp import web\n'), ((1239, 1288), 'aiohttp.web.json_response', 'web.json_response', (["{'result': result}"], {'status': '(400)'}), "({'result': result}, status=400)\n", (1256, 1288), False, 'from aiohttp import web\n'), ((1661, 1724), 'app.core.models.participant.Participant.get_transactions', 'Participant.get_transactions', (['request.app', 'participant_id', 'data'], {}), '(request.app, participant_id, data)\n', (1689, 1724), False, 'from app.core.models.participant import Participant\n'), ((2153, 2216), 'app.core.models.participant.Participant.make_transaction', 'Participant.make_transaction', (['request.app', 'participant_id', 'data'], {}), '(request.app, participant_id, data)\n', (2181, 2216), False, 'from app.core.models.participant import Participant\n')]
|
# Generated by Django 2.1.2 on 2019-08-29 07:41
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Player',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('money', models.IntegerField(default=100)),
('round_money', models.IntegerField(default=0)),
('cards', models.CharField(max_length=100, null=True)),
('state', models.CharField(default='out', max_length=100)),
('name', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='player', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Table',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('pool', models.IntegerField(default=0)),
('deck', models.CharField(max_length=500, null=True)),
('cards_on_table', models.CharField(max_length=100, null=True)),
('game_state', models.CharField(default='ready', max_length=200, null=True)),
('big_blind', models.OneToOneField(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='big_blind', to='game.Player')),
('dealer', models.OneToOneField(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dealer', to='game.Player')),
('decission', models.OneToOneField(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='decission', to='game.Player')),
('player1', models.OneToOneField(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='player1', to='game.Player')),
('player2', models.OneToOneField(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='player2', to='game.Player')),
('player3', models.OneToOneField(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='player3', to='game.Player')),
('player4', models.OneToOneField(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='player4', to='game.Player')),
('small_blind', models.OneToOneField(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='small_blind', to='game.Player')),
],
),
migrations.AddField(
model_name='player',
name='table',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='tablee', to='game.Table'),
),
]
|
[
"django.db.models.OneToOneField",
"django.db.migrations.swappable_dependency",
"django.db.models.ForeignKey",
"django.db.models.CharField",
"django.db.models.AutoField",
"django.db.models.IntegerField"
] |
[((247, 304), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (278, 304), False, 'from django.db import migrations, models\n'), ((2816, 2934), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'related_name': '"""tablee"""', 'to': '"""game.Table"""'}), "(null=True, on_delete=django.db.models.deletion.SET_NULL,\n related_name='tablee', to='game.Table')\n", (2833, 2934), False, 'from django.db import migrations, models\n'), ((435, 528), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (451, 528), False, 'from django.db import migrations, models\n'), ((553, 585), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(100)'}), '(default=100)\n', (572, 585), False, 'from django.db import migrations, models\n'), ((620, 650), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (639, 650), False, 'from django.db import migrations, models\n'), ((679, 722), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'null': '(True)'}), '(max_length=100, null=True)\n', (695, 722), False, 'from django.db import migrations, models\n'), ((751, 798), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""out"""', 'max_length': '(100)'}), "(default='out', max_length=100)\n", (767, 798), False, 'from django.db import migrations, models\n'), ((826, 947), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""player"""', 'to': 'settings.AUTH_USER_MODEL'}), "(on_delete=django.db.models.deletion.CASCADE,\n related_name='player', to=settings.AUTH_USER_MODEL)\n", (846, 947), False, 'from django.db import migrations, models\n'), ((1074, 1167), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1090, 1167), False, 'from django.db import migrations, models\n'), ((1191, 1221), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (1210, 1221), False, 'from django.db import migrations, models\n'), ((1249, 1292), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(500)', 'null': '(True)'}), '(max_length=500, null=True)\n', (1265, 1292), False, 'from django.db import migrations, models\n'), ((1330, 1373), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'null': '(True)'}), '(max_length=100, null=True)\n', (1346, 1373), False, 'from django.db import migrations, models\n'), ((1407, 1467), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""ready"""', 'max_length': '(200)', 'null': '(True)'}), "(default='ready', max_length=200, null=True)\n", (1423, 1467), False, 'from django.db import migrations, models\n'), ((1500, 1626), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'related_name': '"""big_blind"""', 'to': '"""game.Player"""'}), "(null=True, on_delete=django.db.models.deletion.\n SET_NULL, related_name='big_blind', to='game.Player')\n", (1520, 1626), False, 'from django.db import migrations, models\n'), ((1651, 1774), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'related_name': '"""dealer"""', 'to': '"""game.Player"""'}), "(null=True, on_delete=django.db.models.deletion.\n SET_NULL, related_name='dealer', to='game.Player')\n", (1671, 1774), False, 'from django.db import migrations, models\n'), ((1802, 1928), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'related_name': '"""decission"""', 'to': '"""game.Player"""'}), "(null=True, on_delete=django.db.models.deletion.\n SET_NULL, related_name='decission', to='game.Player')\n", (1822, 1928), False, 'from django.db import migrations, models\n'), ((1954, 2078), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'related_name': '"""player1"""', 'to': '"""game.Player"""'}), "(null=True, on_delete=django.db.models.deletion.\n SET_NULL, related_name='player1', to='game.Player')\n", (1974, 2078), False, 'from django.db import migrations, models\n'), ((2104, 2228), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'related_name': '"""player2"""', 'to': '"""game.Player"""'}), "(null=True, on_delete=django.db.models.deletion.\n SET_NULL, related_name='player2', to='game.Player')\n", (2124, 2228), False, 'from django.db import migrations, models\n'), ((2254, 2378), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'related_name': '"""player3"""', 'to': '"""game.Player"""'}), "(null=True, on_delete=django.db.models.deletion.\n SET_NULL, related_name='player3', to='game.Player')\n", (2274, 2378), False, 'from django.db import migrations, models\n'), ((2404, 2528), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'related_name': '"""player4"""', 'to': '"""game.Player"""'}), "(null=True, on_delete=django.db.models.deletion.\n SET_NULL, related_name='player4', to='game.Player')\n", (2424, 2528), False, 'from django.db import migrations, models\n'), ((2558, 2686), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'related_name': '"""small_blind"""', 'to': '"""game.Player"""'}), "(null=True, on_delete=django.db.models.deletion.\n SET_NULL, related_name='small_blind', to='game.Player')\n", (2578, 2686), False, 'from django.db import migrations, models\n')]
|
import click
@click.command()
def cli(args=None):
"""Console script for multi_notifier."""
click.echo("Replace this message by putting your code into "
"multi_notifier.cli.cli")
click.echo("See click documentation at https://click.palletsprojects.com/")
return 0
|
[
"click.echo",
"click.command"
] |
[((17, 32), 'click.command', 'click.command', ([], {}), '()\n', (30, 32), False, 'import click\n'), ((102, 190), 'click.echo', 'click.echo', (['"""Replace this message by putting your code into multi_notifier.cli.cli"""'], {}), "(\n 'Replace this message by putting your code into multi_notifier.cli.cli')\n", (112, 190), False, 'import click\n'), ((208, 283), 'click.echo', 'click.echo', (['"""See click documentation at https://click.palletsprojects.com/"""'], {}), "('See click documentation at https://click.palletsprojects.com/')\n", (218, 283), False, 'import click\n')]
|
"""basic array functions"""
import multiprocessing
import warnings
import numpy as np
try:
import numexpr
numexpr.set_num_threads(multiprocessing.cpu_count())
numexpr.set_vml_num_threads(multiprocessing.cpu_count())
except ImportError:
warnings.warn('numexpr not detected, use `sudo pip install numexpr`')
numexpr = None
def astype(array, dtype):
"""cast array to dtype
Parameters
----------
- array: array
- dtype: dtype to cast to
"""
if numexpr is None:
return array.astype(dtype)
result = np.zeros(array.shape, dtype=dtype)
return numexpr.evaluate('array', out=result, casting='unsafe')
def concatenate(arrays, axis, dtype=None, out=None):
"""concatenate arrays along axis
Parameters
----------
- arrays: iterable of arrays
- axis: int axis to concatenate
- dtype: dtype of result
- out: array in which to store result
"""
# compute sizes
ndim = arrays[0].ndim
other_axes = [other for other in range(arrays[0].ndim) if other != axis]
other_lengths = [arrays[0].shape[other_axis] for other_axis in other_axes]
axis_lengths = [array.shape[axis] for array in arrays]
axis_length = np.sum(axis_lengths)
result_shape = other_lengths[:axis] + [axis_length] + other_lengths[axis:]
# ensure sizes and dtypes are proper
for a, array in enumerate(arrays):
if len(array.shape) != ndim:
raise Exception('array' + str(a) + 'has wrong dimensions')
for ol, length in enumerate(other_lengths):
if array.shape[other_axes[ol]] != length:
raise Exception('bad axis ' + str(ol) + ' of array ' + str(a))
if out is not None:
if out.shape != result_shape:
raise Exception('out does not have shape ' + str(result_shape))
if dtype is not None and out.dtype != dtype:
raise Exception('out does not have dtype ' + str(dtype))
# initialize output
if out is None:
out = np.zeros(result_shape, dtype=dtype)
# fall back to numpy if numexpr not available
if numexpr is None:
out[:] = np.concatenate(arrays, axis=axis)
return out
# populate output
start = 0
slices = [slice(None) for d in range(ndim)]
for array in arrays:
end = start + array.shape[axis]
slices[axis] = slice(start, end)
numexpr.evaluate('array', out=out[slices])
start = end
return out
def isnan(X):
"""evaluate whether elements of X are infinite
Parameters
----------
- X: array to evaluate nan values of
"""
if numexpr is not None:
return numexpr.evaluate('X != X')
else:
return np.isnan(X)
def nan_to_num(X):
"""convert infinite values in array to 0
Parameters
----------
- X: array whose infinite values to convert
"""
if numexpr is not None:
X = copy(X)
X[isnan(X)] = 0
return X
else:
return np.nan_to_num(X)
def nan_to_num_inplace(X):
"""convert infinite values in array to 0 inplace
Parameters
----------
- X: array whose infinite values to convert
"""
if numexpr is not None:
X[isnan(X)] = 0
return X
else:
X[np.isnan(X)] = 0
return X
def copy(X):
"""return a copy of X
Parameters
----------
- X: array to copy
"""
if numexpr is not None:
return numexpr.evaluate('X')
else:
return np.copy(X)
|
[
"numpy.sum",
"numpy.nan_to_num",
"numpy.copy",
"numpy.zeros",
"numexpr.evaluate",
"numpy.isnan",
"warnings.warn",
"numpy.concatenate",
"multiprocessing.cpu_count"
] |
[((559, 593), 'numpy.zeros', 'np.zeros', (['array.shape'], {'dtype': 'dtype'}), '(array.shape, dtype=dtype)\n', (567, 593), True, 'import numpy as np\n'), ((605, 660), 'numexpr.evaluate', 'numexpr.evaluate', (['"""array"""'], {'out': 'result', 'casting': '"""unsafe"""'}), "('array', out=result, casting='unsafe')\n", (621, 660), False, 'import numexpr\n'), ((1211, 1231), 'numpy.sum', 'np.sum', (['axis_lengths'], {}), '(axis_lengths)\n', (1217, 1231), True, 'import numpy as np\n'), ((141, 168), 'multiprocessing.cpu_count', 'multiprocessing.cpu_count', ([], {}), '()\n', (166, 168), False, 'import multiprocessing\n'), ((202, 229), 'multiprocessing.cpu_count', 'multiprocessing.cpu_count', ([], {}), '()\n', (227, 229), False, 'import multiprocessing\n'), ((255, 324), 'warnings.warn', 'warnings.warn', (['"""numexpr not detected, use `sudo pip install numexpr`"""'], {}), "('numexpr not detected, use `sudo pip install numexpr`')\n", (268, 324), False, 'import warnings\n'), ((2004, 2039), 'numpy.zeros', 'np.zeros', (['result_shape'], {'dtype': 'dtype'}), '(result_shape, dtype=dtype)\n', (2012, 2039), True, 'import numpy as np\n'), ((2132, 2165), 'numpy.concatenate', 'np.concatenate', (['arrays'], {'axis': 'axis'}), '(arrays, axis=axis)\n', (2146, 2165), True, 'import numpy as np\n'), ((2384, 2426), 'numexpr.evaluate', 'numexpr.evaluate', (['"""array"""'], {'out': 'out[slices]'}), "('array', out=out[slices])\n", (2400, 2426), False, 'import numexpr\n'), ((2653, 2679), 'numexpr.evaluate', 'numexpr.evaluate', (['"""X != X"""'], {}), "('X != X')\n", (2669, 2679), False, 'import numexpr\n'), ((2705, 2716), 'numpy.isnan', 'np.isnan', (['X'], {}), '(X)\n', (2713, 2716), True, 'import numpy as np\n'), ((2984, 3000), 'numpy.nan_to_num', 'np.nan_to_num', (['X'], {}), '(X)\n', (2997, 3000), True, 'import numpy as np\n'), ((3439, 3460), 'numexpr.evaluate', 'numexpr.evaluate', (['"""X"""'], {}), "('X')\n", (3455, 3460), False, 'import numexpr\n'), ((3486, 3496), 'numpy.copy', 'np.copy', (['X'], {}), '(X)\n', (3493, 3496), True, 'import numpy as np\n'), ((3259, 3270), 'numpy.isnan', 'np.isnan', (['X'], {}), '(X)\n', (3267, 3270), True, 'import numpy as np\n')]
|
from django.contrib import admin
from .models import Deterrent
from .models import DeterrenceCampaign
from .models import DeterrenceMessage
class DeterrenceMessageInline(admin.TabularInline):
model = DeterrenceMessage
readonly_fields = ('date_created',
'status',
'related_phone_number',
'related_contact',
'body',
'related_campaign',
'related_deterrent')
list_display = readonly_fields
list_display_links = ('related_contact',
'related_campaign',
'related_phone_number',
'related_deterrent')
exclude = ['sid', 'deleted']
can_delete = False
def get_extra(self, request, obj=None, **kwargs):
if obj:
return 0
else:
return 1
@admin.register(Deterrent)
class DeterrentAdmin(admin.ModelAdmin):
pass
@admin.register(DeterrenceCampaign)
class DeterrenceCampaignAdmin(admin.ModelAdmin):
inlines = [DeterrenceMessageInline]
@admin.register(DeterrenceMessage)
class DeterrenceMessage(admin.ModelAdmin):
pass
|
[
"django.contrib.admin.register"
] |
[((914, 939), 'django.contrib.admin.register', 'admin.register', (['Deterrent'], {}), '(Deterrent)\n', (928, 939), False, 'from django.contrib import admin\n'), ((992, 1026), 'django.contrib.admin.register', 'admin.register', (['DeterrenceCampaign'], {}), '(DeterrenceCampaign)\n', (1006, 1026), False, 'from django.contrib import admin\n'), ((1119, 1152), 'django.contrib.admin.register', 'admin.register', (['DeterrenceMessage'], {}), '(DeterrenceMessage)\n', (1133, 1152), False, 'from django.contrib import admin\n')]
|
import asyncio
import discord
import random
from discord.ext import commands
from Cogs import Settings
from Cogs import DisplayName
from Cogs import Nullify
import requests
class Star:
def __init__(self, bot):
self.bot = bot
@commands.command(pass_context=True, no_pm=True)
async def randstar(self, ctx, *, text : str = None):
r = requests.get('https://sydneyerickson.me/starapi/rand.php')
await self.bot.say(r.content.decode("utf-8").replace(" ", "%20"))
|
[
"discord.ext.commands.command",
"requests.get"
] |
[((245, 292), 'discord.ext.commands.command', 'commands.command', ([], {'pass_context': '(True)', 'no_pm': '(True)'}), '(pass_context=True, no_pm=True)\n', (261, 292), False, 'from discord.ext import commands\n'), ((353, 411), 'requests.get', 'requests.get', (['"""https://sydneyerickson.me/starapi/rand.php"""'], {}), "('https://sydneyerickson.me/starapi/rand.php')\n", (365, 411), False, 'import requests\n')]
|
import pandas as pd
df = pd.read_csv('data/src/sample_pandas_normal.csv', index_col=0)
print(df)
# age state point
# name
# Alice 24 NY 64
# Bob 42 CA 92
# Charlie 18 CA 70
# Dave 68 TX 70
# Ellen 24 CA 88
# Frank 30 NY 57
print(df.index)
# Index(['Alice', 'Bob', 'Charlie', 'Dave', 'Ellen', 'Frank'], dtype='object', name='name')
print(df.index.str.contains('li'))
# [ True False True False False False]
print(df[df.index.str.contains('li')])
# age state point
# name
# Alice 24 NY 64
# Charlie 18 CA 70
print(df.index.str.endswith('e'))
# [ True False True True False False]
print(df[df.index.str.endswith('e')])
# age state point
# name
# Alice 24 NY 64
# Charlie 18 CA 70
# Dave 68 TX 70
print(df.columns)
# Index(['age', 'state', 'point'], dtype='object')
print(df.columns.str.endswith('e'))
# [ True True False]
print(df.loc[:, df.columns.str.endswith('e')])
# age state
# name
# Alice 24 NY
# Bob 42 CA
# Charlie 18 CA
# Dave 68 TX
# Ellen 24 CA
# Frank 30 NY
print(df.iloc[:, df.columns.str.endswith('e')])
# age state
# name
# Alice 24 NY
# Bob 42 CA
# Charlie 18 CA
# Dave 68 TX
# Ellen 24 CA
# Frank 30 NY
print(df.loc[df.index.str.contains('li'), df.columns.str.endswith('e')])
# age state
# name
# Alice 24 NY
# Charlie 18 CA
|
[
"pandas.read_csv"
] |
[((26, 87), 'pandas.read_csv', 'pd.read_csv', (['"""data/src/sample_pandas_normal.csv"""'], {'index_col': '(0)'}), "('data/src/sample_pandas_normal.csv', index_col=0)\n", (37, 87), True, 'import pandas as pd\n')]
|
from django.contrib import admin
from .models import Product, Category
# Pour pouvoir modifier les données de register depuis Product
# Dépuis admin ça permet d'ajouter du site ça permet de faire les modifications
admin.site.register(Product)
admin.site.register(Category)
# Register your models here.
|
[
"django.contrib.admin.site.register"
] |
[((215, 243), 'django.contrib.admin.site.register', 'admin.site.register', (['Product'], {}), '(Product)\n', (234, 243), False, 'from django.contrib import admin\n'), ((244, 273), 'django.contrib.admin.site.register', 'admin.site.register', (['Category'], {}), '(Category)\n', (263, 273), False, 'from django.contrib import admin\n')]
|
import asyncio
import datetime
import random
import time
from nonebot.command import CommandSession
from nonebot.experimental.plugin import on_command
from aiocqhttp.message import MessageSegment # aiocqhttp 是 nonebot 的自带依赖
import requests
import json
__plugin_name__ = 'english'
__plugin_usage__ = '用法: 对我说 "english",我会回复随机英语'
@on_command('english', aliases='英语')
async def _(session: CommandSession):
end_time = datetime.datetime.now()
start_time = datetime.datetime.now() + datetime.timedelta(days=-600)
a1 = tuple(start_time.timetuple()[0:9])
a2 = tuple(end_time.timetuple()[0:9])
start = time.mktime(a1)
end = time.mktime(a2)
t = random.randint(int(start), int(end))
date_touple = time.localtime(t)
date = time.strftime("%Y-%m-%d", date_touple)
res = requests.get("http://sentence.iciba.com/index.php?c=dailysentence&m=getdetail&title=" + date)
json_str = json.loads(res.text)
chinese=json_str["note"]
english=json_str["content"]
pic = json_str["picture2"]
voice=json_str["tts"]
await session.send( "英文原文:" + english
+ "\n翻译:" + chinese
+ "\n封面:" + MessageSegment.image(pic))
await session.send(MessageSegment.record(voice))
|
[
"nonebot.experimental.plugin.on_command",
"json.loads",
"aiocqhttp.message.MessageSegment.image",
"time.strftime",
"time.mktime",
"datetime.timedelta",
"aiocqhttp.message.MessageSegment.record",
"requests.get",
"datetime.datetime.now",
"time.localtime"
] |
[((334, 369), 'nonebot.experimental.plugin.on_command', 'on_command', (['"""english"""'], {'aliases': '"""英语"""'}), "('english', aliases='英语')\n", (344, 369), False, 'from nonebot.experimental.plugin import on_command\n'), ((423, 446), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (444, 446), False, 'import datetime\n'), ((618, 633), 'time.mktime', 'time.mktime', (['a1'], {}), '(a1)\n', (629, 633), False, 'import time\n'), ((644, 659), 'time.mktime', 'time.mktime', (['a2'], {}), '(a2)\n', (655, 659), False, 'import time\n'), ((723, 740), 'time.localtime', 'time.localtime', (['t'], {}), '(t)\n', (737, 740), False, 'import time\n'), ((752, 790), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d"""', 'date_touple'], {}), "('%Y-%m-%d', date_touple)\n", (765, 790), False, 'import time\n'), ((801, 903), 'requests.get', 'requests.get', (["('http://sentence.iciba.com/index.php?c=dailysentence&m=getdetail&title=' +\n date)"], {}), "(\n 'http://sentence.iciba.com/index.php?c=dailysentence&m=getdetail&title=' +\n date)\n", (813, 903), False, 'import requests\n'), ((910, 930), 'json.loads', 'json.loads', (['res.text'], {}), '(res.text)\n', (920, 930), False, 'import json\n'), ((464, 487), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (485, 487), False, 'import datetime\n'), ((490, 519), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(-600)'}), '(days=-600)\n', (508, 519), False, 'import datetime\n'), ((1219, 1247), 'aiocqhttp.message.MessageSegment.record', 'MessageSegment.record', (['voice'], {}), '(voice)\n', (1240, 1247), False, 'from aiocqhttp.message import MessageSegment\n'), ((1175, 1200), 'aiocqhttp.message.MessageSegment.image', 'MessageSegment.image', (['pic'], {}), '(pic)\n', (1195, 1200), False, 'from aiocqhttp.message import MessageSegment\n')]
|
#!/usr/bin/python3
"""Unit tests for checkimages script."""
#
# (C) Pywikibot team, 2015-2022
#
# Distributed under the terms of the MIT license.
#
import unittest
from pywikibot import FilePage
from scripts import checkimages
from tests.aspects import DefaultSiteTestCase, TestCase
class TestSettings(TestCase):
"""Test checkimages settings."""
family = 'commons'
code = 'commons'
login = True
def test_load(self):
"""Test loading settings."""
b = checkimages.CheckImagesBot(self.get_site())
b.takesettings()
rv = b.settings_data
item1 = rv[0]
self.assertEqual(item1[0], 1)
self.assertEqual(item1[1], 'a deprecated template')
class TestMethods(DefaultSiteTestCase):
"""Test methods of CheckImagesBot."""
def test_important_image(self):
"""Test important_image method."""
filenames = ('Example.jpg', 'Demo.jpg')
images = [(0.0, FilePage(self.site, name)) for name in filenames]
self.assertEqual(checkimages.CheckImagesBot.important_image(images),
FilePage(self.site, 'Example.jpg'))
if __name__ == '__main__': # pragma: no cover
unittest.main()
|
[
"unittest.main",
"pywikibot.FilePage",
"scripts.checkimages.CheckImagesBot.important_image"
] |
[((1188, 1203), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1201, 1203), False, 'import unittest\n'), ((1022, 1072), 'scripts.checkimages.CheckImagesBot.important_image', 'checkimages.CheckImagesBot.important_image', (['images'], {}), '(images)\n', (1064, 1072), False, 'from scripts import checkimages\n'), ((1099, 1133), 'pywikibot.FilePage', 'FilePage', (['self.site', '"""Example.jpg"""'], {}), "(self.site, 'Example.jpg')\n", (1107, 1133), False, 'from pywikibot import FilePage\n'), ((947, 972), 'pywikibot.FilePage', 'FilePage', (['self.site', 'name'], {}), '(self.site, name)\n', (955, 972), False, 'from pywikibot import FilePage\n')]
|
# -*- coding: utf-8 -*-
import scrapy
from scrapy.linkextractors import LinkExtractor
from scrapy_redis_loadbalancing.spiders import RedisSpider
class TestSpider(RedisSpider):
name = 'test'
# allowed_domains = ['localhost']
# start_urls = ['http://localhost:8998/']
link_extractor = LinkExtractor()
def parse(self, response):
for link in self.link_extractor.extract_links(response):
yield scrapy.Request(link.url, callback=self.parse)
|
[
"scrapy.linkextractors.LinkExtractor",
"scrapy.Request"
] |
[((299, 314), 'scrapy.linkextractors.LinkExtractor', 'LinkExtractor', ([], {}), '()\n', (312, 314), False, 'from scrapy.linkextractors import LinkExtractor\n'), ((430, 475), 'scrapy.Request', 'scrapy.Request', (['link.url'], {'callback': 'self.parse'}), '(link.url, callback=self.parse)\n', (444, 475), False, 'import scrapy\n')]
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Convert UV crops to full UV maps."""
import os
import sys
import json
from PIL import Image
import numpy as np
def place_crop(crop, image, center_x, center_y):
"""Place the crop in the image at the specified location."""
im_height, im_width = image.shape[:2]
crop_height, crop_width = crop.shape[:2]
left = center_x - crop_width // 2
right = left + crop_width
top = center_y - crop_height // 2
bottom = top + crop_height
adjusted_crop = crop # remove regions of crop that go beyond image bounds
if left < 0:
adjusted_crop = adjusted_crop[:, -left:]
if right > im_width:
adjusted_crop = adjusted_crop[:, :(im_width - right)]
if top < 0:
adjusted_crop = adjusted_crop[-top:]
if bottom > im_height:
adjusted_crop = adjusted_crop[:(im_height - bottom)]
crop_mask = (adjusted_crop > 0).astype(crop.dtype).sum(-1, keepdims=True)
image[max(0, top):min(im_height, bottom), max(0, left):min(im_width, right)] *= (1 - crop_mask)
image[max(0, top):min(im_height, bottom), max(0, left):min(im_width, right)] += adjusted_crop
return image
def crop2full(keypoints_path, metadata_path, uvdir, outdir):
"""Create each frame's layer UVs from predicted UV crops"""
with open(keypoints_path) as f:
kp_data = json.load(f)
# Get all people ids
people_ids = set()
for frame in kp_data:
for skeleton in kp_data[frame]:
people_ids.add(skeleton['idx'])
people_ids = sorted(list(people_ids))
with open(metadata_path) as f:
metadata = json.load(f)
orig_size = np.array(metadata['alphapose_input_size'][::-1])
out_size = np.array(metadata['size_LR'][::-1])
if 'people_layers' in metadata:
people_layers = metadata['people_layers']
else:
people_layers = [[pid] for pid in people_ids]
# Create output directories.
for layer_i in range(1, 1 + len(people_layers)):
os.makedirs(os.path.join(outdir, f'{layer_i:02d}'), exist_ok=True)
print(f'Writing UVs to {outdir}')
for frame in sorted(kp_data):
for layer_i, layer in enumerate(people_layers, 1):
out_path = os.path.join(outdir, f'{layer_i:02d}', frame)
sys.stdout.flush()
sys.stdout.write('processing frame %s\r' % out_path)
uv_map = np.zeros([out_size[0], out_size[1], 4])
for person_id in layer:
matches = [p for p in kp_data[frame] if p['idx'] == person_id]
if len(matches) == 0: # person doesn't appear in this frame
continue
skeleton = matches[0]
kps = np.array(skeleton['keypoints']).reshape(17, 3)
# Get kps bounding box.
left = kps[:, 0].min()
right = kps[:, 0].max()
top = kps[:, 1].min()
bottom = kps[:, 1].max()
height = bottom - top
width = right - left
orig_crop_size = max(height, width)
orig_center_x = (left + right) // 2
orig_center_y = (top + bottom) // 2
# read predicted uv map
uv_crop_path = os.path.join(uvdir, f'{person_id:02d}_{os.path.basename(out_path)[:-4]}_output_uv.png')
if os.path.exists(uv_crop_path):
uv_crop = np.array(Image.open(uv_crop_path))
else:
uv_crop = np.zeros([256, 256, 3])
# add person ID channel
person_mask = (uv_crop[..., 0:1] > 0).astype('uint8')
person_ids = (255 - person_id) * person_mask
uv_crop = np.concatenate([uv_crop, person_ids], -1)
# scale crop to desired output size
# 256 is the crop size, 192 is the inner crop size
out_crop_size = orig_crop_size * 256./192 * out_size / orig_size
out_crop_size = out_crop_size.astype(np.int)
uv_crop = uv_crop.astype(np.uint8)
uv_crop = np.array(Image.fromarray(uv_crop).resize((out_crop_size[1], out_crop_size[0]), resample=Image.NEAREST))
# scale center coordinate accordingly
out_center_x = (orig_center_x * out_size[1] / orig_size[1]).astype(np.int)
out_center_y = (orig_center_y * out_size[0] / orig_size[0]).astype(np.int)
# Place UV crop in full UV map and save.
uv_map = place_crop(uv_crop, uv_map, out_center_x, out_center_y)
uv_map = Image.fromarray(uv_map.astype('uint8'))
uv_map.save(out_path)
if __name__ == "__main__":
import argparse
arguments = argparse.ArgumentParser()
arguments.add_argument('--dataroot', type=str)
opt = arguments.parse_args()
keypoints_path = os.path.join(opt.dataroot, 'keypoints.json')
metadata_path = os.path.join(opt.dataroot, 'metadata.json')
uvdir = os.path.join(opt.dataroot, 'kp2uv/test_latest/images')
outdir = os.path.join(opt.dataroot, 'iuv')
crop2full(keypoints_path, metadata_path, uvdir, outdir)
|
[
"sys.stdout.write",
"json.load",
"argparse.ArgumentParser",
"os.path.basename",
"numpy.zeros",
"os.path.exists",
"PIL.Image.open",
"sys.stdout.flush",
"numpy.array",
"PIL.Image.fromarray",
"os.path.join",
"numpy.concatenate"
] |
[((2186, 2234), 'numpy.array', 'np.array', (["metadata['alphapose_input_size'][::-1]"], {}), "(metadata['alphapose_input_size'][::-1])\n", (2194, 2234), True, 'import numpy as np\n'), ((2250, 2285), 'numpy.array', 'np.array', (["metadata['size_LR'][::-1]"], {}), "(metadata['size_LR'][::-1])\n", (2258, 2285), True, 'import numpy as np\n'), ((5283, 5308), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (5306, 5308), False, 'import argparse\n'), ((5415, 5459), 'os.path.join', 'os.path.join', (['opt.dataroot', '"""keypoints.json"""'], {}), "(opt.dataroot, 'keypoints.json')\n", (5427, 5459), False, 'import os\n'), ((5480, 5523), 'os.path.join', 'os.path.join', (['opt.dataroot', '"""metadata.json"""'], {}), "(opt.dataroot, 'metadata.json')\n", (5492, 5523), False, 'import os\n'), ((5536, 5590), 'os.path.join', 'os.path.join', (['opt.dataroot', '"""kp2uv/test_latest/images"""'], {}), "(opt.dataroot, 'kp2uv/test_latest/images')\n", (5548, 5590), False, 'import os\n'), ((5604, 5637), 'os.path.join', 'os.path.join', (['opt.dataroot', '"""iuv"""'], {}), "(opt.dataroot, 'iuv')\n", (5616, 5637), False, 'import os\n'), ((1887, 1899), 'json.load', 'json.load', (['f'], {}), '(f)\n', (1896, 1899), False, 'import json\n'), ((2156, 2168), 'json.load', 'json.load', (['f'], {}), '(f)\n', (2165, 2168), False, 'import json\n'), ((2544, 2582), 'os.path.join', 'os.path.join', (['outdir', 'f"""{layer_i:02d}"""'], {}), "(outdir, f'{layer_i:02d}')\n", (2556, 2582), False, 'import os\n'), ((2754, 2799), 'os.path.join', 'os.path.join', (['outdir', 'f"""{layer_i:02d}"""', 'frame'], {}), "(outdir, f'{layer_i:02d}', frame)\n", (2766, 2799), False, 'import os\n'), ((2812, 2830), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (2828, 2830), False, 'import sys\n'), ((2843, 2895), 'sys.stdout.write', 'sys.stdout.write', (["('processing frame %s\\r' % out_path)"], {}), "('processing frame %s\\r' % out_path)\n", (2859, 2895), False, 'import sys\n'), ((2917, 2956), 'numpy.zeros', 'np.zeros', (['[out_size[0], out_size[1], 4]'], {}), '([out_size[0], out_size[1], 4])\n', (2925, 2956), True, 'import numpy as np\n'), ((3893, 3921), 'os.path.exists', 'os.path.exists', (['uv_crop_path'], {}), '(uv_crop_path)\n', (3907, 3921), False, 'import os\n'), ((4262, 4303), 'numpy.concatenate', 'np.concatenate', (['[uv_crop, person_ids]', '(-1)'], {}), '([uv_crop, person_ids], -1)\n', (4276, 4303), True, 'import numpy as np\n'), ((4040, 4063), 'numpy.zeros', 'np.zeros', (['[256, 256, 3]'], {}), '([256, 256, 3])\n', (4048, 4063), True, 'import numpy as np\n'), ((3238, 3269), 'numpy.array', 'np.array', (["skeleton['keypoints']"], {}), "(skeleton['keypoints'])\n", (3246, 3269), True, 'import numpy as np\n'), ((3962, 3986), 'PIL.Image.open', 'Image.open', (['uv_crop_path'], {}), '(uv_crop_path)\n', (3972, 3986), False, 'from PIL import Image\n'), ((4652, 4676), 'PIL.Image.fromarray', 'Image.fromarray', (['uv_crop'], {}), '(uv_crop)\n', (4667, 4676), False, 'from PIL import Image\n'), ((3825, 3851), 'os.path.basename', 'os.path.basename', (['out_path'], {}), '(out_path)\n', (3841, 3851), False, 'import os\n')]
|
import collections
import copy
import datetime
import gc
import time
# import torch
import numpy as np
from util.logconf import logging
log = logging.getLogger(__name__)
# log.setLevel(logging.WARN)
# log.setLevel(logging.INFO)
log.setLevel(logging.DEBUG)
IrcTuple = collections.namedtuple('IrcTuple', ['index', 'row', 'col'])
XyzTuple = collections.namedtuple('XyzTuple', ['x', 'y', 'z'])
def irc2xyz(coord_irc, origin_xyz, vxSize_xyz, direction_a):
cri_a = np.array(coord_irc)[::-1]
origin_a = np.array(origin_xyz)
vxSize_a = np.array(vxSize_xyz)
coords_xyz = (direction_a @ (cri_a * vxSize_a)) + origin_a
# coords_xyz = (direction_a @ (idx * vxSize_a)) + origin_a
return XyzTuple(*coords_xyz)
def xyz2irc(coord_xyz, origin_xyz, vxSize_xyz, direction_a):
origin_a = np.array(origin_xyz)
vxSize_a = np.array(vxSize_xyz)
coord_a = np.array(coord_xyz)
cri_a = ((coord_a - origin_a) @ np.linalg.inv(direction_a)) / vxSize_a
cri_a = np.round(cri_a)
return IrcTuple(int(cri_a[2]), int(cri_a[1]), int(cri_a[0]))
def importstr(module_str, from_=None):
"""
>>> importstr('os')
<module 'os' from '.../os.pyc'>
>>> importstr('math', 'fabs')
<built-in function fabs>
"""
if from_ is None and ':' in module_str:
module_str, from_ = module_str.rsplit(':')
module = __import__(module_str)
for sub_str in module_str.split('.')[1:]:
module = getattr(module, sub_str)
if from_:
try:
return getattr(module, from_)
except:
raise ImportError('{}.{}'.format(module_str, from_))
return module
# class dotdict(dict):
# '''dict where key can be access as attribute d.key -> d[key]'''
# @classmethod
# def deep(cls, dic_obj):
# '''Initialize from dict with deep conversion'''
# return cls(dic_obj).deepConvert()
#
# def __getattr__(self, attr):
# if attr in self:
# return self[attr]
# log.error(sorted(self.keys()))
# raise AttributeError(attr)
# #return self.get(attr, None)
# __setattr__= dict.__setitem__
# __delattr__= dict.__delitem__
#
#
# def __copy__(self):
# return dotdict(self)
#
# def __deepcopy__(self, memo):
# new_dict = dotdict()
# for k, v in self.items():
# new_dict[k] = copy.deepcopy(v, memo)
# return new_dict
#
# # pylint: disable=multiple-statements
# def __getstate__(self): return self.__dict__
# def __setstate__(self, d): self.__dict__.update(d)
#
# def deepConvert(self):
# '''Convert all dicts at all tree levels into dotdict'''
# for k, v in self.items():
# if type(v) is dict: # pylint: disable=unidiomatic-typecheck
# self[k] = dotdict(v)
# self[k].deepConvert()
# try: # try enumerable types
# for m, x in enumerate(v):
# if type(x) is dict: # pylint: disable=unidiomatic-typecheck
# x = dotdict(x)
# x.deepConvert()
# v[m] = x#
# except TypeError:
# pass
# return self
#
# def copy(self):
# # override dict.copy()
# return dotdict(self)
def prhist(ary, prefix_str=None, **kwargs):
if prefix_str is None:
prefix_str = ''
else:
prefix_str += ' '
count_ary, bins_ary = np.histogram(ary, **kwargs)
for i in range(count_ary.shape[0]):
print("{}{:-8.2f}".format(prefix_str, bins_ary[i]), "{:-10}".format(count_ary[i]))
print("{}{:-8.2f}".format(prefix_str, bins_ary[-1]))
# def dumpCuda():
# # small_count = 0
# total_bytes = 0
# size2count_dict = collections.defaultdict(int)
# size2bytes_dict = {}
# for obj in gc.get_objects():
# if isinstance(obj, torch.cuda._CudaBase):
# nbytes = 4
# for n in obj.size():
# nbytes *= n
#
# size2count_dict[tuple([obj.get_device()] + list(obj.size()))] += 1
# size2bytes_dict[tuple([obj.get_device()] + list(obj.size()))] = nbytes
#
# total_bytes += nbytes
#
# # print(small_count, "tensors equal to or less than than 16 bytes")
# for size, count in sorted(size2count_dict.items(), key=lambda sc: (size2bytes_dict[sc[0]] * sc[1], sc[1], sc[0])):
# print('{:4}x'.format(count), '{:10,}'.format(size2bytes_dict[size]), size)
# print('{:10,}'.format(total_bytes), "total bytes")
def enumerateWithEstimate(
iter,
desc_str,
start_ndx=0,
print_ndx=4,
backoff=None,
iter_len=None,
):
"""
In terms of behavior, `enumerateWithEstimate` is almost identical
to the standard `enumerate` (the differences are things like how
our function returns a generator, while `enumerate` returns a
specialized `<enumerate object at 0x...>`).
However, the side effects (logging, specifically) are what make the
function interesting.
:param iter: `iter` is the iterable that will be passed into
`enumerate`. Required.
:param desc_str: This is a human-readable string that describes
what the loop is doing. The value is arbitrary, but should be
kept reasonably short. Things like `"epoch 4 training"` or
`"deleting temp files"` or similar would all make sense.
:param start_ndx: This parameter defines how many iterations of the
loop should be skipped before timing actually starts. Skipping
a few iterations can be useful if there are startup costs like
caching that are only paid early on, resulting in a skewed
average when those early iterations dominate the average time
per iteration.
NOTE: Using `start_ndx` to skip some iterations makes the time
spent performing those iterations not be included in the
displayed duration. Please account for this if you use the
displayed duration for anything formal.
This parameter defaults to `0`.
:param print_ndx: determines which loop interation that the timing
logging will start on. The intent is that we don't start
logging until we've given the loop a few iterations to let the
average time-per-iteration a chance to stablize a bit. We
require that `print_ndx` not be less than `start_ndx` times
`backoff`, since `start_ndx` greater than `0` implies that the
early N iterations are unstable from a timing perspective.
`print_ndx` defaults to `4`.
:param backoff: This is used to how many iterations to skip before
logging again. Frequent logging is less interesting later on,
so by default we double the gap between logging messages each
time after the first.
`backoff` defaults to `2` unless iter_len is > 1000, in which
case it defaults to `4`.
:param iter_len: Since we need to know the number of items to
estimate when the loop will finish, that can be provided by
passing in a value for `iter_len`. If a value isn't provided,
then it will be set by using the value of `len(iter)`.
:return:
"""
if iter_len is None:
iter_len = len(iter)
if backoff is None:
backoff = 2
while backoff ** 7 < iter_len:
backoff *= 2
assert backoff >= 2
while print_ndx < start_ndx * backoff:
print_ndx *= backoff
log.warning("{} ----/{}, starting".format(
desc_str,
iter_len,
))
start_ts = time.time()
for (current_ndx, item) in enumerate(iter):
yield (current_ndx, item)
if current_ndx == print_ndx:
# ... <1>
duration_sec = ((time.time() - start_ts)
/ (current_ndx - start_ndx + 1)
* (iter_len-start_ndx)
)
done_dt = datetime.datetime.fromtimestamp(start_ts + duration_sec)
done_td = datetime.timedelta(seconds=duration_sec)
log.info("{} {:-4}/{}, done at {}, {}".format(
desc_str,
current_ndx,
iter_len,
str(done_dt).rsplit('.', 1)[0],
str(done_td).rsplit('.', 1)[0],
))
print_ndx *= backoff
if current_ndx + 1 == start_ndx:
start_ts = time.time()
log.warning("{} ----/{}, done at {}".format(
desc_str,
iter_len,
str(datetime.datetime.now()).rsplit('.', 1)[0],
))
#
# try:
# import matplotlib
# matplotlib.use('agg', warn=False)
#
# import matplotlib.pyplot as plt
# # matplotlib color maps
# cdict = {'red': ((0.0, 1.0, 1.0),
# # (0.5, 1.0, 1.0),
# (1.0, 1.0, 1.0)),
#
# 'green': ((0.0, 0.0, 0.0),
# (0.5, 0.0, 0.0),
# (1.0, 0.5, 0.5)),
#
# 'blue': ((0.0, 0.0, 0.0),
# # (0.5, 0.5, 0.5),
# # (0.75, 0.0, 0.0),
# (1.0, 0.0, 0.0)),
#
# 'alpha': ((0.0, 0.0, 0.0),
# (0.75, 0.5, 0.5),
# (1.0, 0.5, 0.5))}
#
# plt.register_cmap(name='mask', data=cdict)
#
# cdict = {'red': ((0.0, 0.0, 0.0),
# (0.25, 1.0, 1.0),
# (1.0, 1.0, 1.0)),
#
# 'green': ((0.0, 1.0, 1.0),
# (0.25, 1.0, 1.0),
# (0.5, 0.0, 0.0),
# (1.0, 0.0, 0.0)),
#
# 'blue': ((0.0, 0.0, 0.0),
# # (0.5, 0.5, 0.5),
# # (0.75, 0.0, 0.0),
# (1.0, 0.0, 0.0)),
#
# 'alpha': ((0.0, 0.15, 0.15),
# (0.5, 0.3, 0.3),
# (0.8, 0.0, 0.0),
# (1.0, 0.0, 0.0))}
#
# plt.register_cmap(name='maskinvert', data=cdict)
# except ImportError:
# pass
|
[
"datetime.datetime.now",
"time.time",
"util.logconf.logging.getLogger",
"numpy.histogram",
"numpy.array",
"collections.namedtuple",
"numpy.linalg.inv",
"datetime.datetime.fromtimestamp",
"datetime.timedelta",
"numpy.round"
] |
[((144, 171), 'util.logconf.logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (161, 171), False, 'from util.logconf import logging\n'), ((270, 329), 'collections.namedtuple', 'collections.namedtuple', (['"""IrcTuple"""', "['index', 'row', 'col']"], {}), "('IrcTuple', ['index', 'row', 'col'])\n", (292, 329), False, 'import collections\n'), ((341, 392), 'collections.namedtuple', 'collections.namedtuple', (['"""XyzTuple"""', "['x', 'y', 'z']"], {}), "('XyzTuple', ['x', 'y', 'z'])\n", (363, 392), False, 'import collections\n'), ((508, 528), 'numpy.array', 'np.array', (['origin_xyz'], {}), '(origin_xyz)\n', (516, 528), True, 'import numpy as np\n'), ((544, 564), 'numpy.array', 'np.array', (['vxSize_xyz'], {}), '(vxSize_xyz)\n', (552, 564), True, 'import numpy as np\n'), ((801, 821), 'numpy.array', 'np.array', (['origin_xyz'], {}), '(origin_xyz)\n', (809, 821), True, 'import numpy as np\n'), ((837, 857), 'numpy.array', 'np.array', (['vxSize_xyz'], {}), '(vxSize_xyz)\n', (845, 857), True, 'import numpy as np\n'), ((872, 891), 'numpy.array', 'np.array', (['coord_xyz'], {}), '(coord_xyz)\n', (880, 891), True, 'import numpy as np\n'), ((979, 994), 'numpy.round', 'np.round', (['cri_a'], {}), '(cri_a)\n', (987, 994), True, 'import numpy as np\n'), ((3456, 3483), 'numpy.histogram', 'np.histogram', (['ary'], {}), '(ary, **kwargs)\n', (3468, 3483), True, 'import numpy as np\n'), ((7606, 7617), 'time.time', 'time.time', ([], {}), '()\n', (7615, 7617), False, 'import time\n'), ((467, 486), 'numpy.array', 'np.array', (['coord_irc'], {}), '(coord_irc)\n', (475, 486), True, 'import numpy as np\n'), ((928, 954), 'numpy.linalg.inv', 'np.linalg.inv', (['direction_a'], {}), '(direction_a)\n', (941, 954), True, 'import numpy as np\n'), ((7976, 8032), 'datetime.datetime.fromtimestamp', 'datetime.datetime.fromtimestamp', (['(start_ts + duration_sec)'], {}), '(start_ts + duration_sec)\n', (8007, 8032), False, 'import datetime\n'), ((8055, 8095), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': 'duration_sec'}), '(seconds=duration_sec)\n', (8073, 8095), False, 'import datetime\n'), ((8447, 8458), 'time.time', 'time.time', ([], {}), '()\n', (8456, 8458), False, 'import time\n'), ((7788, 7799), 'time.time', 'time.time', ([], {}), '()\n', (7797, 7799), False, 'import time\n'), ((8557, 8580), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (8578, 8580), False, 'import datetime\n')]
|
import os
import sys
import argparse
import datetime
import time
import os.path as osp
import matplotlib
matplotlib.use('Agg')
from matplotlib import pyplot as plt
import numpy as np
import torch
import torch.nn as nn
from torch.optim import lr_scheduler
import torch.backends.cudnn as cudnn
import datasets
import models
from utils import AverageMeter, Logger
from center_loss import CenterLoss
parser = argparse.ArgumentParser("Center Loss Example")
# dataset
parser.add_argument('-d', '--dataset', type=str, default='mnist', choices=['mnist'])
parser.add_argument('-j', '--workers', default=4, type=int,
help="number of data loading workers (default: 4)")
# optimization
parser.add_argument('--batch-size', type=int, default=128)
parser.add_argument('--lr-model', type=float, default=0.001, help="learning rate for model")
parser.add_argument('--lr-cent', type=float, default=0.5, help="learning rate for center loss")
parser.add_argument('--weight-cent', type=float, default=1, help="weight for center loss")
parser.add_argument('--max-epoch', type=int, default=100)
parser.add_argument('--stepsize', type=int, default=20)
parser.add_argument('--gamma', type=float, default=0.5, help="learning rate decay")
# model
parser.add_argument('--model', type=str, default='cnn')
# misc
parser.add_argument('--eval-freq', type=int, default=10)
parser.add_argument('--print-freq', type=int, default=50)
parser.add_argument('--gpu', type=str, default='0')
parser.add_argument('--seed', type=int, default=1)
parser.add_argument('--use-cpu', action='store_true')
parser.add_argument('--save-dir', type=str, default='log')
parser.add_argument('--plot', action='store_true', help="whether to plot features for every epoch")
args = parser.parse_args()
def main():
torch.manual_seed(args.seed)
os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu
use_gpu = torch.cuda.is_available()
if args.use_cpu: use_gpu = False
sys.stdout = Logger(osp.join(args.save_dir, 'log_' + args.dataset + '.txt'))
if use_gpu:
print("Currently using GPU: {}".format(args.gpu))
cudnn.benchmark = True
torch.cuda.manual_seed_all(args.seed)
else:
print("Currently using CPU")
print("Creating dataset: {}".format(args.dataset))
dataset = datasets.create(
name=args.dataset, batch_size=args.batch_size, use_gpu=use_gpu,
num_workers=args.workers,
)
trainloader, testloader = dataset.trainloader, dataset.testloader
print("Creating model: {}".format(args.model))
model = models.create(name=args.model, num_classes=dataset.num_classes)
if use_gpu:
model = nn.DataParallel(model).cuda()
criterion_xent = nn.CrossEntropyLoss()
criterion_cent = CenterLoss(num_classes=dataset.num_classes, feat_dim=2, use_gpu=use_gpu)
optimizer_model = torch.optim.SGD(model.parameters(), lr=args.lr_model, weight_decay=5e-04, momentum=0.9)
optimizer_centloss = torch.optim.SGD(criterion_cent.parameters(), lr=args.lr_cent)
if args.stepsize > 0:
scheduler = lr_scheduler.StepLR(optimizer_model, step_size=args.stepsize, gamma=args.gamma)
start_time = time.time()
for epoch in range(args.max_epoch):
print("==> Epoch {}/{}".format(epoch+1, args.max_epoch))
train(model, criterion_xent, criterion_cent,
optimizer_model, optimizer_centloss,
trainloader, use_gpu, dataset.num_classes, epoch)
if args.stepsize > 0: scheduler.step()
if args.eval_freq > 0 and (epoch+1) % args.eval_freq == 0 or (epoch+1) == args.max_epoch:
print("==> Test")
acc, err = test(model, testloader, use_gpu, dataset.num_classes, epoch)
print("Accuracy (%): {}\t Error rate (%): {}".format(acc, err))
elapsed = round(time.time() - start_time)
elapsed = str(datetime.timedelta(seconds=elapsed))
print("Finished. Total elapsed time (h:m:s): {}".format(elapsed))
def train(model, criterion_xent, criterion_cent,
optimizer_model, optimizer_centloss,
trainloader, use_gpu, num_classes, epoch):
model.train()
xent_losses = AverageMeter()
cent_losses = AverageMeter()
losses = AverageMeter()
if args.plot:
all_features, all_labels = [], []
for batch_idx, (data, labels) in enumerate(trainloader):
if use_gpu:
data, labels = data.cuda(), labels.cuda()
features, outputs = model(data)
loss_xent = criterion_xent(outputs, labels)
loss_cent = criterion_cent(features, labels)
loss_cent *= args.weight_cent
loss = loss_xent + loss_cent
optimizer_model.zero_grad()
optimizer_centloss.zero_grad()
loss.backward()
optimizer_model.step()
# by doing so, weight_cent would not impact on the learning of centers
for param in criterion_cent.parameters():
param.grad.data *= (1. / args.weight_cent)
optimizer_centloss.step()
losses.update(loss.item(), labels.size(0))
xent_losses.update(loss_xent.item(), labels.size(0))
cent_losses.update(loss_cent.item(), labels.size(0))
if args.plot:
if use_gpu:
all_features.append(features.data.cpu().numpy())
all_labels.append(labels.data.cpu().numpy())
else:
all_features.append(features.data.numpy())
all_labels.append(labels.data.numpy())
if (batch_idx+1) % args.print_freq == 0:
print("Batch {}/{}\t Loss {:.6f} ({:.6f}) XentLoss {:.6f} ({:.6f}) CenterLoss {:.6f} ({:.6f})" \
.format(batch_idx+1, len(trainloader), losses.val, losses.avg, xent_losses.val, xent_losses.avg, cent_losses.val, cent_losses.avg))
if args.plot:
all_features = np.concatenate(all_features, 0)
all_labels = np.concatenate(all_labels, 0)
plot_features(all_features, all_labels, num_classes, epoch, prefix='train')
def test(model, testloader, use_gpu, num_classes, epoch):
model.eval()
correct, total = 0, 0
if args.plot:
all_features, all_labels = [], []
with torch.no_grad():
for data, labels in testloader:
if use_gpu:
data, labels = data.cuda(), labels.cuda()
features, outputs = model(data)
predictions = outputs.data.max(1)[1]
total += labels.size(0)
correct += (predictions == labels.data).sum()
if args.plot:
if use_gpu:
all_features.append(features.data.cpu().numpy())
all_labels.append(labels.data.cpu().numpy())
else:
all_features.append(features.data.numpy())
all_labels.append(labels.data.numpy())
if args.plot:
all_features = np.concatenate(all_features, 0)
all_labels = np.concatenate(all_labels, 0)
plot_features(all_features, all_labels, num_classes, epoch, prefix='test')
acc = correct * 100. / total
err = 100. - acc
return acc, err
def plot_features(features, labels, num_classes, epoch, prefix):
"""Plot features on 2D plane.
Args:
features: (num_instances, num_features).
labels: (num_instances).
"""
colors = ['C0', 'C1', 'C2', 'C3', 'C4', 'C5', 'C6', 'C7', 'C8', 'C9']
for label_idx in range(num_classes):
plt.scatter(
features[labels==label_idx, 0],
features[labels==label_idx, 1],
c=colors[label_idx],
s=1,
)
plt.legend(['0', '1', '2', '3', '4', '5', '6', '7', '8', '9'], loc='upper right')
dirname = osp.join(args.save_dir, prefix)
if not osp.exists(dirname):
os.mkdir(dirname)
save_name = osp.join(dirname, 'epoch_' + str(epoch+1) + '.png')
plt.savefig(save_name, bbox_inches='tight')
plt.close()
if __name__ == '__main__':
main()
|
[
"os.mkdir",
"torch.optim.lr_scheduler.StepLR",
"argparse.ArgumentParser",
"torch.no_grad",
"os.path.join",
"models.create",
"utils.AverageMeter",
"matplotlib.pyplot.close",
"os.path.exists",
"datetime.timedelta",
"datasets.create",
"torch.manual_seed",
"matplotlib.pyplot.legend",
"center_loss.CenterLoss",
"matplotlib.use",
"torch.cuda.is_available",
"numpy.concatenate",
"matplotlib.pyplot.scatter",
"torch.nn.CrossEntropyLoss",
"time.time",
"torch.cuda.manual_seed_all",
"torch.nn.DataParallel",
"matplotlib.pyplot.savefig"
] |
[((105, 126), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (119, 126), False, 'import matplotlib\n'), ((408, 454), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (['"""Center Loss Example"""'], {}), "('Center Loss Example')\n", (431, 454), False, 'import argparse\n'), ((1781, 1809), 'torch.manual_seed', 'torch.manual_seed', (['args.seed'], {}), '(args.seed)\n', (1798, 1809), False, 'import torch\n'), ((1874, 1899), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1897, 1899), False, 'import torch\n'), ((2288, 2398), 'datasets.create', 'datasets.create', ([], {'name': 'args.dataset', 'batch_size': 'args.batch_size', 'use_gpu': 'use_gpu', 'num_workers': 'args.workers'}), '(name=args.dataset, batch_size=args.batch_size, use_gpu=\n use_gpu, num_workers=args.workers)\n', (2303, 2398), False, 'import datasets\n'), ((2552, 2615), 'models.create', 'models.create', ([], {'name': 'args.model', 'num_classes': 'dataset.num_classes'}), '(name=args.model, num_classes=dataset.num_classes)\n', (2565, 2615), False, 'import models\n'), ((2701, 2722), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {}), '()\n', (2720, 2722), True, 'import torch.nn as nn\n'), ((2744, 2816), 'center_loss.CenterLoss', 'CenterLoss', ([], {'num_classes': 'dataset.num_classes', 'feat_dim': '(2)', 'use_gpu': 'use_gpu'}), '(num_classes=dataset.num_classes, feat_dim=2, use_gpu=use_gpu)\n', (2754, 2816), False, 'from center_loss import CenterLoss\n'), ((3159, 3170), 'time.time', 'time.time', ([], {}), '()\n', (3168, 3170), False, 'import time\n'), ((4140, 4154), 'utils.AverageMeter', 'AverageMeter', ([], {}), '()\n', (4152, 4154), False, 'from utils import AverageMeter, Logger\n'), ((4173, 4187), 'utils.AverageMeter', 'AverageMeter', ([], {}), '()\n', (4185, 4187), False, 'from utils import AverageMeter, Logger\n'), ((4201, 4215), 'utils.AverageMeter', 'AverageMeter', ([], {}), '()\n', (4213, 4215), False, 'from utils import AverageMeter, Logger\n'), ((7606, 7692), 'matplotlib.pyplot.legend', 'plt.legend', (["['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']"], {'loc': '"""upper right"""'}), "(['0', '1', '2', '3', '4', '5', '6', '7', '8', '9'], loc=\n 'upper right')\n", (7616, 7692), True, 'from matplotlib import pyplot as plt\n'), ((7702, 7733), 'os.path.join', 'osp.join', (['args.save_dir', 'prefix'], {}), '(args.save_dir, prefix)\n', (7710, 7733), True, 'import os.path as osp\n'), ((7864, 7907), 'matplotlib.pyplot.savefig', 'plt.savefig', (['save_name'], {'bbox_inches': '"""tight"""'}), "(save_name, bbox_inches='tight')\n", (7875, 7907), True, 'from matplotlib import pyplot as plt\n'), ((7912, 7923), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (7921, 7923), True, 'from matplotlib import pyplot as plt\n'), ((1962, 2017), 'os.path.join', 'osp.join', (['args.save_dir', "('log_' + args.dataset + '.txt')"], {}), "(args.save_dir, 'log_' + args.dataset + '.txt')\n", (1970, 2017), True, 'import os.path as osp\n'), ((2133, 2170), 'torch.cuda.manual_seed_all', 'torch.cuda.manual_seed_all', (['args.seed'], {}), '(args.seed)\n', (2159, 2170), False, 'import torch\n'), ((3061, 3140), 'torch.optim.lr_scheduler.StepLR', 'lr_scheduler.StepLR', (['optimizer_model'], {'step_size': 'args.stepsize', 'gamma': 'args.gamma'}), '(optimizer_model, step_size=args.stepsize, gamma=args.gamma)\n', (3080, 3140), False, 'from torch.optim import lr_scheduler\n'), ((3847, 3882), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': 'elapsed'}), '(seconds=elapsed)\n', (3865, 3882), False, 'import datetime\n'), ((5823, 5854), 'numpy.concatenate', 'np.concatenate', (['all_features', '(0)'], {}), '(all_features, 0)\n', (5837, 5854), True, 'import numpy as np\n'), ((5876, 5905), 'numpy.concatenate', 'np.concatenate', (['all_labels', '(0)'], {}), '(all_labels, 0)\n', (5890, 5905), True, 'import numpy as np\n'), ((6162, 6177), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (6175, 6177), False, 'import torch\n'), ((6875, 6906), 'numpy.concatenate', 'np.concatenate', (['all_features', '(0)'], {}), '(all_features, 0)\n', (6889, 6906), True, 'import numpy as np\n'), ((6928, 6957), 'numpy.concatenate', 'np.concatenate', (['all_labels', '(0)'], {}), '(all_labels, 0)\n', (6942, 6957), True, 'import numpy as np\n'), ((7441, 7550), 'matplotlib.pyplot.scatter', 'plt.scatter', (['features[labels == label_idx, 0]', 'features[labels == label_idx, 1]'], {'c': 'colors[label_idx]', 's': '(1)'}), '(features[labels == label_idx, 0], features[labels == label_idx,\n 1], c=colors[label_idx], s=1)\n', (7452, 7550), True, 'from matplotlib import pyplot as plt\n'), ((7745, 7764), 'os.path.exists', 'osp.exists', (['dirname'], {}), '(dirname)\n', (7755, 7764), True, 'import os.path as osp\n'), ((7774, 7791), 'os.mkdir', 'os.mkdir', (['dirname'], {}), '(dirname)\n', (7782, 7791), False, 'import os\n'), ((3803, 3814), 'time.time', 'time.time', ([], {}), '()\n', (3812, 3814), False, 'import time\n'), ((2649, 2671), 'torch.nn.DataParallel', 'nn.DataParallel', (['model'], {}), '(model)\n', (2664, 2671), True, 'import torch.nn as nn\n')]
|
# -*- coding: utf-8 -*-
#
#
# Author: alex
# Created Time: 2019年09月11日 星期三 18时12分55秒
from PIL import Image
from yolo import YOLO
from utils import parse_input_image, parse_output_image, \
format_input_path
# 识别模型配置
detect_configs = {
# 通用目标检测
'common': {
'model_path': 'model_data/yolov3-spp.h5',
'classes_path': 'model_data/coco_classes.txt',
},
# GF Logo&证件识别
'card': {
'model_path': 'model_data/gf_yolov3_spp_l066425.h5',
'classes_path': 'model_data/gf_yolov3_spp_classes.txt',
},
# 安全帽工作服识别
'helmet': {
'model_path': 'model_data/yolov3_shantou_0512.h5',
'classes_path': 'model_data/yolov3_shantou_classes.txt',
}
}
# 定义启动使用的模型
detect_config = detect_configs['card']
# 获取目标类别
detect_classes = []
with open(detect_config['classes_path']) as f:
detect_classes = [t.strip() for t in f.readlines()
if len(t.strip()) > 0]
# 预加载模型
yolo = YOLO(**detect_config)
def detect_images(filenames, classes=None):
"""检测多个图片
:param filenames 文件名列表
:param classes 需要检测的对象分类列表
:return
"""
res = [] # 保存结果数据
for path in filenames:
image_type = 'png' if path.lower().endswith('.png') else 'jpg'
path = format_input_path(path)
img = parse_input_image(image_path=path, image_type=image_type)
_, data = yolo.detect_image(img)
res.append(format_output_data(data, classes))
return res
def detect_b64s(b64_list, classes=None):
"""检测多个图片
:param b64_list 文件base64列表
:param classes 需要检测的对象分类列表
:return
"""
res = [] # 保存结果数据
images = [parse_input_image(image=b64) for b64 in b64_list]
for img in images:
_, data = yolo.detect_image(img)
res.append(format_output_data(data, classes))
return res
def detect_image(image='', image_path='', image_type='jpg', return_img=False):
"""通用目标检测
:param image 图片对象使用base64编码
:param image_path 图片路径
:param image_type 输入图像类型, 取值jpg或者png
:return dict
"""
if image_path != '':
image_path = format_input_path(image_path)
img = parse_input_image(image=image, image_path=image_path,
image_type=image_type)
out_img, data = yolo.detect_image(img, out_img=True)
if data['bboxes'] is None:
return {
'bboxes': [],
'classes': [],
'scores': [],
}
return {
'image': parse_output_image(out_img) if return_img else None,
'bboxes': format_bboxes(data['bboxes'].tolist()),
'scores': data['scores'].tolist(),
'classes': format_classes(data['classes'].tolist()),
}
def format_output_data(data, classes):
"""格式化目标检测的返回值
返回的时候,会转成json
"""
row = {
'bboxes': [],
'classes': [],
'scores': [],
}
if data['bboxes'] is None:
return row
data['scores'] = data['scores'].tolist()
data['bboxes'] = format_bboxes(data['bboxes'].tolist())
data['classes'] = format_classes(data['classes'].tolist())
for box, cs, score in zip(data['bboxes'], data['classes'], data['scores']):
if classes is not None and cs not in classes:
# 如果该值为None,则表示不过滤
continue
row['bboxes'].append(box)
row['classes'].append(cs)
row['scores'].append(score)
return row
def get_demo_image(path):
"""获取演示图片"""
img = Image.open(path)
return {
'image': parse_output_image(img)
}
def format_classes(classes):
"""返回适合人类阅读的类别属性"""
return [detect_classes[c] for c in classes]
def format_bboxes(bboxes):
"""bboxes数据格式转化
top, left, bottom, right ==> x, y, xb, yb
"""
return [[left, top, right, bottom] for top, left, bottom, right in bboxes]
if __name__ == '__main__':
import sys
if sys.argv[1] == 'image':
res = detect_image(image_path=sys.argv[2])
print(res)
elif sys.argv[1] == 'images':
res = detect_images(sys.argv[2].split(';'))
print(res)
|
[
"PIL.Image.open",
"utils.parse_output_image",
"utils.parse_input_image",
"utils.format_input_path",
"yolo.YOLO"
] |
[((953, 974), 'yolo.YOLO', 'YOLO', ([], {}), '(**detect_config)\n', (957, 974), False, 'from yolo import YOLO\n'), ((2128, 2204), 'utils.parse_input_image', 'parse_input_image', ([], {'image': 'image', 'image_path': 'image_path', 'image_type': 'image_type'}), '(image=image, image_path=image_path, image_type=image_type)\n', (2145, 2204), False, 'from utils import parse_input_image, parse_output_image, format_input_path\n'), ((3434, 3450), 'PIL.Image.open', 'Image.open', (['path'], {}), '(path)\n', (3444, 3450), False, 'from PIL import Image\n'), ((1252, 1275), 'utils.format_input_path', 'format_input_path', (['path'], {}), '(path)\n', (1269, 1275), False, 'from utils import parse_input_image, parse_output_image, format_input_path\n'), ((1290, 1347), 'utils.parse_input_image', 'parse_input_image', ([], {'image_path': 'path', 'image_type': 'image_type'}), '(image_path=path, image_type=image_type)\n', (1307, 1347), False, 'from utils import parse_input_image, parse_output_image, format_input_path\n'), ((1637, 1665), 'utils.parse_input_image', 'parse_input_image', ([], {'image': 'b64'}), '(image=b64)\n', (1654, 1665), False, 'from utils import parse_input_image, parse_output_image, format_input_path\n'), ((2087, 2116), 'utils.format_input_path', 'format_input_path', (['image_path'], {}), '(image_path)\n', (2104, 2116), False, 'from utils import parse_input_image, parse_output_image, format_input_path\n'), ((3481, 3504), 'utils.parse_output_image', 'parse_output_image', (['img'], {}), '(img)\n', (3499, 3504), False, 'from utils import parse_input_image, parse_output_image, format_input_path\n'), ((2458, 2485), 'utils.parse_output_image', 'parse_output_image', (['out_img'], {}), '(out_img)\n', (2476, 2485), False, 'from utils import parse_input_image, parse_output_image, format_input_path\n')]
|
from pytest import mark
from messages import show_count
@mark.parametrize('qty, expected', [
(1, '1 part'),
(2, '2 parts'),
(0, 'no parts'),
])
def test_show_count(qty: int, expected: str) -> None:
got = show_count(qty, 'part')
assert got == expected
# tag::TEST_IRREGULAR[]
@mark.parametrize('qty, expected', [
(1, '1 child'),
(2, '2 children'),
(0, 'no children'),
])
def test_irregular(qty: int, expected: str) -> None:
got = show_count(qty, 'child', 'children')
assert got == expected
# end::TEST_IRREGULAR[]
|
[
"pytest.mark.parametrize",
"messages.show_count"
] |
[((60, 147), 'pytest.mark.parametrize', 'mark.parametrize', (['"""qty, expected"""', "[(1, '1 part'), (2, '2 parts'), (0, 'no parts')]"], {}), "('qty, expected', [(1, '1 part'), (2, '2 parts'), (0,\n 'no parts')])\n", (76, 147), False, 'from pytest import mark\n'), ((301, 395), 'pytest.mark.parametrize', 'mark.parametrize', (['"""qty, expected"""', "[(1, '1 child'), (2, '2 children'), (0, 'no children')]"], {}), "('qty, expected', [(1, '1 child'), (2, '2 children'), (0,\n 'no children')])\n", (317, 395), False, 'from pytest import mark\n'), ((223, 246), 'messages.show_count', 'show_count', (['qty', '"""part"""'], {}), "(qty, 'part')\n", (233, 246), False, 'from messages import show_count\n'), ((470, 506), 'messages.show_count', 'show_count', (['qty', '"""child"""', '"""children"""'], {}), "(qty, 'child', 'children')\n", (480, 506), False, 'from messages import show_count\n')]
|
#!/usr/bin/env python
# coding: utf-8
# In[1]:
#read in ipsilateral breast labelmap/volume
#mask this patient's breast
#generate histogram of intensity
#DIR to new patient's breast
#expand/dilate region (might need to be manual)
#mask new patient's breast
#generate histogram of intensity
# In[2]:
#import modules
import SimpleITK as sitk
from platipy.imaging.visualisation.tools import ImageVisualiser
from platipy.imaging.utils.tools import get_com
import matplotlib.pyplot as plt
import numpy as np
get_ipython().run_line_magic('matplotlib', 'notebook')
# In[3]:
R_breast=sitk.ReadImage("/home/alicja/Downloads/Segmentation.nii.gz")
# In[4]:
WES_010_4_B50T=sitk.ReadImage("/home/alicja/Documents/WES_010/IMAGES/WES_010_4_20180829_MR_EP2D_DIFF_TRA_SPAIR_ZOOMIT_EZ_B50T_EP2D_DIFF_TRA_SPAIR_ZOOMIT_TRACEW_DFC_MIX_5.nii.gz")
WES_010_4_B800T=sitk.ReadImage("/home/alicja/Documents/WES_010/IMAGES/WES_010_4_20180829_MR_EP2D_DIFF_TRA_SPAIR_ZOOMIT_EZ_B800T_EP2D_DIFF_TRA_SPAIR_ZOOMIT_TRACEW_DFC_MIX_5.nii.gz")
# In[5]:
masked_R_breast = sitk.Mask(WES_010_4_B50T, R_breast)
# In[10]:
values = sitk.GetArrayViewFromImage(masked_R_breast).flatten()
fig, ax = plt.subplots(1,1)
ax.hist(values, bins=np.linspace(200,900,50), histtype='stepfilled', lw=2)
#ax.set_yscale('log')
ax.grid()
ax.set_axisbelow(True)
ax.set_xlabel('Intensity')
ax.set_ylabel('Frequency')
fig.show()
# In[7]:
#Use these values to do thresholding
def estimate_tumour_vol(img_mri, lowerthreshold=300, upperthreshold=3000, hole_size=1):
label_threshold = sitk.BinaryThreshold(img_mri, lowerThreshold=lowerthreshold, upperThreshold=upperthreshold)
label_threshold_cc = sitk.RelabelComponent(sitk.ConnectedComponent(label_threshold))
label_threshold_cc_x = (label_threshold_cc==1)
label_threshold_cc_x_f = sitk.BinaryMorphologicalClosing(label_threshold_cc_x, (hole_size,hole_size,hole_size))
return(label_threshold_cc_x_f)
# In[12]:
image_mri=WES_010_4_B50T
arr_mri = sitk.GetArrayFromImage(image_mri)
arr_mri[:,:,arr_mri.shape[2]//2:] = 0
image_mri_masked=sitk.GetImageFromArray(arr_mri)
image_mri_masked.CopyInformation(image_mri)
label_threshold_cc_x_f=estimate_tumour_vol(image_mri_masked, lowerthreshold=720, upperthreshold=3000, hole_size=1)
sitk.WriteImage(label_threshold_cc_x_f,"test_label_threshold_010_4_B50T_hist.nii.gz") #works well
# In[35]:
np.max(label_threshold_cc_x_f)
# In[60]:
masked_R_breast_B800T = sitk.Mask(WES_010_4_B800T, R_breast)
values = sitk.GetArrayViewFromImage(masked_R_breast_B800T).flatten()
fig, ax = plt.subplots(1,1)
ax.hist(values, bins=np.linspace(1,600,50), histtype='stepfilled', lw=2)
#ax.set_yscale('log')
ax.grid()
ax.set_axisbelow(True)
ax.set_xlabel('Intensity')
ax.set_ylabel('Frequency')
fig.show()
# In[61]:
image_mri=WES_010_4_B800T
arr_mri = sitk.GetArrayFromImage(image_mri)
arr_mri[:,:,arr_mri.shape[2]//2:] = 0
image_mri_masked=sitk.GetImageFromArray(arr_mri)
image_mri_masked.CopyInformation(image_mri)
label_threshold_cc_x_f=estimate_tumour_vol(image_mri_masked, lowerthreshold=300, upperthreshold=3000, hole_size=1)
sitk.WriteImage(label_threshold_cc_x_f,"test_label_threshold_010_4_B800T_hist.nii.gz") #works super well
# In[15]:
WES_010_4_T2w=sitk.ReadImage("/home/alicja/Documents/WES_010/IMAGES/WES_010_4_20180829_MR_T2_TSE_TRA_SPAIR_TSE2D1_11_T2_TSE_TRA_SPAIR_3.nii.gz")
# In[63]:
WES_010_4_T2w=sitk.Resample(WES_010_4_T2w, WES_010_4_B50T)
masked_R_breast_T2w = sitk.Mask(WES_010_4_T2w, R_breast)
values = sitk.GetArrayViewFromImage(masked_R_breast_T2w).flatten()
fig, ax = plt.subplots(1,1)
ax.hist(values, bins=np.linspace(1,300,50), histtype='stepfilled', lw=2)
#ax.set_yscale('log')
ax.grid()
ax.set_axisbelow(True)
ax.set_xlabel('Intensity')
ax.set_ylabel('Frequency')
fig.show()
# In[64]:
image_mri=WES_010_4_T2w
arr_mri = sitk.GetArrayFromImage(image_mri)
arr_mri[:,:,arr_mri.shape[2]//2:] = 0
image_mri_masked=sitk.GetImageFromArray(arr_mri)
image_mri_masked.CopyInformation(image_mri)
label_threshold_cc_x_f=estimate_tumour_vol(image_mri_masked, lowerthreshold=170, upperthreshold=3000, hole_size=1)
sitk.WriteImage(label_threshold_cc_x_f,"test_label_threshold_010_4_T2w_hist.nii.gz") #works well too
# In[13]:
WES_010_4_MPE=sitk.ReadImage("MPE_sub_WES_010_4.nii.gz")
# In[17]:
WES_010_4_MPE=sitk.Resample(WES_010_4_MPE, WES_010_4_B50T)
masked_R_breast_MPE = sitk.Mask(WES_010_4_MPE, R_breast)
values = sitk.GetArrayViewFromImage(masked_R_breast_MPE).flatten()
fig, ax = plt.subplots(1,1)
ax.hist(values, bins=np.linspace(150,450,50), histtype='stepfilled', lw=2)
#ax.set_yscale('log')
ax.grid()
ax.set_axisbelow(True)
ax.set_xlabel('Intensity')
ax.set_ylabel('Frequency')
fig.show()
# In[20]:
image_mri=WES_010_4_MPE
arr_mri = sitk.GetArrayFromImage(image_mri)
arr_mri[:,:,arr_mri.shape[2]//2:] = 0
image_mri_masked=sitk.GetImageFromArray(arr_mri)
image_mri_masked.CopyInformation(image_mri)
label_threshold_cc_x_f=estimate_tumour_vol(image_mri_masked, lowerthreshold=230, upperthreshold=3000, hole_size=1)
sitk.WriteImage(label_threshold_cc_x_f,"test_label_threshold_010_4_MPE_hist.nii.gz") #good
# In[ ]:
# In[65]:
from platipy.imaging.visualisation.tools import ImageVisualiser
from platipy.imaging.registration.registration import (
initial_registration,
fast_symmetric_forces_demons_registration,
transform_propagation,
apply_field
)
# In[66]:
#DIR to Patient 8
WES_008_4_B50T=sitk.ReadImage("/home/alicja/Documents/WES_008/IMAGES/WES_008_4_20180619_MR_EP2D_DIFF_TRA_SPAIR_ZOOMIT_EZ_B50T_EP2D_DIFF_TRA_SPAIR_ZOOMIT_TRACEW_DFC_MIX_5.nii.gz")
image_to_0_rigid, tfm_to_0_rigid = initial_registration(
WES_008_4_B50T,
WES_010_4_B50T,
options={
'shrink_factors': [8,4],
'smooth_sigmas': [0,0],
'sampling_rate': 0.5,
'final_interp': 2,
'metric': 'mean_squares',
'optimiser': 'gradient_descent_line_search',
'number_of_iterations': 25},
reg_method='Rigid')
image_to_0_dir, tfm_to_0_dir = fast_symmetric_forces_demons_registration(
WES_008_4_B50T,
image_to_0_rigid,
resolution_staging=[4,2],
iteration_staging=[10,10]
)
R_breast_to_0_rigid = transform_propagation(
WES_008_4_B50T,
R_breast,
tfm_to_0_rigid,
structure=True
)
R_breast_to_0_dir = apply_field(
R_breast_to_0_rigid,
tfm_to_0_dir,
structure=True
)
# In[67]:
vis = ImageVisualiser(WES_008_4_B50T, axis='z', cut=get_com(R_breast_to_0_dir), window=[-250, 500])
vis.add_contour(R_breast_to_0_dir, name='BREAST', color='g')
fig = vis.show()
# In[78]:
breast_contour_dilate=sitk.BinaryDilate(R_breast_to_0_dir, (2,2,2))
# In[79]:
vis = ImageVisualiser(WES_008_4_B50T, axis='z', cut=get_com(R_breast_to_0_dir), window=[-250, 500])
vis.add_contour(breast_contour_dilate, name='BREAST', color='g')
fig = vis.show()
# In[80]:
masked_R_breast = sitk.Mask(WES_008_4_B50T, breast_contour_dilate)
# In[92]:
values = sitk.GetArrayViewFromImage(masked_R_breast).flatten()
fig, ax = plt.subplots(1,1)
ax.hist(values, bins=np.linspace(200,3000,50), histtype='stepfilled', lw=2)
#ax.set_yscale('log')
ax.grid()
ax.set_axisbelow(True)
ax.set_xlabel('Intensity')
ax.set_ylabel('Frequency')
fig.show()
# In[93]:
image_mri=WES_008_4_B50T
arr_mri = sitk.GetArrayFromImage(image_mri)
arr_mri[:,:,arr_mri.shape[2]//2:] = 0
image_mri_masked=sitk.GetImageFromArray(arr_mri)
image_mri_masked.CopyInformation(image_mri)
label_threshold_cc_x_f=estimate_tumour_vol(image_mri_masked, lowerthreshold=1400, upperthreshold=5000, hole_size=1)
sitk.WriteImage(label_threshold_cc_x_f,"test_label_threshold_008_4_B50T_hist.nii.gz") #good but seems to contain
#fibroglandular tissue as well
# In[95]:
WES_008_4_B800T=sitk.ReadImage("/home/alicja/Documents/WES_008/IMAGES/WES_008_4_20180619_MR_EP2D_DIFF_TRA_SPAIR_ZOOMIT_EZ_B800T_EP2D_DIFF_TRA_SPAIR_ZOOMIT_TRACEW_DFC_MIX_5.nii.gz")
WES_008_4_T2w=sitk.ReadImage("/home/alicja/Documents/WES_008/IMAGES/WES_008_4_20180619_MR_T2_TSE_TRA_SPAIR_TSE2D1_11_T2_TSE_TRA_SPAIR_3.nii.gz")
# In[96]:
masked_R_breast_B800T = sitk.Mask(WES_008_4_B800T, breast_contour_dilate)
values = sitk.GetArrayViewFromImage(masked_R_breast_B800T).flatten()
fig, ax = plt.subplots(1,1)
ax.hist(values, bins=np.linspace(1,600,50), histtype='stepfilled', lw=2)
#ax.set_yscale('log')
ax.grid()
ax.set_axisbelow(True)
ax.set_xlabel('Intensity')
ax.set_ylabel('Frequency')
fig.show()
# In[99]:
image_mri=WES_008_4_B800T
arr_mri = sitk.GetArrayFromImage(image_mri)
arr_mri[:,:,arr_mri.shape[2]//2:] = 0
image_mri_masked=sitk.GetImageFromArray(arr_mri)
image_mri_masked.CopyInformation(image_mri)
label_threshold_cc_x_f=estimate_tumour_vol(image_mri_masked, lowerthreshold=480, upperthreshold=5000, hole_size=1)
sitk.WriteImage(label_threshold_cc_x_f,"test_label_threshold_008_4_B800T_hist.nii.gz") #good
# In[104]:
WES_008_4_T2w=sitk.Resample(WES_008_4_T2w,WES_008_4_B800T)
masked_R_breast_T2w = sitk.Mask(WES_008_4_T2w, breast_contour_dilate)
values = sitk.GetArrayViewFromImage(masked_R_breast_T2w).flatten()
fig, ax = plt.subplots(1,1)
ax.hist(values, bins=np.linspace(1,250,50), histtype='stepfilled', lw=2)
#ax.set_yscale('log')
ax.grid()
ax.set_axisbelow(True)
ax.set_xlabel('Intensity')
ax.set_ylabel('Frequency')
fig.show()
# In[105]:
image_mri=WES_008_4_T2w
arr_mri = sitk.GetArrayFromImage(image_mri)
arr_mri[:,:,arr_mri.shape[2]//2:] = 0
image_mri_masked=sitk.GetImageFromArray(arr_mri)
image_mri_masked.CopyInformation(image_mri)
label_threshold_cc_x_f=estimate_tumour_vol(image_mri_masked, lowerthreshold=197, upperthreshold=5000, hole_size=1)
sitk.WriteImage(label_threshold_cc_x_f,"test_label_threshold_008_4_T2w_hist.nii.gz") #okay but picks up
#fibroglandular tissue
# In[106]:
L_breast=sitk.ReadImage("contralateral_segmentation.nii.gz")
# In[107]:
L_breast_to_0_rigid = transform_propagation(
WES_008_4_B50T,
L_breast,
tfm_to_0_rigid,
structure=True
)
L_breast_to_0_dir = apply_field(
L_breast_to_0_rigid,
tfm_to_0_dir,
structure=True
)
# In[110]:
L_breast_contour_dilate=sitk.BinaryDilate(L_breast_to_0_dir, (4,4,4))
vis = ImageVisualiser(WES_008_4_B50T, axis='z', cut=get_com(L_breast_to_0_dir), window=[-250, 500])
vis.add_contour(L_breast_contour_dilate, name='BREAST', color='g')
fig = vis.show()
# In[111]:
masked_L_breast = sitk.Mask(WES_008_4_B50T, L_breast_contour_dilate)
values = sitk.GetArrayViewFromImage(masked_L_breast).flatten()
fig, ax = plt.subplots(1,1)
ax.hist(values, bins=np.linspace(200,3000,50), histtype='stepfilled', lw=2)
#ax.set_yscale('log')
ax.grid()
ax.set_axisbelow(True)
ax.set_xlabel('Intensity')
ax.set_ylabel('Frequency')
fig.show()
# In[ ]:
|
[
"SimpleITK.BinaryThreshold",
"SimpleITK.Resample",
"platipy.imaging.registration.registration.fast_symmetric_forces_demons_registration",
"SimpleITK.ConnectedComponent",
"SimpleITK.GetArrayViewFromImage",
"SimpleITK.ReadImage",
"SimpleITK.GetArrayFromImage",
"numpy.max",
"SimpleITK.BinaryMorphologicalClosing",
"numpy.linspace",
"matplotlib.pyplot.subplots",
"platipy.imaging.utils.tools.get_com",
"SimpleITK.Mask",
"SimpleITK.WriteImage",
"platipy.imaging.registration.registration.initial_registration",
"platipy.imaging.registration.registration.apply_field",
"SimpleITK.BinaryDilate",
"SimpleITK.GetImageFromArray",
"platipy.imaging.registration.registration.transform_propagation"
] |
[((588, 648), 'SimpleITK.ReadImage', 'sitk.ReadImage', (['"""/home/alicja/Downloads/Segmentation.nii.gz"""'], {}), "('/home/alicja/Downloads/Segmentation.nii.gz')\n", (602, 648), True, 'import SimpleITK as sitk\n'), ((677, 850), 'SimpleITK.ReadImage', 'sitk.ReadImage', (['"""/home/alicja/Documents/WES_010/IMAGES/WES_010_4_20180829_MR_EP2D_DIFF_TRA_SPAIR_ZOOMIT_EZ_B50T_EP2D_DIFF_TRA_SPAIR_ZOOMIT_TRACEW_DFC_MIX_5.nii.gz"""'], {}), "(\n '/home/alicja/Documents/WES_010/IMAGES/WES_010_4_20180829_MR_EP2D_DIFF_TRA_SPAIR_ZOOMIT_EZ_B50T_EP2D_DIFF_TRA_SPAIR_ZOOMIT_TRACEW_DFC_MIX_5.nii.gz'\n )\n", (691, 850), True, 'import SimpleITK as sitk\n'), ((857, 1031), 'SimpleITK.ReadImage', 'sitk.ReadImage', (['"""/home/alicja/Documents/WES_010/IMAGES/WES_010_4_20180829_MR_EP2D_DIFF_TRA_SPAIR_ZOOMIT_EZ_B800T_EP2D_DIFF_TRA_SPAIR_ZOOMIT_TRACEW_DFC_MIX_5.nii.gz"""'], {}), "(\n '/home/alicja/Documents/WES_010/IMAGES/WES_010_4_20180829_MR_EP2D_DIFF_TRA_SPAIR_ZOOMIT_EZ_B800T_EP2D_DIFF_TRA_SPAIR_ZOOMIT_TRACEW_DFC_MIX_5.nii.gz'\n )\n", (871, 1031), True, 'import SimpleITK as sitk\n'), ((1053, 1088), 'SimpleITK.Mask', 'sitk.Mask', (['WES_010_4_B50T', 'R_breast'], {}), '(WES_010_4_B50T, R_breast)\n', (1062, 1088), True, 'import SimpleITK as sitk\n'), ((1177, 1195), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {}), '(1, 1)\n', (1189, 1195), True, 'import matplotlib.pyplot as plt\n'), ((1982, 2015), 'SimpleITK.GetArrayFromImage', 'sitk.GetArrayFromImage', (['image_mri'], {}), '(image_mri)\n', (2004, 2015), True, 'import SimpleITK as sitk\n'), ((2071, 2102), 'SimpleITK.GetImageFromArray', 'sitk.GetImageFromArray', (['arr_mri'], {}), '(arr_mri)\n', (2093, 2102), True, 'import SimpleITK as sitk\n'), ((2264, 2354), 'SimpleITK.WriteImage', 'sitk.WriteImage', (['label_threshold_cc_x_f', '"""test_label_threshold_010_4_B50T_hist.nii.gz"""'], {}), "(label_threshold_cc_x_f,\n 'test_label_threshold_010_4_B50T_hist.nii.gz')\n", (2279, 2354), True, 'import SimpleITK as sitk\n'), ((2376, 2406), 'numpy.max', 'np.max', (['label_threshold_cc_x_f'], {}), '(label_threshold_cc_x_f)\n', (2382, 2406), True, 'import numpy as np\n'), ((2445, 2481), 'SimpleITK.Mask', 'sitk.Mask', (['WES_010_4_B800T', 'R_breast'], {}), '(WES_010_4_B800T, R_breast)\n', (2454, 2481), True, 'import SimpleITK as sitk\n'), ((2562, 2580), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {}), '(1, 1)\n', (2574, 2580), True, 'import matplotlib.pyplot as plt\n'), ((2823, 2856), 'SimpleITK.GetArrayFromImage', 'sitk.GetArrayFromImage', (['image_mri'], {}), '(image_mri)\n', (2845, 2856), True, 'import SimpleITK as sitk\n'), ((2912, 2943), 'SimpleITK.GetImageFromArray', 'sitk.GetImageFromArray', (['arr_mri'], {}), '(arr_mri)\n', (2934, 2943), True, 'import SimpleITK as sitk\n'), ((3105, 3196), 'SimpleITK.WriteImage', 'sitk.WriteImage', (['label_threshold_cc_x_f', '"""test_label_threshold_010_4_B800T_hist.nii.gz"""'], {}), "(label_threshold_cc_x_f,\n 'test_label_threshold_010_4_B800T_hist.nii.gz')\n", (3120, 3196), True, 'import SimpleITK as sitk\n'), ((3238, 3378), 'SimpleITK.ReadImage', 'sitk.ReadImage', (['"""/home/alicja/Documents/WES_010/IMAGES/WES_010_4_20180829_MR_T2_TSE_TRA_SPAIR_TSE2D1_11_T2_TSE_TRA_SPAIR_3.nii.gz"""'], {}), "(\n '/home/alicja/Documents/WES_010/IMAGES/WES_010_4_20180829_MR_T2_TSE_TRA_SPAIR_TSE2D1_11_T2_TSE_TRA_SPAIR_3.nii.gz'\n )\n", (3252, 3378), True, 'import SimpleITK as sitk\n'), ((3397, 3441), 'SimpleITK.Resample', 'sitk.Resample', (['WES_010_4_T2w', 'WES_010_4_B50T'], {}), '(WES_010_4_T2w, WES_010_4_B50T)\n', (3410, 3441), True, 'import SimpleITK as sitk\n'), ((3464, 3498), 'SimpleITK.Mask', 'sitk.Mask', (['WES_010_4_T2w', 'R_breast'], {}), '(WES_010_4_T2w, R_breast)\n', (3473, 3498), True, 'import SimpleITK as sitk\n'), ((3577, 3595), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {}), '(1, 1)\n', (3589, 3595), True, 'import matplotlib.pyplot as plt\n'), ((3836, 3869), 'SimpleITK.GetArrayFromImage', 'sitk.GetArrayFromImage', (['image_mri'], {}), '(image_mri)\n', (3858, 3869), True, 'import SimpleITK as sitk\n'), ((3925, 3956), 'SimpleITK.GetImageFromArray', 'sitk.GetImageFromArray', (['arr_mri'], {}), '(arr_mri)\n', (3947, 3956), True, 'import SimpleITK as sitk\n'), ((4118, 4207), 'SimpleITK.WriteImage', 'sitk.WriteImage', (['label_threshold_cc_x_f', '"""test_label_threshold_010_4_T2w_hist.nii.gz"""'], {}), "(label_threshold_cc_x_f,\n 'test_label_threshold_010_4_T2w_hist.nii.gz')\n", (4133, 4207), True, 'import SimpleITK as sitk\n'), ((4247, 4289), 'SimpleITK.ReadImage', 'sitk.ReadImage', (['"""MPE_sub_WES_010_4.nii.gz"""'], {}), "('MPE_sub_WES_010_4.nii.gz')\n", (4261, 4289), True, 'import SimpleITK as sitk\n'), ((4318, 4362), 'SimpleITK.Resample', 'sitk.Resample', (['WES_010_4_MPE', 'WES_010_4_B50T'], {}), '(WES_010_4_MPE, WES_010_4_B50T)\n', (4331, 4362), True, 'import SimpleITK as sitk\n'), ((4385, 4419), 'SimpleITK.Mask', 'sitk.Mask', (['WES_010_4_MPE', 'R_breast'], {}), '(WES_010_4_MPE, R_breast)\n', (4394, 4419), True, 'import SimpleITK as sitk\n'), ((4498, 4516), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {}), '(1, 1)\n', (4510, 4516), True, 'import matplotlib.pyplot as plt\n'), ((4759, 4792), 'SimpleITK.GetArrayFromImage', 'sitk.GetArrayFromImage', (['image_mri'], {}), '(image_mri)\n', (4781, 4792), True, 'import SimpleITK as sitk\n'), ((4848, 4879), 'SimpleITK.GetImageFromArray', 'sitk.GetImageFromArray', (['arr_mri'], {}), '(arr_mri)\n', (4870, 4879), True, 'import SimpleITK as sitk\n'), ((5041, 5130), 'SimpleITK.WriteImage', 'sitk.WriteImage', (['label_threshold_cc_x_f', '"""test_label_threshold_010_4_MPE_hist.nii.gz"""'], {}), "(label_threshold_cc_x_f,\n 'test_label_threshold_010_4_MPE_hist.nii.gz')\n", (5056, 5130), True, 'import SimpleITK as sitk\n'), ((5446, 5619), 'SimpleITK.ReadImage', 'sitk.ReadImage', (['"""/home/alicja/Documents/WES_008/IMAGES/WES_008_4_20180619_MR_EP2D_DIFF_TRA_SPAIR_ZOOMIT_EZ_B50T_EP2D_DIFF_TRA_SPAIR_ZOOMIT_TRACEW_DFC_MIX_5.nii.gz"""'], {}), "(\n '/home/alicja/Documents/WES_008/IMAGES/WES_008_4_20180619_MR_EP2D_DIFF_TRA_SPAIR_ZOOMIT_EZ_B50T_EP2D_DIFF_TRA_SPAIR_ZOOMIT_TRACEW_DFC_MIX_5.nii.gz'\n )\n", (5460, 5619), True, 'import SimpleITK as sitk\n'), ((5645, 5936), 'platipy.imaging.registration.registration.initial_registration', 'initial_registration', (['WES_008_4_B50T', 'WES_010_4_B50T'], {'options': "{'shrink_factors': [8, 4], 'smooth_sigmas': [0, 0], 'sampling_rate': 0.5,\n 'final_interp': 2, 'metric': 'mean_squares', 'optimiser':\n 'gradient_descent_line_search', 'number_of_iterations': 25}", 'reg_method': '"""Rigid"""'}), "(WES_008_4_B50T, WES_010_4_B50T, options={\n 'shrink_factors': [8, 4], 'smooth_sigmas': [0, 0], 'sampling_rate': 0.5,\n 'final_interp': 2, 'metric': 'mean_squares', 'optimiser':\n 'gradient_descent_line_search', 'number_of_iterations': 25}, reg_method\n ='Rigid')\n", (5665, 5936), False, 'from platipy.imaging.registration.registration import initial_registration, fast_symmetric_forces_demons_registration, transform_propagation, apply_field\n'), ((6023, 6157), 'platipy.imaging.registration.registration.fast_symmetric_forces_demons_registration', 'fast_symmetric_forces_demons_registration', (['WES_008_4_B50T', 'image_to_0_rigid'], {'resolution_staging': '[4, 2]', 'iteration_staging': '[10, 10]'}), '(WES_008_4_B50T, image_to_0_rigid,\n resolution_staging=[4, 2], iteration_staging=[10, 10])\n', (6064, 6157), False, 'from platipy.imaging.registration.registration import initial_registration, fast_symmetric_forces_demons_registration, transform_propagation, apply_field\n'), ((6193, 6272), 'platipy.imaging.registration.registration.transform_propagation', 'transform_propagation', (['WES_008_4_B50T', 'R_breast', 'tfm_to_0_rigid'], {'structure': '(True)'}), '(WES_008_4_B50T, R_breast, tfm_to_0_rigid, structure=True)\n', (6214, 6272), False, 'from platipy.imaging.registration.registration import initial_registration, fast_symmetric_forces_demons_registration, transform_propagation, apply_field\n'), ((6312, 6374), 'platipy.imaging.registration.registration.apply_field', 'apply_field', (['R_breast_to_0_rigid', 'tfm_to_0_dir'], {'structure': '(True)'}), '(R_breast_to_0_rigid, tfm_to_0_dir, structure=True)\n', (6323, 6374), False, 'from platipy.imaging.registration.registration import initial_registration, fast_symmetric_forces_demons_registration, transform_propagation, apply_field\n'), ((6617, 6664), 'SimpleITK.BinaryDilate', 'sitk.BinaryDilate', (['R_breast_to_0_dir', '(2, 2, 2)'], {}), '(R_breast_to_0_dir, (2, 2, 2))\n', (6634, 6664), True, 'import SimpleITK as sitk\n'), ((6891, 6939), 'SimpleITK.Mask', 'sitk.Mask', (['WES_008_4_B50T', 'breast_contour_dilate'], {}), '(WES_008_4_B50T, breast_contour_dilate)\n', (6900, 6939), True, 'import SimpleITK as sitk\n'), ((7028, 7046), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {}), '(1, 1)\n', (7040, 7046), True, 'import matplotlib.pyplot as plt\n'), ((7291, 7324), 'SimpleITK.GetArrayFromImage', 'sitk.GetArrayFromImage', (['image_mri'], {}), '(image_mri)\n', (7313, 7324), True, 'import SimpleITK as sitk\n'), ((7380, 7411), 'SimpleITK.GetImageFromArray', 'sitk.GetImageFromArray', (['arr_mri'], {}), '(arr_mri)\n', (7402, 7411), True, 'import SimpleITK as sitk\n'), ((7574, 7664), 'SimpleITK.WriteImage', 'sitk.WriteImage', (['label_threshold_cc_x_f', '"""test_label_threshold_008_4_B50T_hist.nii.gz"""'], {}), "(label_threshold_cc_x_f,\n 'test_label_threshold_008_4_B50T_hist.nii.gz')\n", (7589, 7664), True, 'import SimpleITK as sitk\n'), ((7748, 7922), 'SimpleITK.ReadImage', 'sitk.ReadImage', (['"""/home/alicja/Documents/WES_008/IMAGES/WES_008_4_20180619_MR_EP2D_DIFF_TRA_SPAIR_ZOOMIT_EZ_B800T_EP2D_DIFF_TRA_SPAIR_ZOOMIT_TRACEW_DFC_MIX_5.nii.gz"""'], {}), "(\n '/home/alicja/Documents/WES_008/IMAGES/WES_008_4_20180619_MR_EP2D_DIFF_TRA_SPAIR_ZOOMIT_EZ_B800T_EP2D_DIFF_TRA_SPAIR_ZOOMIT_TRACEW_DFC_MIX_5.nii.gz'\n )\n", (7762, 7922), True, 'import SimpleITK as sitk\n'), ((7927, 8067), 'SimpleITK.ReadImage', 'sitk.ReadImage', (['"""/home/alicja/Documents/WES_008/IMAGES/WES_008_4_20180619_MR_T2_TSE_TRA_SPAIR_TSE2D1_11_T2_TSE_TRA_SPAIR_3.nii.gz"""'], {}), "(\n '/home/alicja/Documents/WES_008/IMAGES/WES_008_4_20180619_MR_T2_TSE_TRA_SPAIR_TSE2D1_11_T2_TSE_TRA_SPAIR_3.nii.gz'\n )\n", (7941, 8067), True, 'import SimpleITK as sitk\n'), ((8096, 8145), 'SimpleITK.Mask', 'sitk.Mask', (['WES_008_4_B800T', 'breast_contour_dilate'], {}), '(WES_008_4_B800T, breast_contour_dilate)\n', (8105, 8145), True, 'import SimpleITK as sitk\n'), ((8226, 8244), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {}), '(1, 1)\n', (8238, 8244), True, 'import matplotlib.pyplot as plt\n'), ((8487, 8520), 'SimpleITK.GetArrayFromImage', 'sitk.GetArrayFromImage', (['image_mri'], {}), '(image_mri)\n', (8509, 8520), True, 'import SimpleITK as sitk\n'), ((8576, 8607), 'SimpleITK.GetImageFromArray', 'sitk.GetImageFromArray', (['arr_mri'], {}), '(arr_mri)\n', (8598, 8607), True, 'import SimpleITK as sitk\n'), ((8769, 8860), 'SimpleITK.WriteImage', 'sitk.WriteImage', (['label_threshold_cc_x_f', '"""test_label_threshold_008_4_B800T_hist.nii.gz"""'], {}), "(label_threshold_cc_x_f,\n 'test_label_threshold_008_4_B800T_hist.nii.gz')\n", (8784, 8860), True, 'import SimpleITK as sitk\n'), ((8891, 8936), 'SimpleITK.Resample', 'sitk.Resample', (['WES_008_4_T2w', 'WES_008_4_B800T'], {}), '(WES_008_4_T2w, WES_008_4_B800T)\n', (8904, 8936), True, 'import SimpleITK as sitk\n'), ((8959, 9006), 'SimpleITK.Mask', 'sitk.Mask', (['WES_008_4_T2w', 'breast_contour_dilate'], {}), '(WES_008_4_T2w, breast_contour_dilate)\n', (8968, 9006), True, 'import SimpleITK as sitk\n'), ((9085, 9103), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {}), '(1, 1)\n', (9097, 9103), True, 'import matplotlib.pyplot as plt\n'), ((9345, 9378), 'SimpleITK.GetArrayFromImage', 'sitk.GetArrayFromImage', (['image_mri'], {}), '(image_mri)\n', (9367, 9378), True, 'import SimpleITK as sitk\n'), ((9434, 9465), 'SimpleITK.GetImageFromArray', 'sitk.GetImageFromArray', (['arr_mri'], {}), '(arr_mri)\n', (9456, 9465), True, 'import SimpleITK as sitk\n'), ((9627, 9716), 'SimpleITK.WriteImage', 'sitk.WriteImage', (['label_threshold_cc_x_f', '"""test_label_threshold_008_4_T2w_hist.nii.gz"""'], {}), "(label_threshold_cc_x_f,\n 'test_label_threshold_008_4_T2w_hist.nii.gz')\n", (9642, 9716), True, 'import SimpleITK as sitk\n'), ((9778, 9829), 'SimpleITK.ReadImage', 'sitk.ReadImage', (['"""contralateral_segmentation.nii.gz"""'], {}), "('contralateral_segmentation.nii.gz')\n", (9792, 9829), True, 'import SimpleITK as sitk\n'), ((9867, 9946), 'platipy.imaging.registration.registration.transform_propagation', 'transform_propagation', (['WES_008_4_B50T', 'L_breast', 'tfm_to_0_rigid'], {'structure': '(True)'}), '(WES_008_4_B50T, L_breast, tfm_to_0_rigid, structure=True)\n', (9888, 9946), False, 'from platipy.imaging.registration.registration import initial_registration, fast_symmetric_forces_demons_registration, transform_propagation, apply_field\n'), ((9986, 10048), 'platipy.imaging.registration.registration.apply_field', 'apply_field', (['L_breast_to_0_rigid', 'tfm_to_0_dir'], {'structure': '(True)'}), '(L_breast_to_0_rigid, tfm_to_0_dir, structure=True)\n', (9997, 10048), False, 'from platipy.imaging.registration.registration import initial_registration, fast_symmetric_forces_demons_registration, transform_propagation, apply_field\n'), ((10102, 10149), 'SimpleITK.BinaryDilate', 'sitk.BinaryDilate', (['L_breast_to_0_dir', '(4, 4, 4)'], {}), '(L_breast_to_0_dir, (4, 4, 4))\n', (10119, 10149), True, 'import SimpleITK as sitk\n'), ((10365, 10415), 'SimpleITK.Mask', 'sitk.Mask', (['WES_008_4_B50T', 'L_breast_contour_dilate'], {}), '(WES_008_4_B50T, L_breast_contour_dilate)\n', (10374, 10415), True, 'import SimpleITK as sitk\n'), ((10491, 10509), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {}), '(1, 1)\n', (10503, 10509), True, 'import matplotlib.pyplot as plt\n'), ((1550, 1646), 'SimpleITK.BinaryThreshold', 'sitk.BinaryThreshold', (['img_mri'], {'lowerThreshold': 'lowerthreshold', 'upperThreshold': 'upperthreshold'}), '(img_mri, lowerThreshold=lowerthreshold, upperThreshold\n =upperthreshold)\n', (1570, 1646), True, 'import SimpleITK as sitk\n'), ((1811, 1903), 'SimpleITK.BinaryMorphologicalClosing', 'sitk.BinaryMorphologicalClosing', (['label_threshold_cc_x', '(hole_size, hole_size, hole_size)'], {}), '(label_threshold_cc_x, (hole_size, hole_size,\n hole_size))\n', (1842, 1903), True, 'import SimpleITK as sitk\n'), ((1112, 1155), 'SimpleITK.GetArrayViewFromImage', 'sitk.GetArrayViewFromImage', (['masked_R_breast'], {}), '(masked_R_breast)\n', (1138, 1155), True, 'import SimpleITK as sitk\n'), ((1216, 1241), 'numpy.linspace', 'np.linspace', (['(200)', '(900)', '(50)'], {}), '(200, 900, 50)\n', (1227, 1241), True, 'import numpy as np\n'), ((1689, 1729), 'SimpleITK.ConnectedComponent', 'sitk.ConnectedComponent', (['label_threshold'], {}), '(label_threshold)\n', (1712, 1729), True, 'import SimpleITK as sitk\n'), ((2491, 2540), 'SimpleITK.GetArrayViewFromImage', 'sitk.GetArrayViewFromImage', (['masked_R_breast_B800T'], {}), '(masked_R_breast_B800T)\n', (2517, 2540), True, 'import SimpleITK as sitk\n'), ((2601, 2624), 'numpy.linspace', 'np.linspace', (['(1)', '(600)', '(50)'], {}), '(1, 600, 50)\n', (2612, 2624), True, 'import numpy as np\n'), ((3508, 3555), 'SimpleITK.GetArrayViewFromImage', 'sitk.GetArrayViewFromImage', (['masked_R_breast_T2w'], {}), '(masked_R_breast_T2w)\n', (3534, 3555), True, 'import SimpleITK as sitk\n'), ((3616, 3639), 'numpy.linspace', 'np.linspace', (['(1)', '(300)', '(50)'], {}), '(1, 300, 50)\n', (3627, 3639), True, 'import numpy as np\n'), ((4429, 4476), 'SimpleITK.GetArrayViewFromImage', 'sitk.GetArrayViewFromImage', (['masked_R_breast_MPE'], {}), '(masked_R_breast_MPE)\n', (4455, 4476), True, 'import SimpleITK as sitk\n'), ((4537, 4562), 'numpy.linspace', 'np.linspace', (['(150)', '(450)', '(50)'], {}), '(150, 450, 50)\n', (4548, 4562), True, 'import numpy as np\n'), ((6455, 6481), 'platipy.imaging.utils.tools.get_com', 'get_com', (['R_breast_to_0_dir'], {}), '(R_breast_to_0_dir)\n', (6462, 6481), False, 'from platipy.imaging.utils.tools import get_com\n'), ((6729, 6755), 'platipy.imaging.utils.tools.get_com', 'get_com', (['R_breast_to_0_dir'], {}), '(R_breast_to_0_dir)\n', (6736, 6755), False, 'from platipy.imaging.utils.tools import get_com\n'), ((6963, 7006), 'SimpleITK.GetArrayViewFromImage', 'sitk.GetArrayViewFromImage', (['masked_R_breast'], {}), '(masked_R_breast)\n', (6989, 7006), True, 'import SimpleITK as sitk\n'), ((7067, 7093), 'numpy.linspace', 'np.linspace', (['(200)', '(3000)', '(50)'], {}), '(200, 3000, 50)\n', (7078, 7093), True, 'import numpy as np\n'), ((8155, 8204), 'SimpleITK.GetArrayViewFromImage', 'sitk.GetArrayViewFromImage', (['masked_R_breast_B800T'], {}), '(masked_R_breast_B800T)\n', (8181, 8204), True, 'import SimpleITK as sitk\n'), ((8265, 8288), 'numpy.linspace', 'np.linspace', (['(1)', '(600)', '(50)'], {}), '(1, 600, 50)\n', (8276, 8288), True, 'import numpy as np\n'), ((9016, 9063), 'SimpleITK.GetArrayViewFromImage', 'sitk.GetArrayViewFromImage', (['masked_R_breast_T2w'], {}), '(masked_R_breast_T2w)\n', (9042, 9063), True, 'import SimpleITK as sitk\n'), ((9124, 9147), 'numpy.linspace', 'np.linspace', (['(1)', '(250)', '(50)'], {}), '(1, 250, 50)\n', (9135, 9147), True, 'import numpy as np\n'), ((10200, 10226), 'platipy.imaging.utils.tools.get_com', 'get_com', (['L_breast_to_0_dir'], {}), '(L_breast_to_0_dir)\n', (10207, 10226), False, 'from platipy.imaging.utils.tools import get_com\n'), ((10426, 10469), 'SimpleITK.GetArrayViewFromImage', 'sitk.GetArrayViewFromImage', (['masked_L_breast'], {}), '(masked_L_breast)\n', (10452, 10469), True, 'import SimpleITK as sitk\n'), ((10530, 10556), 'numpy.linspace', 'np.linspace', (['(200)', '(3000)', '(50)'], {}), '(200, 3000, 50)\n', (10541, 10556), True, 'import numpy as np\n')]
|
import numpy as np
def hole_filling(img, kernel=3):
N, M = img.shape
for i in range(N):
for j in range(M):
if img[i, j] == 0:
neighbour = img[max(int((i-(kernel-1)/2)), 0):min(int((i+(kernel-1)/2)), N), max(int((j-(kernel-1)/2)),0):min(int((j+(kernel-1)/2)), M)]
if len(neighbour) == 0:
continue
else:
max_val = np.amax(neighbour)
img[i, j] = max_val
return img
|
[
"numpy.amax"
] |
[((429, 447), 'numpy.amax', 'np.amax', (['neighbour'], {}), '(neighbour)\n', (436, 447), True, 'import numpy as np\n')]
|
from django.conf import settings
from archiver.models import Tune, Setting, Competition, CompetitionRecording, TuneComment, CompetitionComment, Collection, CollectionEntry, CompetitionTuneVote, CompetitionRecordingVote
from actstream import action, actions, registry
#Tune = apps.get_model('archiver', 'Tune')
registry.register(Tune)
folkrnn_anon_submission_default_author_id = 1
for tune in Tune.objects.all():
if tune.author.id == folkrnn_anon_submission_default_author_id:
continue
actions.follow(tune.author, tune, actor_only=False, send_action=False)
#Setting = apps.get_model('archiver', 'Setting')
registry.register(Setting)
for setting in Setting.objects.all():
actions.follow(setting.author, setting.tune, actor_only=False, send_action=False)
#Competition = apps.get_model('archiver', 'Competition')
registry.register(Competition)
#CompetitionRecording = apps.get_model('archiver', 'CompetitionRecording')
registry.register(CompetitionRecording)
for cr in CompetitionRecording.objects.all():
actions.follow(cr.recording.author, cr.competition, actor_only=False, send_action=False)
#TuneComment = apps.get_model('archiver', 'TuneComment')
registry.register(TuneComment)
for tc in TuneComment.objects.all():
actions.follow(tc.author, tc.tune, actor_only=False, send_action=False)
#CompetitionComment = apps.get_model('archiver', 'CompetitionComment')
registry.register(CompetitionComment)
for cc in CompetitionComment.objects.all():
actions.follow(cc.author, cc.competition, actor_only=False, send_action=False)
#Collection = apps.get_model('archiver', 'Collection')
registry.register(Collection)
#CollectionEntry = apps.get_model('archiver', 'CollectionEntry')
registry.register(CollectionEntry)
for ce in CollectionEntry.objects.all():
if ce.tune:
actions.follow(ce.collection.user, ce.tune, actor_only=False, send_action=False)
else:
actions.follow(ce.collection.user, ce.setting, actor_only=False, send_action=False)
#CompetitionTuneVote = apps.get_model('archiver', 'CompetitionTuneVote')
registry.register(CompetitionTuneVote)
for ctv in CompetitionTuneVote.objects.all():
actions.follow(ctv.user, ctv.votable.competition, actor_only=False, send_action=False)
actions.follow(ctv.user, ctv.votable.tune, actor_only=False, send_action=False)
#CompetitionRecordingVote = apps.get_model('archiver', 'CompetitionRecordingVote')
registry.register(CompetitionRecordingVote)
for crv in CompetitionRecordingVote.objects.all():
actions.follow(ctv.user, ctv.votable.competition, actor_only=False, send_action=False)
actions.follow(crv.user, crv.votable.recording, actor_only=False, send_action=False)
|
[
"archiver.models.CompetitionRecordingVote.objects.all",
"archiver.models.CompetitionRecording.objects.all",
"archiver.models.CollectionEntry.objects.all",
"archiver.models.CompetitionTuneVote.objects.all",
"actstream.registry.register",
"archiver.models.TuneComment.objects.all",
"archiver.models.CompetitionComment.objects.all",
"archiver.models.Tune.objects.all",
"actstream.actions.follow",
"archiver.models.Setting.objects.all"
] |
[((311, 334), 'actstream.registry.register', 'registry.register', (['Tune'], {}), '(Tune)\n', (328, 334), False, 'from actstream import action, actions, registry\n'), ((393, 411), 'archiver.models.Tune.objects.all', 'Tune.objects.all', ([], {}), '()\n', (409, 411), False, 'from archiver.models import Tune, Setting, Competition, CompetitionRecording, TuneComment, CompetitionComment, Collection, CollectionEntry, CompetitionTuneVote, CompetitionRecordingVote\n'), ((623, 649), 'actstream.registry.register', 'registry.register', (['Setting'], {}), '(Setting)\n', (640, 649), False, 'from actstream import action, actions, registry\n'), ((665, 686), 'archiver.models.Setting.objects.all', 'Setting.objects.all', ([], {}), '()\n', (684, 686), False, 'from archiver.models import Tune, Setting, Competition, CompetitionRecording, TuneComment, CompetitionComment, Collection, CollectionEntry, CompetitionTuneVote, CompetitionRecordingVote\n'), ((832, 862), 'actstream.registry.register', 'registry.register', (['Competition'], {}), '(Competition)\n', (849, 862), False, 'from actstream import action, actions, registry\n'), ((938, 977), 'actstream.registry.register', 'registry.register', (['CompetitionRecording'], {}), '(CompetitionRecording)\n', (955, 977), False, 'from actstream import action, actions, registry\n'), ((988, 1022), 'archiver.models.CompetitionRecording.objects.all', 'CompetitionRecording.objects.all', ([], {}), '()\n', (1020, 1022), False, 'from archiver.models import Tune, Setting, Competition, CompetitionRecording, TuneComment, CompetitionComment, Collection, CollectionEntry, CompetitionTuneVote, CompetitionRecordingVote\n'), ((1175, 1205), 'actstream.registry.register', 'registry.register', (['TuneComment'], {}), '(TuneComment)\n', (1192, 1205), False, 'from actstream import action, actions, registry\n'), ((1216, 1241), 'archiver.models.TuneComment.objects.all', 'TuneComment.objects.all', ([], {}), '()\n', (1239, 1241), False, 'from archiver.models import Tune, Setting, Competition, CompetitionRecording, TuneComment, CompetitionComment, Collection, CollectionEntry, CompetitionTuneVote, CompetitionRecordingVote\n'), ((1391, 1428), 'actstream.registry.register', 'registry.register', (['CompetitionComment'], {}), '(CompetitionComment)\n', (1408, 1428), False, 'from actstream import action, actions, registry\n'), ((1439, 1471), 'archiver.models.CompetitionComment.objects.all', 'CompetitionComment.objects.all', ([], {}), '()\n', (1469, 1471), False, 'from archiver.models import Tune, Setting, Competition, CompetitionRecording, TuneComment, CompetitionComment, Collection, CollectionEntry, CompetitionTuneVote, CompetitionRecordingVote\n'), ((1612, 1641), 'actstream.registry.register', 'registry.register', (['Collection'], {}), '(Collection)\n', (1629, 1641), False, 'from actstream import action, actions, registry\n'), ((1707, 1741), 'actstream.registry.register', 'registry.register', (['CollectionEntry'], {}), '(CollectionEntry)\n', (1724, 1741), False, 'from actstream import action, actions, registry\n'), ((1752, 1781), 'archiver.models.CollectionEntry.objects.all', 'CollectionEntry.objects.all', ([], {}), '()\n', (1779, 1781), False, 'from archiver.models import Tune, Setting, Competition, CompetitionRecording, TuneComment, CompetitionComment, Collection, CollectionEntry, CompetitionTuneVote, CompetitionRecordingVote\n'), ((2064, 2102), 'actstream.registry.register', 'registry.register', (['CompetitionTuneVote'], {}), '(CompetitionTuneVote)\n', (2081, 2102), False, 'from actstream import action, actions, registry\n'), ((2114, 2147), 'archiver.models.CompetitionTuneVote.objects.all', 'CompetitionTuneVote.objects.all', ([], {}), '()\n', (2145, 2147), False, 'from archiver.models import Tune, Setting, Competition, CompetitionRecording, TuneComment, CompetitionComment, Collection, CollectionEntry, CompetitionTuneVote, CompetitionRecordingVote\n'), ((2408, 2451), 'actstream.registry.register', 'registry.register', (['CompetitionRecordingVote'], {}), '(CompetitionRecordingVote)\n', (2425, 2451), False, 'from actstream import action, actions, registry\n'), ((2463, 2501), 'archiver.models.CompetitionRecordingVote.objects.all', 'CompetitionRecordingVote.objects.all', ([], {}), '()\n', (2499, 2501), False, 'from archiver.models import Tune, Setting, Competition, CompetitionRecording, TuneComment, CompetitionComment, Collection, CollectionEntry, CompetitionTuneVote, CompetitionRecordingVote\n'), ((502, 572), 'actstream.actions.follow', 'actions.follow', (['tune.author', 'tune'], {'actor_only': '(False)', 'send_action': '(False)'}), '(tune.author, tune, actor_only=False, send_action=False)\n', (516, 572), False, 'from actstream import action, actions, registry\n'), ((692, 778), 'actstream.actions.follow', 'actions.follow', (['setting.author', 'setting.tune'], {'actor_only': '(False)', 'send_action': '(False)'}), '(setting.author, setting.tune, actor_only=False, send_action=\n False)\n', (706, 778), False, 'from actstream import action, actions, registry\n'), ((1028, 1120), 'actstream.actions.follow', 'actions.follow', (['cr.recording.author', 'cr.competition'], {'actor_only': '(False)', 'send_action': '(False)'}), '(cr.recording.author, cr.competition, actor_only=False,\n send_action=False)\n', (1042, 1120), False, 'from actstream import action, actions, registry\n'), ((1247, 1318), 'actstream.actions.follow', 'actions.follow', (['tc.author', 'tc.tune'], {'actor_only': '(False)', 'send_action': '(False)'}), '(tc.author, tc.tune, actor_only=False, send_action=False)\n', (1261, 1318), False, 'from actstream import action, actions, registry\n'), ((1477, 1555), 'actstream.actions.follow', 'actions.follow', (['cc.author', 'cc.competition'], {'actor_only': '(False)', 'send_action': '(False)'}), '(cc.author, cc.competition, actor_only=False, send_action=False)\n', (1491, 1555), False, 'from actstream import action, actions, registry\n'), ((2153, 2243), 'actstream.actions.follow', 'actions.follow', (['ctv.user', 'ctv.votable.competition'], {'actor_only': '(False)', 'send_action': '(False)'}), '(ctv.user, ctv.votable.competition, actor_only=False,\n send_action=False)\n', (2167, 2243), False, 'from actstream import action, actions, registry\n'), ((2244, 2323), 'actstream.actions.follow', 'actions.follow', (['ctv.user', 'ctv.votable.tune'], {'actor_only': '(False)', 'send_action': '(False)'}), '(ctv.user, ctv.votable.tune, actor_only=False, send_action=False)\n', (2258, 2323), False, 'from actstream import action, actions, registry\n'), ((2507, 2597), 'actstream.actions.follow', 'actions.follow', (['ctv.user', 'ctv.votable.competition'], {'actor_only': '(False)', 'send_action': '(False)'}), '(ctv.user, ctv.votable.competition, actor_only=False,\n send_action=False)\n', (2521, 2597), False, 'from actstream import action, actions, registry\n'), ((2598, 2686), 'actstream.actions.follow', 'actions.follow', (['crv.user', 'crv.votable.recording'], {'actor_only': '(False)', 'send_action': '(False)'}), '(crv.user, crv.votable.recording, actor_only=False,\n send_action=False)\n', (2612, 2686), False, 'from actstream import action, actions, registry\n'), ((1807, 1892), 'actstream.actions.follow', 'actions.follow', (['ce.collection.user', 'ce.tune'], {'actor_only': '(False)', 'send_action': '(False)'}), '(ce.collection.user, ce.tune, actor_only=False, send_action=False\n )\n', (1821, 1892), False, 'from actstream import action, actions, registry\n'), ((1906, 1993), 'actstream.actions.follow', 'actions.follow', (['ce.collection.user', 'ce.setting'], {'actor_only': '(False)', 'send_action': '(False)'}), '(ce.collection.user, ce.setting, actor_only=False,\n send_action=False)\n', (1920, 1993), False, 'from actstream import action, actions, registry\n')]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
class Migration(migrations.Migration):
dependencies = [
('blog', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='blogcategory',
options={'ordering': ['name'], 'verbose_name_plural': 'Blog Categories'},
),
migrations.AddField(
model_name='blogpage',
name='date',
field=models.DateField(verbose_name='Post date', default=datetime.datetime(2015, 2, 26, 23, 5, 30, 771014)),
preserve_default=False,
),
]
|
[
"django.db.migrations.AlterModelOptions",
"datetime.datetime"
] |
[((253, 381), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', ([], {'name': '"""blogcategory"""', 'options': "{'ordering': ['name'], 'verbose_name_plural': 'Blog Categories'}"}), "(name='blogcategory', options={'ordering': [\n 'name'], 'verbose_name_plural': 'Blog Categories'})\n", (281, 381), False, 'from django.db import models, migrations\n'), ((571, 620), 'datetime.datetime', 'datetime.datetime', (['(2015)', '(2)', '(26)', '(23)', '(5)', '(30)', '(771014)'], {}), '(2015, 2, 26, 23, 5, 30, 771014)\n', (588, 620), False, 'import datetime\n')]
|
from core.advbase import *
def module():
return Julietta
class Julietta(Adv):
conf = {}
conf['slots.a'] = ['Valiant_Crown','Primal_Crisis']
conf['slots.d'] = 'Gala_Thor'
conf['acl'] = """
`dragon, self.energy()<4
`s3, not buff(s3)
`s2
`s1
`s4, s1.charged<s1.sp/2
`fs, x=4 and c_fs(enhanced)>0
"""
conf['coabs'] = ['Blade','Lucretia','Peony']
conf['share'] = ['Summer_Cleo']
if __name__ == '__main__':
from core.simulate import test_with_argv
test_with_argv(None, *sys.argv)
|
[
"core.simulate.test_with_argv"
] |
[((539, 570), 'core.simulate.test_with_argv', 'test_with_argv', (['None', '*sys.argv'], {}), '(None, *sys.argv)\n', (553, 570), False, 'from core.simulate import test_with_argv\n')]
|
"""
A set of functions that should not be publically accessible.
"""
from typing import List
import matplotlib.pyplot as plt
import numpy as np
def _preamble(
data, axis, plot_kwargs, positions, vertical_violins, sides="both"
):
if vertical_violins is True:
assert sides in ["both", "left", "right"]
else: # horizontal violins
assert sides in ["both", "top", "bottom"]
if axis is None:
fig, axis = plt.subplots()
else:
fig = axis.get_figure()
if isinstance(plot_kwargs, list):
assert len(data) == len(plot_kwargs)
if positions is not None:
assert len(data) == len(positions)
else:
# Horizontal positions of the centers of the violins
positions = np.arange(0, len(data))
# Center positions between integers
if vertical_violins:
axis.set_xlim(positions.min() - 0.5, positions.max() + 0.5)
else:
axis.set_ylim(positions.min() - 0.5, positions.max() + 0.5)
return fig, axis, positions
def _xy_order(domain: List, dist: List, vertical_violin: bool):
if vertical_violin:
return dist, domain
return domain, dist
def _plot_from_x_dist(
axis, x, y, index, kwargs, vertical_violins, sides="both", fill=False
):
scale = 0.4 / y.max()
if not fill:
plot_func = axis.plot
else:
plot_func = (
axis.fill_betweenx if vertical_violins else axis.fill_between
)
if sides in ["both", "left", "top"]:
plot_func(
*_xy_order(x, index - y * scale, vertical_violins), **kwargs,
)
if sides in ["both", "right", "bottom"]:
plot_func(
*_xy_order(x, index + y * scale, vertical_violins), **kwargs,
)
return
def _inner_from_x_and_kde(
axis, x, y, index, inner, scale, vertical_violins, sides="both"
):
for i, (xi, yi) in enumerate(zip(x, y)):
if sides in ["both", "left", "top"]:
xii, yii = _xy_order(
[xi, xi], [index, index - yi * scale], vertical_violins
)
if inner in ["stick", "quartiles"]:
if inner == "quartiles" and i == 1:
axis.plot(xii, yii, c="k", alpha=0.5)
else:
axis.plot(xii, yii, c="k", alpha=0.5, ls=":")
if sides in ["both", "right", "bottom"]:
xii, yii = _xy_order(
[xi, xi], [index, index + yi * scale], vertical_violins
)
if inner in ["stick", "quartiles"]:
if inner == "quartiles" and i == 1:
axis.plot(xii, yii, c="k", alpha=0.5)
else:
axis.plot(xii, yii, c="k", alpha=0.5, ls=":")
return
|
[
"matplotlib.pyplot.subplots"
] |
[((444, 458), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (456, 458), True, 'import matplotlib.pyplot as plt\n')]
|
# !/usr/local/python/bin/python
# -*- coding: utf-8 -*-
# (C) <NAME>, 2020
# All rights reserved
# @Author: '<NAME> <<EMAIL>>'
# @Time: '2020-07-11 09:37'
from flask import Flask
from flask_redis import Redis
redis = Redis()
app = Flask(__name__)
app.config["REDIS_PREFIX"] = "EG:"
app.config["REDIS_URL"] = "redis://:@127.0.0.1:6379/0"
app.config["REDIS_DECODE_RESPONSES"] = True
redis.init_app(app)
if __name__ == "__main__":
assert redis.setnx("SET:NX:1", "VALUE1") is True
assert redis.setnx("SET:NX:1", "VALUE2") is False
assert redis.setnx("-SET:NX:1", "VALUE1") is True
assert redis.setnx("-SET:NX:1", "VALUE1") is False
redis.expire("SET:NX:1", 60)
redis.expire("-SET:NX:1", 70)
|
[
"flask_redis.Redis",
"flask.Flask"
] |
[((218, 225), 'flask_redis.Redis', 'Redis', ([], {}), '()\n', (223, 225), False, 'from flask_redis import Redis\n'), ((232, 247), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (237, 247), False, 'from flask import Flask\n')]
|
# -*- coding: utf-8 -*-
# how to use https://github.com/getsentry/responses
import json
import pytest
from aiohttp import web
from src import client_interface as api
payload = {"name": "", "date": "", "requests_sent": 0}
schemas = {
"post": {
"type": "object",
"properties": {
"name": {"type": "string", "required": "true"},
"date": {"type": "string"},
"requests_sent": {"type": "number"},
},
}
}
async def post_ok(request):
if request.method == 'POST':
request.app['value'] = (await request.post())['value']
return web.Response(body={"successful": True}, status=200, )
raise Exception('not supported')
@pytest.fixture
def client(loop, aiohttp_client):
app = web.Application()
app.router.add_post('/live', post_ok)
return loop.run_until_complete(aiohttp_client(app))
async def test_should_make_post_request_failed(client):
resp = await api.post(client, url='/live', payload={'value': 'foo'}, auth='')
assert resp.code == 500
|
[
"aiohttp.web.Response",
"aiohttp.web.Application",
"src.client_interface.post"
] |
[((763, 780), 'aiohttp.web.Application', 'web.Application', ([], {}), '()\n', (778, 780), False, 'from aiohttp import web\n'), ((610, 661), 'aiohttp.web.Response', 'web.Response', ([], {'body': "{'successful': True}", 'status': '(200)'}), "(body={'successful': True}, status=200)\n", (622, 661), False, 'from aiohttp import web\n'), ((954, 1018), 'src.client_interface.post', 'api.post', (['client'], {'url': '"""/live"""', 'payload': "{'value': 'foo'}", 'auth': '""""""'}), "(client, url='/live', payload={'value': 'foo'}, auth='')\n", (962, 1018), True, 'from src import client_interface as api\n')]
|
"""FastStars specific catalog class."""
import codecs
import json
import os
from collections import OrderedDict
from datetime import datetime
from subprocess import check_output
from astrocats.catalog.catalog import Catalog
from astrocats.catalog.quantity import QUANTITY
from astrocats.catalog.utils import read_json_arr, read_json_dict
from .faststars import FASTSTARS, FastStars
from .utils import name_clean
class FastStarsCatalog(Catalog):
"""Catalog class for `FastStars` objects."""
class PATHS(Catalog.PATHS):
"""Paths to catalog inputs/outputs."""
PATH_BASE = os.path.abspath(os.path.dirname(__file__))
def __init__(self, catalog):
"""Initialize paths."""
super(FastStarsCatalog.PATHS, self).__init__(catalog)
# auxiliary datafiles
self.TYPE_SYNONYMS = os.path.join(
self.PATH_INPUT, 'type-synonyms.json')
self.SOURCE_SYNONYMS = os.path.join(
self.PATH_INPUT, 'source-synonyms.json')
self.URL_REDIRECTS = os.path.join(
self.PATH_INPUT, 'url-redirects.json')
self.BIBERRORS = os.path.join(self.PATH_INPUT, 'biberrors.json')
# cached datafiles
self.BIBAUTHORS = os.path.join(
self.PATH_OUTPUT, 'cache', 'bibauthors.json')
self.EXTINCT = os.path.join(
self.PATH_OUTPUT, 'cache', 'extinctions.json')
def get_repo_years(self):
"""Return an array of years based upon output repositories."""
repo_folders = self.get_repo_output_folders(bones=False)
repo_years = [int(repo_folders[x][-4:])
for x in range(len(repo_folders))]
repo_years[0] -= 1
return repo_years
class SCHEMA(object):
"""Define the HASH/URL associated with the present schema."""
HASH = (check_output(['git', '-C', 'astrocats/faststars',
'log', '-n', '1', '--format="%h"',
'--', 'SCHEMA.md'])
.decode('ascii').strip().strip('"').strip())
URL = ('https://github.com/astrocatalogs/faststars/blob/' + HASH +
'/SCHEMA.md')
def __init__(self, args, log):
"""Initialize catalog."""
# Initialize super `astrocats.catalog.catalog.Catalog` object
super(FastStarsCatalog, self).__init__(args, log)
self.proto = FastStars
self._load_aux_data()
return
def should_bury(self, name):
"""Determine whether a fast star should be "buried".
For fast stars, objects that have enough data such that they can be
definitively determined to be "bound" are buried.
"""
bury_entry = False
if (FASTSTARS.BOUND_PROBABILITY in self.entries[name] and
not self.entries[name][
FASTSTARS.BOUND_PROBABILITY][0].get(
QUANTITY.UPPER_LIMIT, False) and float(
self.entries[name][FASTSTARS.BOUND_PROBABILITY][0][
QUANTITY.VALUE]) >= 0.9998):
bury_entry = True
return (bury_entry, True)
def _load_aux_data(self):
"""Load auxiliary dictionaries for use in this catalog."""
# Create/Load auxiliary dictionaries
self.nedd_dict = OrderedDict()
self.bibauthor_dict = read_json_dict(self.PATHS.BIBAUTHORS)
self.biberror_dict = read_json_dict(self.PATHS.BIBERRORS)
self.extinctions_dict = read_json_dict(self.PATHS.EXTINCT)
self.source_syns = read_json_dict(self.PATHS.SOURCE_SYNONYMS)
self.url_redirs = read_json_dict(self.PATHS.URL_REDIRECTS)
self.type_syns = read_json_dict(self.PATHS.TYPE_SYNONYMS)
# Create/Load auxiliary arrays
#self.nonsnetypes = read_json_arr(self.PATHS.NON_SNE_TYPES)
return
def save_caches(self):
"""Save caches to JSON files."""
jsonstring = json.dumps(self.bibauthor_dict, indent='\t',
separators=(',', ':'), ensure_ascii=False)
with codecs.open(self.PATHS.BIBAUTHORS, 'w', encoding='utf8') as f:
f.write(jsonstring)
jsonstring = json.dumps(self.extinctions_dict, indent='\t',
separators=(',', ':'), ensure_ascii=False)
with codecs.open(self.PATHS.EXTINCT, 'w', encoding='utf8') as f:
f.write(jsonstring)
def clean_entry_name(self, name):
"""Clean entry's name."""
return name_clean(name)
|
[
"codecs.open",
"astrocats.catalog.utils.read_json_dict",
"os.path.dirname",
"subprocess.check_output",
"json.dumps",
"collections.OrderedDict",
"os.path.join"
] |
[((3358, 3371), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (3369, 3371), False, 'from collections import OrderedDict\n'), ((3402, 3439), 'astrocats.catalog.utils.read_json_dict', 'read_json_dict', (['self.PATHS.BIBAUTHORS'], {}), '(self.PATHS.BIBAUTHORS)\n', (3416, 3439), False, 'from astrocats.catalog.utils import read_json_arr, read_json_dict\n'), ((3469, 3505), 'astrocats.catalog.utils.read_json_dict', 'read_json_dict', (['self.PATHS.BIBERRORS'], {}), '(self.PATHS.BIBERRORS)\n', (3483, 3505), False, 'from astrocats.catalog.utils import read_json_arr, read_json_dict\n'), ((3538, 3572), 'astrocats.catalog.utils.read_json_dict', 'read_json_dict', (['self.PATHS.EXTINCT'], {}), '(self.PATHS.EXTINCT)\n', (3552, 3572), False, 'from astrocats.catalog.utils import read_json_arr, read_json_dict\n'), ((3600, 3642), 'astrocats.catalog.utils.read_json_dict', 'read_json_dict', (['self.PATHS.SOURCE_SYNONYMS'], {}), '(self.PATHS.SOURCE_SYNONYMS)\n', (3614, 3642), False, 'from astrocats.catalog.utils import read_json_arr, read_json_dict\n'), ((3669, 3709), 'astrocats.catalog.utils.read_json_dict', 'read_json_dict', (['self.PATHS.URL_REDIRECTS'], {}), '(self.PATHS.URL_REDIRECTS)\n', (3683, 3709), False, 'from astrocats.catalog.utils import read_json_arr, read_json_dict\n'), ((3735, 3775), 'astrocats.catalog.utils.read_json_dict', 'read_json_dict', (['self.PATHS.TYPE_SYNONYMS'], {}), '(self.PATHS.TYPE_SYNONYMS)\n', (3749, 3775), False, 'from astrocats.catalog.utils import read_json_arr, read_json_dict\n'), ((3988, 4079), 'json.dumps', 'json.dumps', (['self.bibauthor_dict'], {'indent': '"""\t"""', 'separators': "(',', ':')", 'ensure_ascii': '(False)'}), "(self.bibauthor_dict, indent='\\t', separators=(',', ':'),\n ensure_ascii=False)\n", (3998, 4079), False, 'import json\n'), ((4237, 4330), 'json.dumps', 'json.dumps', (['self.extinctions_dict'], {'indent': '"""\t"""', 'separators': "(',', ':')", 'ensure_ascii': '(False)'}), "(self.extinctions_dict, indent='\\t', separators=(',', ':'),\n ensure_ascii=False)\n", (4247, 4330), False, 'import json\n'), ((615, 640), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (630, 640), False, 'import os\n'), ((849, 900), 'os.path.join', 'os.path.join', (['self.PATH_INPUT', '"""type-synonyms.json"""'], {}), "(self.PATH_INPUT, 'type-synonyms.json')\n", (861, 900), False, 'import os\n'), ((953, 1006), 'os.path.join', 'os.path.join', (['self.PATH_INPUT', '"""source-synonyms.json"""'], {}), "(self.PATH_INPUT, 'source-synonyms.json')\n", (965, 1006), False, 'import os\n'), ((1057, 1108), 'os.path.join', 'os.path.join', (['self.PATH_INPUT', '"""url-redirects.json"""'], {}), "(self.PATH_INPUT, 'url-redirects.json')\n", (1069, 1108), False, 'import os\n'), ((1155, 1202), 'os.path.join', 'os.path.join', (['self.PATH_INPUT', '"""biberrors.json"""'], {}), "(self.PATH_INPUT, 'biberrors.json')\n", (1167, 1202), False, 'import os\n'), ((1264, 1322), 'os.path.join', 'os.path.join', (['self.PATH_OUTPUT', '"""cache"""', '"""bibauthors.json"""'], {}), "(self.PATH_OUTPUT, 'cache', 'bibauthors.json')\n", (1276, 1322), False, 'import os\n'), ((1367, 1426), 'os.path.join', 'os.path.join', (['self.PATH_OUTPUT', '"""cache"""', '"""extinctions.json"""'], {}), "(self.PATH_OUTPUT, 'cache', 'extinctions.json')\n", (1379, 1426), False, 'import os\n'), ((4121, 4177), 'codecs.open', 'codecs.open', (['self.PATHS.BIBAUTHORS', '"""w"""'], {'encoding': '"""utf8"""'}), "(self.PATHS.BIBAUTHORS, 'w', encoding='utf8')\n", (4132, 4177), False, 'import codecs\n'), ((4372, 4425), 'codecs.open', 'codecs.open', (['self.PATHS.EXTINCT', '"""w"""'], {'encoding': '"""utf8"""'}), "(self.PATHS.EXTINCT, 'w', encoding='utf8')\n", (4383, 4425), False, 'import codecs\n'), ((1911, 2019), 'subprocess.check_output', 'check_output', (['[\'git\', \'-C\', \'astrocats/faststars\', \'log\', \'-n\', \'1\', \'--format="%h"\',\n \'--\', \'SCHEMA.md\']'], {}), '([\'git\', \'-C\', \'astrocats/faststars\', \'log\', \'-n\', \'1\',\n \'--format="%h"\', \'--\', \'SCHEMA.md\'])\n', (1923, 2019), False, 'from subprocess import check_output\n')]
|
#!/usr/bin/env python3
# Run a command on the hub.
# Guts were lifted from spikeprime-tools/spiketools/spikejsonrpcapispike.py
import base64
from comm.DirectConnectionMonitor import DirectConnectionMonitor
from comm.UsbConnectionMonitor import UsbConnectionMonitor
from comm.SerialConnection import SerialConnection
import os
import argparse
from tqdm import tqdm
import time
import json
import logging
from datetime import datetime
from comm.HubClient import ConnectionState, HubClient
from data.HubMonitor import HubMonitor
from utils.setup import setup_logging
import mpy_cross
from pathlib import Path
logger = logging.getLogger("App")
class RPC:
def __init__(self):
cm = UsbConnectionMonitor()
# below is an alternative if the USB scanning does not work
#cm = DirectConnectionMonitor(SerialConnection("/dev/ttyACM0"))
self._client = HubClient(cm)
self._hm = HubMonitor(self._client)
self._hm.events.console_print += self._console_print
self._client.start()
def _console_print(self, msg):
print(msg, end='')
def _gen_random_id(self, length=4):
import string, random
letters = string.ascii_letters + string.digits + '_'
return ''.join(random.choice(letters) for _ in range(length))
def send_message(self, name, params = {}):
while rpc._client.state is not ConnectionState.TELEMETRY:
logger.info('waiting for hub to connect')
time.sleep(0.2)
return self._client.send_message(name, params)
# Program Methods
def program_execute(self, n: int, wait: bool = True, terminate_on_ctrl_c: bool = True):
info = rpc.get_storage_information()
if info is None:
logger.error(f'Cannot get storage information from Hub')
raise SystemExit
slots = info['slots']
if str(n) not in slots:
logger.error(f'Cannot find program in slot {n}')
return
project = slots[str(n)]
project_id = project['project_id']
res = self.send_message('program_execute', {'slotid': n})
if not wait:
return res
try:
import time
while self._hm.execution_status != (project_id, False):
time.sleep(0.5)
except KeyboardInterrupt:
if terminate_on_ctrl_c and self._hm.execution_status[1]: # take care of 'None' and 'False'
logger.warning('Ctrl-C received, terminating program...')
self.program_terminate()
else:
logger.warning('Ctrl-C received.')
finally:
pass
return res
def program_terminate(self):
return self.send_message('program_terminate')
def get_storage_information(self) -> dict:
return self.send_message('get_storage_status')
def program_compile(self, src_file: str, out_file: str = None, opt: int = 0) -> str:
cmd = f'-municode {src_file}'
if not out_file:
out_file = Path(src_file).with_suffix('.mpy')
cmd += f' -o {out_file}'
if opt != 0 and 0 < opt <= 3:
cmd += f' -O{opt}'
try:
logger.info(f'Executing mpy_cross with args: {cmd}')
res = mpy_cross.run(*cmd.split())
res.wait()
except:
logger.warning(f'Failed to compile: {src_file}')
return None
logger.info(f'Successfully compiled: {src_file}')
return out_file
def program_write(self, file:str, name: str = None, slot: int = 0, vm: bool = False, compile: bool = False) -> bool:
def _start_write_program(name, size, slot, created, modified, filename: str = '__init__.py'):
project_id = self._gen_random_id(12)
type = 'scratch' if vm else 'python'
meta = {'created': created, 'modified': modified, 'name': str(base64.b64encode(name.encode()), 'utf-8'),
'type': type, 'project_id': project_id}
return self.send_message('start_write_program', {'slotid':slot, 'size': size, 'meta': meta, 'filename': filename})
def _write_package(data, transferid):
return self.send_message('write_package', {'data': str(base64.b64encode(data), 'utf-8'), 'transferid': transferid})
filepath = Path(file)
if not filepath.exists():
logger.error(f'File {filepath} does not exists')
return False
is_py = filepath.suffix.lower() == '.py'
is_mpy = filepath.suffix.lower() == '.mpy'
if args.compile:
if is_mpy:
logger.warning(f'Skip compiling mpy file: {filepath.name}')
else:
import tempfile
out_file = tempfile.NamedTemporaryFile(suffix='.mpy')
mpy_name = out_file.name
out_file.close()
mpy_file = rpc.program_compile(filepath, out_file=mpy_name)
if mpy_file:
is_mpy = True
is_py = False
filepath = mpy_file
logger.info(f'Uploading file: {filepath}')
if not is_py and not is_mpy:
logger.error(f'File {filepath} is not a valide .py or .mpy file')
return False
dest_file = '__init__.py' if is_py else '__init__.mpy'
with open(filepath, "rb") as f:
size = os.path.getsize(filepath)
name = name if name else file
now = int(time.time() * 1000)
start = _start_write_program(name, size, slot, now, now, filename=dest_file)
bs = start['blocksize']
id = start['transferid']
with tqdm(total=size, unit='B', unit_scale=True) as pbar:
b = f.read(bs)
while b:
_write_package(b, id)
pbar.update(len(b))
b = f.read(bs)
return True
def move_project(self, from_slot, to_slot):
return self.send_message('move_project', {'old_slotid': from_slot, 'new_slotid': to_slot})
def remove_project(self, from_slot):
return self.send_message('remove_project', {'slotid': from_slot })
# Light Methods
def display_set_pixel(self, x, y, brightness = 9):
return self.send_message('scratch.display_set_pixel', { 'x':x, 'y': y, 'brightness': brightness})
def display_clear(self):
return self.send_message('scratch.display_clear')
def display_image(self, image):
return self.send_message('scratch.display_image', { 'image':image })
def display_image_for(self, image, duration_ms):
return self.send_message('scratch.display_image_for', { 'image':image, 'duration': duration_ms })
def display_text(self, text):
return self.send_message('scratch.display_text', {'text':text})
# Hub Methods
def get_firmware_info(self):
return self.send_message('get_hub_info')
if __name__ == "__main__":
def handle_list():
info = rpc.get_storage_information()
storage = info['storage']
slots = info['slots']
print("%4s %-40s %6s %-20s %-12s %-10s" % ("Slot", "Decoded Name", "Size", "Last Modified", "Project_id", "Type"))
for i in range(20):
if str(i) in slots:
sl = slots[str(i)]
modified = datetime.utcfromtimestamp(sl['modified']/1000).strftime('%Y-%m-%d %H:%M:%S')
try:
decoded_name = base64.b64decode(sl['name']).decode('utf-8')
except:
decoded_name = sl['name']
try:
project = sl['project_id']
except:
project = " "
try:
type = sl['type']
except:
type = " "
# print("%2s %-40s %-40s %5db %6s %-20s %-20s %-10s" % (i, sl['name'], decoded_name, sl['size'], sl['id'], modified, project, type))
print("%4s %-40s %5db %-20s %-12s %-10s" % (i, decoded_name, sl['size'], modified, project, type))
print(("Storage free %s%s of total %s%s" % (storage['free'], storage['unit'], storage['total'], storage['unit'])))
def handle_fwinfo():
info = rpc.get_firmware_info()
fw = '.'.join(str(x) for x in info['firmware']['version'])
rt = '.'.join(str(x) for x in info['runtime']['version'])
print("Firmware version: %s; Runtime version: %s" % (fw, rt))
def handle_upload():
res = rpc.program_write(args.file, args.name, args.to_slot, vm=args.vm, compile=args.compile)
if not res:
logger.error(f'Fail to write file: {args.file}')
return False
if args.start or args.wait: # either -s or/and -w are specified
rpc.program_execute(args.to_slot, wait=args.wait)
return True
parser = argparse.ArgumentParser(description='Tools for Spike Hub RPC protocol')
parser.add_argument('--verbose', '-v', help='print informational messages to console', action='store_true')
parser.set_defaults(func=lambda: parser.print_help())
sub_parsers = parser.add_subparsers()
list_parser = sub_parsers.add_parser('list', aliases=['ls'], help='List stored programs')
list_parser.set_defaults(func=handle_list)
fwinfo_parser = sub_parsers.add_parser('fwinfo', help='Show firmware version')
fwinfo_parser.set_defaults(func=handle_fwinfo)
mvprogram_parser = sub_parsers.add_parser('mv', help='Changes program slot')
mvprogram_parser.add_argument('from_slot', type=int)
mvprogram_parser.add_argument('to_slot', type=int)
mvprogram_parser.set_defaults(func=lambda: rpc.move_project(args.from_slot, args.to_slot))
cpprogram_parser = sub_parsers.add_parser('upload', aliases=['cp'], help='Uploads a program')
cpprogram_parser.add_argument('file')
cpprogram_parser.add_argument('to_slot', type=int)
cpprogram_parser.add_argument('name', nargs='?')
cpprogram_parser.add_argument('--start', '-s', help='Start after upload', action='store_true')
cpprogram_parser.add_argument('--wait', '-w', help='Start and wait for program to finish', action='store_true')
cpprogram_parser.add_argument('--vm', help='Virtualmachine-based python program', action='store_true')
cpprogram_parser.add_argument('--compile', '-c', help='Compile python program before upload', action='store_true')
cpprogram_parser.set_defaults(func=handle_upload)
rmprogram_parser = sub_parsers.add_parser('rm', help='Removes the program at a given slot')
rmprogram_parser.add_argument('from_slot', type=int)
rmprogram_parser.set_defaults(func=lambda: rpc.remove_project(args.from_slot))
startprogram_parser = sub_parsers.add_parser('start', help='Starts a program')
startprogram_parser.add_argument('slot', type=int)
startprogram_parser.add_argument('--wait', '-w', help='Wait for program to finish', action='store_true')
startprogram_parser.set_defaults(func=lambda: rpc.program_execute(args.slot, wait=args.wait))
stopprogram_parser = sub_parsers.add_parser('stop', help='Stop program execution')
stopprogram_parser.set_defaults(func=lambda: rpc.program_terminate())
display_parser = sub_parsers.add_parser('display', help='Controls 5x5 LED matrix display')
display_parser.set_defaults(func=lambda: display_parser.print_help())
display_parsers = display_parser.add_subparsers()
display_image_parser = display_parsers.add_parser('image', help='Displays image on the LED matrix')
display_image_parser.add_argument('image', help='format xxxxx:xxxxx:xxxxx:xxxxx:xxxx, where x is the pixel brigthness in range 0-9')
display_image_parser.set_defaults(func=lambda: rpc.display_image(args.image))
display_text_parser = display_parsers.add_parser('text', help='Displays scrolling text on the LED matrix')
display_text_parser.add_argument('text')
display_text_parser.set_defaults(func=lambda: rpc.display_text(args.text))
display_clear_parser = display_parsers.add_parser('clear', help='Clears display')
display_clear_parser.set_defaults(func=lambda: rpc.display_clear())
display_pixel_parser = display_parsers.add_parser('setpixel', help='Sets individual LED brightness')
display_pixel_parser.add_argument('x', type=int)
display_pixel_parser.add_argument('y', type=int)
display_pixel_parser.add_argument('brightness', nargs='?', type=int, default=9, help='pixel brightness 0-9')
display_pixel_parser.set_defaults(func=lambda: rpc.display_set_pixel(args.x, args.y, args.brightness))
log_level = logging.WARNING
args = parser.parse_args()
if args.verbose:
log_level = logging.DEBUG
setup_logging(os.path.dirname(__file__) + "/logs/run_command.log", log_level)
rpc = RPC()
args.func()
|
[
"tempfile.NamedTemporaryFile",
"tqdm.tqdm",
"argparse.ArgumentParser",
"os.path.getsize",
"comm.UsbConnectionMonitor.UsbConnectionMonitor",
"os.path.dirname",
"data.HubMonitor.HubMonitor",
"random.choice",
"logging.getLogger",
"time.sleep",
"time.time",
"datetime.datetime.utcfromtimestamp",
"pathlib.Path",
"base64.b64decode",
"base64.b64encode",
"comm.HubClient.HubClient"
] |
[((618, 642), 'logging.getLogger', 'logging.getLogger', (['"""App"""'], {}), "('App')\n", (635, 642), False, 'import logging\n'), ((8100, 8171), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Tools for Spike Hub RPC protocol"""'}), "(description='Tools for Spike Hub RPC protocol')\n", (8123, 8171), False, 'import argparse\n'), ((687, 709), 'comm.UsbConnectionMonitor.UsbConnectionMonitor', 'UsbConnectionMonitor', ([], {}), '()\n', (707, 709), False, 'from comm.UsbConnectionMonitor import UsbConnectionMonitor\n'), ((861, 874), 'comm.HubClient.HubClient', 'HubClient', (['cm'], {}), '(cm)\n', (870, 874), False, 'from comm.HubClient import ConnectionState, HubClient\n'), ((890, 914), 'data.HubMonitor.HubMonitor', 'HubMonitor', (['self._client'], {}), '(self._client)\n', (900, 914), False, 'from data.HubMonitor import HubMonitor\n'), ((4022, 4032), 'pathlib.Path', 'Path', (['file'], {}), '(file)\n', (4026, 4032), False, 'from pathlib import Path\n'), ((1414, 1429), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (1424, 1429), False, 'import time\n'), ((4960, 4985), 'os.path.getsize', 'os.path.getsize', (['filepath'], {}), '(filepath)\n', (4975, 4985), False, 'import os\n'), ((11850, 11875), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (11865, 11875), False, 'import os\n'), ((1203, 1225), 'random.choice', 'random.choice', (['letters'], {}), '(letters)\n', (1216, 1225), False, 'import string, random\n'), ((2150, 2165), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (2160, 2165), False, 'import time\n'), ((4397, 4439), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {'suffix': '""".mpy"""'}), "(suffix='.mpy')\n", (4424, 4439), False, 'import tempfile\n'), ((5213, 5256), 'tqdm.tqdm', 'tqdm', ([], {'total': 'size', 'unit': '"""B"""', 'unit_scale': '(True)'}), "(total=size, unit='B', unit_scale=True)\n", (5217, 5256), False, 'from tqdm import tqdm\n'), ((2834, 2848), 'pathlib.Path', 'Path', (['src_file'], {}), '(src_file)\n', (2838, 2848), False, 'from pathlib import Path\n'), ((5038, 5049), 'time.time', 'time.time', ([], {}), '()\n', (5047, 5049), False, 'import time\n'), ((3941, 3963), 'base64.b64encode', 'base64.b64encode', (['data'], {}), '(data)\n', (3957, 3963), False, 'import base64\n'), ((6734, 6782), 'datetime.datetime.utcfromtimestamp', 'datetime.utcfromtimestamp', (["(sl['modified'] / 1000)"], {}), "(sl['modified'] / 1000)\n", (6759, 6782), False, 'from datetime import datetime\n'), ((6849, 6877), 'base64.b64decode', 'base64.b64decode', (["sl['name']"], {}), "(sl['name'])\n", (6865, 6877), False, 'import base64\n')]
|
'''
adapted from Harry
'''
import matplotlib
matplotlib.use("Agg")
import matplotlib.pyplot as plt
import numpy as np
from pyPCGA import PCGA
# import mf
import math
import datetime as dt
import os
import sys
from poro import Model
#print(np.__version__)
# domain parameters
nx = 128
ny = 128
N = np.array([nx, ny])
m = np.prod(N)
x = np.linspace(0., 1., N[0])
y = np.linspace(0., 1., N[1])
xmin = np.array([x[0], y[0]])
xmax = np.array([x[-1], y[-1]])
# forward problem parameters
pts_fem = np.loadtxt('dof_perm_dg0.csv', delimiter=',')
ptx = np.linspace(0,1,nx)
pty = np.linspace(0,1,ny)
logk_idx = np.loadtxt('logk_idx.txt').astype(int)
forward_params = {'ptx': ptx, 'pty': pty, 'pts_fem': pts_fem, 'logk_idx': logk_idx}
# Load files for s_true and obs
s_true = np.loadtxt('s_true.txt').reshape(-1,1)
obs = np.loadtxt('obs.txt').reshape(-1,1) # gerenated noisy obs from poro.py
# covairance kernel and scale parameters
prior_std = 2.0
prior_cov_scale = np.array([0.1, 0.1])
def kernel(r): return (prior_std ** 2) * np.exp(-r)
XX, YY = np.meshgrid(x, y)
pts = None # for uniform grids, you don't need pts of s
# prepare interface to run as a function
def forward_model(s, parallelization, ncores=None):
model = Model(forward_params)
if parallelization:
simul_obs = model.run(s, parallelization, ncores)
else:
simul_obs = model.run(s, parallelization)
return simul_obs
params = {'R': (50.0) ** 2, 'n_pc': 96,
'maxiter': 10, 'restol': 0.1,
'matvec': 'FFT', 'xmin': xmin, 'xmax': xmax, 'N': N,
'prior_std': prior_std, 'prior_cov_scale': prior_cov_scale,
'kernel': kernel, 'post_cov': 'diag',
'precond': False, 'LM': True, # 'LM_smin' : -30.0, 'LM_smax' : 5.0, # 'alphamax_LM' : 1.E+5,
'parallel': True, 'linesearch': True, #'precision': 1.e-4,
'forward_model_verbose': True, 'verbose': True,
'iter_save': True}
#s_init = np.mean(s_true) * np.ones((m, 1))
s_init = -20. * np.ones((m, 1))
# initialize
prob = PCGA(forward_model, s_init, pts, params, s_true, obs)
# prob = PCGA(forward_model, s_init, pts, params, s_true, obs, X = X) #if you want to add your own drift X
# run inversion
s_hat, simul_obs, post_diagv, iter_best = prob.Run()
|
[
"numpy.meshgrid",
"poro.Model",
"pyPCGA.PCGA",
"numpy.ones",
"matplotlib.use",
"numpy.array",
"numpy.loadtxt",
"numpy.linspace",
"numpy.exp",
"numpy.prod"
] |
[((49, 70), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (63, 70), False, 'import matplotlib\n'), ((323, 341), 'numpy.array', 'np.array', (['[nx, ny]'], {}), '([nx, ny])\n', (331, 341), True, 'import numpy as np\n'), ((347, 357), 'numpy.prod', 'np.prod', (['N'], {}), '(N)\n', (354, 357), True, 'import numpy as np\n'), ((365, 392), 'numpy.linspace', 'np.linspace', (['(0.0)', '(1.0)', 'N[0]'], {}), '(0.0, 1.0, N[0])\n', (376, 392), True, 'import numpy as np\n'), ((396, 423), 'numpy.linspace', 'np.linspace', (['(0.0)', '(1.0)', 'N[1]'], {}), '(0.0, 1.0, N[1])\n', (407, 423), True, 'import numpy as np\n'), ((432, 454), 'numpy.array', 'np.array', (['[x[0], y[0]]'], {}), '([x[0], y[0]])\n', (440, 454), True, 'import numpy as np\n'), ((463, 487), 'numpy.array', 'np.array', (['[x[-1], y[-1]]'], {}), '([x[-1], y[-1]])\n', (471, 487), True, 'import numpy as np\n'), ((531, 576), 'numpy.loadtxt', 'np.loadtxt', (['"""dof_perm_dg0.csv"""'], {'delimiter': '""","""'}), "('dof_perm_dg0.csv', delimiter=',')\n", (541, 576), True, 'import numpy as np\n'), ((584, 605), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', 'nx'], {}), '(0, 1, nx)\n', (595, 605), True, 'import numpy as np\n'), ((611, 632), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', 'ny'], {}), '(0, 1, ny)\n', (622, 632), True, 'import numpy as np\n'), ((1014, 1034), 'numpy.array', 'np.array', (['[0.1, 0.1]'], {}), '([0.1, 0.1])\n', (1022, 1034), True, 'import numpy as np\n'), ((1102, 1119), 'numpy.meshgrid', 'np.meshgrid', (['x', 'y'], {}), '(x, y)\n', (1113, 1119), True, 'import numpy as np\n'), ((2115, 2168), 'pyPCGA.PCGA', 'PCGA', (['forward_model', 's_init', 'pts', 'params', 's_true', 'obs'], {}), '(forward_model, s_init, pts, params, s_true, obs)\n', (2119, 2168), False, 'from pyPCGA import PCGA\n'), ((1288, 1309), 'poro.Model', 'Model', (['forward_params'], {}), '(forward_params)\n', (1293, 1309), False, 'from poro import Model\n'), ((2077, 2092), 'numpy.ones', 'np.ones', (['(m, 1)'], {}), '((m, 1))\n', (2084, 2092), True, 'import numpy as np\n'), ((643, 669), 'numpy.loadtxt', 'np.loadtxt', (['"""logk_idx.txt"""'], {}), "('logk_idx.txt')\n", (653, 669), True, 'import numpy as np\n'), ((815, 839), 'numpy.loadtxt', 'np.loadtxt', (['"""s_true.txt"""'], {}), "('s_true.txt')\n", (825, 839), True, 'import numpy as np\n'), ((861, 882), 'numpy.loadtxt', 'np.loadtxt', (['"""obs.txt"""'], {}), "('obs.txt')\n", (871, 882), True, 'import numpy as np\n'), ((1079, 1089), 'numpy.exp', 'np.exp', (['(-r)'], {}), '(-r)\n', (1085, 1089), True, 'import numpy as np\n')]
|
import os
from pathlib import Path
import sys
import time
from termcolor import colored
import traceback
from app.commands import Adventure
name = input("Welcome to the world, adventurer! What name would you like to be "
"known as in this land? \n")
adventure = Adventure(name)
print(f"Nice to meet you, {name}!\nUse commands to interact with your world. At any time, type 'help' to see all "
f"available commands.\nHere is your current status: \n")
print(adventure.player.status())
print()
adventure.look_around()
while adventure.player.health > 0:
cycle_start = time.time()
while adventure.player.health > 0 and time.time() - cycle_start < 80:
try:
adventure.commands_manager(input())
except EOFError:
pass
except Exception as err:
print(colored(f"Holy moly, you came across a huge issues!", 'red'))
with open(Path(os.path.abspath(os.path.dirname(__file__)), 'errors.log'), 'a', encoding='utf-8') as err_log:
err_log.write(traceback.format_exc())
err_log.write("\n\n")
print('We wrote the issue to "errors.log", I will try to keep going, but I am unsure if it will work')
if adventure.player.health <= 0:
break
else:
if adventure.player.health > 0:
adventure.player.phase_change(adventure.map)
print(colored(f"It is now {adventure.player.phase}time.", "blue"))
input('Press enter to exit.')
sys.exit(0)
# next version ideas
# inventory limits
# loose a bit of health if you don't sleep at night
# possibility of highway robbery when travelling
# sell inventory items for money
# mini games to earn money
# have the interview process include drug testing - screen for mushrooms, magic pills, etc
# multiplayer
# hospital doctors can heal you (but you need health insurance probably)
# adopt animals
# have multiple choice conversations with mobs, trade items, etc
# map items evolve over time as player levels up
# home base on spawn to store stuff
# armor to not die as fast
# magic spells
|
[
"app.commands.Adventure",
"os.path.dirname",
"time.time",
"termcolor.colored",
"traceback.format_exc",
"sys.exit"
] |
[((279, 294), 'app.commands.Adventure', 'Adventure', (['name'], {}), '(name)\n', (288, 294), False, 'from app.commands import Adventure\n'), ((1501, 1512), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (1509, 1512), False, 'import sys\n'), ((594, 605), 'time.time', 'time.time', ([], {}), '()\n', (603, 605), False, 'import time\n'), ((648, 659), 'time.time', 'time.time', ([], {}), '()\n', (657, 659), False, 'import time\n'), ((1409, 1468), 'termcolor.colored', 'colored', (['f"""It is now {adventure.player.phase}time."""', '"""blue"""'], {}), "(f'It is now {adventure.player.phase}time.', 'blue')\n", (1416, 1468), False, 'from termcolor import colored\n'), ((834, 894), 'termcolor.colored', 'colored', (['f"""Holy moly, you came across a huge issues!"""', '"""red"""'], {}), "(f'Holy moly, you came across a huge issues!', 'red')\n", (841, 894), False, 'from termcolor import colored\n'), ((1047, 1069), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (1067, 1069), False, 'import traceback\n'), ((939, 964), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (954, 964), False, 'import os\n')]
|
import click
from the_price.search_engines.search_engine import SearchEngine
@click.command()
@click.argument('item', type=click.STRING)
@click.option('--shop', help='Narrow done the search to a specific shop')
def ask_the_price_of(item, shop):
"""Main function to get the price of an item from a specific shop.
If no shop provided, default strategy will apply"""
try:
price_finder = SearchEngine(shop)
title, price, currency = price_finder.find(item)
click.echo(title + ' cost ' + str(price) + ' ' + currency + ' on ' + price_finder.finder.name)
except Exception as e:
click.echo('Item not found ')
|
[
"click.argument",
"click.echo",
"click.option",
"click.command",
"the_price.search_engines.search_engine.SearchEngine"
] |
[((79, 94), 'click.command', 'click.command', ([], {}), '()\n', (92, 94), False, 'import click\n'), ((96, 137), 'click.argument', 'click.argument', (['"""item"""'], {'type': 'click.STRING'}), "('item', type=click.STRING)\n", (110, 137), False, 'import click\n'), ((139, 211), 'click.option', 'click.option', (['"""--shop"""'], {'help': '"""Narrow done the search to a specific shop"""'}), "('--shop', help='Narrow done the search to a specific shop')\n", (151, 211), False, 'import click\n'), ((405, 423), 'the_price.search_engines.search_engine.SearchEngine', 'SearchEngine', (['shop'], {}), '(shop)\n', (417, 423), False, 'from the_price.search_engines.search_engine import SearchEngine\n'), ((619, 648), 'click.echo', 'click.echo', (['"""Item not found """'], {}), "('Item not found ')\n", (629, 648), False, 'import click\n')]
|
import os
from optparse import OptionParser
import appstore_scrape
import play_scrape
# Constants
PLATFORM_ANDROID_FILENAME_SUFFIX = 'android'
PLATFORM_IOS_FILENAME_SUFFIX = 'ios'
def append_platform_to_filename(base_filename, platform_suffix):
"""
Appends the platform suffix to the end of the file name
:param base_filename:
The file name to be appended on
:param platform_suffix:
The suffix to be appended
:return:
The formatted file name with the suffix
"""
name, ext = os.path.splitext(base_filename)
return "{name}_{uid}{ext}".format(name=name, uid=platform_suffix, ext=ext)
def main():
parser = OptionParser(usage="usage: %prog [options] filename",
version="%prog 1.0")
parser.add_option("-p", "--pages",
action="store",
dest="pages",
default=5,
help="The number of pages you want to scrape")
parser.add_option("-o", "--output",
action="store",
dest="output",
default="output.csv",
help="The output file where you want to dump results")
parser.add_option("--android",
action="store",
dest="android_app_id",
help="The Google Play Store App ID of the app you want to scrape reviews")
parser.add_option("--ios",
action="store",
dest="ios_app_id",
help="The App Store App ID of the app you want to scrape reviews")
parser.add_option("-c", "--country",
action="store",
dest="country",
default="ph",
help="The country code where the reviews will be scraped (only for iOS)", )
(options, args) = parser.parse_args()
if options.android_app_id is not None and options.android_app_id:
print("[*] Starting scraping for iOS (%s)", options.android_app_id)
save_path = append_platform_to_filename(options.output, PLATFORM_ANDROID_FILENAME_SUFFIX)
play_scrape.save_page_reviews(options.android_app_id, options.pages, save_path)
print("[*] Finished scraping to file " + save_path)
print()
if options.ios_app_id is not None and options.ios_app_id:
print("[*] Starting scraping for iOS (%s)", options.ios_app_id)
save_path = append_platform_to_filename(options.output, PLATFORM_IOS_FILENAME_SUFFIX)
appstore_scrape.save_page_reviews(options.ios_app_id, options.country, options.pages, save_path)
print("[*] Finished scraping to file " + save_path)
print()
if __name__ == '__main__':
main()
|
[
"appstore_scrape.save_page_reviews",
"play_scrape.save_page_reviews",
"os.path.splitext",
"optparse.OptionParser"
] |
[((534, 565), 'os.path.splitext', 'os.path.splitext', (['base_filename'], {}), '(base_filename)\n', (550, 565), False, 'import os\n'), ((672, 746), 'optparse.OptionParser', 'OptionParser', ([], {'usage': '"""usage: %prog [options] filename"""', 'version': '"""%prog 1.0"""'}), "(usage='usage: %prog [options] filename', version='%prog 1.0')\n", (684, 746), False, 'from optparse import OptionParser\n'), ((2185, 2264), 'play_scrape.save_page_reviews', 'play_scrape.save_page_reviews', (['options.android_app_id', 'options.pages', 'save_path'], {}), '(options.android_app_id, options.pages, save_path)\n', (2214, 2264), False, 'import play_scrape\n'), ((2578, 2678), 'appstore_scrape.save_page_reviews', 'appstore_scrape.save_page_reviews', (['options.ios_app_id', 'options.country', 'options.pages', 'save_path'], {}), '(options.ios_app_id, options.country,\n options.pages, save_path)\n', (2611, 2678), False, 'import appstore_scrape\n')]
|
from django.urls import path
from . import views
from rest_framework.authtoken.views import obtain_auth_token
urlpatterns = [
path('maps/map/', views.mapView.as_view(), name='map'),
path('maps/map_image/', views.mapImageView.as_view(), name='map_image'),
path('maps/duplicate_map/', views.duplicateMapView.as_view(), name='duplicate_map'),
path('maps/personal_maps/', views.personalMapsView.as_view(), name='personal_maps'),
path('maps/all_maps/', views.allMapsView.as_view(), name='all_maps'),
path('maps/analytics/', views.analyticsMapView.as_view(), name='map_analytics'),
path('apps/app/', views.appView.as_view(), name='app'),
path('apps/app_image/', views.appImageView.as_view(), name='app_image'),
path('apps/duplicate_app/', views.duplicateAppView.as_view(), name='duplicate_app'),
path('apps/personal_apps/', views.personalAppsView.as_view(), name='personal_apps'),
path('apps/all_apps/', views.allAppsView.as_view(), name='all_apps'),
path('apps/analytics/', views.analyticsAppView.as_view(), name='app_analytics'),
path('basic/group/', views.groupView.as_view(), name='group'),
path('basic/groups/', views.allGroupsView.as_view(), name='groups'),
path('basic/user_search/', views.userSearchView.as_view(), name='user_search'),
path('basic/group_search/', views.groupSearchView.as_view(), name='group_search'),
path('basic/global_search/', views.globalSearchView.as_view(), name='global_search'),
path('import/import_geographic_file/', views.importGeographicFileView.as_view(), name='import_geographic_file'),
path('import/import_point_file/', views.importPointFileView.as_view(), name='import_point_file'),
path('import/esri_service/', views.importEsriUrlView.as_view(), name='esri_service'),
path('tables/table/', views.tableView.as_view(), name='table'),
path('tables/table_image/', views.tableImageView.as_view(), name='table_image'),
path('tables/duplicate_table/', views.duplicateTableView.as_view(), name='duplicate_table'),
path('tables/personal_tables/', views.personalTablesView.as_view(), name='personal_tables'),
path('tables/all_tables/', views.allTablesView.as_view(), name='all_tables'),
path('tables/analytics/', views.analyticsTableView.as_view(), name='table_analytics'),
path('features/columns/', views.featureColumnsView.as_view(), name='columns'),
path('features/query/', views.featureQueryView.as_view(), name='query'),
path('features/statistics/', views.featureStatisticsView.as_view(), name='statistics'),
path('sites/site/', views.siteView.as_view(), name='map'),
path('sites/site_image/', views.siteImageView.as_view(), name='site_image'),
path('sites/duplicate_site/', views.duplicateSiteView.as_view(), name='duplicate_site'),
path('sites/personal_sites/', views.personalSitesView.as_view(), name='personal_sites'),
path('sites/all_sites/', views.allSitesView.as_view(), name='all_sites'),
path('sites/analytics/', views.analyticsSiteView.as_view(), name='site_analytics'),
path('geosubscriptions/geosubscription/', views.geosubscriptionView.as_view(), name='map'),
path('geosubscriptions/geosubscription_image/', views.geosubscriptionImageView.as_view(), name='geosubscription_image'),
path('geosubscriptions/duplicate_geosubscription/', views.duplicateGeosubscriptionView.as_view(), name='duplicate_geosubscription'),
path('geosubscriptions/personal_geosubscriptions/', views.personalGeosubscriptionsView.as_view(), name='personal_geosubscriptions'),
path('geosubscriptions/all_geosubscriptions/', views.allGeosubscriptionsView.as_view(), name='all_geosubscriptions'),
path('geosubscriptions/analytics/', views.analyticsGeosubscriptionView.as_view(), name='geosubscription_analytics'),
path('administration/map_service_congfiguration/', views.mapServiceConfigurationView.as_view(), name='map_service_congfiguration'),
path('administration/map_service_security/', views.mapServiceSecurityConfigurationView.as_view(), name='map_service_security'),
path('administration/blocked_user/', views.blockedUserView.as_view(), name='blocked_user'),
path('administration/alert/', views.alertView.as_view(), name='alert'),
path('tiles/<str:database>/<str:table_name>/<int:z>/<int:x>/<int:y>.pbf', views.tilesView.as_view(), name='tiles'),
path('services/geocode/', views.geocodeView.as_view(), name='geocode'),
path('services/map_query/', views.mapQueryView.as_view(), name='map_query'),
path('services/portal_tables/', views.portalTablesView.as_view(), name='portal_tables'),
path('services/autocomplete/', views.autocompleteView.as_view(), name='autocomplete'),
path('services/wms_search/', views.wmsSearchView.as_view(), name='wms_search'),
path('services/portal_search/', views.portalSearchView.as_view(), name='portal_search'),
path('remote_datasets/remote_dataset/', views.remoteDataView.as_view(), name='remote_dataset'),
path('authentication/get_token/', obtain_auth_token, name="get_token"),
path('register/register_user/', views.registerView.as_view(), name="register_user"),
]
|
[
"django.urls.path"
] |
[((4973, 5043), 'django.urls.path', 'path', (['"""authentication/get_token/"""', 'obtain_auth_token'], {'name': '"""get_token"""'}), "('authentication/get_token/', obtain_auth_token, name='get_token')\n", (4977, 5043), False, 'from django.urls import path\n')]
|
# Copyright (c) 2021. <NAME>
#
# This software is licensed under the The MIT License.
# You should have received a copy of the license terms with the software.
# Otherwise, you can find the text here: https://opensource.org/licenses/MIT
#
#
# This software is licensed under the The MIT License.
# You should have received a copy of the license terms with the software.
# Otherwise, you can find the text here: https://opensource.org/licenses/MIT
#
from typing import Optional, Union, List
from textstat import textstat
from fairest.models import BaseDocumentRule, Request, DocumentModel, Report, RuleDescription
class DocumentStatisticsRule(BaseDocumentRule):
"""
Produces a report of some useful statistics of the document. Uses textstat.
"""
@classmethod
def describe(cls) -> RuleDescription:
return RuleDescription(
title="Document Statistics Rule",
description="Produces a report of some useful statistics of the document. Uses textstat.",
author="Core Fairest Plugin"
)
def run_document_rule(self, request: Request, model: DocumentModel) -> Optional[Union[Report, List[Report]]]:
doc = model.get_nlp_text()
text = doc.text
return Report(
title="Document Statistics",
message=
f"""
Word count: {textstat.lexicon_count(text)}
Sentence count: {textstat.sentence_count(text)}
Average sentence length: {textstat.avg_sentence_length(text)}
Readability: {textstat.text_standard(text)}
Reading time: {textstat.reading_time(text)}s
""",
rule_id=self.get_rule_name()
)
|
[
"textstat.textstat.text_standard",
"textstat.textstat.sentence_count",
"fairest.models.RuleDescription",
"textstat.textstat.lexicon_count",
"textstat.textstat.avg_sentence_length",
"textstat.textstat.reading_time"
] |
[((845, 1025), 'fairest.models.RuleDescription', 'RuleDescription', ([], {'title': '"""Document Statistics Rule"""', 'description': '"""Produces a report of some useful statistics of the document. Uses textstat."""', 'author': '"""Core Fairest Plugin"""'}), "(title='Document Statistics Rule', description=\n 'Produces a report of some useful statistics of the document. Uses textstat.'\n , author='Core Fairest Plugin')\n", (860, 1025), False, 'from fairest.models import BaseDocumentRule, Request, DocumentModel, Report, RuleDescription\n'), ((1364, 1392), 'textstat.textstat.lexicon_count', 'textstat.lexicon_count', (['text'], {}), '(text)\n', (1386, 1392), False, 'from textstat import textstat\n'), ((1411, 1440), 'textstat.textstat.sentence_count', 'textstat.sentence_count', (['text'], {}), '(text)\n', (1434, 1440), False, 'from textstat import textstat\n'), ((1468, 1502), 'textstat.textstat.avg_sentence_length', 'textstat.avg_sentence_length', (['text'], {}), '(text)\n', (1496, 1502), False, 'from textstat import textstat\n'), ((1518, 1546), 'textstat.textstat.text_standard', 'textstat.text_standard', (['text'], {}), '(text)\n', (1540, 1546), False, 'from textstat import textstat\n'), ((1563, 1590), 'textstat.textstat.reading_time', 'textstat.reading_time', (['text'], {}), '(text)\n', (1584, 1590), False, 'from textstat import textstat\n')]
|
# encoding: utf-8
from __future__ import unicode_literals
import argparse
from gym_bot_app.models import Admin
from gym_bot_app.commands import Command
from gym_bot_app.tasks import (GoToGymTask,
WentToGymTask,
NewWeekSelectDaysTask)
class AdminCommand(Command):
"""Telegram gym bot admin command.
Allows to execute admin commands.
Options:
--run-task task_name: run the given task right now.
"""
DEFAULT_COMMAND_NAME = 'admin'
NOT_ADMIN_MSG = 'שתוק'
SUCCEEDED_TO_RUN_COMMAND_MSG = 'בוצע'
FAILED_TO_RUN_COMMAND_MSG = 'נכשל'
SOMETHING_WENT_WRONG_MSG = 'exception'
TASKS = {
'go_to_gym': GoToGymTask,
'went_to_gym': WentToGymTask,
'new_week_select_days': NewWeekSelectDaysTask
}
def __init__(self, *args, **kwargs):
super(AdminCommand, self).__init__(*args, **kwargs)
self.parser = argparse.ArgumentParser()
self.parser.add_argument('--run-task', dest='task_name')
def _handler(self, bot, update, args):
"""Override method to handle admin command.
Execute admin commands.
"""
self.logger.info('Admin command from %s with args %s', update.effective_user.id, args)
if not Admin.objects.is_admin(update.effective_user.id): # not an admin
self.logger.debug('User is not an admin')
update.message.reply_text(quote=True, text=self.NOT_ADMIN_MSG)
return
try:
parsed_args = self.parser.parse_args(args)
task = self.TASKS[parsed_args.task_name]
task(updater=self.updater, logger=self.logger).execute()
self.logger.debug('Finished to execute %s via admin command', parsed_args.task_name)
update.message.reply_text(quote=True, text=self.SUCCEEDED_TO_RUN_COMMAND_MSG)
except (AttributeError, KeyError, SystemExit) as e:
self.logger.error('Failed to execute task via admin due to wrong usage with args %s, exception %s', args, e)
update.message.reply_text(quote=True, text=self.FAILED_TO_RUN_COMMAND_MSG)
except Exception as e:
self.logger.error('Failed to execute task via admin with args %s, exception %s', args, e)
exception_msg = '{msg} {exc}'.format(msg=self.SOMETHING_WENT_WRONG_MSG, exc=e)
update.message.reply_text(quote=True, text=exception_msg)
def start(self, *args, **kwargs):
"""Override method to update kwargs in order to request to pass args in command handler."""
kwargs['pass_args'] = True
return super(AdminCommand, self).start(*args, **kwargs)
|
[
"gym_bot_app.models.Admin.objects.is_admin",
"argparse.ArgumentParser"
] |
[((948, 973), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (971, 973), False, 'import argparse\n'), ((1292, 1340), 'gym_bot_app.models.Admin.objects.is_admin', 'Admin.objects.is_admin', (['update.effective_user.id'], {}), '(update.effective_user.id)\n', (1314, 1340), False, 'from gym_bot_app.models import Admin\n')]
|
""" Userbot module for other small commands. """
from userbot import CMD_HANDLER as cmd
from userbot import CMD_HELP
from userbot.utils import edit_or_reply, man_cmd
@man_cmd(pattern="ihelp$")
async def usit(event):
me = await event.client.get_me()
await edit_or_reply(
event,
f"**Hai {me.first_name} Kalo Anda Tidak Tau Perintah Untuk Memerintah Ku Ketik** `.help` Atau Bisa Minta Bantuan Ke:\n"
f"✣ **Group Support :** [Sharing Userbot](t.me/wibu_telegram)\n"
f"✣ **Channel Man :** [Lunatic0de](t.me/adamcodeubot)\n"
f"✣ **Owner Repo :** [Arul](t.me/skyiarul)\n"
f"✣ **Repo :** [Adam-Userbot](https://github.com/userkontol/Adam-Userbot)\n",
)
@man_cmd(pattern="listvar$")
async def var(event):
await edit_or_reply(
event,
"**Daftar Lengkap Vars Dari Adam-Userbot:** [KLIK DISINI](https://telegra.ph/List-Variabel-Heroku-untuk-Man-Userbot-09-22)",
)
CMD_HELP.update(
{
"helper": f"**Plugin : **`helper`\
\n\n • **Syntax :** `{cmd}ihelp`\
\n • **Function : **Bantuan Untuk Adam-Userbot.\
\n\n • **Syntax :** `{cmd}listvar`\
\n • **Function : **Melihat Daftar Vars.\
\n\n • **Syntax :** `{cmd}repo`\
\n • **Function : **Melihat Repository Adam-Userbot.\
\n\n • **Syntax :** `{cmd}string`\
\n • **Function : **Link untuk mengambil String Adam-Userbot.\
"
}
)
|
[
"userbot.utils.man_cmd",
"userbot.CMD_HELP.update",
"userbot.utils.edit_or_reply"
] |
[((169, 194), 'userbot.utils.man_cmd', 'man_cmd', ([], {'pattern': '"""ihelp$"""'}), "(pattern='ihelp$')\n", (176, 194), False, 'from userbot.utils import edit_or_reply, man_cmd\n'), ((710, 737), 'userbot.utils.man_cmd', 'man_cmd', ([], {'pattern': '"""listvar$"""'}), "(pattern='listvar$')\n", (717, 737), False, 'from userbot.utils import edit_or_reply, man_cmd\n'), ((941, 1409), 'userbot.CMD_HELP.update', 'CMD_HELP.update', (['{\'helper\':\n f"""**Plugin : **`helper` \n\n • **Syntax :** `{cmd}ihelp` \n • **Function : **Bantuan Untuk Adam-Userbot. \n\n • **Syntax :** `{cmd}listvar` \n • **Function : **Melihat Daftar Vars. \n\n • **Syntax :** `{cmd}repo` \n • **Function : **Melihat Repository Adam-Userbot. \n\n • **Syntax :** `{cmd}string` \n • **Function : **Link untuk mengambil String Adam-Userbot. """\n }'], {}), '({\'helper\':\n f"""**Plugin : **`helper` \n\n • **Syntax :** `{cmd}ihelp` \n • **Function : **Bantuan Untuk Adam-Userbot. \n\n • **Syntax :** `{cmd}listvar` \n • **Function : **Melihat Daftar Vars. \n\n • **Syntax :** `{cmd}repo` \n • **Function : **Melihat Repository Adam-Userbot. \n\n • **Syntax :** `{cmd}string` \n • **Function : **Link untuk mengambil String Adam-Userbot. """\n })\n', (956, 1409), False, 'from userbot import CMD_HELP\n'), ((265, 643), 'userbot.utils.edit_or_reply', 'edit_or_reply', (['event', 'f"""**Hai {me.first_name} Kalo Anda Tidak Tau Perintah Untuk Memerintah Ku Ketik** `.help` Atau Bisa Minta Bantuan Ke:\n✣ **Group Support :** [Sharing Userbot](t.me/wibu_telegram)\n✣ **Channel Man :** [Lunatic0de](t.me/adamcodeubot)\n✣ **Owner Repo :** [Arul](t.me/skyiarul)\n✣ **Repo :** [Adam-Userbot](https://github.com/userkontol/Adam-Userbot)\n"""'], {}), '(event,\n f"""**Hai {me.first_name} Kalo Anda Tidak Tau Perintah Untuk Memerintah Ku Ketik** `.help` Atau Bisa Minta Bantuan Ke:\n✣ **Group Support :** [Sharing Userbot](t.me/wibu_telegram)\n✣ **Channel Man :** [Lunatic0de](t.me/adamcodeubot)\n✣ **Owner Repo :** [Arul](t.me/skyiarul)\n✣ **Repo :** [Adam-Userbot](https://github.com/userkontol/Adam-Userbot)\n"""\n )\n', (278, 643), False, 'from userbot.utils import edit_or_reply, man_cmd\n'), ((770, 924), 'userbot.utils.edit_or_reply', 'edit_or_reply', (['event', '"""**Daftar Lengkap Vars Dari Adam-Userbot:** [KLIK DISINI](https://telegra.ph/List-Variabel-Heroku-untuk-Man-Userbot-09-22)"""'], {}), "(event,\n '**Daftar Lengkap Vars Dari Adam-Userbot:** [KLIK DISINI](https://telegra.ph/List-Variabel-Heroku-untuk-Man-Userbot-09-22)'\n )\n", (783, 924), False, 'from userbot.utils import edit_or_reply, man_cmd\n')]
|
import torch
import torch.nn as nn
from EncoderLayer import EncoderLayer
import math
class Encoder(nn.Module):
def __init__(self, input_dim, embed_dim, num_layers, num_heads, expand_dim, dropout, device, max_length = 30):
super().__init__()
self.tok_embedding = nn.Embedding(input_dim, embed_dim)
#self.pos_embedding = nn.Embedding(max_length, embed_dim)
self.pos_embedding = nn.Embedding.from_pretrained(self.get_positional_encoding(max_length, embed_dim))
self.layers = nn.ModuleList([EncoderLayer(embed_dim, num_heads, expand_dim, dropout) for _ in range(num_layers)])
self.dropout = nn.Dropout(dropout)
self.scale = torch.sqrt(torch.FloatTensor([embed_dim])).to(device)
self.device = device
def forward(self, src, src_mask):
#src = [batch size, src len]
#src_mask = [batch size, 1, 1, src len]
batch_size = src.shape[0]
src_len = src.shape[1]
pos = torch.arange(0, src_len).unsqueeze(0).repeat(batch_size, 1).to(self.device)
#pos = [batch size, src len]
src = self.dropout((self.tok_embedding(src) * self.scale) + self.pos_embedding(pos))
#src = [batch size, src len, embed dim]
for layer in self.layers:
src = layer(src, src_mask)
#src = [batch size, src len, embed dim]
return src
def get_positional_encoding(self, max_seq_len, embed_dim):
pos_enc = torch.zeros(max_seq_len, embed_dim)
position = torch.arange(0, max_seq_len).unsqueeze(1)
div_term = torch.exp(torch.arange(0, embed_dim, 2) * (-math.log(10000.0) / embed_dim))
pos_enc[:, 0::2] = torch.sin(position * div_term)
pos_enc[:, 1::2] = torch.cos(position * div_term)
return pos_enc
|
[
"torch.nn.Dropout",
"EncoderLayer.EncoderLayer",
"torch.nn.Embedding",
"torch.FloatTensor",
"torch.cos",
"torch.arange",
"torch.zeros",
"math.log",
"torch.sin"
] |
[((291, 325), 'torch.nn.Embedding', 'nn.Embedding', (['input_dim', 'embed_dim'], {}), '(input_dim, embed_dim)\n', (303, 325), True, 'import torch.nn as nn\n'), ((652, 671), 'torch.nn.Dropout', 'nn.Dropout', (['dropout'], {}), '(dropout)\n', (662, 671), True, 'import torch.nn as nn\n'), ((1564, 1599), 'torch.zeros', 'torch.zeros', (['max_seq_len', 'embed_dim'], {}), '(max_seq_len, embed_dim)\n', (1575, 1599), False, 'import torch\n'), ((1788, 1818), 'torch.sin', 'torch.sin', (['(position * div_term)'], {}), '(position * div_term)\n', (1797, 1818), False, 'import torch\n'), ((1847, 1877), 'torch.cos', 'torch.cos', (['(position * div_term)'], {}), '(position * div_term)\n', (1856, 1877), False, 'import torch\n'), ((543, 598), 'EncoderLayer.EncoderLayer', 'EncoderLayer', (['embed_dim', 'num_heads', 'expand_dim', 'dropout'], {}), '(embed_dim, num_heads, expand_dim, dropout)\n', (555, 598), False, 'from EncoderLayer import EncoderLayer\n'), ((1620, 1648), 'torch.arange', 'torch.arange', (['(0)', 'max_seq_len'], {}), '(0, max_seq_len)\n', (1632, 1648), False, 'import torch\n'), ((1692, 1721), 'torch.arange', 'torch.arange', (['(0)', 'embed_dim', '(2)'], {}), '(0, embed_dim, 2)\n', (1704, 1721), False, 'import torch\n'), ((705, 735), 'torch.FloatTensor', 'torch.FloatTensor', (['[embed_dim]'], {}), '([embed_dim])\n', (722, 735), False, 'import torch\n'), ((1726, 1743), 'math.log', 'math.log', (['(10000.0)'], {}), '(10000.0)\n', (1734, 1743), False, 'import math\n'), ((1006, 1030), 'torch.arange', 'torch.arange', (['(0)', 'src_len'], {}), '(0, src_len)\n', (1018, 1030), False, 'import torch\n')]
|
# -*- coding: utf-8 -*-
import pickle
class Color:
def __init__(self, red, green, blue):
self.red = int(red)
self.green = int(green)
self.blue = int(blue)
def toHash(self):
return hash((self.red, self.green, self.blue))
def regFormat(self):
return "{},{},{}".format(self.red, self.green, self.blue)
def toTuple(self):
return (self.red, self.green, self.blue)
@staticmethod
def mix(color1, color2):
red = int((color1.red+color2.red)/2)
green = int((color1.green+color2.green)/2)
blue = int((color1.blue+color2.blue)/2)
return Color(red, green, blue)
class Theme(object):
DEFAULT_FOREGROUND = 0
DEFAULT_BOLD_FOREGROUND = 1
DEFAULT_BACKGROUND = 2
DEFAULT_BOLD_BACKGROUND = 3
CURSOR_TEXT = 4
CURSOR_COLOR = 5
ANSI_BLACK = 6
ANSI_BLACK_BOLD = 7
ANSI_RED = 8
ANSI_RED_BOLD = 9
ANSI_GREEN = 10
ANSI_GREEN_BOLD = 11
ANSI_YELLOW = 12
ANSI_YELLOW_BOLD = 13
ANSI_BLUE = 14
ANSI_BLUE_BOLD = 15
ANSI_MAGENTA = 16
ANSI_MAGENTA_BOLD = 17
ANSI_CYAN = 18
ANSI_CYAN_BOLD = 19
ANSI_WHITE = 20
ANSI_WHITE_BOLD = 21
def __init__(self, name=None):
self.colors = []
self.name = name
def addColorValues(self, red=0, green=0, blue=0):
self.colors.append(Color(red, green, blue))
return self
def addColor(self, color=Color):
self.colors.append(color)
return self
def describe(self):
for c in self.colors:
print("{} {} {}".format(c.red, c.green, c.blue))
def describeHex(self):
for c in self.colors:
print("0x{:02x}{:02x}{:02x}".format(c.red, c.green, c.blue))
def toHash(self):
p = pickle.dumps(self.colors, -1)
return hash(p)
def getColorHexByIndex(self, index):
if len(self.colors) != 22 or index > 21 or index < 0:
return None
color_hex_str = "".join('{:02x}'.format(a)
for a in self.colors[index].toTuple())
return "#{}".format(color_hex_str)
def getColorHex(self, color_index):
if len(self.colors) != 22:
return None
color_hex_str = self.getColorHexByIndex(color_index)
return color_hex_str
@staticmethod
def default():
theme = Theme("Default")
theme.addColorValues(187, 187, 187)
theme.addColorValues(255, 255, 255)
theme.addColorValues(0, 0, 0)
theme.addColorValues(85, 85, 85)
theme.addColorValues(0, 0, 0)
theme.addColorValues(0, 255, 0)
theme.addColorValues(0, 0, 0)
theme.addColorValues(85, 85, 85)
theme.addColorValues(187, 0, 0)
theme.addColorValues(255, 85, 85)
theme.addColorValues(0, 187, 0)
theme.addColorValues(85, 255, 85)
theme.addColorValues(187, 187, 0)
theme.addColorValues(255, 255, 85)
theme.addColorValues(0, 0, 187)
theme.addColorValues(85, 85, 255)
theme.addColorValues(187, 0, 187)
theme.addColorValues(255, 85, 255)
theme.addColorValues(0, 187, 187)
theme.addColorValues(85, 255, 255)
theme.addColorValues(187, 187, 187)
theme.addColorValues(255, 255, 255)
return theme
|
[
"pickle.dumps"
] |
[((1783, 1812), 'pickle.dumps', 'pickle.dumps', (['self.colors', '(-1)'], {}), '(self.colors, -1)\n', (1795, 1812), False, 'import pickle\n')]
|
#!/usr/bin/env python3
import tensorflow as tf
tf.config.run_functions_eagerly(True)
import numpy as np
from graph2tensor.model.layers import GCNConv
from graph2tensor.model.models import MessagePassing
from unittest import TestCase, main
conv_layers = [
GCNConv(units=32, name="layer1"),
GCNConv(units=32, name="layer2"),
GCNConv(units=8, name="layer3"),
]
mp = MessagePassing([conv_layers, conv_layers, conv_layers], name="sage", concat_hidden=False)
src = {'feat': tf.constant(np.random.random(size=(4, 16)), dtype=tf.float32)}
hop1 = {'feat': tf.constant(np.random.random(size=(10, 16)), dtype=tf.float32)}
hop2 = {'feat': tf.constant(np.random.random(size=(55, 16)), dtype=tf.float32)}
hop3 = {'feat': tf.constant(np.random.random(size=(110, 16)), dtype=tf.float32)}
edge = {}
hops = (
(src, edge, hop1, tf.repeat(tf.range(4), tf.range(1, 5))),
(hop1, edge, hop2, tf.repeat(tf.range(10), tf.range(1, 11))),
(hop2, edge, hop3, tf.repeat(tf.range(55), 2))
)
class TestMsgPassing(TestCase):
def test_config(self):
config = mp.get_config()
assert config["name"] == "sage"
assert config["concat_hidden"] is False
assert config["attr_reduce_mode"] == 'concat'
assert config["conv_layers"].__len__() == 3
assert config["conv_layers"][0].__len__() == 3
assert config["conv_layers"][0][1]["class_name"] == "GCNConv"
assert config["conv_layers"][0][1]["config"]["name"] == "layer2"
assert config["conv_layers"][0][1]["config"]["units"] == 32
custom_objects = {
"GCNConv": GCNConv,
}
_ = MessagePassing.from_config(config, custom_objects)
def test_save_load(self):
x = mp((hops, hops, hops)).numpy()
mp.save_weights("/tmp/sage")
mp1 = MessagePassing([conv_layers, conv_layers, conv_layers], name="sage", concat_hidden=False)
mp1.load_weights("/tmp/sage")
x1 = mp1((hops, hops, hops)).numpy()
np.testing.assert_allclose(x, x1, atol=1e-6)
def test_call(self):
assert mp((hops, hops, hops)).numpy().shape == (4, 8)
mp1 = MessagePassing([conv_layers, conv_layers, conv_layers], name="sage", concat_hidden=True)
assert mp1((hops, hops, hops)).numpy().shape == (4, 72)
if __name__ == "__main__":
main()
|
[
"unittest.main",
"tensorflow.config.run_functions_eagerly",
"graph2tensor.model.layers.GCNConv",
"tensorflow.range",
"graph2tensor.model.models.MessagePassing.from_config",
"numpy.random.random",
"numpy.testing.assert_allclose",
"graph2tensor.model.models.MessagePassing"
] |
[((47, 84), 'tensorflow.config.run_functions_eagerly', 'tf.config.run_functions_eagerly', (['(True)'], {}), '(True)\n', (78, 84), True, 'import tensorflow as tf\n'), ((376, 469), 'graph2tensor.model.models.MessagePassing', 'MessagePassing', (['[conv_layers, conv_layers, conv_layers]'], {'name': '"""sage"""', 'concat_hidden': '(False)'}), "([conv_layers, conv_layers, conv_layers], name='sage',\n concat_hidden=False)\n", (390, 469), False, 'from graph2tensor.model.models import MessagePassing\n'), ((260, 292), 'graph2tensor.model.layers.GCNConv', 'GCNConv', ([], {'units': '(32)', 'name': '"""layer1"""'}), "(units=32, name='layer1')\n", (267, 292), False, 'from graph2tensor.model.layers import GCNConv\n'), ((298, 330), 'graph2tensor.model.layers.GCNConv', 'GCNConv', ([], {'units': '(32)', 'name': '"""layer2"""'}), "(units=32, name='layer2')\n", (305, 330), False, 'from graph2tensor.model.layers import GCNConv\n'), ((336, 367), 'graph2tensor.model.layers.GCNConv', 'GCNConv', ([], {'units': '(8)', 'name': '"""layer3"""'}), "(units=8, name='layer3')\n", (343, 367), False, 'from graph2tensor.model.layers import GCNConv\n'), ((2312, 2318), 'unittest.main', 'main', ([], {}), '()\n', (2316, 2318), False, 'from unittest import TestCase, main\n'), ((493, 523), 'numpy.random.random', 'np.random.random', ([], {'size': '(4, 16)'}), '(size=(4, 16))\n', (509, 523), True, 'import numpy as np\n'), ((572, 603), 'numpy.random.random', 'np.random.random', ([], {'size': '(10, 16)'}), '(size=(10, 16))\n', (588, 603), True, 'import numpy as np\n'), ((652, 683), 'numpy.random.random', 'np.random.random', ([], {'size': '(55, 16)'}), '(size=(55, 16))\n', (668, 683), True, 'import numpy as np\n'), ((732, 764), 'numpy.random.random', 'np.random.random', ([], {'size': '(110, 16)'}), '(size=(110, 16))\n', (748, 764), True, 'import numpy as np\n'), ((1622, 1672), 'graph2tensor.model.models.MessagePassing.from_config', 'MessagePassing.from_config', (['config', 'custom_objects'], {}), '(config, custom_objects)\n', (1648, 1672), False, 'from graph2tensor.model.models import MessagePassing\n'), ((1798, 1891), 'graph2tensor.model.models.MessagePassing', 'MessagePassing', (['[conv_layers, conv_layers, conv_layers]'], {'name': '"""sage"""', 'concat_hidden': '(False)'}), "([conv_layers, conv_layers, conv_layers], name='sage',\n concat_hidden=False)\n", (1812, 1891), False, 'from graph2tensor.model.models import MessagePassing\n'), ((1979, 2024), 'numpy.testing.assert_allclose', 'np.testing.assert_allclose', (['x', 'x1'], {'atol': '(1e-06)'}), '(x, x1, atol=1e-06)\n', (2005, 2024), True, 'import numpy as np\n'), ((2126, 2218), 'graph2tensor.model.models.MessagePassing', 'MessagePassing', (['[conv_layers, conv_layers, conv_layers]'], {'name': '"""sage"""', 'concat_hidden': '(True)'}), "([conv_layers, conv_layers, conv_layers], name='sage',\n concat_hidden=True)\n", (2140, 2218), False, 'from graph2tensor.model.models import MessagePassing\n'), ((836, 847), 'tensorflow.range', 'tf.range', (['(4)'], {}), '(4)\n', (844, 847), True, 'import tensorflow as tf\n'), ((849, 863), 'tensorflow.range', 'tf.range', (['(1)', '(5)'], {}), '(1, 5)\n', (857, 863), True, 'import tensorflow as tf\n'), ((900, 912), 'tensorflow.range', 'tf.range', (['(10)'], {}), '(10)\n', (908, 912), True, 'import tensorflow as tf\n'), ((914, 929), 'tensorflow.range', 'tf.range', (['(1)', '(11)'], {}), '(1, 11)\n', (922, 929), True, 'import tensorflow as tf\n'), ((966, 978), 'tensorflow.range', 'tf.range', (['(55)'], {}), '(55)\n', (974, 978), True, 'import tensorflow as tf\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
High level functions for signal characterization from 1D signals
Code licensed under both GPL and BSD licenses
Authors: <NAME> <<EMAIL>>
<NAME> <<EMAIL>>
"""
from scipy.signal import periodogram, welch
import pandas as pd
import numpy as np
def psd(s, fs, nperseg=256, method='welch', window='hanning', nfft=None, tlims=None):
"""
Estimates power spectral density of 1D signal using Welch's or periodogram methods.
Note: this is a wrapper function that uses functions from scipy.signal module
Parameters
----------
s: 1D array
Input signal to process
fs: float, optional
Sampling frequency of audio signal
nperseg: int, optional
Lenght of segment for 'welch' method, default is 256
nfft: int, optional
Length of FFT for periodogram method. If None, length of signal will be used.
Length of FFT for welch method if zero padding is desired. If None, length of nperseg will be used.
method: {'welch', 'periodogram'}
Method used to estimate the power spectral density of the signal
tlims: tuple of ints or floats
Temporal limits to compute the power spectral density in seconds (s)
Returns
-------
psd: pandas Series
Estimate of power spectral density
f_idx: pandas Series
Index of sample frequencies
Example
-------
s, fs = sound.load('spinetail.wav')
psd, f_idx = psd(s, fs, nperseg=512)
"""
if tlims is not None:
# trim audio signal
try:
s = s[int(tlims[0]*fs): int(tlims[1]*fs)]
except:
raise Exception('length of tlims tuple should be 2')
if method=='welch':
f_idx, psd_s = welch(s, fs, window, nperseg, nfft)
elif method=='periodogram':
f_idx, psd_s = periodogram(s, fs, window, nfft, scaling='spectrum')
else:
raise Exception("Invalid method. Method should be 'welch' or 'periodogram' ")
index_names = ['psd_' + str(idx).zfill(3) for idx in range(1,len(psd_s)+1)]
psd_s = pd.Series(psd_s, index=index_names)
f_idx = pd.Series(f_idx, index=index_names)
return psd_s, f_idx
def rms(s):
"""
Computes the root-mean-square (RMS) of a signal
Parameters
----------
s : ndarray
1D audio signal
Returns
-------
rms: float
Root mean square of signal
"""
return np.sqrt(np.mean(s**2))
|
[
"scipy.signal.periodogram",
"numpy.mean",
"pandas.Series",
"scipy.signal.welch"
] |
[((2212, 2247), 'pandas.Series', 'pd.Series', (['psd_s'], {'index': 'index_names'}), '(psd_s, index=index_names)\n', (2221, 2247), True, 'import pandas as pd\n'), ((2260, 2295), 'pandas.Series', 'pd.Series', (['f_idx'], {'index': 'index_names'}), '(f_idx, index=index_names)\n', (2269, 2295), True, 'import pandas as pd\n'), ((1856, 1891), 'scipy.signal.welch', 'welch', (['s', 'fs', 'window', 'nperseg', 'nfft'], {}), '(s, fs, window, nperseg, nfft)\n', (1861, 1891), False, 'from scipy.signal import periodogram, welch\n'), ((2567, 2582), 'numpy.mean', 'np.mean', (['(s ** 2)'], {}), '(s ** 2)\n', (2574, 2582), True, 'import numpy as np\n'), ((1952, 2004), 'scipy.signal.periodogram', 'periodogram', (['s', 'fs', 'window', 'nfft'], {'scaling': '"""spectrum"""'}), "(s, fs, window, nfft, scaling='spectrum')\n", (1963, 2004), False, 'from scipy.signal import periodogram, welch\n')]
|
import json
import logging
import sys
from datetime import datetime, timedelta
import discord
from discord.ext import commands
import urbandictionary as ud
import shinden as sh
import covid19
import timer
import languages
logging.basicConfig(format='%(asctime)s - %(levelname)s - %(message)s', datefmt='%d/%m/%Y %H:%M:%S', level=logging.INFO)
cv = covid19.Covid_data()
t = timer.Timer()
# On the first execution, there will be no settings.json, so we will create one
try:
with open('settings.json', 'r') as f:
content = json.load(f)
api_key = content['api']
prefix = content['prefix']
starting_language = content['language']
except:
logging.warning("Proper settings.json file wasn't found, creating a default one")
default_settings = {'api': 'your_api_token', 'prefix': '!', 'language':'EN'}
with open('settings.json', 'w') as f:
json.dump(default_settings, f, indent=4)
# After creating the settings file, code execution will stop, so the user can enter discord api token and run the code again
logging.debug('Program will now shutdown, please apply settings in settings.json')
sys.exit()
# Applying prefix
bot = commands.Bot(command_prefix=prefix, help_command=None)
# Applying language
lg = languages.Language(starting_language)
logging.debug('Bot ready to connect with prefix {} and language {}'.format(prefix, starting_language))
# Excuted when bot is connected and ready
@bot.event
async def on_ready():
logging.info('We have logged in as {0.user}'.format(bot)) # Logging the bot's nickname
for guild in bot.guilds:
logging.debug('Logged in ' + str(guild.name) + ' (id: '+ str(guild.id) +')') # Printing out server name, which the bot is connected to
members = ' - '.join([member.name for member in guild.members])
logging.debug(f'Guild Members:\n - {members}') # Logging a list of server members
# Setting bot status to streaming (Never gonna give you up)
stream = discord.Streaming(name=prefix + 'helperino', url='https://www.youtube.com/watch?v=dQw4w9WgXcQ')
await bot.change_presence(activity=stream)
await lg.update(starting_language)
logging.info("------ | Ready | ------")
# Help command
@bot.command(name='help', aliases=['helperino'], help='Lists available commands')
async def help(ctx):
color = discord.Colour(16777215)
response = discord.Embed(title='***Flop***', type='rich', description=lg.help[1].format(prefix), colour = color.dark_magenta(), url = 'https://github.com/ShaderLight/flop_discord_bot')
response.add_field(name=lg.help[2].format(prefix), value=lg.help[3], inline=False)
response.add_field(name=lg.help[4].format(prefix), value=lg.help[5], inline=False)
response.add_field(name=lg.help[6].format(prefix), value=lg.help[7], inline=False)
response.add_field(name=lg.help[8].format(prefix), value=lg.help[9], inline=False)
response.add_field(name=lg.help[10].format(prefix), value=lg.help[11], inline = False)
response.add_field(name=lg.help[12].format(prefix), value=lg.help[13], inline = False)
response.add_field(name=lg.help[14].format(prefix), value=lg.help[15], inline = False)
response.add_field(name=lg.help[16].format(prefix), value=lg.help[17], inline=False)
response.add_field(name=lg.help[18].format(prefix), value=lg.help[19], inline=False)
response.add_field(name=lg.help[20].format(prefix), value=lg.help[21], inline=False)
response.add_field(name=lg.help[22].format(prefix), value=lg.help[23], inline=False)
response.add_field(name=lg.help[24].format(prefix), value=lg.help[25], inline=False)
await ctx.send(embed=response)
# Language command
@bot.command(name='language', aliases=['lang'])
async def language(ctx, *args):
logging.debug('Executing command {}language'.format(prefix))
if len(args) != 1:
help_string = (lg.language[0].format(prefix))
return await ctx.send(help_string)
desired_language = ''.join(args)
desired_language = desired_language.upper()
if desired_language == lg.lang_set:
return await ctx.send(lg.language[2])
try:
await lg.update(desired_language)
except languages.LanguageNotSupportedError:
return await ctx.send(lg.language[1])
await ctx.send(lg.language[3].format(lg.lang_set))
# Urban Dictionary related commands
@bot.command(name='urban', aliases=['u','ud'], help='Responds with urban dictionary definition')
async def urban(ctx, *args):
logging.debug('Executing command {}urban'.format(prefix))
if args == (): # If no arguments were passed, then respond with help message
help_string = (lg.urban[0].format(prefix))
return await ctx.send(help_string)
if len(args) >= 2:
args_list = list(args)
which_result = 1
possible_int = args_list.pop()
try:
which_result = int(possible_int)
except:
args_list.append(possible_int)
words = ' '.join(args_list)
defs = ud.define(words) # Using UrbanDictionary library to search for Urban Dictionary definitions
try:
definition = defs[which_result-1] # Selecting one result, based on which_result parameter (first result by default)
except IndexError:
await ctx.send(lg.urban[1]) # If index is out of range, then prints out that there was no result found
response = lg.urban[2].format(definition) # Reponse with some discord formatting for a nicer look
await ctx.send(response)
else:
words = ' '.join(args)
defs = ud.define(words) # Using UrbanDictionary library to search for Urban Dictionary definitions
try:
definition = defs[0]
except IndexError:
await ctx.send(lg.urban[1]) # If index is out of range, then prints out that there was no result found
response = lg.urban[2].format(definition) # Reponse with some discord formatting for a nicer look
await ctx.send(response)
@bot.command(name='urbanlist', aliases=['ul','udlist','udl', 'ulist'], help='Responds with urban dictionary definition list')
async def urbanlist(ctx, *args): # This function responds with every definition found on UD (maximum result count is 10 and maximum word count for every definition is 75, urban command does not have that restriction)
logging.debug('Executing command {}urbanlist'.format(prefix))
if args == ():
help_string = (lg.urbanlist[0].format(prefix))
return await ctx.send(help_string)
try:
t.start()
except timer.TimerAlreadyRunning:
t.stop()
t.start()
words = ' '.join(args)
defs = ud.define(words)
response = discord.Embed(title=words, type='rich', description='Results for maximum 10 first results from Urban Dictionary' )
try:
check = 0 # This variable checks if there was at least one successful iteration
for i in range(10):
result = defs[i]
check = 1
text = (result.definition[:75] + '...') if len(result.definition) > 75 else result.definition # This line checks if the word count of the definition is over 75, if true, then cuts it and adds '...'
response.add_field(name=result.word, value=text, inline=False)
except IndexError:
if check == 0: # If there wasnt any correct iteration, then bot responds with No result message
t.stop()
return await ctx.send(lg.urbanlist[0])
execution_time = str(t.stop())
response.set_footer(text=lg.urbanlist[1].format(execution_time[:5]))
await ctx.send(embed=response)
@bot.command(name='urbanrandom', aliases=['ur', 'udrandom', 'udr', 'urandom'], help='Returns random Urban Dictionary definition')
async def urbanrandom(ctx):
logging.debug('Executing command {}urbanrandom'.format(prefix))
definition = ud.random()[0] # selecting first definition from the list of random definitions
response = '***{0.word}***\n\n`{0.definition}\n\n{0.example}`'.format(definition)
await ctx.send(response)
# Shinden related commands
@bot.command(name='shindenanime', aliases=['sa', 'shindena', 'sha', 'sanime', 'shanime'], help='Returns an anime from shinden.pl')
async def shindenanime(ctx, *args):
logging.debug('Executing command {}shindenanime'.format(prefix))
if args == ():
help_string = (lg.s_anime[0].format(prefix))
return await ctx.send(help_string)
try:
t.start()
except timer.TimerAlreadyRunning:
t.stop()
t.start()
if len(args) >= 2:
args_list = list(args)
which_result = 1
possible_int = args_list.pop()
try:
which_result = int(possible_int)
except:
args_list.append(possible_int)
title = ' '.join(args_list)
anime_list = sh.search_titles(title)
try:
anime = anime_list[which_result-1] # Selecting one anime result from the list of all found results
except TypeError:
t.stop()
return await ctx.send(lg.s_anime[1])
except IndexError:
await ctx.send(lg.s_anime[2])
anime = anime_list[-1]
color = discord.Colour(16777215)
# Creating a discord embed message object and adding fields with information
response = discord.Embed(title='***{0.title}***'.format(anime), type='rich', description=lg.s_anime[3] + str(anime.tags), colour=color.teal(), url=anime.url)
response.add_field(name=lg.s_anime[4], value=anime.top_score)
response.add_field(name=lg.s_anime[5], value=anime.episodes)
response.add_field(name=lg.s_anime[6], value=anime.status)
execution_time = round(t.stop(), 3)
response.set_footer(text=lg.s_anime[7].format(execution_time))
await ctx.send(embed=response)
else:
title = ' '.join(args)
anime_list = sh.search_titles(title)
try:
anime = anime_list[0] # Selecting one anime result from the list of all found results
except TypeError:
t.stop()
return await ctx.send(lg.s_anime[1])
color = discord.Colour(16777215)
# Creating a discord embed message object and adding fields with information
response = discord.Embed(title='***{0.title}***'.format(anime), type='rich', description=lg.s_anime[3] + str(anime.tags), colour=color.teal(), url=anime.url)
response.add_field(name=lg.s_anime[4], value=anime.top_score)
response.add_field(name=lg.s_anime[5], value=anime.episodes)
response.add_field(name=lg.s_anime[6], value=anime.status)
execution_time = round(t.stop(), 3)
response.set_footer(text=lg.s_anime[7].format(execution_time))
await ctx.send(embed=response)
@bot.command(name='shindenmanga', aliases=['sm', 'shindenm', 'shm','smanga', 'shmanga'], help='Returns a manga from shinden.pl')
async def shindenmanga(ctx, *args):
logging.debug('Executing command {}shindenmanga'.format(prefix))
if args == ():
help_string = (lg.s_manga[0].format(prefix))
return await ctx.send(help_string)
try:
t.start()
except timer.TimerAlreadyRunning:
t.stop()
t.start()
if len(args) >= 2:
args_list = list(args)
which_result = 1
possible_int = args_list.pop()
try:
which_result = int(possible_int)
except:
args_list.append(possible_int)
title = ' '.join(args_list)
manga_list = sh.search_titles(title, anime_or_manga='manga')
try:
manga = manga_list[which_result-1]
except TypeError:
t.stop()
return await ctx.send(lg.s_manga[1])
except IndexError:
await ctx.send(lg.s_manga[2])
manga = manga_list[-1]
color = discord.Colour(16777215)
response = discord.Embed(title='***{0.title}***'.format(manga), type='rich', description=lg.s_manga[3] + str(manga.tags), colour=color.teal(), url=manga.url)
response.add_field(name=lg.s_manga[4], value=manga.top_score)
response.add_field(name=lg.s_manga[5], value=manga.episodes)
response.add_field(name=lg.s_manga[6], value=manga.status)
execution_time = round(t.stop(), 3)
response.set_footer(text=lg.s_manga[7].format(execution_time))
await ctx.send(embed=response)
else:
title = ' '.join(args)
manga_list = sh.search_titles(title, anime_or_manga='manga')
try:
manga = manga_list[0]
except TypeError:
t.stop()
return await ctx.send(lg.s_manga[1])
except IndexError:
await ctx.send(lg.s_manga[2])
manga = manga_list[-1]
color = discord.Colour(16777215)
response = discord.Embed(title='***{0.title}***'.format(manga), type='rich', description=lg.s_manga[3] + str(manga.tags), colour=color.teal(), url=manga.url)
response.add_field(name=lg.s_manga[4], value=manga.top_score)
response.add_field(name = lg.s_manga[5], value=manga.episodes)
response.add_field(name=lg.s_manga[6], value=manga.status)
execution_time = round(t.stop(), 3)
response.set_footer(text=lg.s_manga[7].format(execution_time))
await ctx.send(embed=response)
@bot.command(name='shindenanimelist', aliases=['sal', 'shindenal', 'shal', 'sanimelist', 'shanimelist'], help='Returns a list of anime from shinden.pl')
async def shindenanimelist(ctx, *args):
logging.debug('Executing command {}shindenanimelist'.format(prefix))
if args == ():
help_string = (lg.s_animelist[0].format(prefix))
return await ctx.send(help_string)
try:
t.start()
except timer.TimerAlreadyRunning:
t.stop()
t.start()
title = ' '.join(args)
anime_list = sh.search_titles(title)
if anime_list == None:
t.stop()
return await ctx.send(lg.s_animelist[1])
color = discord.Colour(16777215)
response = discord.Embed(title=lg.s_animelist[2], type='rich', description=lg.s_animelist[3].format(title), colour=color.teal())
counter = 1
for anime in anime_list:
value_text = '[{0.title}]({0.url})'.format(anime)
response.add_field(name=str(counter) + '.', value=value_text) # Counter variable helps with returning many anime titles in a row (1. 2. 3. etc)
counter = counter + 1
execution_time = round(t.stop(), 3)
response.set_footer(text=lg.s_animelist[4].format(execution_time))
await ctx.send(embed=response)
@bot.command(name='shindenmangalist', aliases=['sml', 'shindenml', 'shml', 'smangalist', 'shmangalist'], help='Returns a list of manga results')
async def shindenmangalist(ctx, *args):
logging.debug('Executing command {}shindenmangalist'.format(prefix))
try:
t.start()
except timer.TimerAlreadyRunning:
t.stop()
t.start()
if args == ():
help_string = (lg.s_mangalist[0].format(prefix))
return await ctx.send(help_string)
title = ' '.join(args)
manga_list = sh.search_titles(title, anime_or_manga='manga')
if manga_list == None:
t.stop()
return await ctx.send(lg.s_mangalist[1])
color = discord.Colour(16777215)
response = discord.Embed(title=lg.s_mangalist[2], type='rich', description=lg.s_mangalist[3].format(title), colour=color.teal())
counter = 1
for manga in manga_list:
value_text = '[{0.title}]({0.url})'.format(manga)
response.add_field(name=str(counter) + '.', value=value_text)
counter = counter + 1
execution_time = round(t.stop(), 3)
response.set_footer(text=lg.s_mangalist[4].format(execution_time))
await ctx.send(embed=response)
@bot.command(name='shindencharacter', aliases=['sc', 'shindenc', 'shc', 'scharacter', 'shcharacter', 'sch', 'shindench', 'shch'], help='Returns a character result from shinden.pl')
async def shindencharacter(ctx, *args):
logging.debug('Executing command {}shindencharacter'.format(prefix))
if args == ():
help_string = (lg.s_character[0].format(prefix))
return await ctx.send(help_string)
try:
t.start()
except timer.TimerAlreadyRunning:
t.stop()
t.start()
if len(args) >= 2:
args_list = list(args)
which_result = 1
possible_int = args_list.pop()
try:
which_result = int(possible_int)
except:
args_list.append(possible_int)
name = ' '.join(args_list)
character_list = sh.search_characters(name)
try:
character = character_list[which_result-1]
except TypeError:
t.stop()
return await ctx.send(lg.s_character[1])
except IndexError:
await ctx.send(lg.s_character[2])
character = character_list[-1]
color = discord.Colour(16777215)
try:
if len(character.description) > 2000: # Description of discord embed must be under 2048 characters
desc = character.description[:2000] + '...'
else:
desc = character.description
response = discord.Embed(title='***{0.name}***'.format(character), type='rich', description='`' + desc + '`', colour=color.dark_gold(), url=character.url)
except TypeError: # In case the character has no description (character.description = None)
response = discord.Embed(title='***{0.name}***'.format(character), type='rich', colour=color.dark_gold(), url=character.url)
response.add_field(name=lg.s_character[3], value=character.gender)
response.add_field(name=lg.s_character[4], value=character.is_historical)
response.add_field(name=lg.s_character[5], value=(', '.join(character.appearance_list)), inline=False)
execution_time = round(t.stop(), 3)
response.set_footer(text=lg.s_character[6].format(execution_time))
await ctx.send(embed=response)
else:
name = ' '.join(args)
character_list = sh.search_characters(name)
try:
character = character_list[0]
except TypeError:
t.stop()
return await ctx.send(lg.s_character[1])
except IndexError:
await ctx.send(lg.s_character[2])
character = character_list[-1]
color = discord.Colour(16777215)
try:
if len(character.description) > 2000: # Description of discord embed must be under 2048 characters
desc = character.description[:2000] + '...'
else:
desc = character.description
response = discord.Embed(title='***{0.name}***'.format(character), type='rich', description='`' + desc + '`', colour=color.dark_gold(), url=character.url)
except TypeError: # In case the character has no description (character.description = None)
response = discord.Embed(title='***{0.name}***'.format(character), type='rich', colour=color.dark_gold(), url=character.url)
response = discord.Embed(title='***{0.name}***'.format(character), type='rich', description='`' + desc + '`', colour=color.dark_gold(), url=character.url)
response.add_field(name=lg.s_character[3], value=character.gender)
response.add_field(name=lg.s_character[4], value=character.is_historical)
response.add_field(name=lg.s_character[5], value=(', '.join(character.appearance_list)), inline=False)
execution_time = round(t.stop(), 3)
response.set_footer(text=lg.s_character[6].format(execution_time))
await ctx.send(embed=response)
@bot.command(name='shindencharacterlist', aliases=['scl', 'shindencl', 'shcl', 'scharacterlist', 'shcharacterlist', 'schl', 'shindenchl', 'shchl'], help='Responds with a list of character results')
async def shindencharacterlist(ctx, *args):
logging.debug('Executing command {}shindencharacterlist'.format(prefix))
if args == ():
help_string = (lg.s_characterlist[0].format(prefix))
return await ctx.send(help_string)
try:
t.start()
except timer.TimerAlreadyRunning:
t.stop()
t.start()
name = ' '.join(args)
character_list = sh.search_characters(name, get_description=False)
if character_list == None:
t.stop()
return await ctx.send(lg.s_characterlist[1])
color = discord.Colour(16777215)
response = discord.Embed(title=lg.s_characterlist[2], type='rich', description=lg.s_characterlist[3].format(name), colour=color.green())
counter = 1
for character in character_list:
info = lg.s_characterlist[4].format(character)
for appear in character.appearance_list:
info = info + str(appear) + ', '
response.add_field(name='`{0}. {1.name}`'.format(counter, character), value=info[:-2], inline=False)
counter = counter + 1
execution_time = round(t.stop(), 3)
response.set_footer(text=lg.s_characterlist[5].format(execution_time))
await ctx.send(embed=response)
@bot.command(name='shindenuser', aliases=['su', 'shindenu', 'shu', 'suser', 'shuser'], help='Searches for a shinden user')
async def shindenuser(ctx, *args):
logging.debug('Executing command {}shindenuser'.format(prefix))
if args == ():
help_string = (lg.s_user[0].format(prefix))
return await ctx.send(help_string)
try:
t.start()
except timer.TimerAlreadyRunning:
t.stop()
t.start()
if len(args) >= 2:
args_list = list(args)
which_result = 1
possible_int = args_list.pop()
try:
which_result = int(possible_int)
except:
args_list.append(possible_int)
nickname = ' '.join(args_list)
user_list = sh.search_users(nickname)
try:
user = user_list[which_result-1]
except IndexError:
await ctx.send(lg.s_user[1])
user = user_list[-1]
except TypeError:
t.stop()
return await ctx.send(lg.s_user[2])
color = discord.Colour(16777215)
response = discord.Embed(title='**{0.nickname}**'.format(user), type='rich', colour=color.red(), url=user.url)
response.add_field(name=lg.s_user[3], value='`{0.anime_titles_watched}`'.format(user))
hours_watched = int(user.anime_minutes_watched/60)
response.add_field(name=lg.s_user[4], value='`{}`'.format(hours_watched))
response.add_field(name=lg.s_user[5], value='`{0.anime_episodes_watched}`'.format(user))
response.add_field(name=lg.s_user[6], value='`{0.average_anime_ratings}`'.format(user))
response.add_field(name=lg.s_user[7], value='`{0.achievement_count}`'.format(user))
response.add_field(name=lg.s_user[8], value='`{0.points}`'.format(user))
formatted_last_seen = user.last_seen.strftime('%H:%M %d.%m.%Y')
response.add_field(name=lg.s_user[9], value='`{}`'.format(formatted_last_seen))
execution_time = round(t.stop(), 3)
response.set_footer(text=lg.s_user[10].format(execution_time))
await ctx.send(embed=response)
else:
nickname = ' '.join(args)
user_list = sh.search_users(nickname)
try:
user = user_list[0]
except TypeError:
t.stop()
return await ctx.send(lg.s_user[1])
except TypeError:
t.stop()
return await ctx.send(lg.s_user[2])
color = discord.Colour(16777215)
response = discord.Embed(title='**{0.nickname}**'.format(user), type='rich', colour=color.red(), url=user.url)
response.add_field(name=lg.s_user[3], value='`{0.anime_titles_watched}`'.format(user))
hours_watched = int(user.anime_minutes_watched/60)
response.add_field(name=lg.s_user[4], value='`{}`'.format(hours_watched))
response.add_field(name=lg.s_user[5], value='`{0.anime_episodes_watched}`'.format(user))
response.add_field(name=lg.s_user[6], value='`{0.average_anime_ratings}`'.format(user))
response.add_field(name=lg.s_user[7], value='`{0.achievement_count}`'.format(user))
response.add_field(name=lg.s_user[8], value='`{0.points}`'.format(user))
formatted_last_seen = user.last_seen.strftime('%H:%M %d.%m.%Y')
response.add_field(name=lg.s_user[9], value='`{}`'.format(formatted_last_seen))
execution_time = round(t.stop(), 3)
response.set_footer(text=lg.s_user[10].format(execution_time))
await ctx.send(embed=response)
@bot.command(name='shindenuserlist', aliases=['sul', 'shindenul', 'shul', 'suserlist', 'shuserlist'], help='Lists shinden users found')
async def shindenuserlist(ctx, *args):
logging.debug('Executing command {}shindenuserlist'.format(prefix))
if args == ():
help_string = (lg.s_userlist[0].format(prefix))
return await ctx.send(help_string)
try:
t.start()
except timer.TimerAlreadyRunning:
t.stop()
t.start()
nickname = ' '.join(args)
user_list = sh.search_users(nickname, detailed_info=True)
if user_list == None:
t.stop()
return await ctx.send(lg.s_userlist[1])
color = discord.Colour(16777215)
response = discord.Embed(title=lg.s_userlist[2] , type='rich', description=lg.s_userlist[3].format(nickname), colour=color.purple())
counter = 1
for user in user_list: # Formatting the data using datatime's strftime method
profile_hyperlink = lg.s_userlist[4].format(user)
formatted_last_seen = user.last_seen.strftime('%H:%M %d.%m.%Y')
hours_watched = int(user.anime_minutes_watched/60)
info = lg.s_userlist[5].format(formatted_last_seen, hours_watched, profile_hyperlink)
response.add_field(name='`{0}. {1.nickname}`'.format(counter, user), value=info, inline=False)
counter = counter + 1
execution_time = round(t.stop(), 3)
response.set_footer(text=lg.s_userlist[6].format(execution_time))
await ctx.send(embed=response)
# Other commands
@bot.command(name='covid', aliases=['ncov', 'covid19', 'coronavirus'])
async def covid(ctx):
logging.debug('Executing command {}covid'.format(prefix))
t.start()
time_of_update = await cv.when_last_update()
if time_of_update == 'never':
await cv.update()
elif (datetime.now() - time_of_update) > timedelta(hours=12): # if covid data hasnt been updated in 12 hours, then update (in order to minimalise requests sent)
await cv.update()
data = await cv.read_data()
# Creating two separate embeds for world and poland respectively
color = discord.Colour(16777215)
world_embed = discord.Embed(title=lg.covid[0], type='rich', colour=color.red(), url='https://worldometers.info/coronavirus/')
world_embed.add_field(name=lg.covid[2], value=data['world'].cases)
world_embed.add_field(name=lg.covid[3], value=data['world'].deaths)
world_embed.add_field(name=lg.covid[4], value=data['world'].recovered)
poland_embed = discord.Embed(title=lg.covid[1], type='rich', colour=color.red(), url='https://worldometers.info/coronavirus/country/poland')
poland_embed.add_field(name=lg.covid[2], value=data['poland'].cases)
poland_embed.add_field(name=lg.covid[3], value=data['poland'].deaths)
poland_embed.add_field(name=lg.covid[4], value=data['poland'].recovered)
execution_time = round(t.stop(), 3)
world_embed.set_footer(text=lg.covid[5].format(execution_time))
poland_embed.set_footer(text=lg.covid[5].format(execution_time))
await ctx.send(embed=world_embed)
await ctx.send(embed=poland_embed)
@bot.command(name='truth', help='This basically responds with dino earth image and nothing else')
async def truth(ctx):
logging.debug('Executing command {}truth'.format(prefix))
response = discord.Embed(title='The truth')
response.set_image(url='https://pbs.twimg.com/profile_images/1116994465464508418/E9UB9VPx.png')
await ctx.send(embed=response)
# Finally running the bot with our api key from settings.json
bot.run(api_key)
|
[
"discord.Embed",
"languages.Language",
"discord.ext.commands.Bot",
"shinden.search_characters",
"timer.Timer",
"urbandictionary.define",
"logging.warning",
"discord.Streaming",
"datetime.timedelta",
"shinden.search_users",
"datetime.datetime.now",
"urbandictionary.random",
"shinden.search_titles",
"json.dump",
"discord.Colour",
"sys.exit",
"json.load",
"logging.debug",
"logging.basicConfig",
"logging.info",
"covid19.Covid_data"
] |
[((225, 349), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(asctime)s - %(levelname)s - %(message)s"""', 'datefmt': '"""%d/%m/%Y %H:%M:%S"""', 'level': 'logging.INFO'}), "(format='%(asctime)s - %(levelname)s - %(message)s',\n datefmt='%d/%m/%Y %H:%M:%S', level=logging.INFO)\n", (244, 349), False, 'import logging\n'), ((352, 372), 'covid19.Covid_data', 'covid19.Covid_data', ([], {}), '()\n', (370, 372), False, 'import covid19\n'), ((377, 390), 'timer.Timer', 'timer.Timer', ([], {}), '()\n', (388, 390), False, 'import timer\n'), ((1186, 1240), 'discord.ext.commands.Bot', 'commands.Bot', ([], {'command_prefix': 'prefix', 'help_command': 'None'}), '(command_prefix=prefix, help_command=None)\n', (1198, 1240), False, 'from discord.ext import commands\n'), ((1267, 1304), 'languages.Language', 'languages.Language', (['starting_language'], {}), '(starting_language)\n', (1285, 1304), False, 'import languages\n'), ((1823, 1872), 'logging.debug', 'logging.debug', (['f"""Guild Members:\n - {members}"""'], {}), '(f"""Guild Members:\n - {members}""")\n', (1836, 1872), False, 'import logging\n'), ((1983, 2083), 'discord.Streaming', 'discord.Streaming', ([], {'name': "(prefix + 'helperino')", 'url': '"""https://www.youtube.com/watch?v=dQw4w9WgXcQ"""'}), "(name=prefix + 'helperino', url=\n 'https://www.youtube.com/watch?v=dQw4w9WgXcQ')\n", (2000, 2083), False, 'import discord\n'), ((2171, 2210), 'logging.info', 'logging.info', (['"""------ | Ready | ------"""'], {}), "('------ | Ready | ------')\n", (2183, 2210), False, 'import logging\n'), ((2343, 2367), 'discord.Colour', 'discord.Colour', (['(16777215)'], {}), '(16777215)\n', (2357, 2367), False, 'import discord\n'), ((6729, 6745), 'urbandictionary.define', 'ud.define', (['words'], {}), '(words)\n', (6738, 6745), True, 'import urbandictionary as ud\n'), ((6761, 6879), 'discord.Embed', 'discord.Embed', ([], {'title': 'words', 'type': '"""rich"""', 'description': '"""Results for maximum 10 first results from Urban Dictionary"""'}), "(title=words, type='rich', description=\n 'Results for maximum 10 first results from Urban Dictionary')\n", (6774, 6879), False, 'import discord\n'), ((14009, 14032), 'shinden.search_titles', 'sh.search_titles', (['title'], {}), '(title)\n', (14025, 14032), True, 'import shinden as sh\n'), ((14139, 14163), 'discord.Colour', 'discord.Colour', (['(16777215)'], {}), '(16777215)\n', (14153, 14163), False, 'import discord\n'), ((15266, 15313), 'shinden.search_titles', 'sh.search_titles', (['title'], {'anime_or_manga': '"""manga"""'}), "(title, anime_or_manga='manga')\n", (15282, 15313), True, 'import shinden as sh\n'), ((15419, 15443), 'discord.Colour', 'discord.Colour', (['(16777215)'], {}), '(16777215)\n', (15433, 15443), False, 'import discord\n'), ((20488, 20537), 'shinden.search_characters', 'sh.search_characters', (['name'], {'get_description': '(False)'}), '(name, get_description=False)\n', (20508, 20537), True, 'import shinden as sh\n'), ((20652, 20676), 'discord.Colour', 'discord.Colour', (['(16777215)'], {}), '(16777215)\n', (20666, 20676), False, 'import discord\n'), ((25408, 25453), 'shinden.search_users', 'sh.search_users', (['nickname'], {'detailed_info': '(True)'}), '(nickname, detailed_info=True)\n', (25423, 25453), True, 'import shinden as sh\n'), ((25558, 25582), 'discord.Colour', 'discord.Colour', (['(16777215)'], {}), '(16777215)\n', (25572, 25582), False, 'import discord\n'), ((27008, 27032), 'discord.Colour', 'discord.Colour', (['(16777215)'], {}), '(16777215)\n', (27022, 27032), False, 'import discord\n'), ((28223, 28255), 'discord.Embed', 'discord.Embed', ([], {'title': '"""The truth"""'}), "(title='The truth')\n", (28236, 28255), False, 'import discord\n'), ((538, 550), 'json.load', 'json.load', (['f'], {}), '(f)\n', (547, 550), False, 'import json\n'), ((668, 754), 'logging.warning', 'logging.warning', (['"""Proper settings.json file wasn\'t found, creating a default one"""'], {}), '(\n "Proper settings.json file wasn\'t found, creating a default one")\n', (683, 754), False, 'import logging\n'), ((1061, 1148), 'logging.debug', 'logging.debug', (['"""Program will now shutdown, please apply settings in settings.json"""'], {}), "(\n 'Program will now shutdown, please apply settings in settings.json')\n", (1074, 1148), False, 'import logging\n'), ((1148, 1158), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1156, 1158), False, 'import sys\n'), ((5048, 5064), 'urbandictionary.define', 'ud.define', (['words'], {}), '(words)\n', (5057, 5064), True, 'import urbandictionary as ud\n'), ((5625, 5641), 'urbandictionary.define', 'ud.define', (['words'], {}), '(words)\n', (5634, 5641), True, 'import urbandictionary as ud\n'), ((7933, 7944), 'urbandictionary.random', 'ud.random', ([], {}), '()\n', (7942, 7944), True, 'import urbandictionary as ud\n'), ((8924, 8947), 'shinden.search_titles', 'sh.search_titles', (['title'], {}), '(title)\n', (8940, 8947), True, 'import shinden as sh\n'), ((9290, 9314), 'discord.Colour', 'discord.Colour', (['(16777215)'], {}), '(16777215)\n', (9304, 9314), False, 'import discord\n'), ((9993, 10016), 'shinden.search_titles', 'sh.search_titles', (['title'], {}), '(title)\n', (10009, 10016), True, 'import shinden as sh\n'), ((10242, 10266), 'discord.Colour', 'discord.Colour', (['(16777215)'], {}), '(16777215)\n', (10256, 10266), False, 'import discord\n'), ((11632, 11679), 'shinden.search_titles', 'sh.search_titles', (['title'], {'anime_or_manga': '"""manga"""'}), "(title, anime_or_manga='manga')\n", (11648, 11679), True, 'import shinden as sh\n'), ((11958, 11982), 'discord.Colour', 'discord.Colour', (['(16777215)'], {}), '(16777215)\n', (11972, 11982), False, 'import discord\n'), ((12586, 12633), 'shinden.search_titles', 'sh.search_titles', (['title'], {'anime_or_manga': '"""manga"""'}), "(title, anime_or_manga='manga')\n", (12602, 12633), True, 'import shinden as sh\n'), ((12900, 12924), 'discord.Colour', 'discord.Colour', (['(16777215)'], {}), '(16777215)\n', (12914, 12924), False, 'import discord\n'), ((16753, 16779), 'shinden.search_characters', 'sh.search_characters', (['name'], {}), '(name)\n', (16773, 16779), True, 'import shinden as sh\n'), ((17081, 17105), 'discord.Colour', 'discord.Colour', (['(16777215)'], {}), '(16777215)\n', (17095, 17105), False, 'import discord\n'), ((18272, 18298), 'shinden.search_characters', 'sh.search_characters', (['name'], {}), '(name)\n', (18292, 18298), True, 'import shinden as sh\n'), ((18595, 18619), 'discord.Colour', 'discord.Colour', (['(16777215)'], {}), '(16777215)\n', (18609, 18619), False, 'import discord\n'), ((22079, 22104), 'shinden.search_users', 'sh.search_users', (['nickname'], {}), '(nickname)\n', (22094, 22104), True, 'import shinden as sh\n'), ((22376, 22400), 'discord.Colour', 'discord.Colour', (['(16777215)'], {}), '(16777215)\n', (22390, 22400), False, 'import discord\n'), ((23529, 23554), 'shinden.search_users', 'sh.search_users', (['nickname'], {}), '(nickname)\n', (23544, 23554), True, 'import shinden as sh\n'), ((23807, 23831), 'discord.Colour', 'discord.Colour', (['(16777215)'], {}), '(16777215)\n', (23821, 23831), False, 'import discord\n'), ((886, 926), 'json.dump', 'json.dump', (['default_settings', 'f'], {'indent': '(4)'}), '(default_settings, f, indent=4)\n', (895, 926), False, 'import json\n'), ((26743, 26762), 'datetime.timedelta', 'timedelta', ([], {'hours': '(12)'}), '(hours=12)\n', (26752, 26762), False, 'from datetime import datetime, timedelta\n'), ((26708, 26722), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (26720, 26722), False, 'from datetime import datetime, timedelta\n')]
|
#import what we need
import pandas as pd
from sqlalchemy import create_engine
#Read in the Titanic df
df = pd.read_csv('module2-sql-for-analysis/titanic.csv')
#make sure we got it
df.head()
#See what the datatypes are
df.dtypes
df.info()
# Create engine for DF insertion
engine = create_engine('postgres://ensbdkiv:<EMAIL>:5432/ensbdkiv')
df.to_sql('titanic_dataset', engine)
|
[
"pandas.read_csv",
"sqlalchemy.create_engine"
] |
[((113, 164), 'pandas.read_csv', 'pd.read_csv', (['"""module2-sql-for-analysis/titanic.csv"""'], {}), "('module2-sql-for-analysis/titanic.csv')\n", (124, 164), True, 'import pandas as pd\n'), ((296, 354), 'sqlalchemy.create_engine', 'create_engine', (['"""postgres://ensbdkiv:<EMAIL>:5432/ensbdkiv"""'], {}), "('postgres://ensbdkiv:<EMAIL>:5432/ensbdkiv')\n", (309, 354), False, 'from sqlalchemy import create_engine\n')]
|
###/usr/bin/env python
### coding: utf-8
import pandas as pd
import yfinance as yf
import investpy
import numpy as np
df_main = pd.read_excel(r'RawData.xlsx')
df_main = df_main [:-2] #remove last 2 rows so that data is able to update even when there are no new rows. This is ensure the code runs when there is a transfer from GEM to main board.
### Gather data & clean from IPO
page="http://www.aastocks.com/en/stocks/market/ipo/listedipo.aspx?s=3&o=0&page=" + str (1)
dfs = pd.read_html(page)
df = dfs [16]
df = df [:-3]
df = df.iloc [:,1:]
name = df.columns [0]
df2 = df [name]
df2 = df2.map(lambda x: x.rstrip('Sink Below Listing Price'))
df_code = df2.map(lambda x: x[-7:])
df_name = df2.map(lambda x: x[:-8])
df [name] = df_code
df.insert(0, 'Name', df_name)
df = df.rename(columns = {name:'Code'})
df_IPO = df[~df['Code'].isin(df_main['Code'])]
### Gather sponsor data
page= 'http://www.aastocks.com/en/stocks/market/ipo/sponsor.aspx?s=1&o=0&s2=0&o2=0&f1=&f2=&page=' + str(1) + '#sponsor'
dfs = pd.read_html(page)
df = dfs [17]
df = df.iloc[:-2,0:7]
df ['Name▼ / Code▼'] = df_code
df.insert(0, 'Name', df_name)
df = df.rename(columns = {'Name▼ / Code▼':'Code'})
df_sponsor = df[df['Code'].isin(df_IPO['Code'])]
df_sponsor = df_sponsor.drop(columns = ['Name/Code', 'List Date', 'Acc. % Chg.▼', '% Chg. onDebut1▼', 'Name' ],axis = 1)
### merge newly gathered data
df_new = df_IPO.merge(df_sponsor, on = ['Code'], how = 'left')
df_new = df_new.rename( columns={'Industry':'AA Stocks Industry'})
### gather Chinese name data
page="http://www.aastocks.com/sc/stocks/market/ipo/listedipo.aspx?s=3&o=0&page=" + str (1)
dfs = pd.read_html(page)
df = dfs [19]
df = df [:-3]
df = df.iloc [:,1:]
name = df.columns [0]
df2 = df [name]
df2 = df2.map(lambda x: x.rstrip('跌穿上市价'))
df_code = df2.map(lambda x: x[-7:])
df_name = df2.map(lambda x: x[:-8])
df [name] = df_code
df.insert(0, 'Name CN', df_name)
df = df.rename(columns = {name:'Code'})
df = df[~df['Code'].isin(df_main['Code'])]
df = df.iloc [:,0:2]
df_new = df.merge(df_new, on = ['Code'], how = 'left') #merge data
def cleanpercent (df_main, column):
df_main[column]= df_main[column].astype (str)
df_main[column]= df_main[column].str.replace('%', '', regex=True)
df_main[column]= df_main[column].str.replace('+', '', regex=True)
df_main[column]= df_main[column].astype (float)
df_main[column]= df_main[column]/100
return df_main
df_new = cleanpercent (df_new, '% Chg. on2Debut▼')
df_new = cleanpercent (df_new, 'Gray Market (%)2')
df_new = cleanpercent (df_new, 'One LotSuccess Rate▼')
### Add Yahoo Industries/Sector Data
df2 = df_new ['Code']
df8 = []
for data in df2:
try:
DC = yf.Ticker(data)
df3 = DC.info ['industry']
df4 = DC.info ['sector']
df6 = DC.info ['longBusinessSummary']
df5 = [df3, df4, df6, data]
df8.append (df5)
except KeyError:
df5 = ['na', 'na', 'na', 'na']
df8.append (df5)
df8 = pd.DataFrame(df8, columns=['Industry', 'Sector', 'Business Summary', 'Code'])
### merge newly gathered data
df_new = df_new.merge(df8, on = ['Code'], how = 'left')
### clean new Leads data
df2 = df_new['Sponsor'].str.replace(', Limited', ' Limited,', regex=True)
df2 = df2.str.split (',', expand = True)
def trim_all_columns(df):
"""
Trim whitespace from ends of each value across all series in dataframe
"""
trim_strings = lambda x: x.strip() if isinstance(x, str) else x
return df.applymap(trim_strings)
df2 = trim_all_columns(df2)
for col in df2.columns:
df2 = df2.rename(columns = {col:'Lead '+ str(col + 1)})
### merge newly gathered data
df_new = pd.concat([df_new,df2],axis = 1)
### Splitting Market Cap and Offer price to 2 columns
def split_name(df, df_column):
"""
Splits columns with a - in the data and returns a column with the lower and upper bound
"""
df2 = df [df_column]
df2 = df2.str.split ('-', expand = True)
df2 = df2.astype(float)
df2 = df2.rename(columns = {0:'Lower '+ df_column, 1: 'Upper ' + df_column})
df [df_column] = df2.iloc [:,0]
df = df.rename(columns = {df_column:'Lower ' + df_column})
try:
df.insert(df.columns.get_loc('Lower ' + df_column) +1, 'Upper ' + df_column, value = df2.iloc [:,1])
except IndexError: ##catches error where there is no upper for market cap and offer price
df2 ['Upper ' + df_column] = np.nan
df.insert(df.columns.get_loc('Lower ' + df_column) +1, 'Upper ' + df_column, value = df2.iloc [:,1])
return df
df_new = split_name(df_new, 'Market Cap(B)')
df_new = split_name(df_new, 'Offer Price')
### calculating Market cap
mktcap = df_new['Listing Price'].astype(float) / df_new['Upper Offer Price'].astype(float) * df_new[ 'Upper Market Cap(B)'].astype(float)
df_new.insert (6, 'Market Cap(B)', value = mktcap)
### cleaning data before merging
df_new = df_new.drop(columns = ['Last1','Acc.% Chg.▼' ],axis = 1)
### code for adding a count as ipo? column for new data
df_countipo = df_new['Name']
abdc = df_countipo.str [-3:]
abdc = abdc.str.replace('-SW', '0', regex=False)
abdc = abdc.str [-2:]
abdc = abdc.str.replace('-S','0', regex = False)
abdc1 = []
for ends in abdc:
if ends != '0':
ends = '1'
abdc1.append(ends)
abdc1 = list(map(int, abdc1))
abdc1 = pd.DataFrame(abdc1, columns =['Count as IPO?'])
df_new = pd.concat([df_new,abdc1], axis=1, ignore_index = False)
df_new = df_new.iloc [::-1]
## concat new data with old data
df_main = pd.concat([df_main,df_new], axis=0)
## gather yahoo trading data and calculating return
### initialize data
df2 = df_main ['Code']
df_date = df_main ['Listing Date▼']
df_date = pd.to_datetime(df_date, infer_datetime_format=True)
df_date = pd.concat ([df_date, df2], axis=1)
df_date = df_date.set_index('Code')
df = yf.download('^HSI', start="2018-01-01") ['Close']
today = pd.to_datetime('today').strftime('%d/%m/%Y')
df_invest = investpy.get_index_historical_data(index='hs healthcare',
country='hong kong',
from_date='01/01/2018',
to_date=today)
df_invest = df_invest ['Close']
df_trading8 = []
df_HSI8 = []
df_HSH8 =[]
df2 = df2.values.tolist()
df3 = yf.download(df2, period="max") ['Close']
def calc_data (init_df,date_df,trading_days):
df_HSI = []
for day in trading_days:
try:
df_day = date_df[day]
except (IndexError, TypeError):
df_day = 'NAN'
df_HSI.append(df_day)
df_HSI.append (ticker)
init_df.append(df_HSI)
return init_df
for ticker in df2:
df4 = df3 [ticker]
df4 = df4.dropna()
start_date = df_date.loc [ticker] ### use listing date from AA stocks
start_date = start_date.values[0]
trading_days = [0, -1, 80, 100, 120, 140, 160, 252, 372] ###! THIS CAN BE ADJUSTED AS NECESSARY but others need to be adjusted if different from 9 variables
try:
end_date = df4.index [-1]
### filtering by date
df5 = df.loc[start_date:end_date]
df4 = df4.loc [start_date:end_date]
### filtering by date for investpy
end_dateinv = df_invest.index [-1]
df6 = df_invest.loc[start_date:end_dateinv]
except IndexError:
df5 = np.NAN
df4 = np.NAN
df6 = np.NAN
calc_data(df_HSI8,df5, trading_days)
calc_data(df_trading8, df4,trading_days)
calc_data(df_HSH8, df6,trading_days)
###making dataframe then merging
def make_df (ending_term, df_data):
HSI_days = [str(i)+ ending_term for i in trading_days]
HSI_days.append('Code')
df = pd.DataFrame(df_data, columns = HSI_days)
return df
df9 = make_df (' HSI Days', df_HSI8)
df10 = make_df (' HSH Days', df_HSH8)
df8 = make_df(' Trading Days', df_trading8)
### preparing data for division
df_trading = df8.iloc [:,:-1]
df_HSI = df9.iloc [:,:-1]
df_HSH = df10.iloc [:,:-1]
df_trading = df_trading.astype(float)
df_HSI = df_HSI.astype(float)
df_HSH = df_HSH.astype(float)
df_listprice = df_main ['Listing Price']
df_listprice = df_listprice.reset_index(drop=True)
df_listprice = df_listprice.astype(float)
df_code = df8 ['Code']
#Trading return: dividing returns by list price. Need to review some Matrix Division hahaha
def ret (numerator,denominator,code):
"""
Divides two dataframes by transposing and using linear algebra rules
"""
df_tradingret = numerator.T / denominator -1
df_tradingret = df_tradingret.T
df_tradingret = pd.concat ([df_tradingret, code], axis=1)
return df_tradingret
df_tradingret = ret(df_trading, df_listprice, df_code)
# Index return: divide HSI by rest of column
df_HSIPO = df_HSI.iloc [:,0]
df_HSI = df_HSI.iloc[:,1:]
df_HSHPO = df_HSH.iloc [:,0]
df_HSH = df_HSH.iloc[:,1:]
df_HSHret = ret(df_HSH,df_HSHPO,df_code)
df_HSIret = ret(df_HSI,df_HSIPO,df_code)
### merge Trading and HSI and HSH return
df_yfret = df_tradingret.merge(df_HSIret, on = ['Code'], how = 'left')
df_yfret = df_yfret.merge(df_HSHret, on = ['Code'], how = 'left')
### removing old data then adding new data
df_end = df_main.iloc [:,-3:] ##!! Key piece of code!!!
df_end = df_end.reset_index(drop=True)
df_main = df_main.drop(df_main.columns[27:], axis = 1)
df_main = df_main.merge(df_yfret, on = ['Code'], how = 'left')
df_main = pd.concat ([df_main, df_end], axis=1)
### cleaning df_main
df_main ['Listing Price'] = df_main ['Listing Price'].astype (str)
df_main ['Listing Price'] = df_main ['Listing Price'].astype (float)
df_main ['Applied lotsfor 1 lot▼'] = df_main ['Applied lotsfor 1 lot▼'].astype (str)
df_main ['Applied lotsfor 1 lot▼'] = df_main ['Applied lotsfor 1 lot▼'].str.replace(' lot', '', regex=True)
df_main ['Applied lotsfor 1 lot▼'] = df_main ['Applied lotsfor 1 lot▼'].astype (float)
df_main ['Over-sub.Rate▼'] = df_main ['Over-sub.Rate▼'].astype (str)
df_main ['Over-sub.Rate▼'] = df_main ['Over-sub.Rate▼'].str.replace ('Under-Sub.', '-1000', regex=True)
df_main ['Over-sub.Rate▼'] = df_main ['Over-sub.Rate▼'].astype (float)
df_main ['Lot Size'] = df_main ['Lot Size'].astype (str)
df_main ['Lot Size'] = df_main ['Lot Size'].astype (float)
df_main ['Listing Date▼'] = df_main ['Listing Date▼'].astype (str)
df_main ['Listing Date▼'] = df_main ['Listing Date▼'].str.replace (' 00:00:00','', regex = True)
## removing discrepancy and calculating new discrepancy
discrepancy = abs(df_main ['% Chg. on2Debut▼'] - df_main['0 Trading Days'])
discrepancy = discrepancy.round(2)
df_main ['Discrepancy'] = discrepancy
df_main.to_excel('RawData.xlsx', index = False)
|
[
"pandas.read_html",
"pandas.DataFrame",
"yfinance.download",
"pandas.read_excel",
"pandas.to_datetime",
"investpy.get_index_historical_data",
"yfinance.Ticker",
"pandas.concat"
] |
[((131, 160), 'pandas.read_excel', 'pd.read_excel', (['"""RawData.xlsx"""'], {}), "('RawData.xlsx')\n", (144, 160), True, 'import pandas as pd\n'), ((479, 497), 'pandas.read_html', 'pd.read_html', (['page'], {}), '(page)\n', (491, 497), True, 'import pandas as pd\n'), ((1008, 1026), 'pandas.read_html', 'pd.read_html', (['page'], {}), '(page)\n', (1020, 1026), True, 'import pandas as pd\n'), ((1634, 1652), 'pandas.read_html', 'pd.read_html', (['page'], {}), '(page)\n', (1646, 1652), True, 'import pandas as pd\n'), ((2980, 3057), 'pandas.DataFrame', 'pd.DataFrame', (['df8'], {'columns': "['Industry', 'Sector', 'Business Summary', 'Code']"}), "(df8, columns=['Industry', 'Sector', 'Business Summary', 'Code'])\n", (2992, 3057), True, 'import pandas as pd\n'), ((3663, 3695), 'pandas.concat', 'pd.concat', (['[df_new, df2]'], {'axis': '(1)'}), '([df_new, df2], axis=1)\n', (3672, 3695), True, 'import pandas as pd\n'), ((5339, 5385), 'pandas.DataFrame', 'pd.DataFrame', (['abdc1'], {'columns': "['Count as IPO?']"}), "(abdc1, columns=['Count as IPO?'])\n", (5351, 5385), True, 'import pandas as pd\n'), ((5396, 5450), 'pandas.concat', 'pd.concat', (['[df_new, abdc1]'], {'axis': '(1)', 'ignore_index': '(False)'}), '([df_new, abdc1], axis=1, ignore_index=False)\n', (5405, 5450), True, 'import pandas as pd\n'), ((5525, 5561), 'pandas.concat', 'pd.concat', (['[df_main, df_new]'], {'axis': '(0)'}), '([df_main, df_new], axis=0)\n', (5534, 5561), True, 'import pandas as pd\n'), ((5708, 5759), 'pandas.to_datetime', 'pd.to_datetime', (['df_date'], {'infer_datetime_format': '(True)'}), '(df_date, infer_datetime_format=True)\n', (5722, 5759), True, 'import pandas as pd\n'), ((5770, 5803), 'pandas.concat', 'pd.concat', (['[df_date, df2]'], {'axis': '(1)'}), '([df_date, df2], axis=1)\n', (5779, 5803), True, 'import pandas as pd\n'), ((5966, 6088), 'investpy.get_index_historical_data', 'investpy.get_index_historical_data', ([], {'index': '"""hs healthcare"""', 'country': '"""hong kong"""', 'from_date': '"""01/01/2018"""', 'to_date': 'today'}), "(index='hs healthcare', country=\n 'hong kong', from_date='01/01/2018', to_date=today)\n", (6000, 6088), False, 'import investpy\n'), ((9439, 9475), 'pandas.concat', 'pd.concat', (['[df_main, df_end]'], {'axis': '(1)'}), '([df_main, df_end], axis=1)\n', (9448, 9475), True, 'import pandas as pd\n'), ((5847, 5886), 'yfinance.download', 'yf.download', (['"""^HSI"""'], {'start': '"""2018-01-01"""'}), "('^HSI', start='2018-01-01')\n", (5858, 5886), True, 'import yfinance as yf\n'), ((6312, 6342), 'yfinance.download', 'yf.download', (['df2'], {'period': '"""max"""'}), "(df2, period='max')\n", (6323, 6342), True, 'import yfinance as yf\n'), ((7735, 7774), 'pandas.DataFrame', 'pd.DataFrame', (['df_data'], {'columns': 'HSI_days'}), '(df_data, columns=HSI_days)\n', (7747, 7774), True, 'import pandas as pd\n'), ((8620, 8660), 'pandas.concat', 'pd.concat', (['[df_tradingret, code]'], {'axis': '(1)'}), '([df_tradingret, code], axis=1)\n', (8629, 8660), True, 'import pandas as pd\n'), ((2687, 2702), 'yfinance.Ticker', 'yf.Ticker', (['data'], {}), '(data)\n', (2696, 2702), True, 'import yfinance as yf\n'), ((5909, 5932), 'pandas.to_datetime', 'pd.to_datetime', (['"""today"""'], {}), "('today')\n", (5923, 5932), True, 'import pandas as pd\n')]
|
import os
from PIL import Image
import numpy as np
path='faces/faces_4/an2i'
trainx=[]
trainy=[]
for filename in os.listdir(path):
pixel=[]
im=Image.open(path+'/'+filename)
for i in range(im.size[0]):
row=[]
for j in range(im.size[1]):
row.append(im.getpixel((i,j)))
pixel.append(row)
trainx.append(pixel)
director=filename.split('_')[1]
if director=='left':
trainy.append([1,0,0,0])
elif director=='right':
trainy.append([0,1,0,0])
elif director=='straight':
trainy.append([0,0,1,0])
elif director=='up':
trainy.append([0,0,0,1])
trainx=np.array(trainx)
trainy=np.array(trainy)
trainx=np.transpose(trainx.reshape((-1,32*30))-128)/256.0
trainy=np.transpose(trainy)
|
[
"numpy.transpose",
"numpy.array",
"os.listdir",
"PIL.Image.open"
] |
[((114, 130), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (124, 130), False, 'import os\n'), ((565, 581), 'numpy.array', 'np.array', (['trainx'], {}), '(trainx)\n', (573, 581), True, 'import numpy as np\n'), ((590, 606), 'numpy.array', 'np.array', (['trainy'], {}), '(trainy)\n', (598, 606), True, 'import numpy as np\n'), ((673, 693), 'numpy.transpose', 'np.transpose', (['trainy'], {}), '(trainy)\n', (685, 693), True, 'import numpy as np\n'), ((146, 179), 'PIL.Image.open', 'Image.open', (["(path + '/' + filename)"], {}), "(path + '/' + filename)\n", (156, 179), False, 'from PIL import Image\n')]
|
from pathlib import Path
from loguru import logger
import numpy as np
import torch
from torch.utils.data import Dataset
import ganslate
from ganslate.utils import sitk_utils
from ganslate.data.utils.normalization import min_max_normalize, min_max_denormalize
from ganslate.data.utils.body_mask import apply_body_mask
# Config imports
from typing import Tuple
from dataclasses import dataclass
from omegaconf import MISSING
from ganslate import configs
@dataclass
class CBCTtoCTInferenceDatasetConfig(configs.base.BaseDatasetConfig):
hounsfield_units_range: Tuple[int, int] = (-1000, 2000)
class CBCTtoCTInferenceDataset(Dataset):
def __init__(self, conf):
self.root_path = Path(conf.infer.dataset.root).resolve()
self.paths_CBCT = []
for CT_CBCT_pair_dir in self.root_path.iterdir():
CT_CBCT_pair_dir = self.root_path / CT_CBCT_pair_dir
CBCT = list(CT_CBCT_pair_dir.rglob('CBCT.nrrd'))[0]
self.paths_CBCT.append(CBCT)
# Min and max HU values for clipping and normalization
self.hu_min, self.hu_max = conf.infer.dataset.hounsfield_units_range
def __getitem__(self, index):
path_CBCT = self.paths_CBCT[index]
CBCT = sitk_utils.load(path_CBCT)
metadata = {
'path': str(path_CBCT),
'size': CBCT.GetSize(),
'origin': CBCT.GetOrigin(),
'spacing': CBCT.GetSpacing(),
'direction': CBCT.GetDirection(),
'dtype': sitk_utils.get_npy_dtype(CBCT)
}
CBCT = apply_body_mask(sitk_utils.get_npy(CBCT),
apply_mask=True,
masking_value=self.hu_min,
hu_threshold=-800)
CBCT = torch.tensor(CBCT)
# Limits the lowest and highest HU unit
CBCT = torch.clamp(CBCT, self.hu_min, self.hu_max)
# Normalize Hounsfield units to range [-1,1]
CBCT = min_max_normalize(CBCT, self.hu_min, self.hu_max)
# Add channel dimension (1 = grayscale)
CBCT = CBCT.unsqueeze(0)
return {'input': CBCT, "metadata": metadata}
def __len__(self):
return len(self.paths_CBCT)
def save(self, tensor, save_dir, metadata):
tensor = tensor.squeeze().cpu()
tensor = min_max_denormalize(tensor, self.hu_min, self.hu_max)
sitk_image = sitk_utils.tensor_to_sitk_image(tensor, metadata['origin'],
metadata['spacing'], metadata['direction'],
metadata['dtype'])
# Dataset used has a directory per each datapoint, the name of each
# datapoint's dir is used to save the output
datapoint_path = Path(str(metadata['path']))
save_path = datapoint_path.relative_to(self.root_path)
save_path = Path(save_dir) / save_path
save_path.parent.mkdir(exist_ok=True, parents=True)
sitk_utils.write(sitk_image, save_path)
|
[
"ganslate.data.utils.normalization.min_max_normalize",
"ganslate.utils.sitk_utils.tensor_to_sitk_image",
"ganslate.data.utils.normalization.min_max_denormalize",
"ganslate.utils.sitk_utils.write",
"ganslate.utils.sitk_utils.get_npy",
"pathlib.Path",
"torch.clamp",
"ganslate.utils.sitk_utils.get_npy_dtype",
"ganslate.utils.sitk_utils.load",
"torch.tensor"
] |
[((1229, 1255), 'ganslate.utils.sitk_utils.load', 'sitk_utils.load', (['path_CBCT'], {}), '(path_CBCT)\n', (1244, 1255), False, 'from ganslate.utils import sitk_utils\n'), ((1770, 1788), 'torch.tensor', 'torch.tensor', (['CBCT'], {}), '(CBCT)\n', (1782, 1788), False, 'import torch\n'), ((1853, 1896), 'torch.clamp', 'torch.clamp', (['CBCT', 'self.hu_min', 'self.hu_max'], {}), '(CBCT, self.hu_min, self.hu_max)\n', (1864, 1896), False, 'import torch\n'), ((1966, 2015), 'ganslate.data.utils.normalization.min_max_normalize', 'min_max_normalize', (['CBCT', 'self.hu_min', 'self.hu_max'], {}), '(CBCT, self.hu_min, self.hu_max)\n', (1983, 2015), False, 'from ganslate.data.utils.normalization import min_max_normalize, min_max_denormalize\n'), ((2318, 2371), 'ganslate.data.utils.normalization.min_max_denormalize', 'min_max_denormalize', (['tensor', 'self.hu_min', 'self.hu_max'], {}), '(tensor, self.hu_min, self.hu_max)\n', (2337, 2371), False, 'from ganslate.data.utils.normalization import min_max_normalize, min_max_denormalize\n'), ((2394, 2521), 'ganslate.utils.sitk_utils.tensor_to_sitk_image', 'sitk_utils.tensor_to_sitk_image', (['tensor', "metadata['origin']", "metadata['spacing']", "metadata['direction']", "metadata['dtype']"], {}), "(tensor, metadata['origin'], metadata[\n 'spacing'], metadata['direction'], metadata['dtype'])\n", (2425, 2521), False, 'from ganslate.utils import sitk_utils\n'), ((2988, 3027), 'ganslate.utils.sitk_utils.write', 'sitk_utils.write', (['sitk_image', 'save_path'], {}), '(sitk_image, save_path)\n', (3004, 3027), False, 'from ganslate.utils import sitk_utils\n'), ((1499, 1529), 'ganslate.utils.sitk_utils.get_npy_dtype', 'sitk_utils.get_npy_dtype', (['CBCT'], {}), '(CBCT)\n', (1523, 1529), False, 'from ganslate.utils import sitk_utils\n'), ((1572, 1596), 'ganslate.utils.sitk_utils.get_npy', 'sitk_utils.get_npy', (['CBCT'], {}), '(CBCT)\n', (1590, 1596), False, 'from ganslate.utils import sitk_utils\n'), ((2891, 2905), 'pathlib.Path', 'Path', (['save_dir'], {}), '(save_dir)\n', (2895, 2905), False, 'from pathlib import Path\n'), ((696, 725), 'pathlib.Path', 'Path', (['conf.infer.dataset.root'], {}), '(conf.infer.dataset.root)\n', (700, 725), False, 'from pathlib import Path\n')]
|
from gpio import Gpio
from time import sleep
class Wh1602:
def __init__(self):
self.reserve_gpios()
self.rw.set_value(0)
sleep(0.05)
def __del__(self):
pass
def reserve_gpios(self):
self.rs = Gpio(2, "out")
self.rw = Gpio(3, "out")
self.e = Gpio(4, "out")
self.d = [Gpio(17, "out"), Gpio(27, "out"),
Gpio(22, "out"), Gpio(23, "out")]
def lcd_write_nibble(self, val):
for i, p in enumerate(self.d):
p.set_value(0 if (val & 1 << i) == 0 else 1)
self.e.set_value(1)
sleep(0.02)
self.e.set_value(0)
def lcd_write_data(self, data):
self.lcd_write_nibble(data >> 4)
self.lcd_write_nibble(data & 0xF)
def init_lcd(self):
self.rs.set_value(0)
sleep(0.2)
self.lcd_write_nibble(0x03)
sleep(0.05)
self.lcd_write_nibble(0x03)
sleep(0.05)
self.lcd_write_nibble(0x02)
sleep(0.02)
self.lcd_write_data(0x08)
sleep(0.02)
self.lcd_write_data(0x01)
sleep(0.02)
self.lcd_write_data(0x06)
sleep(0.02)
self.lcd_write_data(0x0D)
sleep(0.02)
self.rs.set_value(1)
def lcd_write_string(self, str):
for s in str:
self.lcd_write_data(s)
|
[
"gpio.Gpio",
"time.sleep"
] |
[((150, 161), 'time.sleep', 'sleep', (['(0.05)'], {}), '(0.05)\n', (155, 161), False, 'from time import sleep\n'), ((247, 261), 'gpio.Gpio', 'Gpio', (['(2)', '"""out"""'], {}), "(2, 'out')\n", (251, 261), False, 'from gpio import Gpio\n'), ((280, 294), 'gpio.Gpio', 'Gpio', (['(3)', '"""out"""'], {}), "(3, 'out')\n", (284, 294), False, 'from gpio import Gpio\n'), ((312, 326), 'gpio.Gpio', 'Gpio', (['(4)', '"""out"""'], {}), "(4, 'out')\n", (316, 326), False, 'from gpio import Gpio\n'), ((602, 613), 'time.sleep', 'sleep', (['(0.02)'], {}), '(0.02)\n', (607, 613), False, 'from time import sleep\n'), ((824, 834), 'time.sleep', 'sleep', (['(0.2)'], {}), '(0.2)\n', (829, 834), False, 'from time import sleep\n'), ((880, 891), 'time.sleep', 'sleep', (['(0.05)'], {}), '(0.05)\n', (885, 891), False, 'from time import sleep\n'), ((937, 948), 'time.sleep', 'sleep', (['(0.05)'], {}), '(0.05)\n', (942, 948), False, 'from time import sleep\n'), ((994, 1005), 'time.sleep', 'sleep', (['(0.02)'], {}), '(0.02)\n', (999, 1005), False, 'from time import sleep\n'), ((1049, 1060), 'time.sleep', 'sleep', (['(0.02)'], {}), '(0.02)\n', (1054, 1060), False, 'from time import sleep\n'), ((1104, 1115), 'time.sleep', 'sleep', (['(0.02)'], {}), '(0.02)\n', (1109, 1115), False, 'from time import sleep\n'), ((1159, 1170), 'time.sleep', 'sleep', (['(0.02)'], {}), '(0.02)\n', (1164, 1170), False, 'from time import sleep\n'), ((1214, 1225), 'time.sleep', 'sleep', (['(0.02)'], {}), '(0.02)\n', (1219, 1225), False, 'from time import sleep\n'), ((345, 360), 'gpio.Gpio', 'Gpio', (['(17)', '"""out"""'], {}), "(17, 'out')\n", (349, 360), False, 'from gpio import Gpio\n'), ((362, 377), 'gpio.Gpio', 'Gpio', (['(27)', '"""out"""'], {}), "(27, 'out')\n", (366, 377), False, 'from gpio import Gpio\n'), ((397, 412), 'gpio.Gpio', 'Gpio', (['(22)', '"""out"""'], {}), "(22, 'out')\n", (401, 412), False, 'from gpio import Gpio\n'), ((414, 429), 'gpio.Gpio', 'Gpio', (['(23)', '"""out"""'], {}), "(23, 'out')\n", (418, 429), False, 'from gpio import Gpio\n')]
|
# -*- coding: utf-8 -*-
import scrapy
import hashlib
from bitco_in_forum.items import BitcoInForumItem
class BitcoInforumSpider(scrapy.Spider):
name = 'Bitco_inForum'
allowed_domains = ['bitco.in/forum/']
start_urls = ['http://bitco.in/forum//']
def parse(self, response):
subforums = response.css("div.nodeText h3.nodeTitle a::attr(href)").extract()
for board in subforums:
yield scrapy.Request(url=board,
callback=self.parse_topics)
def parse_topics(self, response):
topic_links = response.css("h3.title a::attr(href)").extract()
try:
page_list = response.css("div.PageNav a::text").extract()
counter = 0
# index finden
for element in page_list:
if page_list[counter] != "Next >":
counter = counter + 1
continue
else:
break
next_url = response.css("div.PageNav a::attr(href)").extract()[counter]
if next_url is not None:
yield scrapy.Request(url=next_url,
callback=self.parse_topics)
for topic_link in topic_links:
yield scrapy.Request(url=topic_link,
callback=self.parse_posts)
except:
# no text
pass
def parse_posts(self, response):
post = BitcoInForumItem()
author_list = response.css("div.uix_usernameWrapper a::text").extract()
topic_list = response.css("div.titleBar h1::text").extract()
dates_list = response.css("span.DateTime::text").extract()
posttext_list = response.css("blockquote.messageText::text")
for idx, item in enumerate(author_list):
try:
post['author'] = author_list[idx]
except:
post['author'] = "None"
try:
post['datetime'] = dates_list[idx]
except:
post['datetime'] = "None"
try:
post['posttext'] = posttext_list[idx]
except:
post['posttext'] = "None"
try:
post['topic'] = topic_list[idx]
except:
post['topic'] = "None"
tohash = str(post['author']) + str(post['datetime']) + str(post['posttext']) + str(post['topic'])
hobject = hashlib.sha256(tohash.encode())
hash_string = str(hobject.hexdigest())
post['identityhash'] = hash_string
yield post
# Pagination
try:
next_page_array = response.css("div.PageNav a::text").extract()
counter = 0
# index finden
for element in next_page_array:
if next_page_array[counter] != "Next >":
counter = counter + 1
continue
else:
break
next_url = response.css("div.PageNav a::attr(href)").extract()[counter]
except:
next_url = None
if next_url is not None:
yield scrapy.Request(url=next_url,
callback=self.parse_posts)
|
[
"bitco_in_forum.items.BitcoInForumItem",
"scrapy.Request"
] |
[((1478, 1496), 'bitco_in_forum.items.BitcoInForumItem', 'BitcoInForumItem', ([], {}), '()\n', (1494, 1496), False, 'from bitco_in_forum.items import BitcoInForumItem\n'), ((428, 481), 'scrapy.Request', 'scrapy.Request', ([], {'url': 'board', 'callback': 'self.parse_topics'}), '(url=board, callback=self.parse_topics)\n', (442, 481), False, 'import scrapy\n'), ((3202, 3257), 'scrapy.Request', 'scrapy.Request', ([], {'url': 'next_url', 'callback': 'self.parse_posts'}), '(url=next_url, callback=self.parse_posts)\n', (3216, 3257), False, 'import scrapy\n'), ((1115, 1171), 'scrapy.Request', 'scrapy.Request', ([], {'url': 'next_url', 'callback': 'self.parse_topics'}), '(url=next_url, callback=self.parse_topics)\n', (1129, 1171), False, 'import scrapy\n'), ((1275, 1332), 'scrapy.Request', 'scrapy.Request', ([], {'url': 'topic_link', 'callback': 'self.parse_posts'}), '(url=topic_link, callback=self.parse_posts)\n', (1289, 1332), False, 'import scrapy\n')]
|
"""
Collections of Fermion-to-Qubit encodings known to tequila
Most are Interfaces to OpenFermion
"""
from tequila.circuit.circuit import QCircuit
from tequila.circuit.gates import X
from tequila.hamiltonian.qubit_hamiltonian import QubitHamiltonian
import openfermion
def known_encodings():
# convenience for testing and I/O
encodings= {
"JordanWigner":JordanWigner,
"BravyiKitaev":BravyiKitaev,
"BravyiKitaevFast": BravyiKitaevFast,
"BravyiKitaevTree": BravyiKitaevTree,
"TaperedBravyiKitaev": TaperedBravyKitaev
}
# aliases
encodings = {**encodings,
"ReorderedJordanWigner": lambda **kwargs: JordanWigner(up_then_down=True, **kwargs),
"ReorderedBravyiKitaev": lambda **kwargs: BravyiKitaev(up_then_down=True, **kwargs),
"ReorderedBravyiKitaevTree": lambda **kwargs: BravyiKitaevTree(up_then_down=True, **kwargs),
}
return {k.replace("_","").replace("-","").upper():v for k,v in encodings.items()}
class EncodingBase:
@property
def name(self):
prefix=""
if self.up_then_down:
prefix="Reordered"
if hasattr(self, "_name"):
return prefix+self._name
else:
return prefix+type(self).__name__
def __init__(self, n_electrons, n_orbitals, up_then_down=False, *args, **kwargs):
self.n_electrons = n_electrons
self.n_orbitals = n_orbitals
self.up_then_down = up_then_down
def __call__(self, fermion_operator:openfermion.FermionOperator, *args, **kwargs) -> QubitHamiltonian:
"""
:param fermion_operator:
an openfermion FermionOperator
:return:
The openfermion QubitOperator of this class ecoding
"""
if self.up_then_down:
op = openfermion.reorder(operator=fermion_operator, order_function=openfermion.up_then_down, num_modes=2*self.n_orbitals)
else:
op = fermion_operator
fop = self.do_transform(fermion_operator=op, *args, **kwargs)
fop.compress()
return self.post_processing(QubitHamiltonian.from_openfermion(fop))
def post_processing(self, op, *args, **kwargs):
return op
def up(self, i):
if self.up_then_down:
return i
else:
return 2*i
def down(self, i):
if self.up_then_down:
return i+self.n_orbitals
else:
return 2*i+1
def do_transform(self, fermion_operator:openfermion.FermionOperator, *args, **kwargs) -> openfermion.QubitOperator:
raise Exception("{}::do_transform: called base class".format(type(self).__name__))
def map_state(self, state:list, *args, **kwargs) -> list:
"""
Expects a state in spin-orbital ordering
Returns the corresponding qubit state in the class encoding
:param state:
basis-state as occupation number vector in spin orbitals
sorted as: [0_up, 0_down, 1_up, 1_down, ... N_up, N_down]
with N being the number of spatial orbitals
:return:
basis-state as qubit state in the corresponding mapping
"""
"""Does a really lazy workaround ... but it works
:return: Hartree-Fock Reference as binary-number
Parameters
----------
reference_orbitals: list:
give list of doubly occupied orbitals
default is None which leads to automatic list of the
first n_electron/2 orbitals
Returns
-------
"""
# default is a lazy workaround, but it workds
n_qubits = 2 * self.n_orbitals
spin_orbitals = sorted([i for i,x in enumerate(state) if int(x)==1])
string = "1.0 ["
for i in spin_orbitals:
string += str(i) + "^ "
string += "]"
fop = openfermion.FermionOperator(string, 1.0)
op = self(fop)
from tequila.wavefunction.qubit_wavefunction import QubitWaveFunction
wfn = QubitWaveFunction.from_int(0, n_qubits=n_qubits)
wfn = wfn.apply_qubitoperator(operator=op)
assert (len(wfn.keys()) == 1)
key = list(wfn.keys())[0].array
return key
def hcb_to_me(self, *args, **kwargs):
return None
def __str__(self):
return type(self).__name__
class JordanWigner(EncodingBase):
"""
OpenFermion::jordan_wigner
"""
def do_transform(self, fermion_operator:openfermion.FermionOperator, *args, **kwargs) -> openfermion.QubitOperator:
return openfermion.jordan_wigner(fermion_operator, *args, **kwargs)
def map_state(self, state:list, *args, **kwargs):
state = state + [0]*(self.n_orbitals-len(state))
result = [0]*len(state)
if self.up_then_down:
return [state[2*i] for i in range(self.n_orbitals)] + [state[2*i+1] for i in range(self.n_orbitals)]
else:
return state
def hcb_to_me(self, *args, **kwargs):
U = QCircuit()
for i in range(self.n_orbitals):
U += X(target=self.down(i), control=self.up(i))
return U
class BravyiKitaev(EncodingBase):
"""
Uses OpenFermion::bravyi_kitaev
"""
def do_transform(self, fermion_operator:openfermion.FermionOperator, *args, **kwargs) -> openfermion.QubitOperator:
return openfermion.bravyi_kitaev(fermion_operator, n_qubits=self.n_orbitals*2)
class BravyiKitaevTree(EncodingBase):
"""
Uses OpenFermion::bravyi_kitaev_tree
"""
def do_transform(self, fermion_operator:openfermion.FermionOperator, *args, **kwargs) -> openfermion.QubitOperator:
return openfermion.bravyi_kitaev_tree(fermion_operator, n_qubits=self.n_orbitals*2)
class BravyiKitaevFast(EncodingBase):
"""
Uses OpenFermion::bravyi_kitaev_tree
"""
def do_transform(self, fermion_operator:openfermion.FermionOperator, *args, **kwargs) -> openfermion.QubitOperator:
n_qubits = openfermion.count_qubits(fermion_operator)
if n_qubits != self.n_orbitals*2:
raise Exception("BravyiKitaevFast transformation currently only possible for full Hamiltonians (no UCC generators).\nfermion_operator was {}".format(fermion_operator))
op = openfermion.get_interaction_operator(fermion_operator)
return openfermion.bravyi_kitaev_fast(op)
class TaperedBravyKitaev(EncodingBase):
"""
Uses OpenFermion::symmetry_conserving_bravyi_kitaev (tapered bravyi_kitaev_tree arxiv:1701.07072)
Reduces Hamiltonian by 2 qubits
See OpenFermion Documentation for more
Does not work for UCC generators yet
"""
def __init__(self, n_electrons, n_orbitals, active_fermions=None, active_orbitals=None, *args, **kwargs):
if active_fermions is None:
self.active_fermions = n_electrons
else:
self.active_fermions = active_fermions
if active_orbitals is None:
self.active_orbitals = n_orbitals*2 # in openfermion those are spin-orbitals
else:
self.active_orbitals = active_orbitals
if "up_then_down" in kwargs:
raise Exception("Don't pass up_then_down argument to {}, it can't be changed".format(type(self).__name__))
super().__init__(n_orbitals=n_orbitals, n_electrons=n_electrons, up_then_down=False, *args, **kwargs)
def do_transform(self, fermion_operator:openfermion.FermionOperator, *args, **kwargs) -> openfermion.QubitOperator:
if openfermion.count_qubits(fermion_operator) != self.n_orbitals*2:
raise Exception("TaperedBravyiKitaev not ready for UCC generators yet")
return openfermion.symmetry_conserving_bravyi_kitaev(fermion_operator, active_orbitals=self.active_orbitals, active_fermions=self.active_fermions)
def map_state(self, state:list, *args, **kwargs):
non_tapered_trafo = BravyiKitaevTree(up_then_down=True, n_electrons=self.n_electrons, n_orbitals=self.n_orbitals)
key = non_tapered_trafo.map_state(state=state, *args, **kwargs)
n_qubits = self.n_orbitals*2
active_qubits = [i for i in range(n_qubits) if i not in [n_qubits - 1, n_qubits // 2 - 1]]
key = [key[i] for i in active_qubits]
return key
|
[
"openfermion.get_interaction_operator",
"openfermion.FermionOperator",
"tequila.wavefunction.qubit_wavefunction.QubitWaveFunction.from_int",
"tequila.circuit.circuit.QCircuit",
"openfermion.bravyi_kitaev",
"openfermion.symmetry_conserving_bravyi_kitaev",
"openfermion.bravyi_kitaev_fast",
"openfermion.jordan_wigner",
"openfermion.count_qubits",
"openfermion.reorder",
"openfermion.bravyi_kitaev_tree",
"tequila.hamiltonian.qubit_hamiltonian.QubitHamiltonian.from_openfermion"
] |
[((3895, 3935), 'openfermion.FermionOperator', 'openfermion.FermionOperator', (['string', '(1.0)'], {}), '(string, 1.0)\n', (3922, 3935), False, 'import openfermion\n'), ((4051, 4099), 'tequila.wavefunction.qubit_wavefunction.QubitWaveFunction.from_int', 'QubitWaveFunction.from_int', (['(0)'], {'n_qubits': 'n_qubits'}), '(0, n_qubits=n_qubits)\n', (4077, 4099), False, 'from tequila.wavefunction.qubit_wavefunction import QubitWaveFunction\n'), ((4588, 4648), 'openfermion.jordan_wigner', 'openfermion.jordan_wigner', (['fermion_operator', '*args'], {}), '(fermion_operator, *args, **kwargs)\n', (4613, 4648), False, 'import openfermion\n'), ((5030, 5040), 'tequila.circuit.circuit.QCircuit', 'QCircuit', ([], {}), '()\n', (5038, 5040), False, 'from tequila.circuit.circuit import QCircuit\n'), ((5382, 5455), 'openfermion.bravyi_kitaev', 'openfermion.bravyi_kitaev', (['fermion_operator'], {'n_qubits': '(self.n_orbitals * 2)'}), '(fermion_operator, n_qubits=self.n_orbitals * 2)\n', (5407, 5455), False, 'import openfermion\n'), ((5687, 5765), 'openfermion.bravyi_kitaev_tree', 'openfermion.bravyi_kitaev_tree', (['fermion_operator'], {'n_qubits': '(self.n_orbitals * 2)'}), '(fermion_operator, n_qubits=self.n_orbitals * 2)\n', (5717, 5765), False, 'import openfermion\n'), ((6000, 6042), 'openfermion.count_qubits', 'openfermion.count_qubits', (['fermion_operator'], {}), '(fermion_operator)\n', (6024, 6042), False, 'import openfermion\n'), ((6278, 6332), 'openfermion.get_interaction_operator', 'openfermion.get_interaction_operator', (['fermion_operator'], {}), '(fermion_operator)\n', (6314, 6332), False, 'import openfermion\n'), ((6348, 6382), 'openfermion.bravyi_kitaev_fast', 'openfermion.bravyi_kitaev_fast', (['op'], {}), '(op)\n', (6378, 6382), False, 'import openfermion\n'), ((7674, 7817), 'openfermion.symmetry_conserving_bravyi_kitaev', 'openfermion.symmetry_conserving_bravyi_kitaev', (['fermion_operator'], {'active_orbitals': 'self.active_orbitals', 'active_fermions': 'self.active_fermions'}), '(fermion_operator,\n active_orbitals=self.active_orbitals, active_fermions=self.active_fermions)\n', (7719, 7817), False, 'import openfermion\n'), ((1840, 1963), 'openfermion.reorder', 'openfermion.reorder', ([], {'operator': 'fermion_operator', 'order_function': 'openfermion.up_then_down', 'num_modes': '(2 * self.n_orbitals)'}), '(operator=fermion_operator, order_function=openfermion.\n up_then_down, num_modes=2 * self.n_orbitals)\n', (1859, 1963), False, 'import openfermion\n'), ((2135, 2173), 'tequila.hamiltonian.qubit_hamiltonian.QubitHamiltonian.from_openfermion', 'QubitHamiltonian.from_openfermion', (['fop'], {}), '(fop)\n', (2168, 2173), False, 'from tequila.hamiltonian.qubit_hamiltonian import QubitHamiltonian\n'), ((7510, 7552), 'openfermion.count_qubits', 'openfermion.count_qubits', (['fermion_operator'], {}), '(fermion_operator)\n', (7534, 7552), False, 'import openfermion\n')]
|
import math
s = input()
s1 = input().split()
s1_n = []
for i in range(0,int(s),1):
s1_n.append(int(s1[i]))
s1_n.sort(reverse=True)
gcd_now = s1_n[0]
for i in range(1, len(s1_n), 1):
gcd_now = math.gcd(gcd_now,s1_n[i])
print(gcd_now)
|
[
"math.gcd"
] |
[((204, 230), 'math.gcd', 'math.gcd', (['gcd_now', 's1_n[i]'], {}), '(gcd_now, s1_n[i])\n', (212, 230), False, 'import math\n')]
|
import glob
import os
import torch
from torch.utils.data import Dataset, DataLoader
import numpy as np
import matplotlib.image as mpimg
import pandas as pd
import cv2
class FacialKeypointsDataset(Dataset):
"""Face Landmarks dataset."""
def __init__(self, csv_file, root_dir, transform=None):
"""
Args:
csv_file (string): Path to the csv file with annotations.
root_dir (string): Directory with all the images.
transform (callable, optional): Optional transform to be applied
on a sample.
"""
self.key_pts_frame = pd.read_csv(csv_file)
self.root_dir = root_dir
self.transform = transform
def __len__(self):
return len(self.key_pts_frame)
def __getitem__(self, idx):
image_name = os.path.join(self.root_dir,
self.key_pts_frame.iloc[idx, 0])
image = mpimg.imread(image_name)
# if image has an alpha color channel, get rid of it
if(image.shape[2] == 4):
image = image[:,:,0:3]
key_pts = self.key_pts_frame.iloc[idx, 1:].as_matrix()
key_pts = key_pts.astype('float').reshape(-1, 2)
sample = {'image': image, 'keypoints': key_pts}
if self.transform:
sample = self.transform(sample)
return sample
# tranforms
import torch
from torchvision import transforms, utils
# tranforms
class GrayScale(object):
def __call__(self, sample):
image, keypoints = sample["image"], sample["keypoints"]
image_copy = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY).reshape(image.shape[0], image.shape[1], 1)
print(image_copy.shape)
return {"image": image_copy, "keypoints": keypoints}
class Normalize(object):
"""Convert a color image to grayscale and normalize the color range to [0,1]."""
def __call__(self, sample):
image, key_pts = sample['image'], sample['keypoints']
image_copy = np.copy(image)
key_pts_copy = np.copy(key_pts)
# convert image to grayscale
#image_copy = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)
# scale color range from [0, 255] to [0, 1]
image_copy= image_copy/255.0
# scale keypoints to be centered around 0 with a range of [-1, 1]
# mean = 100, sqrt = 50, so, pts should be (pts - 100)/50
key_pts_copy = (key_pts_copy - 100)/50.0
return {'image': image_copy, 'keypoints': key_pts_copy}
class Rescale(object):
"""Rescale the image in a sample to a given size.
Args:
output_size (tuple or int): Desired output size. If tuple, output is
matched to output_size. If int, smaller of image edges is matched
to output_size keeping aspect ratio the same.
"""
def __init__(self, output_size):
assert isinstance(output_size, (int, tuple))
self.output_size = output_size
def __call__(self, sample):
image, key_pts = sample['image'], sample['keypoints']
h, w = image.shape[:2]
#keep the aspec ratio
if isinstance(self.output_size, int):
if h > w:
new_h, new_w = self.output_size * h / w, self.output_size
else:
new_h, new_w = self.output_size, self.output_size * w / h
else:
new_h, new_w = self.output_size
new_h, new_w = int(new_h), int(new_w)
img = cv2.resize(image, (new_w, new_h))
# scale the pts, too
key_pts = key_pts * [new_w / w, new_h / h]
return {'image': img, 'keypoints': key_pts}
class RandomCrop(object):
"""Crop randomly the image in a sample.
Args:
output_size (tuple or int): Desired output size. If int, square crop
is made.
"""
def __init__(self, output_size):
assert isinstance(output_size, (int, tuple))
if isinstance(output_size, int):
self.output_size = (output_size, output_size)
else:
assert len(output_size) == 2
self.output_size = output_size
def __call__(self, sample):
image, key_pts = sample['image'], sample['keypoints']
h, w = image.shape[:2]
new_h, new_w = self.output_size
top = np.random.randint(0, h - new_h)
left = np.random.randint(0, w - new_w)
image = image[top: top + new_h,
left: left + new_w]
key_pts = key_pts - [left, top]
return {'image': image, 'keypoints': key_pts}
class ToTensor(object):
"""Convert ndarrays in sample to Tensors."""
def __call__(self, sample):
image, key_pts = sample['image'], sample['keypoints']
# if image has no grayscale color channel, add one
if(len(image.shape) == 2):
# add that third color dim
image = image.reshape(image.shape[0], image.shape[1], 1)
# swap color axis because
# numpy image: H x W x C
# torch image: C X H X W
image = image.transpose((2, 0, 1))
return {'image': torch.from_numpy(image),
'keypoints': torch.from_numpy(key_pts)}
class RandomRotation(object):
"""Rotate randomly an image in a sample
Args:
min_rotation_angle (int)
max_rotation_angle (int)
"""
def __init__(self, range_angle=45, range_scale=0.1):
self.range_angle = range_angle
self.range_scale = range_scale
def __call__(self, sample):
image, keypoints = sample['image'], sample['keypoints']
random_angle = -self.range_angle + np.random.random()*2*self.range_angle
random_scale = 1-self.range_scale + np.random.random()*2*self.range_scale
(h, w) = image.shape[:2]
(cX, cY) = (w // 2, h // 2)
rotation = cv2.getRotationMatrix2D((cX, cY), random_angle, random_scale)
keypoints_copy = np.hstack([keypoints, np.ones((keypoints.shape[0], 1))])
#print("keypoints_shape : ", keypoints_copy.T)
keypoints_copy = np.matmul(rotation, keypoints_copy.T).T
rotated_image = cv2.warpAffine(image, rotation,(h, w))
print("rotated_image_shape : ", rotated_image.shape)
return {'image': rotated_image,
'keypoints': keypoints_copy}
class RandomHorizontalFlip(object):
"""Rotate randomly an image in a sample
Args:
"""
def __init__(self):
self.flip_indices = [(21, 22), (20, 23), (19, 24), (18, 25), (17, 26), #eye brow
(36, 45), (37, 44), (38, 43), (39, 42), (41, 46), (40, 47), # eyes
(0, 16), (1, 15), (2, 14), (3, 13), (4, 12), (5, 11), (6, 10), (7, 9), #chin
(48, 54), (49, 54), (50, 53), (58, 56), (59, 55), (60, 64), (61, 64), (67, 65), #mouth
(31, 35), (32, 34) # nose
]
def __call__(self, sample):
flip = np.random.random() > 0.5
image, keypoints = sample['image'], sample['keypoints']
keypoints_copy = keypoints[:,:]
(h, w) = image.shape[:2]
if flip:
# change the coordinates of the keypoints
keypoints_copy[:,0] = w - keypoints_copy[:,0]
#and inverse their position in keypoints as well
for i, j in self.flip_indices:
temp = [keypoints_copy[i,0],keypoints_copy[i,1]]
keypoints_copy[i,0] = keypoints_copy[j,0]
keypoints_copy[i,1] = keypoints_copy[j,1]
keypoints_copy[j,0] = temp[0]
keypoints_copy[j,1] = temp[1]
#flip the image
image = image[:,-1:0:-1,:]
return {'image': image,
'keypoints': keypoints_copy}
|
[
"matplotlib.image.imread",
"numpy.copy",
"pandas.read_csv",
"cv2.cvtColor",
"numpy.ones",
"cv2.warpAffine",
"numpy.random.randint",
"numpy.random.random",
"numpy.matmul",
"os.path.join",
"cv2.getRotationMatrix2D",
"cv2.resize",
"torch.from_numpy"
] |
[((608, 629), 'pandas.read_csv', 'pd.read_csv', (['csv_file'], {}), '(csv_file)\n', (619, 629), True, 'import pandas as pd\n'), ((815, 875), 'os.path.join', 'os.path.join', (['self.root_dir', 'self.key_pts_frame.iloc[idx, 0]'], {}), '(self.root_dir, self.key_pts_frame.iloc[idx, 0])\n', (827, 875), False, 'import os\n'), ((933, 957), 'matplotlib.image.imread', 'mpimg.imread', (['image_name'], {}), '(image_name)\n', (945, 957), True, 'import matplotlib.image as mpimg\n'), ((2062, 2076), 'numpy.copy', 'np.copy', (['image'], {}), '(image)\n', (2069, 2076), True, 'import numpy as np\n'), ((2100, 2116), 'numpy.copy', 'np.copy', (['key_pts'], {}), '(key_pts)\n', (2107, 2116), True, 'import numpy as np\n'), ((3533, 3566), 'cv2.resize', 'cv2.resize', (['image', '(new_w, new_h)'], {}), '(image, (new_w, new_h))\n', (3543, 3566), False, 'import cv2\n'), ((4368, 4399), 'numpy.random.randint', 'np.random.randint', (['(0)', '(h - new_h)'], {}), '(0, h - new_h)\n', (4385, 4399), True, 'import numpy as np\n'), ((4415, 4446), 'numpy.random.randint', 'np.random.randint', (['(0)', '(w - new_w)'], {}), '(0, w - new_w)\n', (4432, 4446), True, 'import numpy as np\n'), ((5962, 6023), 'cv2.getRotationMatrix2D', 'cv2.getRotationMatrix2D', (['(cX, cY)', 'random_angle', 'random_scale'], {}), '((cX, cY), random_angle, random_scale)\n', (5985, 6023), False, 'import cv2\n'), ((6253, 6292), 'cv2.warpAffine', 'cv2.warpAffine', (['image', 'rotation', '(h, w)'], {}), '(image, rotation, (h, w))\n', (6267, 6292), False, 'import cv2\n'), ((5198, 5221), 'torch.from_numpy', 'torch.from_numpy', (['image'], {}), '(image)\n', (5214, 5221), False, 'import torch\n'), ((5252, 5277), 'torch.from_numpy', 'torch.from_numpy', (['key_pts'], {}), '(key_pts)\n', (5268, 5277), False, 'import torch\n'), ((6187, 6224), 'numpy.matmul', 'np.matmul', (['rotation', 'keypoints_copy.T'], {}), '(rotation, keypoints_copy.T)\n', (6196, 6224), True, 'import numpy as np\n'), ((7105, 7123), 'numpy.random.random', 'np.random.random', ([], {}), '()\n', (7121, 7123), True, 'import numpy as np\n'), ((1633, 1672), 'cv2.cvtColor', 'cv2.cvtColor', (['image', 'cv2.COLOR_RGB2GRAY'], {}), '(image, cv2.COLOR_RGB2GRAY)\n', (1645, 1672), False, 'import cv2\n'), ((6072, 6104), 'numpy.ones', 'np.ones', (['(keypoints.shape[0], 1)'], {}), '((keypoints.shape[0], 1))\n', (6079, 6104), True, 'import numpy as np\n'), ((5752, 5770), 'numpy.random.random', 'np.random.random', ([], {}), '()\n', (5768, 5770), True, 'import numpy as np\n'), ((5834, 5852), 'numpy.random.random', 'np.random.random', ([], {}), '()\n', (5850, 5852), True, 'import numpy as np\n')]
|
from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from smartfields import fields
from django.utils import timezone
from django.conf import settings
# Create your models here.
class Hood(models.Model):
name = models.CharField(max_length=40, null=True)
image = models.ImageField(upload_to = 'images/')
description = models.TextField(null=True)
link = models.URLField()
user = models.ForeignKey(User, null=True, on_delete = models.CASCADE)
location = models.CharField(max_length = 40, null = True)
occupants = models.IntegerField(null=True)
objects = models.Manager()
def __str__(self):
return self.name
def save_hood(self):
return self.save()
def delete_hood(self):
return self.delete()
class Profile(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL)
dob = models.DateTimeField(blank=True, null = True)
bio = models.CharField(max_length = 250, null=True)
avatar = fields.ImageField(upload_to = 'uploads/')
hood = models.CharField(max_length = 30, blank=True, null=True)
hobby = models.CharField(max_length = 200, blank=True, null=True)
def create_user_profile(sender, instance, created, **kwargs):
if created:
profile.objects.create(user=instance)
post_save.connect(create_user_profile, sender=User)
|
[
"django.db.models.TextField",
"django.db.models.URLField",
"django.db.models.OneToOneField",
"smartfields.fields.ImageField",
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.db.models.Manager",
"django.db.models.ImageField",
"django.db.models.IntegerField",
"django.db.models.DateTimeField",
"django.db.models.signals.post_save.connect"
] |
[((284, 326), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(40)', 'null': '(True)'}), '(max_length=40, null=True)\n', (300, 326), False, 'from django.db import models\n'), ((339, 377), 'django.db.models.ImageField', 'models.ImageField', ([], {'upload_to': '"""images/"""'}), "(upload_to='images/')\n", (356, 377), False, 'from django.db import models\n'), ((398, 425), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (414, 425), False, 'from django.db import models\n'), ((437, 454), 'django.db.models.URLField', 'models.URLField', ([], {}), '()\n', (452, 454), False, 'from django.db import models\n'), ((466, 526), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'null': '(True)', 'on_delete': 'models.CASCADE'}), '(User, null=True, on_delete=models.CASCADE)\n', (483, 526), False, 'from django.db import models\n'), ((544, 586), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(40)', 'null': '(True)'}), '(max_length=40, null=True)\n', (560, 586), False, 'from django.db import models\n'), ((607, 637), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (626, 637), False, 'from django.db import models\n'), ((652, 668), 'django.db.models.Manager', 'models.Manager', ([], {}), '()\n', (666, 668), False, 'from django.db import models\n'), ((871, 917), 'django.db.models.OneToOneField', 'models.OneToOneField', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (891, 917), False, 'from django.db import models\n'), ((928, 971), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (948, 971), False, 'from django.db import models\n'), ((984, 1027), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(250)', 'null': '(True)'}), '(max_length=250, null=True)\n', (1000, 1027), False, 'from django.db import models\n'), ((1043, 1082), 'smartfields.fields.ImageField', 'fields.ImageField', ([], {'upload_to': '"""uploads/"""'}), "(upload_to='uploads/')\n", (1060, 1082), False, 'from smartfields import fields\n'), ((1096, 1150), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)', 'blank': '(True)', 'null': '(True)'}), '(max_length=30, blank=True, null=True)\n', (1112, 1150), False, 'from django.db import models\n'), ((1165, 1220), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'blank': '(True)', 'null': '(True)'}), '(max_length=200, blank=True, null=True)\n', (1181, 1220), False, 'from django.db import models\n'), ((1376, 1427), 'django.db.models.signals.post_save.connect', 'post_save.connect', (['create_user_profile'], {'sender': 'User'}), '(create_user_profile, sender=User)\n', (1393, 1427), False, 'from django.db.models.signals import post_save\n')]
|
# Generated by Django 3.1.3 on 2021-03-29 07:00
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('social_media_api', '0005_auto_20210329_0621'),
]
operations = [
migrations.CreateModel(
name='CanadaPosts',
fields=[
('id', models.CharField(db_index=True, max_length=20, primary_key=True, serialize=False)),
('title', models.CharField(max_length=300)),
('content', models.TextField(null=True)),
('upvote_ratio', models.FloatField(null=True)),
('score', models.IntegerField(null=True)),
('num_comments', models.IntegerField(null=True)),
('created_on', models.DateTimeField()),
('stickied', models.BooleanField(null=True)),
('over_18', models.BooleanField(null=True)),
('spoiler', models.BooleanField(null=True)),
('author_is_gold', models.BooleanField(null=True)),
('author_mod', models.BooleanField(null=True)),
('author_has_verified_email', models.BooleanField(null=True)),
('permalink', models.CharField(max_length=300, null=True)),
('author', models.CharField(max_length=300)),
('author_created', models.DateTimeField()),
('comment_karma', models.IntegerField(null=True)),
('subreddit', models.CharField(default='canada', editable=False, max_length=200)),
],
options={
'verbose_name_plural': 'Canada Subreddit Posts',
'ordering': ['created_on'],
'abstract': False,
},
),
migrations.CreateModel(
name='ChinaPosts',
fields=[
('id', models.CharField(db_index=True, max_length=20, primary_key=True, serialize=False)),
('title', models.CharField(max_length=300)),
('content', models.TextField(null=True)),
('upvote_ratio', models.FloatField(null=True)),
('score', models.IntegerField(null=True)),
('num_comments', models.IntegerField(null=True)),
('created_on', models.DateTimeField()),
('stickied', models.BooleanField(null=True)),
('over_18', models.BooleanField(null=True)),
('spoiler', models.BooleanField(null=True)),
('author_is_gold', models.BooleanField(null=True)),
('author_mod', models.BooleanField(null=True)),
('author_has_verified_email', models.BooleanField(null=True)),
('permalink', models.CharField(max_length=300, null=True)),
('author', models.CharField(max_length=300)),
('author_created', models.DateTimeField()),
('comment_karma', models.IntegerField(null=True)),
('subreddit', models.CharField(default='China', editable=False, max_length=200)),
],
options={
'verbose_name_plural': 'China Subreddit Posts',
'ordering': ['created_on'],
'abstract': False,
},
),
migrations.CreateModel(
name='ConflictNewsPosts',
fields=[
('id', models.CharField(db_index=True, max_length=20, primary_key=True, serialize=False)),
('title', models.CharField(max_length=300)),
('content', models.TextField(null=True)),
('upvote_ratio', models.FloatField(null=True)),
('score', models.IntegerField(null=True)),
('num_comments', models.IntegerField(null=True)),
('created_on', models.DateTimeField()),
('stickied', models.BooleanField(null=True)),
('over_18', models.BooleanField(null=True)),
('spoiler', models.BooleanField(null=True)),
('author_is_gold', models.BooleanField(null=True)),
('author_mod', models.BooleanField(null=True)),
('author_has_verified_email', models.BooleanField(null=True)),
('permalink', models.CharField(max_length=300, null=True)),
('author', models.CharField(max_length=300)),
('author_created', models.DateTimeField()),
('comment_karma', models.IntegerField(null=True)),
('subreddit', models.CharField(default='ConflictNews', editable=False, max_length=200)),
],
options={
'verbose_name_plural': 'ConflictNews Subreddit Posts',
'ordering': ['created_on'],
'abstract': False,
},
),
migrations.CreateModel(
name='ConservativePosts',
fields=[
('id', models.CharField(db_index=True, max_length=20, primary_key=True, serialize=False)),
('title', models.CharField(max_length=300)),
('content', models.TextField(null=True)),
('upvote_ratio', models.FloatField(null=True)),
('score', models.IntegerField(null=True)),
('num_comments', models.IntegerField(null=True)),
('created_on', models.DateTimeField()),
('stickied', models.BooleanField(null=True)),
('over_18', models.BooleanField(null=True)),
('spoiler', models.BooleanField(null=True)),
('author_is_gold', models.BooleanField(null=True)),
('author_mod', models.BooleanField(null=True)),
('author_has_verified_email', models.BooleanField(null=True)),
('permalink', models.CharField(max_length=300, null=True)),
('author', models.CharField(max_length=300)),
('author_created', models.DateTimeField()),
('comment_karma', models.IntegerField(null=True)),
('subreddit', models.CharField(default='Conservative', editable=False, max_length=200)),
],
options={
'verbose_name_plural': 'Conservative Subreddit Posts',
'ordering': ['created_on'],
'abstract': False,
},
),
migrations.CreateModel(
name='CryptoCurrencyPosts',
fields=[
('id', models.CharField(db_index=True, max_length=20, primary_key=True, serialize=False)),
('title', models.CharField(max_length=300)),
('content', models.TextField(null=True)),
('upvote_ratio', models.FloatField(null=True)),
('score', models.IntegerField(null=True)),
('num_comments', models.IntegerField(null=True)),
('created_on', models.DateTimeField()),
('stickied', models.BooleanField(null=True)),
('over_18', models.BooleanField(null=True)),
('spoiler', models.BooleanField(null=True)),
('author_is_gold', models.BooleanField(null=True)),
('author_mod', models.BooleanField(null=True)),
('author_has_verified_email', models.BooleanField(null=True)),
('permalink', models.CharField(max_length=300, null=True)),
('author', models.CharField(max_length=300)),
('author_created', models.DateTimeField()),
('comment_karma', models.IntegerField(null=True)),
('subreddit', models.CharField(default='HongKong', editable=False, max_length=200)),
],
options={
'verbose_name_plural': 'CryptoCurrency Subreddit Posts',
'ordering': ['created_on'],
'abstract': False,
},
),
migrations.CreateModel(
name='DemocratsPosts',
fields=[
('id', models.CharField(db_index=True, max_length=20, primary_key=True, serialize=False)),
('title', models.CharField(max_length=300)),
('content', models.TextField(null=True)),
('upvote_ratio', models.FloatField(null=True)),
('score', models.IntegerField(null=True)),
('num_comments', models.IntegerField(null=True)),
('created_on', models.DateTimeField()),
('stickied', models.BooleanField(null=True)),
('over_18', models.BooleanField(null=True)),
('spoiler', models.BooleanField(null=True)),
('author_is_gold', models.BooleanField(null=True)),
('author_mod', models.BooleanField(null=True)),
('author_has_verified_email', models.BooleanField(null=True)),
('permalink', models.CharField(max_length=300, null=True)),
('author', models.CharField(max_length=300)),
('author_created', models.DateTimeField()),
('comment_karma', models.IntegerField(null=True)),
('subreddit', models.CharField(default='democrats', editable=False, max_length=200)),
],
options={
'verbose_name_plural': 'Democrats Subreddit Posts',
'ordering': ['created_on'],
'abstract': False,
},
),
migrations.CreateModel(
name='EconomicsPosts',
fields=[
('id', models.CharField(db_index=True, max_length=20, primary_key=True, serialize=False)),
('title', models.CharField(max_length=300)),
('content', models.TextField(null=True)),
('upvote_ratio', models.FloatField(null=True)),
('score', models.IntegerField(null=True)),
('num_comments', models.IntegerField(null=True)),
('created_on', models.DateTimeField()),
('stickied', models.BooleanField(null=True)),
('over_18', models.BooleanField(null=True)),
('spoiler', models.BooleanField(null=True)),
('author_is_gold', models.BooleanField(null=True)),
('author_mod', models.BooleanField(null=True)),
('author_has_verified_email', models.BooleanField(null=True)),
('permalink', models.CharField(max_length=300, null=True)),
('author', models.CharField(max_length=300)),
('author_created', models.DateTimeField()),
('comment_karma', models.IntegerField(null=True)),
('subreddit', models.CharField(default='Economics', editable=False, max_length=200)),
],
options={
'verbose_name_plural': 'Economics Subreddit Posts',
'ordering': ['created_on'],
'abstract': False,
},
),
migrations.CreateModel(
name='EnergyPosts',
fields=[
('id', models.CharField(db_index=True, max_length=20, primary_key=True, serialize=False)),
('title', models.CharField(max_length=300)),
('content', models.TextField(null=True)),
('upvote_ratio', models.FloatField(null=True)),
('score', models.IntegerField(null=True)),
('num_comments', models.IntegerField(null=True)),
('created_on', models.DateTimeField()),
('stickied', models.BooleanField(null=True)),
('over_18', models.BooleanField(null=True)),
('spoiler', models.BooleanField(null=True)),
('author_is_gold', models.BooleanField(null=True)),
('author_mod', models.BooleanField(null=True)),
('author_has_verified_email', models.BooleanField(null=True)),
('permalink', models.CharField(max_length=300, null=True)),
('author', models.CharField(max_length=300)),
('author_created', models.DateTimeField()),
('comment_karma', models.IntegerField(null=True)),
('subreddit', models.CharField(default='energy', editable=False, max_length=200)),
],
options={
'verbose_name_plural': 'Energy Subreddit Posts',
'ordering': ['created_on'],
'abstract': False,
},
),
migrations.CreateModel(
name='HongKongPosts',
fields=[
('id', models.CharField(db_index=True, max_length=20, primary_key=True, serialize=False)),
('title', models.CharField(max_length=300)),
('content', models.TextField(null=True)),
('upvote_ratio', models.FloatField(null=True)),
('score', models.IntegerField(null=True)),
('num_comments', models.IntegerField(null=True)),
('created_on', models.DateTimeField()),
('stickied', models.BooleanField(null=True)),
('over_18', models.BooleanField(null=True)),
('spoiler', models.BooleanField(null=True)),
('author_is_gold', models.BooleanField(null=True)),
('author_mod', models.BooleanField(null=True)),
('author_has_verified_email', models.BooleanField(null=True)),
('permalink', models.CharField(max_length=300, null=True)),
('author', models.CharField(max_length=300)),
('author_created', models.DateTimeField()),
('comment_karma', models.IntegerField(null=True)),
('subreddit', models.CharField(default='HongKong', editable=False, max_length=200)),
],
options={
'verbose_name_plural': 'HongKong Subreddit Posts',
'ordering': ['created_on'],
'abstract': False,
},
),
migrations.CreateModel(
name='IndiaPosts',
fields=[
('id', models.CharField(db_index=True, max_length=20, primary_key=True, serialize=False)),
('title', models.CharField(max_length=300)),
('content', models.TextField(null=True)),
('upvote_ratio', models.FloatField(null=True)),
('score', models.IntegerField(null=True)),
('num_comments', models.IntegerField(null=True)),
('created_on', models.DateTimeField()),
('stickied', models.BooleanField(null=True)),
('over_18', models.BooleanField(null=True)),
('spoiler', models.BooleanField(null=True)),
('author_is_gold', models.BooleanField(null=True)),
('author_mod', models.BooleanField(null=True)),
('author_has_verified_email', models.BooleanField(null=True)),
('permalink', models.CharField(max_length=300, null=True)),
('author', models.CharField(max_length=300)),
('author_created', models.DateTimeField()),
('comment_karma', models.IntegerField(null=True)),
('subreddit', models.CharField(default='india', editable=False, max_length=200)),
],
options={
'verbose_name_plural': 'India Subreddit Posts',
'ordering': ['created_on'],
'abstract': False,
},
),
migrations.CreateModel(
name='IsraelPosts',
fields=[
('id', models.CharField(db_index=True, max_length=20, primary_key=True, serialize=False)),
('title', models.CharField(max_length=300)),
('content', models.TextField(null=True)),
('upvote_ratio', models.FloatField(null=True)),
('score', models.IntegerField(null=True)),
('num_comments', models.IntegerField(null=True)),
('created_on', models.DateTimeField()),
('stickied', models.BooleanField(null=True)),
('over_18', models.BooleanField(null=True)),
('spoiler', models.BooleanField(null=True)),
('author_is_gold', models.BooleanField(null=True)),
('author_mod', models.BooleanField(null=True)),
('author_has_verified_email', models.BooleanField(null=True)),
('permalink', models.CharField(max_length=300, null=True)),
('author', models.CharField(max_length=300)),
('author_created', models.DateTimeField()),
('comment_karma', models.IntegerField(null=True)),
('subreddit', models.CharField(default='Israel', editable=False, max_length=200)),
],
options={
'verbose_name_plural': 'Israel Subreddit Posts',
'ordering': ['created_on'],
'abstract': False,
},
),
migrations.CreateModel(
name='JapanPosts',
fields=[
('id', models.CharField(db_index=True, max_length=20, primary_key=True, serialize=False)),
('title', models.CharField(max_length=300)),
('content', models.TextField(null=True)),
('upvote_ratio', models.FloatField(null=True)),
('score', models.IntegerField(null=True)),
('num_comments', models.IntegerField(null=True)),
('created_on', models.DateTimeField()),
('stickied', models.BooleanField(null=True)),
('over_18', models.BooleanField(null=True)),
('spoiler', models.BooleanField(null=True)),
('author_is_gold', models.BooleanField(null=True)),
('author_mod', models.BooleanField(null=True)),
('author_has_verified_email', models.BooleanField(null=True)),
('permalink', models.CharField(max_length=300, null=True)),
('author', models.CharField(max_length=300)),
('author_created', models.DateTimeField()),
('comment_karma', models.IntegerField(null=True)),
('subreddit', models.CharField(default='japan', editable=False, max_length=200)),
],
options={
'verbose_name_plural': 'Japan Subreddit Posts',
'ordering': ['created_on'],
'abstract': False,
},
),
migrations.CreateModel(
name='KoreaPosts',
fields=[
('id', models.CharField(db_index=True, max_length=20, primary_key=True, serialize=False)),
('title', models.CharField(max_length=300)),
('content', models.TextField(null=True)),
('upvote_ratio', models.FloatField(null=True)),
('score', models.IntegerField(null=True)),
('num_comments', models.IntegerField(null=True)),
('created_on', models.DateTimeField()),
('stickied', models.BooleanField(null=True)),
('over_18', models.BooleanField(null=True)),
('spoiler', models.BooleanField(null=True)),
('author_is_gold', models.BooleanField(null=True)),
('author_mod', models.BooleanField(null=True)),
('author_has_verified_email', models.BooleanField(null=True)),
('permalink', models.CharField(max_length=300, null=True)),
('author', models.CharField(max_length=300)),
('author_created', models.DateTimeField()),
('comment_karma', models.IntegerField(null=True)),
('subreddit', models.CharField(default='korea', editable=False, max_length=200)),
],
options={
'verbose_name_plural': 'Korea Subreddit Posts',
'ordering': ['created_on'],
'abstract': False,
},
),
migrations.CreateModel(
name='LiberalPosts',
fields=[
('id', models.CharField(db_index=True, max_length=20, primary_key=True, serialize=False)),
('title', models.CharField(max_length=300)),
('content', models.TextField(null=True)),
('upvote_ratio', models.FloatField(null=True)),
('score', models.IntegerField(null=True)),
('num_comments', models.IntegerField(null=True)),
('created_on', models.DateTimeField()),
('stickied', models.BooleanField(null=True)),
('over_18', models.BooleanField(null=True)),
('spoiler', models.BooleanField(null=True)),
('author_is_gold', models.BooleanField(null=True)),
('author_mod', models.BooleanField(null=True)),
('author_has_verified_email', models.BooleanField(null=True)),
('permalink', models.CharField(max_length=300, null=True)),
('author', models.CharField(max_length=300)),
('author_created', models.DateTimeField()),
('comment_karma', models.IntegerField(null=True)),
('subreddit', models.CharField(default='Liberal', editable=False, max_length=200)),
],
options={
'verbose_name_plural': 'Liberal Subreddit Posts',
'ordering': ['created_on'],
'abstract': False,
},
),
migrations.CreateModel(
name='LibertarianPosts',
fields=[
('id', models.CharField(db_index=True, max_length=20, primary_key=True, serialize=False)),
('title', models.CharField(max_length=300)),
('content', models.TextField(null=True)),
('upvote_ratio', models.FloatField(null=True)),
('score', models.IntegerField(null=True)),
('num_comments', models.IntegerField(null=True)),
('created_on', models.DateTimeField()),
('stickied', models.BooleanField(null=True)),
('over_18', models.BooleanField(null=True)),
('spoiler', models.BooleanField(null=True)),
('author_is_gold', models.BooleanField(null=True)),
('author_mod', models.BooleanField(null=True)),
('author_has_verified_email', models.BooleanField(null=True)),
('permalink', models.CharField(max_length=300, null=True)),
('author', models.CharField(max_length=300)),
('author_created', models.DateTimeField()),
('comment_karma', models.IntegerField(null=True)),
('subreddit', models.CharField(default='Libertarian', editable=False, max_length=200)),
],
options={
'verbose_name_plural': 'Libertarian Subreddit Posts',
'ordering': ['created_on'],
'abstract': False,
},
),
migrations.CreateModel(
name='MalaysiaPosts',
fields=[
('id', models.CharField(db_index=True, max_length=20, primary_key=True, serialize=False)),
('title', models.CharField(max_length=300)),
('content', models.TextField(null=True)),
('upvote_ratio', models.FloatField(null=True)),
('score', models.IntegerField(null=True)),
('num_comments', models.IntegerField(null=True)),
('created_on', models.DateTimeField()),
('stickied', models.BooleanField(null=True)),
('over_18', models.BooleanField(null=True)),
('spoiler', models.BooleanField(null=True)),
('author_is_gold', models.BooleanField(null=True)),
('author_mod', models.BooleanField(null=True)),
('author_has_verified_email', models.BooleanField(null=True)),
('permalink', models.CharField(max_length=300, null=True)),
('author', models.CharField(max_length=300)),
('author_created', models.DateTimeField()),
('comment_karma', models.IntegerField(null=True)),
('subreddit', models.CharField(default='malaysia', editable=False, max_length=200)),
],
options={
'verbose_name_plural': 'Malaysia Subreddit Posts',
'ordering': ['created_on'],
'abstract': False,
},
),
migrations.CreateModel(
name='MiddleEastNewsPosts',
fields=[
('id', models.CharField(db_index=True, max_length=20, primary_key=True, serialize=False)),
('title', models.CharField(max_length=300)),
('content', models.TextField(null=True)),
('upvote_ratio', models.FloatField(null=True)),
('score', models.IntegerField(null=True)),
('num_comments', models.IntegerField(null=True)),
('created_on', models.DateTimeField()),
('stickied', models.BooleanField(null=True)),
('over_18', models.BooleanField(null=True)),
('spoiler', models.BooleanField(null=True)),
('author_is_gold', models.BooleanField(null=True)),
('author_mod', models.BooleanField(null=True)),
('author_has_verified_email', models.BooleanField(null=True)),
('permalink', models.CharField(max_length=300, null=True)),
('author', models.CharField(max_length=300)),
('author_created', models.DateTimeField()),
('comment_karma', models.IntegerField(null=True)),
('subreddit', models.CharField(default='MiddleEastNews', editable=False, max_length=200)),
],
options={
'verbose_name_plural': 'MiddleEastNews Subreddit Posts',
'ordering': ['created_on'],
'abstract': False,
},
),
migrations.CreateModel(
name='NeutralPoliticsPosts',
fields=[
('id', models.CharField(db_index=True, max_length=20, primary_key=True, serialize=False)),
('title', models.CharField(max_length=300)),
('content', models.TextField(null=True)),
('upvote_ratio', models.FloatField(null=True)),
('score', models.IntegerField(null=True)),
('num_comments', models.IntegerField(null=True)),
('created_on', models.DateTimeField()),
('stickied', models.BooleanField(null=True)),
('over_18', models.BooleanField(null=True)),
('spoiler', models.BooleanField(null=True)),
('author_is_gold', models.BooleanField(null=True)),
('author_mod', models.BooleanField(null=True)),
('author_has_verified_email', models.BooleanField(null=True)),
('permalink', models.CharField(max_length=300, null=True)),
('author', models.CharField(max_length=300)),
('author_created', models.DateTimeField()),
('comment_karma', models.IntegerField(null=True)),
('subreddit', models.CharField(default='NeutralPolitics', editable=False, max_length=200)),
],
options={
'verbose_name_plural': 'NeutralPolitics Subreddit Posts',
'ordering': ['created_on'],
'abstract': False,
},
),
migrations.CreateModel(
name='NewsPosts',
fields=[
('id', models.CharField(db_index=True, max_length=20, primary_key=True, serialize=False)),
('title', models.CharField(max_length=300)),
('content', models.TextField(null=True)),
('upvote_ratio', models.FloatField(null=True)),
('score', models.IntegerField(null=True)),
('num_comments', models.IntegerField(null=True)),
('created_on', models.DateTimeField()),
('stickied', models.BooleanField(null=True)),
('over_18', models.BooleanField(null=True)),
('spoiler', models.BooleanField(null=True)),
('author_is_gold', models.BooleanField(null=True)),
('author_mod', models.BooleanField(null=True)),
('author_has_verified_email', models.BooleanField(null=True)),
('permalink', models.CharField(max_length=300, null=True)),
('author', models.CharField(max_length=300)),
('author_created', models.DateTimeField()),
('comment_karma', models.IntegerField(null=True)),
('subreddit', models.CharField(default='news', editable=False, max_length=200)),
],
options={
'verbose_name_plural': 'News Subreddit Posts',
'ordering': ['created_on'],
'abstract': False,
},
),
migrations.CreateModel(
name='NorthKoreaNewsPosts',
fields=[
('id', models.CharField(db_index=True, max_length=20, primary_key=True, serialize=False)),
('title', models.CharField(max_length=300)),
('content', models.TextField(null=True)),
('upvote_ratio', models.FloatField(null=True)),
('score', models.IntegerField(null=True)),
('num_comments', models.IntegerField(null=True)),
('created_on', models.DateTimeField()),
('stickied', models.BooleanField(null=True)),
('over_18', models.BooleanField(null=True)),
('spoiler', models.BooleanField(null=True)),
('author_is_gold', models.BooleanField(null=True)),
('author_mod', models.BooleanField(null=True)),
('author_has_verified_email', models.BooleanField(null=True)),
('permalink', models.CharField(max_length=300, null=True)),
('author', models.CharField(max_length=300)),
('author_created', models.DateTimeField()),
('comment_karma', models.IntegerField(null=True)),
('subreddit', models.CharField(default='NorthKoreaNews', editable=False, max_length=200)),
],
options={
'verbose_name_plural': 'NorthKoreaNews Subreddit Posts',
'ordering': ['created_on'],
'abstract': False,
},
),
migrations.CreateModel(
name='NorthKoreaPosts',
fields=[
('id', models.CharField(db_index=True, max_length=20, primary_key=True, serialize=False)),
('title', models.CharField(max_length=300)),
('content', models.TextField(null=True)),
('upvote_ratio', models.FloatField(null=True)),
('score', models.IntegerField(null=True)),
('num_comments', models.IntegerField(null=True)),
('created_on', models.DateTimeField()),
('stickied', models.BooleanField(null=True)),
('over_18', models.BooleanField(null=True)),
('spoiler', models.BooleanField(null=True)),
('author_is_gold', models.BooleanField(null=True)),
('author_mod', models.BooleanField(null=True)),
('author_has_verified_email', models.BooleanField(null=True)),
('permalink', models.CharField(max_length=300, null=True)),
('author', models.CharField(max_length=300)),
('author_created', models.DateTimeField()),
('comment_karma', models.IntegerField(null=True)),
('subreddit', models.CharField(default='northkorea', editable=False, max_length=200)),
],
options={
'verbose_name_plural': 'North Korea Subreddit Posts',
'ordering': ['created_on'],
'abstract': False,
},
),
migrations.CreateModel(
name='PakistanPosts',
fields=[
('id', models.CharField(db_index=True, max_length=20, primary_key=True, serialize=False)),
('title', models.CharField(max_length=300)),
('content', models.TextField(null=True)),
('upvote_ratio', models.FloatField(null=True)),
('score', models.IntegerField(null=True)),
('num_comments', models.IntegerField(null=True)),
('created_on', models.DateTimeField()),
('stickied', models.BooleanField(null=True)),
('over_18', models.BooleanField(null=True)),
('spoiler', models.BooleanField(null=True)),
('author_is_gold', models.BooleanField(null=True)),
('author_mod', models.BooleanField(null=True)),
('author_has_verified_email', models.BooleanField(null=True)),
('permalink', models.CharField(max_length=300, null=True)),
('author', models.CharField(max_length=300)),
('author_created', models.DateTimeField()),
('comment_karma', models.IntegerField(null=True)),
('subreddit', models.CharField(default='pakistan', editable=False, max_length=200)),
],
options={
'verbose_name_plural': 'Pakistan Subreddit Posts',
'ordering': ['created_on'],
'abstract': False,
},
),
migrations.CreateModel(
name='PalestinePosts',
fields=[
('id', models.CharField(db_index=True, max_length=20, primary_key=True, serialize=False)),
('title', models.CharField(max_length=300)),
('content', models.TextField(null=True)),
('upvote_ratio', models.FloatField(null=True)),
('score', models.IntegerField(null=True)),
('num_comments', models.IntegerField(null=True)),
('created_on', models.DateTimeField()),
('stickied', models.BooleanField(null=True)),
('over_18', models.BooleanField(null=True)),
('spoiler', models.BooleanField(null=True)),
('author_is_gold', models.BooleanField(null=True)),
('author_mod', models.BooleanField(null=True)),
('author_has_verified_email', models.BooleanField(null=True)),
('permalink', models.CharField(max_length=300, null=True)),
('author', models.CharField(max_length=300)),
('author_created', models.DateTimeField()),
('comment_karma', models.IntegerField(null=True)),
('subreddit', models.CharField(default='Palestine', editable=False, max_length=200)),
],
options={
'verbose_name_plural': 'Palestine Subreddit Posts',
'ordering': ['created_on'],
'abstract': False,
},
),
migrations.CreateModel(
name='PoliticsPosts',
fields=[
('id', models.CharField(db_index=True, max_length=20, primary_key=True, serialize=False)),
('title', models.CharField(max_length=300)),
('content', models.TextField(null=True)),
('upvote_ratio', models.FloatField(null=True)),
('score', models.IntegerField(null=True)),
('num_comments', models.IntegerField(null=True)),
('created_on', models.DateTimeField()),
('stickied', models.BooleanField(null=True)),
('over_18', models.BooleanField(null=True)),
('spoiler', models.BooleanField(null=True)),
('author_is_gold', models.BooleanField(null=True)),
('author_mod', models.BooleanField(null=True)),
('author_has_verified_email', models.BooleanField(null=True)),
('permalink', models.CharField(max_length=300, null=True)),
('author', models.CharField(max_length=300)),
('author_created', models.DateTimeField()),
('comment_karma', models.IntegerField(null=True)),
('subreddit', models.CharField(default='politics', editable=False, max_length=200)),
],
options={
'verbose_name_plural': 'Politics Subreddit Posts',
'ordering': ['created_on'],
'abstract': False,
},
),
migrations.CreateModel(
name='ProgressivePosts',
fields=[
('id', models.CharField(db_index=True, max_length=20, primary_key=True, serialize=False)),
('title', models.CharField(max_length=300)),
('content', models.TextField(null=True)),
('upvote_ratio', models.FloatField(null=True)),
('score', models.IntegerField(null=True)),
('num_comments', models.IntegerField(null=True)),
('created_on', models.DateTimeField()),
('stickied', models.BooleanField(null=True)),
('over_18', models.BooleanField(null=True)),
('spoiler', models.BooleanField(null=True)),
('author_is_gold', models.BooleanField(null=True)),
('author_mod', models.BooleanField(null=True)),
('author_has_verified_email', models.BooleanField(null=True)),
('permalink', models.CharField(max_length=300, null=True)),
('author', models.CharField(max_length=300)),
('author_created', models.DateTimeField()),
('comment_karma', models.IntegerField(null=True)),
('subreddit', models.CharField(default='progressive', editable=False, max_length=200)),
],
options={
'verbose_name_plural': 'Progressive Subreddit Posts',
'ordering': ['created_on'],
'abstract': False,
},
),
migrations.CreateModel(
name='RealTechPosts',
fields=[
('id', models.CharField(db_index=True, max_length=20, primary_key=True, serialize=False)),
('title', models.CharField(max_length=300)),
('content', models.TextField(null=True)),
('upvote_ratio', models.FloatField(null=True)),
('score', models.IntegerField(null=True)),
('num_comments', models.IntegerField(null=True)),
('created_on', models.DateTimeField()),
('stickied', models.BooleanField(null=True)),
('over_18', models.BooleanField(null=True)),
('spoiler', models.BooleanField(null=True)),
('author_is_gold', models.BooleanField(null=True)),
('author_mod', models.BooleanField(null=True)),
('author_has_verified_email', models.BooleanField(null=True)),
('permalink', models.CharField(max_length=300, null=True)),
('author', models.CharField(max_length=300)),
('author_created', models.DateTimeField()),
('comment_karma', models.IntegerField(null=True)),
('subreddit', models.CharField(default='realtech', editable=False, max_length=200)),
],
options={
'verbose_name_plural': 'Realtech Subreddit Posts',
'ordering': ['created_on'],
'abstract': False,
},
),
migrations.CreateModel(
name='SingaporePosts',
fields=[
('id', models.CharField(db_index=True, max_length=20, primary_key=True, serialize=False)),
('title', models.CharField(max_length=300)),
('content', models.TextField(null=True)),
('upvote_ratio', models.FloatField(null=True)),
('score', models.IntegerField(null=True)),
('num_comments', models.IntegerField(null=True)),
('created_on', models.DateTimeField()),
('stickied', models.BooleanField(null=True)),
('over_18', models.BooleanField(null=True)),
('spoiler', models.BooleanField(null=True)),
('author_is_gold', models.BooleanField(null=True)),
('author_mod', models.BooleanField(null=True)),
('author_has_verified_email', models.BooleanField(null=True)),
('permalink', models.CharField(max_length=300, null=True)),
('author', models.CharField(max_length=300)),
('author_created', models.DateTimeField()),
('comment_karma', models.IntegerField(null=True)),
('subreddit', models.CharField(default='singapore', editable=False, max_length=200)),
],
options={
'verbose_name_plural': 'Singapore Subreddit Posts',
'ordering': ['created_on'],
'abstract': False,
},
),
migrations.CreateModel(
name='SocialismPosts',
fields=[
('id', models.CharField(db_index=True, max_length=20, primary_key=True, serialize=False)),
('title', models.CharField(max_length=300)),
('content', models.TextField(null=True)),
('upvote_ratio', models.FloatField(null=True)),
('score', models.IntegerField(null=True)),
('num_comments', models.IntegerField(null=True)),
('created_on', models.DateTimeField()),
('stickied', models.BooleanField(null=True)),
('over_18', models.BooleanField(null=True)),
('spoiler', models.BooleanField(null=True)),
('author_is_gold', models.BooleanField(null=True)),
('author_mod', models.BooleanField(null=True)),
('author_has_verified_email', models.BooleanField(null=True)),
('permalink', models.CharField(max_length=300, null=True)),
('author', models.CharField(max_length=300)),
('author_created', models.DateTimeField()),
('comment_karma', models.IntegerField(null=True)),
('subreddit', models.CharField(default='socialism', editable=False, max_length=200)),
],
options={
'verbose_name_plural': 'Socialism Subreddit Posts',
'ordering': ['created_on'],
'abstract': False,
},
),
migrations.CreateModel(
name='SpacePosts',
fields=[
('id', models.CharField(db_index=True, max_length=20, primary_key=True, serialize=False)),
('title', models.CharField(max_length=300)),
('content', models.TextField(null=True)),
('upvote_ratio', models.FloatField(null=True)),
('score', models.IntegerField(null=True)),
('num_comments', models.IntegerField(null=True)),
('created_on', models.DateTimeField()),
('stickied', models.BooleanField(null=True)),
('over_18', models.BooleanField(null=True)),
('spoiler', models.BooleanField(null=True)),
('author_is_gold', models.BooleanField(null=True)),
('author_mod', models.BooleanField(null=True)),
('author_has_verified_email', models.BooleanField(null=True)),
('permalink', models.CharField(max_length=300, null=True)),
('author', models.CharField(max_length=300)),
('author_created', models.DateTimeField()),
('comment_karma', models.IntegerField(null=True)),
('subreddit', models.CharField(default='HongKong', editable=False, max_length=200)),
],
options={
'verbose_name_plural': 'Space Subreddit Posts',
'ordering': ['created_on'],
'abstract': False,
},
),
migrations.CreateModel(
name='TaiwanPosts',
fields=[
('id', models.CharField(db_index=True, max_length=20, primary_key=True, serialize=False)),
('title', models.CharField(max_length=300)),
('content', models.TextField(null=True)),
('upvote_ratio', models.FloatField(null=True)),
('score', models.IntegerField(null=True)),
('num_comments', models.IntegerField(null=True)),
('created_on', models.DateTimeField()),
('stickied', models.BooleanField(null=True)),
('over_18', models.BooleanField(null=True)),
('spoiler', models.BooleanField(null=True)),
('author_is_gold', models.BooleanField(null=True)),
('author_mod', models.BooleanField(null=True)),
('author_has_verified_email', models.BooleanField(null=True)),
('permalink', models.CharField(max_length=300, null=True)),
('author', models.CharField(max_length=300)),
('author_created', models.DateTimeField()),
('comment_karma', models.IntegerField(null=True)),
('subreddit', models.CharField(default='taiwan', editable=False, max_length=200)),
],
options={
'verbose_name_plural': 'Taiwan Subreddit Posts',
'ordering': ['created_on'],
'abstract': False,
},
),
migrations.CreateModel(
name='TechnologyPosts',
fields=[
('id', models.CharField(db_index=True, max_length=20, primary_key=True, serialize=False)),
('title', models.CharField(max_length=300)),
('content', models.TextField(null=True)),
('upvote_ratio', models.FloatField(null=True)),
('score', models.IntegerField(null=True)),
('num_comments', models.IntegerField(null=True)),
('created_on', models.DateTimeField()),
('stickied', models.BooleanField(null=True)),
('over_18', models.BooleanField(null=True)),
('spoiler', models.BooleanField(null=True)),
('author_is_gold', models.BooleanField(null=True)),
('author_mod', models.BooleanField(null=True)),
('author_has_verified_email', models.BooleanField(null=True)),
('permalink', models.CharField(max_length=300, null=True)),
('author', models.CharField(max_length=300)),
('author_created', models.DateTimeField()),
('comment_karma', models.IntegerField(null=True)),
('subreddit', models.CharField(default='technology', editable=False, max_length=200)),
],
options={
'verbose_name_plural': 'Technology Subreddit Posts',
'ordering': ['created_on'],
'abstract': False,
},
),
migrations.CreateModel(
name='TechPosts',
fields=[
('id', models.CharField(db_index=True, max_length=20, primary_key=True, serialize=False)),
('title', models.CharField(max_length=300)),
('content', models.TextField(null=True)),
('upvote_ratio', models.FloatField(null=True)),
('score', models.IntegerField(null=True)),
('num_comments', models.IntegerField(null=True)),
('created_on', models.DateTimeField()),
('stickied', models.BooleanField(null=True)),
('over_18', models.BooleanField(null=True)),
('spoiler', models.BooleanField(null=True)),
('author_is_gold', models.BooleanField(null=True)),
('author_mod', models.BooleanField(null=True)),
('author_has_verified_email', models.BooleanField(null=True)),
('permalink', models.CharField(max_length=300, null=True)),
('author', models.CharField(max_length=300)),
('author_created', models.DateTimeField()),
('comment_karma', models.IntegerField(null=True)),
('subreddit', models.CharField(default='tech', editable=False, max_length=200)),
],
options={
'verbose_name_plural': 'Tech Subreddit Posts',
'ordering': ['created_on'],
'abstract': False,
},
),
migrations.CreateModel(
name='ThailandPosts',
fields=[
('id', models.CharField(db_index=True, max_length=20, primary_key=True, serialize=False)),
('title', models.CharField(max_length=300)),
('content', models.TextField(null=True)),
('upvote_ratio', models.FloatField(null=True)),
('score', models.IntegerField(null=True)),
('num_comments', models.IntegerField(null=True)),
('created_on', models.DateTimeField()),
('stickied', models.BooleanField(null=True)),
('over_18', models.BooleanField(null=True)),
('spoiler', models.BooleanField(null=True)),
('author_is_gold', models.BooleanField(null=True)),
('author_mod', models.BooleanField(null=True)),
('author_has_verified_email', models.BooleanField(null=True)),
('permalink', models.CharField(max_length=300, null=True)),
('author', models.CharField(max_length=300)),
('author_created', models.DateTimeField()),
('comment_karma', models.IntegerField(null=True)),
('subreddit', models.CharField(default='Thailand', editable=False, max_length=200)),
],
options={
'verbose_name_plural': 'Thailand Subreddit Posts',
'ordering': ['created_on'],
'abstract': False,
},
),
migrations.CreateModel(
name='UKPoliticsPosts',
fields=[
('id', models.CharField(db_index=True, max_length=20, primary_key=True, serialize=False)),
('title', models.CharField(max_length=300)),
('content', models.TextField(null=True)),
('upvote_ratio', models.FloatField(null=True)),
('score', models.IntegerField(null=True)),
('num_comments', models.IntegerField(null=True)),
('created_on', models.DateTimeField()),
('stickied', models.BooleanField(null=True)),
('over_18', models.BooleanField(null=True)),
('spoiler', models.BooleanField(null=True)),
('author_is_gold', models.BooleanField(null=True)),
('author_mod', models.BooleanField(null=True)),
('author_has_verified_email', models.BooleanField(null=True)),
('permalink', models.CharField(max_length=300, null=True)),
('author', models.CharField(max_length=300)),
('author_created', models.DateTimeField()),
('comment_karma', models.IntegerField(null=True)),
('subreddit', models.CharField(default='ukpolitics', editable=False, max_length=200)),
],
options={
'verbose_name_plural': 'UK Politics Subreddit Posts',
'ordering': ['created_on'],
'abstract': False,
},
),
migrations.RenameModel(
old_name='SubredditPostModel',
new_name='WorldNewsPosts',
),
migrations.AlterModelOptions(
name='worldnewsposts',
options={'ordering': ['created_on'], 'verbose_name_plural': 'World News Subreddit Posts'},
),
]
|
[
"django.db.models.TextField",
"django.db.migrations.RenameModel",
"django.db.models.CharField",
"django.db.models.FloatField",
"django.db.models.BooleanField",
"django.db.models.IntegerField",
"django.db.migrations.AlterModelOptions",
"django.db.models.DateTimeField"
] |
[((52249, 52334), 'django.db.migrations.RenameModel', 'migrations.RenameModel', ([], {'old_name': '"""SubredditPostModel"""', 'new_name': '"""WorldNewsPosts"""'}), "(old_name='SubredditPostModel', new_name='WorldNewsPosts'\n )\n", (52271, 52334), False, 'from django.db import migrations, models\n'), ((52374, 52521), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', ([], {'name': '"""worldnewsposts"""', 'options': "{'ordering': ['created_on'], 'verbose_name_plural':\n 'World News Subreddit Posts'}"}), "(name='worldnewsposts', options={'ordering': [\n 'created_on'], 'verbose_name_plural': 'World News Subreddit Posts'})\n", (52402, 52521), False, 'from django.db import migrations, models\n'), ((344, 430), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(20)', 'primary_key': '(True)', 'serialize': '(False)'}), '(db_index=True, max_length=20, primary_key=True, serialize=\n False)\n', (360, 430), False, 'from django.db import migrations, models\n'), ((454, 486), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (470, 486), False, 'from django.db import migrations, models\n'), ((517, 544), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (533, 544), False, 'from django.db import migrations, models\n'), ((580, 608), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (597, 608), False, 'from django.db import migrations, models\n'), ((637, 667), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (656, 667), False, 'from django.db import migrations, models\n'), ((703, 733), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (722, 733), False, 'from django.db import migrations, models\n'), ((767, 789), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (787, 789), False, 'from django.db import migrations, models\n'), ((821, 851), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (840, 851), False, 'from django.db import migrations, models\n'), ((882, 912), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (901, 912), False, 'from django.db import migrations, models\n'), ((943, 973), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (962, 973), False, 'from django.db import migrations, models\n'), ((1011, 1041), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (1030, 1041), False, 'from django.db import migrations, models\n'), ((1075, 1105), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (1094, 1105), False, 'from django.db import migrations, models\n'), ((1154, 1184), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (1173, 1184), False, 'from django.db import migrations, models\n'), ((1217, 1260), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'null': '(True)'}), '(max_length=300, null=True)\n', (1233, 1260), False, 'from django.db import migrations, models\n'), ((1290, 1322), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (1306, 1322), False, 'from django.db import migrations, models\n'), ((1360, 1382), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (1380, 1382), False, 'from django.db import migrations, models\n'), ((1419, 1449), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (1438, 1449), False, 'from django.db import migrations, models\n'), ((1482, 1548), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""canada"""', 'editable': '(False)', 'max_length': '(200)'}), "(default='canada', editable=False, max_length=200)\n", (1498, 1548), False, 'from django.db import migrations, models\n'), ((1865, 1951), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(20)', 'primary_key': '(True)', 'serialize': '(False)'}), '(db_index=True, max_length=20, primary_key=True, serialize=\n False)\n', (1881, 1951), False, 'from django.db import migrations, models\n'), ((1975, 2007), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (1991, 2007), False, 'from django.db import migrations, models\n'), ((2038, 2065), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (2054, 2065), False, 'from django.db import migrations, models\n'), ((2101, 2129), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (2118, 2129), False, 'from django.db import migrations, models\n'), ((2158, 2188), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (2177, 2188), False, 'from django.db import migrations, models\n'), ((2224, 2254), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (2243, 2254), False, 'from django.db import migrations, models\n'), ((2288, 2310), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (2308, 2310), False, 'from django.db import migrations, models\n'), ((2342, 2372), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (2361, 2372), False, 'from django.db import migrations, models\n'), ((2403, 2433), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (2422, 2433), False, 'from django.db import migrations, models\n'), ((2464, 2494), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (2483, 2494), False, 'from django.db import migrations, models\n'), ((2532, 2562), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (2551, 2562), False, 'from django.db import migrations, models\n'), ((2596, 2626), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (2615, 2626), False, 'from django.db import migrations, models\n'), ((2675, 2705), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (2694, 2705), False, 'from django.db import migrations, models\n'), ((2738, 2781), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'null': '(True)'}), '(max_length=300, null=True)\n', (2754, 2781), False, 'from django.db import migrations, models\n'), ((2811, 2843), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (2827, 2843), False, 'from django.db import migrations, models\n'), ((2881, 2903), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (2901, 2903), False, 'from django.db import migrations, models\n'), ((2940, 2970), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (2959, 2970), False, 'from django.db import migrations, models\n'), ((3003, 3068), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""China"""', 'editable': '(False)', 'max_length': '(200)'}), "(default='China', editable=False, max_length=200)\n", (3019, 3068), False, 'from django.db import migrations, models\n'), ((3391, 3477), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(20)', 'primary_key': '(True)', 'serialize': '(False)'}), '(db_index=True, max_length=20, primary_key=True, serialize=\n False)\n', (3407, 3477), False, 'from django.db import migrations, models\n'), ((3501, 3533), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (3517, 3533), False, 'from django.db import migrations, models\n'), ((3564, 3591), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (3580, 3591), False, 'from django.db import migrations, models\n'), ((3627, 3655), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (3644, 3655), False, 'from django.db import migrations, models\n'), ((3684, 3714), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (3703, 3714), False, 'from django.db import migrations, models\n'), ((3750, 3780), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (3769, 3780), False, 'from django.db import migrations, models\n'), ((3814, 3836), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (3834, 3836), False, 'from django.db import migrations, models\n'), ((3868, 3898), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (3887, 3898), False, 'from django.db import migrations, models\n'), ((3929, 3959), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (3948, 3959), False, 'from django.db import migrations, models\n'), ((3990, 4020), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (4009, 4020), False, 'from django.db import migrations, models\n'), ((4058, 4088), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (4077, 4088), False, 'from django.db import migrations, models\n'), ((4122, 4152), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (4141, 4152), False, 'from django.db import migrations, models\n'), ((4201, 4231), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (4220, 4231), False, 'from django.db import migrations, models\n'), ((4264, 4307), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'null': '(True)'}), '(max_length=300, null=True)\n', (4280, 4307), False, 'from django.db import migrations, models\n'), ((4337, 4369), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (4353, 4369), False, 'from django.db import migrations, models\n'), ((4407, 4429), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (4427, 4429), False, 'from django.db import migrations, models\n'), ((4466, 4496), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (4485, 4496), False, 'from django.db import migrations, models\n'), ((4529, 4601), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""ConflictNews"""', 'editable': '(False)', 'max_length': '(200)'}), "(default='ConflictNews', editable=False, max_length=200)\n", (4545, 4601), False, 'from django.db import migrations, models\n'), ((4931, 5017), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(20)', 'primary_key': '(True)', 'serialize': '(False)'}), '(db_index=True, max_length=20, primary_key=True, serialize=\n False)\n', (4947, 5017), False, 'from django.db import migrations, models\n'), ((5041, 5073), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (5057, 5073), False, 'from django.db import migrations, models\n'), ((5104, 5131), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (5120, 5131), False, 'from django.db import migrations, models\n'), ((5167, 5195), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (5184, 5195), False, 'from django.db import migrations, models\n'), ((5224, 5254), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (5243, 5254), False, 'from django.db import migrations, models\n'), ((5290, 5320), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (5309, 5320), False, 'from django.db import migrations, models\n'), ((5354, 5376), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (5374, 5376), False, 'from django.db import migrations, models\n'), ((5408, 5438), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (5427, 5438), False, 'from django.db import migrations, models\n'), ((5469, 5499), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (5488, 5499), False, 'from django.db import migrations, models\n'), ((5530, 5560), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (5549, 5560), False, 'from django.db import migrations, models\n'), ((5598, 5628), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (5617, 5628), False, 'from django.db import migrations, models\n'), ((5662, 5692), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (5681, 5692), False, 'from django.db import migrations, models\n'), ((5741, 5771), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (5760, 5771), False, 'from django.db import migrations, models\n'), ((5804, 5847), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'null': '(True)'}), '(max_length=300, null=True)\n', (5820, 5847), False, 'from django.db import migrations, models\n'), ((5877, 5909), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (5893, 5909), False, 'from django.db import migrations, models\n'), ((5947, 5969), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (5967, 5969), False, 'from django.db import migrations, models\n'), ((6006, 6036), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (6025, 6036), False, 'from django.db import migrations, models\n'), ((6069, 6141), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""Conservative"""', 'editable': '(False)', 'max_length': '(200)'}), "(default='Conservative', editable=False, max_length=200)\n", (6085, 6141), False, 'from django.db import migrations, models\n'), ((6473, 6559), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(20)', 'primary_key': '(True)', 'serialize': '(False)'}), '(db_index=True, max_length=20, primary_key=True, serialize=\n False)\n', (6489, 6559), False, 'from django.db import migrations, models\n'), ((6583, 6615), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (6599, 6615), False, 'from django.db import migrations, models\n'), ((6646, 6673), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (6662, 6673), False, 'from django.db import migrations, models\n'), ((6709, 6737), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (6726, 6737), False, 'from django.db import migrations, models\n'), ((6766, 6796), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (6785, 6796), False, 'from django.db import migrations, models\n'), ((6832, 6862), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (6851, 6862), False, 'from django.db import migrations, models\n'), ((6896, 6918), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (6916, 6918), False, 'from django.db import migrations, models\n'), ((6950, 6980), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (6969, 6980), False, 'from django.db import migrations, models\n'), ((7011, 7041), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (7030, 7041), False, 'from django.db import migrations, models\n'), ((7072, 7102), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (7091, 7102), False, 'from django.db import migrations, models\n'), ((7140, 7170), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (7159, 7170), False, 'from django.db import migrations, models\n'), ((7204, 7234), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (7223, 7234), False, 'from django.db import migrations, models\n'), ((7283, 7313), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (7302, 7313), False, 'from django.db import migrations, models\n'), ((7346, 7389), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'null': '(True)'}), '(max_length=300, null=True)\n', (7362, 7389), False, 'from django.db import migrations, models\n'), ((7419, 7451), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (7435, 7451), False, 'from django.db import migrations, models\n'), ((7489, 7511), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (7509, 7511), False, 'from django.db import migrations, models\n'), ((7548, 7578), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (7567, 7578), False, 'from django.db import migrations, models\n'), ((7611, 7679), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""HongKong"""', 'editable': '(False)', 'max_length': '(200)'}), "(default='HongKong', editable=False, max_length=200)\n", (7627, 7679), False, 'from django.db import migrations, models\n'), ((8008, 8094), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(20)', 'primary_key': '(True)', 'serialize': '(False)'}), '(db_index=True, max_length=20, primary_key=True, serialize=\n False)\n', (8024, 8094), False, 'from django.db import migrations, models\n'), ((8118, 8150), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (8134, 8150), False, 'from django.db import migrations, models\n'), ((8181, 8208), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (8197, 8208), False, 'from django.db import migrations, models\n'), ((8244, 8272), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (8261, 8272), False, 'from django.db import migrations, models\n'), ((8301, 8331), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (8320, 8331), False, 'from django.db import migrations, models\n'), ((8367, 8397), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (8386, 8397), False, 'from django.db import migrations, models\n'), ((8431, 8453), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (8451, 8453), False, 'from django.db import migrations, models\n'), ((8485, 8515), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (8504, 8515), False, 'from django.db import migrations, models\n'), ((8546, 8576), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (8565, 8576), False, 'from django.db import migrations, models\n'), ((8607, 8637), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (8626, 8637), False, 'from django.db import migrations, models\n'), ((8675, 8705), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (8694, 8705), False, 'from django.db import migrations, models\n'), ((8739, 8769), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (8758, 8769), False, 'from django.db import migrations, models\n'), ((8818, 8848), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (8837, 8848), False, 'from django.db import migrations, models\n'), ((8881, 8924), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'null': '(True)'}), '(max_length=300, null=True)\n', (8897, 8924), False, 'from django.db import migrations, models\n'), ((8954, 8986), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (8970, 8986), False, 'from django.db import migrations, models\n'), ((9024, 9046), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (9044, 9046), False, 'from django.db import migrations, models\n'), ((9083, 9113), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (9102, 9113), False, 'from django.db import migrations, models\n'), ((9146, 9215), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""democrats"""', 'editable': '(False)', 'max_length': '(200)'}), "(default='democrats', editable=False, max_length=200)\n", (9162, 9215), False, 'from django.db import migrations, models\n'), ((9539, 9625), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(20)', 'primary_key': '(True)', 'serialize': '(False)'}), '(db_index=True, max_length=20, primary_key=True, serialize=\n False)\n', (9555, 9625), False, 'from django.db import migrations, models\n'), ((9649, 9681), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (9665, 9681), False, 'from django.db import migrations, models\n'), ((9712, 9739), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (9728, 9739), False, 'from django.db import migrations, models\n'), ((9775, 9803), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (9792, 9803), False, 'from django.db import migrations, models\n'), ((9832, 9862), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (9851, 9862), False, 'from django.db import migrations, models\n'), ((9898, 9928), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (9917, 9928), False, 'from django.db import migrations, models\n'), ((9962, 9984), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (9982, 9984), False, 'from django.db import migrations, models\n'), ((10016, 10046), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (10035, 10046), False, 'from django.db import migrations, models\n'), ((10077, 10107), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (10096, 10107), False, 'from django.db import migrations, models\n'), ((10138, 10168), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (10157, 10168), False, 'from django.db import migrations, models\n'), ((10206, 10236), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (10225, 10236), False, 'from django.db import migrations, models\n'), ((10270, 10300), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (10289, 10300), False, 'from django.db import migrations, models\n'), ((10349, 10379), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (10368, 10379), False, 'from django.db import migrations, models\n'), ((10412, 10455), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'null': '(True)'}), '(max_length=300, null=True)\n', (10428, 10455), False, 'from django.db import migrations, models\n'), ((10485, 10517), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (10501, 10517), False, 'from django.db import migrations, models\n'), ((10555, 10577), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (10575, 10577), False, 'from django.db import migrations, models\n'), ((10614, 10644), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (10633, 10644), False, 'from django.db import migrations, models\n'), ((10677, 10746), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""Economics"""', 'editable': '(False)', 'max_length': '(200)'}), "(default='Economics', editable=False, max_length=200)\n", (10693, 10746), False, 'from django.db import migrations, models\n'), ((11067, 11153), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(20)', 'primary_key': '(True)', 'serialize': '(False)'}), '(db_index=True, max_length=20, primary_key=True, serialize=\n False)\n', (11083, 11153), False, 'from django.db import migrations, models\n'), ((11177, 11209), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (11193, 11209), False, 'from django.db import migrations, models\n'), ((11240, 11267), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (11256, 11267), False, 'from django.db import migrations, models\n'), ((11303, 11331), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (11320, 11331), False, 'from django.db import migrations, models\n'), ((11360, 11390), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (11379, 11390), False, 'from django.db import migrations, models\n'), ((11426, 11456), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (11445, 11456), False, 'from django.db import migrations, models\n'), ((11490, 11512), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (11510, 11512), False, 'from django.db import migrations, models\n'), ((11544, 11574), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (11563, 11574), False, 'from django.db import migrations, models\n'), ((11605, 11635), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (11624, 11635), False, 'from django.db import migrations, models\n'), ((11666, 11696), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (11685, 11696), False, 'from django.db import migrations, models\n'), ((11734, 11764), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (11753, 11764), False, 'from django.db import migrations, models\n'), ((11798, 11828), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (11817, 11828), False, 'from django.db import migrations, models\n'), ((11877, 11907), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (11896, 11907), False, 'from django.db import migrations, models\n'), ((11940, 11983), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'null': '(True)'}), '(max_length=300, null=True)\n', (11956, 11983), False, 'from django.db import migrations, models\n'), ((12013, 12045), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (12029, 12045), False, 'from django.db import migrations, models\n'), ((12083, 12105), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (12103, 12105), False, 'from django.db import migrations, models\n'), ((12142, 12172), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (12161, 12172), False, 'from django.db import migrations, models\n'), ((12205, 12271), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""energy"""', 'editable': '(False)', 'max_length': '(200)'}), "(default='energy', editable=False, max_length=200)\n", (12221, 12271), False, 'from django.db import migrations, models\n'), ((12591, 12677), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(20)', 'primary_key': '(True)', 'serialize': '(False)'}), '(db_index=True, max_length=20, primary_key=True, serialize=\n False)\n', (12607, 12677), False, 'from django.db import migrations, models\n'), ((12701, 12733), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (12717, 12733), False, 'from django.db import migrations, models\n'), ((12764, 12791), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (12780, 12791), False, 'from django.db import migrations, models\n'), ((12827, 12855), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (12844, 12855), False, 'from django.db import migrations, models\n'), ((12884, 12914), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (12903, 12914), False, 'from django.db import migrations, models\n'), ((12950, 12980), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (12969, 12980), False, 'from django.db import migrations, models\n'), ((13014, 13036), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (13034, 13036), False, 'from django.db import migrations, models\n'), ((13068, 13098), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (13087, 13098), False, 'from django.db import migrations, models\n'), ((13129, 13159), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (13148, 13159), False, 'from django.db import migrations, models\n'), ((13190, 13220), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (13209, 13220), False, 'from django.db import migrations, models\n'), ((13258, 13288), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (13277, 13288), False, 'from django.db import migrations, models\n'), ((13322, 13352), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (13341, 13352), False, 'from django.db import migrations, models\n'), ((13401, 13431), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (13420, 13431), False, 'from django.db import migrations, models\n'), ((13464, 13507), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'null': '(True)'}), '(max_length=300, null=True)\n', (13480, 13507), False, 'from django.db import migrations, models\n'), ((13537, 13569), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (13553, 13569), False, 'from django.db import migrations, models\n'), ((13607, 13629), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (13627, 13629), False, 'from django.db import migrations, models\n'), ((13666, 13696), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (13685, 13696), False, 'from django.db import migrations, models\n'), ((13729, 13797), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""HongKong"""', 'editable': '(False)', 'max_length': '(200)'}), "(default='HongKong', editable=False, max_length=200)\n", (13745, 13797), False, 'from django.db import migrations, models\n'), ((14116, 14202), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(20)', 'primary_key': '(True)', 'serialize': '(False)'}), '(db_index=True, max_length=20, primary_key=True, serialize=\n False)\n', (14132, 14202), False, 'from django.db import migrations, models\n'), ((14226, 14258), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (14242, 14258), False, 'from django.db import migrations, models\n'), ((14289, 14316), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (14305, 14316), False, 'from django.db import migrations, models\n'), ((14352, 14380), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (14369, 14380), False, 'from django.db import migrations, models\n'), ((14409, 14439), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (14428, 14439), False, 'from django.db import migrations, models\n'), ((14475, 14505), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (14494, 14505), False, 'from django.db import migrations, models\n'), ((14539, 14561), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (14559, 14561), False, 'from django.db import migrations, models\n'), ((14593, 14623), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (14612, 14623), False, 'from django.db import migrations, models\n'), ((14654, 14684), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (14673, 14684), False, 'from django.db import migrations, models\n'), ((14715, 14745), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (14734, 14745), False, 'from django.db import migrations, models\n'), ((14783, 14813), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (14802, 14813), False, 'from django.db import migrations, models\n'), ((14847, 14877), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (14866, 14877), False, 'from django.db import migrations, models\n'), ((14926, 14956), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (14945, 14956), False, 'from django.db import migrations, models\n'), ((14989, 15032), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'null': '(True)'}), '(max_length=300, null=True)\n', (15005, 15032), False, 'from django.db import migrations, models\n'), ((15062, 15094), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (15078, 15094), False, 'from django.db import migrations, models\n'), ((15132, 15154), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (15152, 15154), False, 'from django.db import migrations, models\n'), ((15191, 15221), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (15210, 15221), False, 'from django.db import migrations, models\n'), ((15254, 15319), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""india"""', 'editable': '(False)', 'max_length': '(200)'}), "(default='india', editable=False, max_length=200)\n", (15270, 15319), False, 'from django.db import migrations, models\n'), ((15636, 15722), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(20)', 'primary_key': '(True)', 'serialize': '(False)'}), '(db_index=True, max_length=20, primary_key=True, serialize=\n False)\n', (15652, 15722), False, 'from django.db import migrations, models\n'), ((15746, 15778), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (15762, 15778), False, 'from django.db import migrations, models\n'), ((15809, 15836), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (15825, 15836), False, 'from django.db import migrations, models\n'), ((15872, 15900), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (15889, 15900), False, 'from django.db import migrations, models\n'), ((15929, 15959), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (15948, 15959), False, 'from django.db import migrations, models\n'), ((15995, 16025), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (16014, 16025), False, 'from django.db import migrations, models\n'), ((16059, 16081), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (16079, 16081), False, 'from django.db import migrations, models\n'), ((16113, 16143), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (16132, 16143), False, 'from django.db import migrations, models\n'), ((16174, 16204), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (16193, 16204), False, 'from django.db import migrations, models\n'), ((16235, 16265), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (16254, 16265), False, 'from django.db import migrations, models\n'), ((16303, 16333), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (16322, 16333), False, 'from django.db import migrations, models\n'), ((16367, 16397), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (16386, 16397), False, 'from django.db import migrations, models\n'), ((16446, 16476), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (16465, 16476), False, 'from django.db import migrations, models\n'), ((16509, 16552), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'null': '(True)'}), '(max_length=300, null=True)\n', (16525, 16552), False, 'from django.db import migrations, models\n'), ((16582, 16614), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (16598, 16614), False, 'from django.db import migrations, models\n'), ((16652, 16674), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (16672, 16674), False, 'from django.db import migrations, models\n'), ((16711, 16741), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (16730, 16741), False, 'from django.db import migrations, models\n'), ((16774, 16840), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""Israel"""', 'editable': '(False)', 'max_length': '(200)'}), "(default='Israel', editable=False, max_length=200)\n", (16790, 16840), False, 'from django.db import migrations, models\n'), ((17157, 17243), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(20)', 'primary_key': '(True)', 'serialize': '(False)'}), '(db_index=True, max_length=20, primary_key=True, serialize=\n False)\n', (17173, 17243), False, 'from django.db import migrations, models\n'), ((17267, 17299), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (17283, 17299), False, 'from django.db import migrations, models\n'), ((17330, 17357), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (17346, 17357), False, 'from django.db import migrations, models\n'), ((17393, 17421), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (17410, 17421), False, 'from django.db import migrations, models\n'), ((17450, 17480), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (17469, 17480), False, 'from django.db import migrations, models\n'), ((17516, 17546), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (17535, 17546), False, 'from django.db import migrations, models\n'), ((17580, 17602), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (17600, 17602), False, 'from django.db import migrations, models\n'), ((17634, 17664), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (17653, 17664), False, 'from django.db import migrations, models\n'), ((17695, 17725), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (17714, 17725), False, 'from django.db import migrations, models\n'), ((17756, 17786), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (17775, 17786), False, 'from django.db import migrations, models\n'), ((17824, 17854), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (17843, 17854), False, 'from django.db import migrations, models\n'), ((17888, 17918), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (17907, 17918), False, 'from django.db import migrations, models\n'), ((17967, 17997), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (17986, 17997), False, 'from django.db import migrations, models\n'), ((18030, 18073), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'null': '(True)'}), '(max_length=300, null=True)\n', (18046, 18073), False, 'from django.db import migrations, models\n'), ((18103, 18135), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (18119, 18135), False, 'from django.db import migrations, models\n'), ((18173, 18195), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (18193, 18195), False, 'from django.db import migrations, models\n'), ((18232, 18262), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (18251, 18262), False, 'from django.db import migrations, models\n'), ((18295, 18360), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""japan"""', 'editable': '(False)', 'max_length': '(200)'}), "(default='japan', editable=False, max_length=200)\n", (18311, 18360), False, 'from django.db import migrations, models\n'), ((18676, 18762), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(20)', 'primary_key': '(True)', 'serialize': '(False)'}), '(db_index=True, max_length=20, primary_key=True, serialize=\n False)\n', (18692, 18762), False, 'from django.db import migrations, models\n'), ((18786, 18818), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (18802, 18818), False, 'from django.db import migrations, models\n'), ((18849, 18876), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (18865, 18876), False, 'from django.db import migrations, models\n'), ((18912, 18940), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (18929, 18940), False, 'from django.db import migrations, models\n'), ((18969, 18999), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (18988, 18999), False, 'from django.db import migrations, models\n'), ((19035, 19065), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (19054, 19065), False, 'from django.db import migrations, models\n'), ((19099, 19121), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (19119, 19121), False, 'from django.db import migrations, models\n'), ((19153, 19183), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (19172, 19183), False, 'from django.db import migrations, models\n'), ((19214, 19244), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (19233, 19244), False, 'from django.db import migrations, models\n'), ((19275, 19305), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (19294, 19305), False, 'from django.db import migrations, models\n'), ((19343, 19373), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (19362, 19373), False, 'from django.db import migrations, models\n'), ((19407, 19437), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (19426, 19437), False, 'from django.db import migrations, models\n'), ((19486, 19516), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (19505, 19516), False, 'from django.db import migrations, models\n'), ((19549, 19592), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'null': '(True)'}), '(max_length=300, null=True)\n', (19565, 19592), False, 'from django.db import migrations, models\n'), ((19622, 19654), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (19638, 19654), False, 'from django.db import migrations, models\n'), ((19692, 19714), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (19712, 19714), False, 'from django.db import migrations, models\n'), ((19751, 19781), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (19770, 19781), False, 'from django.db import migrations, models\n'), ((19814, 19879), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""korea"""', 'editable': '(False)', 'max_length': '(200)'}), "(default='korea', editable=False, max_length=200)\n", (19830, 19879), False, 'from django.db import migrations, models\n'), ((20197, 20283), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(20)', 'primary_key': '(True)', 'serialize': '(False)'}), '(db_index=True, max_length=20, primary_key=True, serialize=\n False)\n', (20213, 20283), False, 'from django.db import migrations, models\n'), ((20307, 20339), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (20323, 20339), False, 'from django.db import migrations, models\n'), ((20370, 20397), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (20386, 20397), False, 'from django.db import migrations, models\n'), ((20433, 20461), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (20450, 20461), False, 'from django.db import migrations, models\n'), ((20490, 20520), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (20509, 20520), False, 'from django.db import migrations, models\n'), ((20556, 20586), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (20575, 20586), False, 'from django.db import migrations, models\n'), ((20620, 20642), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (20640, 20642), False, 'from django.db import migrations, models\n'), ((20674, 20704), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (20693, 20704), False, 'from django.db import migrations, models\n'), ((20735, 20765), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (20754, 20765), False, 'from django.db import migrations, models\n'), ((20796, 20826), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (20815, 20826), False, 'from django.db import migrations, models\n'), ((20864, 20894), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (20883, 20894), False, 'from django.db import migrations, models\n'), ((20928, 20958), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (20947, 20958), False, 'from django.db import migrations, models\n'), ((21007, 21037), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (21026, 21037), False, 'from django.db import migrations, models\n'), ((21070, 21113), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'null': '(True)'}), '(max_length=300, null=True)\n', (21086, 21113), False, 'from django.db import migrations, models\n'), ((21143, 21175), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (21159, 21175), False, 'from django.db import migrations, models\n'), ((21213, 21235), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (21233, 21235), False, 'from django.db import migrations, models\n'), ((21272, 21302), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (21291, 21302), False, 'from django.db import migrations, models\n'), ((21335, 21402), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""Liberal"""', 'editable': '(False)', 'max_length': '(200)'}), "(default='Liberal', editable=False, max_length=200)\n", (21351, 21402), False, 'from django.db import migrations, models\n'), ((21726, 21812), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(20)', 'primary_key': '(True)', 'serialize': '(False)'}), '(db_index=True, max_length=20, primary_key=True, serialize=\n False)\n', (21742, 21812), False, 'from django.db import migrations, models\n'), ((21836, 21868), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (21852, 21868), False, 'from django.db import migrations, models\n'), ((21899, 21926), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (21915, 21926), False, 'from django.db import migrations, models\n'), ((21962, 21990), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (21979, 21990), False, 'from django.db import migrations, models\n'), ((22019, 22049), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (22038, 22049), False, 'from django.db import migrations, models\n'), ((22085, 22115), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (22104, 22115), False, 'from django.db import migrations, models\n'), ((22149, 22171), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (22169, 22171), False, 'from django.db import migrations, models\n'), ((22203, 22233), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (22222, 22233), False, 'from django.db import migrations, models\n'), ((22264, 22294), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (22283, 22294), False, 'from django.db import migrations, models\n'), ((22325, 22355), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (22344, 22355), False, 'from django.db import migrations, models\n'), ((22393, 22423), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (22412, 22423), False, 'from django.db import migrations, models\n'), ((22457, 22487), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (22476, 22487), False, 'from django.db import migrations, models\n'), ((22536, 22566), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (22555, 22566), False, 'from django.db import migrations, models\n'), ((22599, 22642), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'null': '(True)'}), '(max_length=300, null=True)\n', (22615, 22642), False, 'from django.db import migrations, models\n'), ((22672, 22704), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (22688, 22704), False, 'from django.db import migrations, models\n'), ((22742, 22764), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (22762, 22764), False, 'from django.db import migrations, models\n'), ((22801, 22831), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (22820, 22831), False, 'from django.db import migrations, models\n'), ((22864, 22935), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""Libertarian"""', 'editable': '(False)', 'max_length': '(200)'}), "(default='Libertarian', editable=False, max_length=200)\n", (22880, 22935), False, 'from django.db import migrations, models\n'), ((23260, 23346), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(20)', 'primary_key': '(True)', 'serialize': '(False)'}), '(db_index=True, max_length=20, primary_key=True, serialize=\n False)\n', (23276, 23346), False, 'from django.db import migrations, models\n'), ((23370, 23402), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (23386, 23402), False, 'from django.db import migrations, models\n'), ((23433, 23460), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (23449, 23460), False, 'from django.db import migrations, models\n'), ((23496, 23524), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (23513, 23524), False, 'from django.db import migrations, models\n'), ((23553, 23583), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (23572, 23583), False, 'from django.db import migrations, models\n'), ((23619, 23649), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (23638, 23649), False, 'from django.db import migrations, models\n'), ((23683, 23705), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (23703, 23705), False, 'from django.db import migrations, models\n'), ((23737, 23767), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (23756, 23767), False, 'from django.db import migrations, models\n'), ((23798, 23828), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (23817, 23828), False, 'from django.db import migrations, models\n'), ((23859, 23889), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (23878, 23889), False, 'from django.db import migrations, models\n'), ((23927, 23957), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (23946, 23957), False, 'from django.db import migrations, models\n'), ((23991, 24021), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (24010, 24021), False, 'from django.db import migrations, models\n'), ((24070, 24100), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (24089, 24100), False, 'from django.db import migrations, models\n'), ((24133, 24176), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'null': '(True)'}), '(max_length=300, null=True)\n', (24149, 24176), False, 'from django.db import migrations, models\n'), ((24206, 24238), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (24222, 24238), False, 'from django.db import migrations, models\n'), ((24276, 24298), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (24296, 24298), False, 'from django.db import migrations, models\n'), ((24335, 24365), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (24354, 24365), False, 'from django.db import migrations, models\n'), ((24398, 24466), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""malaysia"""', 'editable': '(False)', 'max_length': '(200)'}), "(default='malaysia', editable=False, max_length=200)\n", (24414, 24466), False, 'from django.db import migrations, models\n'), ((24794, 24880), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(20)', 'primary_key': '(True)', 'serialize': '(False)'}), '(db_index=True, max_length=20, primary_key=True, serialize=\n False)\n', (24810, 24880), False, 'from django.db import migrations, models\n'), ((24904, 24936), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (24920, 24936), False, 'from django.db import migrations, models\n'), ((24967, 24994), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (24983, 24994), False, 'from django.db import migrations, models\n'), ((25030, 25058), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (25047, 25058), False, 'from django.db import migrations, models\n'), ((25087, 25117), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (25106, 25117), False, 'from django.db import migrations, models\n'), ((25153, 25183), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (25172, 25183), False, 'from django.db import migrations, models\n'), ((25217, 25239), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (25237, 25239), False, 'from django.db import migrations, models\n'), ((25271, 25301), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (25290, 25301), False, 'from django.db import migrations, models\n'), ((25332, 25362), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (25351, 25362), False, 'from django.db import migrations, models\n'), ((25393, 25423), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (25412, 25423), False, 'from django.db import migrations, models\n'), ((25461, 25491), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (25480, 25491), False, 'from django.db import migrations, models\n'), ((25525, 25555), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (25544, 25555), False, 'from django.db import migrations, models\n'), ((25604, 25634), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (25623, 25634), False, 'from django.db import migrations, models\n'), ((25667, 25710), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'null': '(True)'}), '(max_length=300, null=True)\n', (25683, 25710), False, 'from django.db import migrations, models\n'), ((25740, 25772), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (25756, 25772), False, 'from django.db import migrations, models\n'), ((25810, 25832), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (25830, 25832), False, 'from django.db import migrations, models\n'), ((25869, 25899), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (25888, 25899), False, 'from django.db import migrations, models\n'), ((25932, 26006), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""MiddleEastNews"""', 'editable': '(False)', 'max_length': '(200)'}), "(default='MiddleEastNews', editable=False, max_length=200)\n", (25948, 26006), False, 'from django.db import migrations, models\n'), ((26341, 26427), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(20)', 'primary_key': '(True)', 'serialize': '(False)'}), '(db_index=True, max_length=20, primary_key=True, serialize=\n False)\n', (26357, 26427), False, 'from django.db import migrations, models\n'), ((26451, 26483), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (26467, 26483), False, 'from django.db import migrations, models\n'), ((26514, 26541), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (26530, 26541), False, 'from django.db import migrations, models\n'), ((26577, 26605), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (26594, 26605), False, 'from django.db import migrations, models\n'), ((26634, 26664), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (26653, 26664), False, 'from django.db import migrations, models\n'), ((26700, 26730), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (26719, 26730), False, 'from django.db import migrations, models\n'), ((26764, 26786), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (26784, 26786), False, 'from django.db import migrations, models\n'), ((26818, 26848), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (26837, 26848), False, 'from django.db import migrations, models\n'), ((26879, 26909), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (26898, 26909), False, 'from django.db import migrations, models\n'), ((26940, 26970), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (26959, 26970), False, 'from django.db import migrations, models\n'), ((27008, 27038), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (27027, 27038), False, 'from django.db import migrations, models\n'), ((27072, 27102), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (27091, 27102), False, 'from django.db import migrations, models\n'), ((27151, 27181), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (27170, 27181), False, 'from django.db import migrations, models\n'), ((27214, 27257), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'null': '(True)'}), '(max_length=300, null=True)\n', (27230, 27257), False, 'from django.db import migrations, models\n'), ((27287, 27319), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (27303, 27319), False, 'from django.db import migrations, models\n'), ((27357, 27379), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (27377, 27379), False, 'from django.db import migrations, models\n'), ((27416, 27446), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (27435, 27446), False, 'from django.db import migrations, models\n'), ((27479, 27554), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""NeutralPolitics"""', 'editable': '(False)', 'max_length': '(200)'}), "(default='NeutralPolitics', editable=False, max_length=200)\n", (27495, 27554), False, 'from django.db import migrations, models\n'), ((27879, 27965), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(20)', 'primary_key': '(True)', 'serialize': '(False)'}), '(db_index=True, max_length=20, primary_key=True, serialize=\n False)\n', (27895, 27965), False, 'from django.db import migrations, models\n'), ((27989, 28021), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (28005, 28021), False, 'from django.db import migrations, models\n'), ((28052, 28079), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (28068, 28079), False, 'from django.db import migrations, models\n'), ((28115, 28143), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (28132, 28143), False, 'from django.db import migrations, models\n'), ((28172, 28202), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (28191, 28202), False, 'from django.db import migrations, models\n'), ((28238, 28268), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (28257, 28268), False, 'from django.db import migrations, models\n'), ((28302, 28324), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (28322, 28324), False, 'from django.db import migrations, models\n'), ((28356, 28386), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (28375, 28386), False, 'from django.db import migrations, models\n'), ((28417, 28447), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (28436, 28447), False, 'from django.db import migrations, models\n'), ((28478, 28508), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (28497, 28508), False, 'from django.db import migrations, models\n'), ((28546, 28576), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (28565, 28576), False, 'from django.db import migrations, models\n'), ((28610, 28640), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (28629, 28640), False, 'from django.db import migrations, models\n'), ((28689, 28719), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (28708, 28719), False, 'from django.db import migrations, models\n'), ((28752, 28795), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'null': '(True)'}), '(max_length=300, null=True)\n', (28768, 28795), False, 'from django.db import migrations, models\n'), ((28825, 28857), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (28841, 28857), False, 'from django.db import migrations, models\n'), ((28895, 28917), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (28915, 28917), False, 'from django.db import migrations, models\n'), ((28954, 28984), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (28973, 28984), False, 'from django.db import migrations, models\n'), ((29017, 29081), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""news"""', 'editable': '(False)', 'max_length': '(200)'}), "(default='news', editable=False, max_length=200)\n", (29033, 29081), False, 'from django.db import migrations, models\n'), ((29405, 29491), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(20)', 'primary_key': '(True)', 'serialize': '(False)'}), '(db_index=True, max_length=20, primary_key=True, serialize=\n False)\n', (29421, 29491), False, 'from django.db import migrations, models\n'), ((29515, 29547), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (29531, 29547), False, 'from django.db import migrations, models\n'), ((29578, 29605), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (29594, 29605), False, 'from django.db import migrations, models\n'), ((29641, 29669), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (29658, 29669), False, 'from django.db import migrations, models\n'), ((29698, 29728), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (29717, 29728), False, 'from django.db import migrations, models\n'), ((29764, 29794), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (29783, 29794), False, 'from django.db import migrations, models\n'), ((29828, 29850), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (29848, 29850), False, 'from django.db import migrations, models\n'), ((29882, 29912), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (29901, 29912), False, 'from django.db import migrations, models\n'), ((29943, 29973), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (29962, 29973), False, 'from django.db import migrations, models\n'), ((30004, 30034), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (30023, 30034), False, 'from django.db import migrations, models\n'), ((30072, 30102), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (30091, 30102), False, 'from django.db import migrations, models\n'), ((30136, 30166), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (30155, 30166), False, 'from django.db import migrations, models\n'), ((30215, 30245), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (30234, 30245), False, 'from django.db import migrations, models\n'), ((30278, 30321), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'null': '(True)'}), '(max_length=300, null=True)\n', (30294, 30321), False, 'from django.db import migrations, models\n'), ((30351, 30383), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (30367, 30383), False, 'from django.db import migrations, models\n'), ((30421, 30443), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (30441, 30443), False, 'from django.db import migrations, models\n'), ((30480, 30510), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (30499, 30510), False, 'from django.db import migrations, models\n'), ((30543, 30617), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""NorthKoreaNews"""', 'editable': '(False)', 'max_length': '(200)'}), "(default='NorthKoreaNews', editable=False, max_length=200)\n", (30559, 30617), False, 'from django.db import migrations, models\n'), ((30947, 31033), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(20)', 'primary_key': '(True)', 'serialize': '(False)'}), '(db_index=True, max_length=20, primary_key=True, serialize=\n False)\n', (30963, 31033), False, 'from django.db import migrations, models\n'), ((31057, 31089), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (31073, 31089), False, 'from django.db import migrations, models\n'), ((31120, 31147), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (31136, 31147), False, 'from django.db import migrations, models\n'), ((31183, 31211), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (31200, 31211), False, 'from django.db import migrations, models\n'), ((31240, 31270), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (31259, 31270), False, 'from django.db import migrations, models\n'), ((31306, 31336), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (31325, 31336), False, 'from django.db import migrations, models\n'), ((31370, 31392), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (31390, 31392), False, 'from django.db import migrations, models\n'), ((31424, 31454), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (31443, 31454), False, 'from django.db import migrations, models\n'), ((31485, 31515), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (31504, 31515), False, 'from django.db import migrations, models\n'), ((31546, 31576), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (31565, 31576), False, 'from django.db import migrations, models\n'), ((31614, 31644), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (31633, 31644), False, 'from django.db import migrations, models\n'), ((31678, 31708), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (31697, 31708), False, 'from django.db import migrations, models\n'), ((31757, 31787), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (31776, 31787), False, 'from django.db import migrations, models\n'), ((31820, 31863), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'null': '(True)'}), '(max_length=300, null=True)\n', (31836, 31863), False, 'from django.db import migrations, models\n'), ((31893, 31925), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (31909, 31925), False, 'from django.db import migrations, models\n'), ((31963, 31985), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (31983, 31985), False, 'from django.db import migrations, models\n'), ((32022, 32052), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (32041, 32052), False, 'from django.db import migrations, models\n'), ((32085, 32155), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""northkorea"""', 'editable': '(False)', 'max_length': '(200)'}), "(default='northkorea', editable=False, max_length=200)\n", (32101, 32155), False, 'from django.db import migrations, models\n'), ((32480, 32566), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(20)', 'primary_key': '(True)', 'serialize': '(False)'}), '(db_index=True, max_length=20, primary_key=True, serialize=\n False)\n', (32496, 32566), False, 'from django.db import migrations, models\n'), ((32590, 32622), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (32606, 32622), False, 'from django.db import migrations, models\n'), ((32653, 32680), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (32669, 32680), False, 'from django.db import migrations, models\n'), ((32716, 32744), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (32733, 32744), False, 'from django.db import migrations, models\n'), ((32773, 32803), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (32792, 32803), False, 'from django.db import migrations, models\n'), ((32839, 32869), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (32858, 32869), False, 'from django.db import migrations, models\n'), ((32903, 32925), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (32923, 32925), False, 'from django.db import migrations, models\n'), ((32957, 32987), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (32976, 32987), False, 'from django.db import migrations, models\n'), ((33018, 33048), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (33037, 33048), False, 'from django.db import migrations, models\n'), ((33079, 33109), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (33098, 33109), False, 'from django.db import migrations, models\n'), ((33147, 33177), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (33166, 33177), False, 'from django.db import migrations, models\n'), ((33211, 33241), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (33230, 33241), False, 'from django.db import migrations, models\n'), ((33290, 33320), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (33309, 33320), False, 'from django.db import migrations, models\n'), ((33353, 33396), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'null': '(True)'}), '(max_length=300, null=True)\n', (33369, 33396), False, 'from django.db import migrations, models\n'), ((33426, 33458), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (33442, 33458), False, 'from django.db import migrations, models\n'), ((33496, 33518), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (33516, 33518), False, 'from django.db import migrations, models\n'), ((33555, 33585), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (33574, 33585), False, 'from django.db import migrations, models\n'), ((33618, 33686), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""pakistan"""', 'editable': '(False)', 'max_length': '(200)'}), "(default='pakistan', editable=False, max_length=200)\n", (33634, 33686), False, 'from django.db import migrations, models\n'), ((34009, 34095), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(20)', 'primary_key': '(True)', 'serialize': '(False)'}), '(db_index=True, max_length=20, primary_key=True, serialize=\n False)\n', (34025, 34095), False, 'from django.db import migrations, models\n'), ((34119, 34151), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (34135, 34151), False, 'from django.db import migrations, models\n'), ((34182, 34209), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (34198, 34209), False, 'from django.db import migrations, models\n'), ((34245, 34273), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (34262, 34273), False, 'from django.db import migrations, models\n'), ((34302, 34332), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (34321, 34332), False, 'from django.db import migrations, models\n'), ((34368, 34398), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (34387, 34398), False, 'from django.db import migrations, models\n'), ((34432, 34454), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (34452, 34454), False, 'from django.db import migrations, models\n'), ((34486, 34516), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (34505, 34516), False, 'from django.db import migrations, models\n'), ((34547, 34577), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (34566, 34577), False, 'from django.db import migrations, models\n'), ((34608, 34638), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (34627, 34638), False, 'from django.db import migrations, models\n'), ((34676, 34706), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (34695, 34706), False, 'from django.db import migrations, models\n'), ((34740, 34770), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (34759, 34770), False, 'from django.db import migrations, models\n'), ((34819, 34849), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (34838, 34849), False, 'from django.db import migrations, models\n'), ((34882, 34925), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'null': '(True)'}), '(max_length=300, null=True)\n', (34898, 34925), False, 'from django.db import migrations, models\n'), ((34955, 34987), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (34971, 34987), False, 'from django.db import migrations, models\n'), ((35025, 35047), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (35045, 35047), False, 'from django.db import migrations, models\n'), ((35084, 35114), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (35103, 35114), False, 'from django.db import migrations, models\n'), ((35147, 35216), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""Palestine"""', 'editable': '(False)', 'max_length': '(200)'}), "(default='Palestine', editable=False, max_length=200)\n", (35163, 35216), False, 'from django.db import migrations, models\n'), ((35539, 35625), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(20)', 'primary_key': '(True)', 'serialize': '(False)'}), '(db_index=True, max_length=20, primary_key=True, serialize=\n False)\n', (35555, 35625), False, 'from django.db import migrations, models\n'), ((35649, 35681), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (35665, 35681), False, 'from django.db import migrations, models\n'), ((35712, 35739), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (35728, 35739), False, 'from django.db import migrations, models\n'), ((35775, 35803), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (35792, 35803), False, 'from django.db import migrations, models\n'), ((35832, 35862), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (35851, 35862), False, 'from django.db import migrations, models\n'), ((35898, 35928), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (35917, 35928), False, 'from django.db import migrations, models\n'), ((35962, 35984), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (35982, 35984), False, 'from django.db import migrations, models\n'), ((36016, 36046), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (36035, 36046), False, 'from django.db import migrations, models\n'), ((36077, 36107), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (36096, 36107), False, 'from django.db import migrations, models\n'), ((36138, 36168), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (36157, 36168), False, 'from django.db import migrations, models\n'), ((36206, 36236), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (36225, 36236), False, 'from django.db import migrations, models\n'), ((36270, 36300), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (36289, 36300), False, 'from django.db import migrations, models\n'), ((36349, 36379), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (36368, 36379), False, 'from django.db import migrations, models\n'), ((36412, 36455), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'null': '(True)'}), '(max_length=300, null=True)\n', (36428, 36455), False, 'from django.db import migrations, models\n'), ((36485, 36517), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (36501, 36517), False, 'from django.db import migrations, models\n'), ((36555, 36577), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (36575, 36577), False, 'from django.db import migrations, models\n'), ((36614, 36644), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (36633, 36644), False, 'from django.db import migrations, models\n'), ((36677, 36745), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""politics"""', 'editable': '(False)', 'max_length': '(200)'}), "(default='politics', editable=False, max_length=200)\n", (36693, 36745), False, 'from django.db import migrations, models\n'), ((37070, 37156), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(20)', 'primary_key': '(True)', 'serialize': '(False)'}), '(db_index=True, max_length=20, primary_key=True, serialize=\n False)\n', (37086, 37156), False, 'from django.db import migrations, models\n'), ((37180, 37212), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (37196, 37212), False, 'from django.db import migrations, models\n'), ((37243, 37270), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (37259, 37270), False, 'from django.db import migrations, models\n'), ((37306, 37334), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (37323, 37334), False, 'from django.db import migrations, models\n'), ((37363, 37393), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (37382, 37393), False, 'from django.db import migrations, models\n'), ((37429, 37459), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (37448, 37459), False, 'from django.db import migrations, models\n'), ((37493, 37515), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (37513, 37515), False, 'from django.db import migrations, models\n'), ((37547, 37577), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (37566, 37577), False, 'from django.db import migrations, models\n'), ((37608, 37638), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (37627, 37638), False, 'from django.db import migrations, models\n'), ((37669, 37699), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (37688, 37699), False, 'from django.db import migrations, models\n'), ((37737, 37767), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (37756, 37767), False, 'from django.db import migrations, models\n'), ((37801, 37831), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (37820, 37831), False, 'from django.db import migrations, models\n'), ((37880, 37910), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (37899, 37910), False, 'from django.db import migrations, models\n'), ((37943, 37986), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'null': '(True)'}), '(max_length=300, null=True)\n', (37959, 37986), False, 'from django.db import migrations, models\n'), ((38016, 38048), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (38032, 38048), False, 'from django.db import migrations, models\n'), ((38086, 38108), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (38106, 38108), False, 'from django.db import migrations, models\n'), ((38145, 38175), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (38164, 38175), False, 'from django.db import migrations, models\n'), ((38208, 38279), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""progressive"""', 'editable': '(False)', 'max_length': '(200)'}), "(default='progressive', editable=False, max_length=200)\n", (38224, 38279), False, 'from django.db import migrations, models\n'), ((38604, 38690), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(20)', 'primary_key': '(True)', 'serialize': '(False)'}), '(db_index=True, max_length=20, primary_key=True, serialize=\n False)\n', (38620, 38690), False, 'from django.db import migrations, models\n'), ((38714, 38746), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (38730, 38746), False, 'from django.db import migrations, models\n'), ((38777, 38804), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (38793, 38804), False, 'from django.db import migrations, models\n'), ((38840, 38868), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (38857, 38868), False, 'from django.db import migrations, models\n'), ((38897, 38927), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (38916, 38927), False, 'from django.db import migrations, models\n'), ((38963, 38993), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (38982, 38993), False, 'from django.db import migrations, models\n'), ((39027, 39049), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (39047, 39049), False, 'from django.db import migrations, models\n'), ((39081, 39111), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (39100, 39111), False, 'from django.db import migrations, models\n'), ((39142, 39172), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (39161, 39172), False, 'from django.db import migrations, models\n'), ((39203, 39233), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (39222, 39233), False, 'from django.db import migrations, models\n'), ((39271, 39301), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (39290, 39301), False, 'from django.db import migrations, models\n'), ((39335, 39365), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (39354, 39365), False, 'from django.db import migrations, models\n'), ((39414, 39444), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (39433, 39444), False, 'from django.db import migrations, models\n'), ((39477, 39520), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'null': '(True)'}), '(max_length=300, null=True)\n', (39493, 39520), False, 'from django.db import migrations, models\n'), ((39550, 39582), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (39566, 39582), False, 'from django.db import migrations, models\n'), ((39620, 39642), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (39640, 39642), False, 'from django.db import migrations, models\n'), ((39679, 39709), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (39698, 39709), False, 'from django.db import migrations, models\n'), ((39742, 39810), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""realtech"""', 'editable': '(False)', 'max_length': '(200)'}), "(default='realtech', editable=False, max_length=200)\n", (39758, 39810), False, 'from django.db import migrations, models\n'), ((40133, 40219), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(20)', 'primary_key': '(True)', 'serialize': '(False)'}), '(db_index=True, max_length=20, primary_key=True, serialize=\n False)\n', (40149, 40219), False, 'from django.db import migrations, models\n'), ((40243, 40275), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (40259, 40275), False, 'from django.db import migrations, models\n'), ((40306, 40333), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (40322, 40333), False, 'from django.db import migrations, models\n'), ((40369, 40397), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (40386, 40397), False, 'from django.db import migrations, models\n'), ((40426, 40456), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (40445, 40456), False, 'from django.db import migrations, models\n'), ((40492, 40522), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (40511, 40522), False, 'from django.db import migrations, models\n'), ((40556, 40578), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (40576, 40578), False, 'from django.db import migrations, models\n'), ((40610, 40640), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (40629, 40640), False, 'from django.db import migrations, models\n'), ((40671, 40701), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (40690, 40701), False, 'from django.db import migrations, models\n'), ((40732, 40762), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (40751, 40762), False, 'from django.db import migrations, models\n'), ((40800, 40830), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (40819, 40830), False, 'from django.db import migrations, models\n'), ((40864, 40894), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (40883, 40894), False, 'from django.db import migrations, models\n'), ((40943, 40973), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (40962, 40973), False, 'from django.db import migrations, models\n'), ((41006, 41049), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'null': '(True)'}), '(max_length=300, null=True)\n', (41022, 41049), False, 'from django.db import migrations, models\n'), ((41079, 41111), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (41095, 41111), False, 'from django.db import migrations, models\n'), ((41149, 41171), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (41169, 41171), False, 'from django.db import migrations, models\n'), ((41208, 41238), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (41227, 41238), False, 'from django.db import migrations, models\n'), ((41271, 41340), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""singapore"""', 'editable': '(False)', 'max_length': '(200)'}), "(default='singapore', editable=False, max_length=200)\n", (41287, 41340), False, 'from django.db import migrations, models\n'), ((41664, 41750), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(20)', 'primary_key': '(True)', 'serialize': '(False)'}), '(db_index=True, max_length=20, primary_key=True, serialize=\n False)\n', (41680, 41750), False, 'from django.db import migrations, models\n'), ((41774, 41806), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (41790, 41806), False, 'from django.db import migrations, models\n'), ((41837, 41864), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (41853, 41864), False, 'from django.db import migrations, models\n'), ((41900, 41928), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (41917, 41928), False, 'from django.db import migrations, models\n'), ((41957, 41987), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (41976, 41987), False, 'from django.db import migrations, models\n'), ((42023, 42053), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (42042, 42053), False, 'from django.db import migrations, models\n'), ((42087, 42109), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (42107, 42109), False, 'from django.db import migrations, models\n'), ((42141, 42171), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (42160, 42171), False, 'from django.db import migrations, models\n'), ((42202, 42232), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (42221, 42232), False, 'from django.db import migrations, models\n'), ((42263, 42293), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (42282, 42293), False, 'from django.db import migrations, models\n'), ((42331, 42361), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (42350, 42361), False, 'from django.db import migrations, models\n'), ((42395, 42425), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (42414, 42425), False, 'from django.db import migrations, models\n'), ((42474, 42504), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (42493, 42504), False, 'from django.db import migrations, models\n'), ((42537, 42580), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'null': '(True)'}), '(max_length=300, null=True)\n', (42553, 42580), False, 'from django.db import migrations, models\n'), ((42610, 42642), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (42626, 42642), False, 'from django.db import migrations, models\n'), ((42680, 42702), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (42700, 42702), False, 'from django.db import migrations, models\n'), ((42739, 42769), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (42758, 42769), False, 'from django.db import migrations, models\n'), ((42802, 42871), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""socialism"""', 'editable': '(False)', 'max_length': '(200)'}), "(default='socialism', editable=False, max_length=200)\n", (42818, 42871), False, 'from django.db import migrations, models\n'), ((43191, 43277), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(20)', 'primary_key': '(True)', 'serialize': '(False)'}), '(db_index=True, max_length=20, primary_key=True, serialize=\n False)\n', (43207, 43277), False, 'from django.db import migrations, models\n'), ((43301, 43333), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (43317, 43333), False, 'from django.db import migrations, models\n'), ((43364, 43391), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (43380, 43391), False, 'from django.db import migrations, models\n'), ((43427, 43455), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (43444, 43455), False, 'from django.db import migrations, models\n'), ((43484, 43514), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (43503, 43514), False, 'from django.db import migrations, models\n'), ((43550, 43580), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (43569, 43580), False, 'from django.db import migrations, models\n'), ((43614, 43636), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (43634, 43636), False, 'from django.db import migrations, models\n'), ((43668, 43698), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (43687, 43698), False, 'from django.db import migrations, models\n'), ((43729, 43759), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (43748, 43759), False, 'from django.db import migrations, models\n'), ((43790, 43820), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (43809, 43820), False, 'from django.db import migrations, models\n'), ((43858, 43888), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (43877, 43888), False, 'from django.db import migrations, models\n'), ((43922, 43952), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (43941, 43952), False, 'from django.db import migrations, models\n'), ((44001, 44031), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (44020, 44031), False, 'from django.db import migrations, models\n'), ((44064, 44107), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'null': '(True)'}), '(max_length=300, null=True)\n', (44080, 44107), False, 'from django.db import migrations, models\n'), ((44137, 44169), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (44153, 44169), False, 'from django.db import migrations, models\n'), ((44207, 44229), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (44227, 44229), False, 'from django.db import migrations, models\n'), ((44266, 44296), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (44285, 44296), False, 'from django.db import migrations, models\n'), ((44329, 44397), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""HongKong"""', 'editable': '(False)', 'max_length': '(200)'}), "(default='HongKong', editable=False, max_length=200)\n", (44345, 44397), False, 'from django.db import migrations, models\n'), ((44714, 44800), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(20)', 'primary_key': '(True)', 'serialize': '(False)'}), '(db_index=True, max_length=20, primary_key=True, serialize=\n False)\n', (44730, 44800), False, 'from django.db import migrations, models\n'), ((44824, 44856), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (44840, 44856), False, 'from django.db import migrations, models\n'), ((44887, 44914), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (44903, 44914), False, 'from django.db import migrations, models\n'), ((44950, 44978), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (44967, 44978), False, 'from django.db import migrations, models\n'), ((45007, 45037), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (45026, 45037), False, 'from django.db import migrations, models\n'), ((45073, 45103), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (45092, 45103), False, 'from django.db import migrations, models\n'), ((45137, 45159), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (45157, 45159), False, 'from django.db import migrations, models\n'), ((45191, 45221), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (45210, 45221), False, 'from django.db import migrations, models\n'), ((45252, 45282), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (45271, 45282), False, 'from django.db import migrations, models\n'), ((45313, 45343), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (45332, 45343), False, 'from django.db import migrations, models\n'), ((45381, 45411), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (45400, 45411), False, 'from django.db import migrations, models\n'), ((45445, 45475), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (45464, 45475), False, 'from django.db import migrations, models\n'), ((45524, 45554), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (45543, 45554), False, 'from django.db import migrations, models\n'), ((45587, 45630), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'null': '(True)'}), '(max_length=300, null=True)\n', (45603, 45630), False, 'from django.db import migrations, models\n'), ((45660, 45692), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (45676, 45692), False, 'from django.db import migrations, models\n'), ((45730, 45752), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (45750, 45752), False, 'from django.db import migrations, models\n'), ((45789, 45819), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (45808, 45819), False, 'from django.db import migrations, models\n'), ((45852, 45918), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""taiwan"""', 'editable': '(False)', 'max_length': '(200)'}), "(default='taiwan', editable=False, max_length=200)\n", (45868, 45918), False, 'from django.db import migrations, models\n'), ((46240, 46326), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(20)', 'primary_key': '(True)', 'serialize': '(False)'}), '(db_index=True, max_length=20, primary_key=True, serialize=\n False)\n', (46256, 46326), False, 'from django.db import migrations, models\n'), ((46350, 46382), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (46366, 46382), False, 'from django.db import migrations, models\n'), ((46413, 46440), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (46429, 46440), False, 'from django.db import migrations, models\n'), ((46476, 46504), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (46493, 46504), False, 'from django.db import migrations, models\n'), ((46533, 46563), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (46552, 46563), False, 'from django.db import migrations, models\n'), ((46599, 46629), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (46618, 46629), False, 'from django.db import migrations, models\n'), ((46663, 46685), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (46683, 46685), False, 'from django.db import migrations, models\n'), ((46717, 46747), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (46736, 46747), False, 'from django.db import migrations, models\n'), ((46778, 46808), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (46797, 46808), False, 'from django.db import migrations, models\n'), ((46839, 46869), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (46858, 46869), False, 'from django.db import migrations, models\n'), ((46907, 46937), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (46926, 46937), False, 'from django.db import migrations, models\n'), ((46971, 47001), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (46990, 47001), False, 'from django.db import migrations, models\n'), ((47050, 47080), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (47069, 47080), False, 'from django.db import migrations, models\n'), ((47113, 47156), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'null': '(True)'}), '(max_length=300, null=True)\n', (47129, 47156), False, 'from django.db import migrations, models\n'), ((47186, 47218), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (47202, 47218), False, 'from django.db import migrations, models\n'), ((47256, 47278), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (47276, 47278), False, 'from django.db import migrations, models\n'), ((47315, 47345), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (47334, 47345), False, 'from django.db import migrations, models\n'), ((47378, 47448), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""technology"""', 'editable': '(False)', 'max_length': '(200)'}), "(default='technology', editable=False, max_length=200)\n", (47394, 47448), False, 'from django.db import migrations, models\n'), ((47768, 47854), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(20)', 'primary_key': '(True)', 'serialize': '(False)'}), '(db_index=True, max_length=20, primary_key=True, serialize=\n False)\n', (47784, 47854), False, 'from django.db import migrations, models\n'), ((47878, 47910), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (47894, 47910), False, 'from django.db import migrations, models\n'), ((47941, 47968), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (47957, 47968), False, 'from django.db import migrations, models\n'), ((48004, 48032), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (48021, 48032), False, 'from django.db import migrations, models\n'), ((48061, 48091), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (48080, 48091), False, 'from django.db import migrations, models\n'), ((48127, 48157), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (48146, 48157), False, 'from django.db import migrations, models\n'), ((48191, 48213), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (48211, 48213), False, 'from django.db import migrations, models\n'), ((48245, 48275), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (48264, 48275), False, 'from django.db import migrations, models\n'), ((48306, 48336), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (48325, 48336), False, 'from django.db import migrations, models\n'), ((48367, 48397), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (48386, 48397), False, 'from django.db import migrations, models\n'), ((48435, 48465), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (48454, 48465), False, 'from django.db import migrations, models\n'), ((48499, 48529), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (48518, 48529), False, 'from django.db import migrations, models\n'), ((48578, 48608), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (48597, 48608), False, 'from django.db import migrations, models\n'), ((48641, 48684), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'null': '(True)'}), '(max_length=300, null=True)\n', (48657, 48684), False, 'from django.db import migrations, models\n'), ((48714, 48746), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (48730, 48746), False, 'from django.db import migrations, models\n'), ((48784, 48806), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (48804, 48806), False, 'from django.db import migrations, models\n'), ((48843, 48873), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (48862, 48873), False, 'from django.db import migrations, models\n'), ((48906, 48970), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""tech"""', 'editable': '(False)', 'max_length': '(200)'}), "(default='tech', editable=False, max_length=200)\n", (48922, 48970), False, 'from django.db import migrations, models\n'), ((49288, 49374), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(20)', 'primary_key': '(True)', 'serialize': '(False)'}), '(db_index=True, max_length=20, primary_key=True, serialize=\n False)\n', (49304, 49374), False, 'from django.db import migrations, models\n'), ((49398, 49430), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (49414, 49430), False, 'from django.db import migrations, models\n'), ((49461, 49488), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (49477, 49488), False, 'from django.db import migrations, models\n'), ((49524, 49552), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (49541, 49552), False, 'from django.db import migrations, models\n'), ((49581, 49611), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (49600, 49611), False, 'from django.db import migrations, models\n'), ((49647, 49677), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (49666, 49677), False, 'from django.db import migrations, models\n'), ((49711, 49733), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (49731, 49733), False, 'from django.db import migrations, models\n'), ((49765, 49795), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (49784, 49795), False, 'from django.db import migrations, models\n'), ((49826, 49856), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (49845, 49856), False, 'from django.db import migrations, models\n'), ((49887, 49917), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (49906, 49917), False, 'from django.db import migrations, models\n'), ((49955, 49985), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (49974, 49985), False, 'from django.db import migrations, models\n'), ((50019, 50049), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (50038, 50049), False, 'from django.db import migrations, models\n'), ((50098, 50128), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (50117, 50128), False, 'from django.db import migrations, models\n'), ((50161, 50204), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'null': '(True)'}), '(max_length=300, null=True)\n', (50177, 50204), False, 'from django.db import migrations, models\n'), ((50234, 50266), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (50250, 50266), False, 'from django.db import migrations, models\n'), ((50304, 50326), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (50324, 50326), False, 'from django.db import migrations, models\n'), ((50363, 50393), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (50382, 50393), False, 'from django.db import migrations, models\n'), ((50426, 50494), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""Thailand"""', 'editable': '(False)', 'max_length': '(200)'}), "(default='Thailand', editable=False, max_length=200)\n", (50442, 50494), False, 'from django.db import migrations, models\n'), ((50818, 50904), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(20)', 'primary_key': '(True)', 'serialize': '(False)'}), '(db_index=True, max_length=20, primary_key=True, serialize=\n False)\n', (50834, 50904), False, 'from django.db import migrations, models\n'), ((50928, 50960), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (50944, 50960), False, 'from django.db import migrations, models\n'), ((50991, 51018), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (51007, 51018), False, 'from django.db import migrations, models\n'), ((51054, 51082), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (51071, 51082), False, 'from django.db import migrations, models\n'), ((51111, 51141), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (51130, 51141), False, 'from django.db import migrations, models\n'), ((51177, 51207), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (51196, 51207), False, 'from django.db import migrations, models\n'), ((51241, 51263), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (51261, 51263), False, 'from django.db import migrations, models\n'), ((51295, 51325), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (51314, 51325), False, 'from django.db import migrations, models\n'), ((51356, 51386), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (51375, 51386), False, 'from django.db import migrations, models\n'), ((51417, 51447), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (51436, 51447), False, 'from django.db import migrations, models\n'), ((51485, 51515), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (51504, 51515), False, 'from django.db import migrations, models\n'), ((51549, 51579), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (51568, 51579), False, 'from django.db import migrations, models\n'), ((51628, 51658), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)'}), '(null=True)\n', (51647, 51658), False, 'from django.db import migrations, models\n'), ((51691, 51734), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'null': '(True)'}), '(max_length=300, null=True)\n', (51707, 51734), False, 'from django.db import migrations, models\n'), ((51764, 51796), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (51780, 51796), False, 'from django.db import migrations, models\n'), ((51834, 51856), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (51854, 51856), False, 'from django.db import migrations, models\n'), ((51893, 51923), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (51912, 51923), False, 'from django.db import migrations, models\n'), ((51956, 52026), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""ukpolitics"""', 'editable': '(False)', 'max_length': '(200)'}), "(default='ukpolitics', editable=False, max_length=200)\n", (51972, 52026), False, 'from django.db import migrations, models\n')]
|
import putiopy
import os
KB = 1024
MB = 1024 * KB
OAUTH_TOKEN = 'xxx'
CLIENT_SECRET = 'xxx'
CLIENT_ID = 1
# Read and write operations are limited to this chunk size.
# This can make a big difference when dealing with large files.
CHUNK_SIZE = 256 * KB * 10
#destination folder on NAS
dest='/volume1/homes/Phillip/Media/Unsorted'
# this will open a browser to authetication url
# after authenticating you will find the oauth_token in the address bar
helper = putiopy.AuthHelper(CLIENT_ID,CLIENT_SECRET,'', type='token')
helper.open_authentication_url()
client = putiopy.Client(OAUTH_TOKEN)
# list all files on put.io
files = client.File.list()
for n in files:
if n.name == 'Plex':
#Get Plex File and its size
Plex_File = n
#If the plex folder contains data, download it and run filebot
if Plex_File.size>0:
# Delete the files on put.io when download completes? (does not delete directories)
delete_after_download=False
# Download command
download = Plex_File.download(dest,delete_after_download,CHUNK_SIZE)
FILEBOTSCRIPT="filebot -script fn:amc --log INFO --output \"/volume1/Media\" --action move --conflict auto -non-strict \"/volume1/homes/Phillip/Media/Unsorted\" --filter \"(n != \'American Dad!\' || s == 15)\" --log-file amc.log --def clean=y music=y subtitles=en musicFormat=\"{plex}\" movieFormat=\"{plex}\" seriesFormat=\"{plex}\" minFileSize=4"
os.system(FILEBOTSCRIPT)
## Test filebot command
#echo $'filebot -script fn:amc --log INFO --output \"/volume1/Media\" --action move --conflict auto -non-strict \"/volume1/homes/Phillip/Media/Unsorted\" --filter \"(n != \'American Dad!\' || s == 15)\" --log-file amc.log --def clean=y music=y subtitles=en musicFormat=\"{plex}\" movieFormat=\"{plex}\" seriesFormat=\"{plex}\" minFileSize=4'
|
[
"os.system",
"putiopy.Client",
"putiopy.AuthHelper"
] |
[((480, 542), 'putiopy.AuthHelper', 'putiopy.AuthHelper', (['CLIENT_ID', 'CLIENT_SECRET', '""""""'], {'type': '"""token"""'}), "(CLIENT_ID, CLIENT_SECRET, '', type='token')\n", (498, 542), False, 'import putiopy\n'), ((585, 612), 'putiopy.Client', 'putiopy.Client', (['OAUTH_TOKEN'], {}), '(OAUTH_TOKEN)\n', (599, 612), False, 'import putiopy\n'), ((1411, 1435), 'os.system', 'os.system', (['FILEBOTSCRIPT'], {}), '(FILEBOTSCRIPT)\n', (1420, 1435), False, 'import os\n')]
|
from __future__ import unicode_literals
import posixpath
import frappe
from models import (OpencartCategory,
OpencartProductOption,
OpencartProductOptionExt,
OpencartStore,
OpencartCustomerGroup,
OpencartOrder)
from utils import oc_request, oc_upload_file
def get(site_name, api_base_url=None, use_pure_rest_api=False):
'''This method assumes to use Opencart REST Admin API'''
site_doc = frappe.get_doc("Opencart Site", site_name)
if use_pure_rest_api:
headers = {site_doc.get('opencart_header_key'): site_doc.get('opencart_header_value')}
else:
headers = {site_doc.get('opencart_admin_header_key'): site_doc.get('opencart_admin_header_value')}
opencart_api = OpencartApi(api_base_url if api_base_url else site_doc.get('server_base_url'),
use_pure_rest_api=use_pure_rest_api,
headers=headers)
return opencart_api
class OpencartApi(object):
def __init__(self, base_api_url, use_pure_rest_api=False, headers={}):
self._url = posixpath.join(base_api_url, 'api', 'rest')
self.use_pure_rest_api = use_pure_rest_api
self._headers = headers
# self._headers.update({'Accept': 'application/json'})
# text/plain; charset=UTF-8
# self._headers.update({'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux i686; rv:11.0) Gecko/20100101 Firefox/11.0'})
def __str__(self):
return "OpencartApi at %s" % (self._url,)
@property
def url(self):
return self._url
@property
def headers(self):
return self._headers
def get_categories_by_level(self, level=1):
success, resp = oc_request(self.url + '/categories/level/%s' % str(level), headers=self.headers)
if not success:
return
categories_resp = resp.get('data').get('categories', {})
for c in categories_resp:
yield OpencartCategory(self, categories_resp.get(c)[0], level=level)
def get_categories_by_parent(self, parent_id):
success, resp = oc_request(self.url + '/categories/parent/%s' % parent_id, headers=self.headers)
if not success:
return
categories_resp = resp.get('data').get('categories', {})
for c in categories_resp:
yield OpencartCategory(self, categories_resp.get(c)[0], parent=parent_id)
def get_all_categories(self, parent_id=None):
if parent_id:
categories = self.get_categories_by_parent(parent_id)
else:
categories = self.get_categories_by_level()
for category in categories:
yield category
for sub_category in self.get_all_categories(category.category_id):
yield sub_category
def get_all_products(self, limit=100, page=1):
while True:
success, resp = oc_request(self.url + '/products/limit/%s/page/%s' % (str(limit), str(page)), headers=self.headers)
if not success or not resp.get('data'):
break
page += 1
for c in resp.get('data', {}):
yield c
def get_product_options(self, limit=100, page=1):
while True:
success, resp = oc_request(self.url + '/product_options/limit/%s/page/%s' % (str(limit), str(page)), headers=self.headers)
if not success or not resp.get('data'):
break
page += 1
for po in resp.get('data'):
yield OpencartProductOption(self, po)
def get_products_by_category(self, category_id, limit=100, page=1):
while True:
success, resp = oc_request(self.url + '/products/category/%s/limit/%s/page/%s' % (category_id, str(limit), str(page)), headers=self.headers)
if not success or not resp.get('data'):
break
page += 1
for c in resp.get('data', {}):
c['category_id'] = category_id
c['product_id'] = c.get('product_id') or c.get('id')
for i, first_description in c.get('product_description').items():
if isinstance(first_description, list):
for k, val in first_description[0].items():
c[k] = val
break
else:
for k, val in first_description.items():
c[k] = val
if i.isdigit():
break
else:
c[i] = first_description
c['options_list'] = [OpencartProductOptionExt(self, o) for o in c.get('options')] if c.get('options') else []
yield c
def get_product(self, product_id):
success, resp = oc_request(self.url + '/products/%s' % product_id, headers=self.headers)
product = resp.get('data', {})
product['product_id'] = product.get('product_id') or product.get('id')
# resolving category_id
if isinstance(product.get('category'), dict):
for category_id, first_category in product.get('category').items():
product['category_id'] = category_id
break
for i, first_description in product.get('product_description').items():
if isinstance(first_description, list):
for k, val in first_description[0].items():
product[k] = val
break
else:
for k, val in first_description.items():
product[k] = val
if i.isdigit():
break
else:
product[i] = first_description
return (success, product)
def create_product(self, data):
success, resp = oc_request(self.url + '/products', headers=self.headers, method='POST', data=data)
return (success, str(resp.get('product_id', '')))
def update_product(self, product_id, data):
success, resp = oc_request(self.url + '/products/%s' % product_id, headers=self.headers, method='PUT', data=data)
return success
def delete_product(self, product_id):
success, resp = oc_request(self.url + '/products/%s' % product_id, headers=self.headers, method='DELETE')
return success
def set_product_image(self, product_id, file_path):
success, resp = oc_upload_file(self.url + '/products/%s/images' % product_id, file_path, headers=self.headers)
return success
def get_stores(self):
success, resp = oc_request(self.url + '/stores', headers=self.headers)
if not success:
return
stores = resp.get('data')
yield OpencartStore(self, stores)
for i in stores:
if i.isdigit():
yield OpencartStore(self, stores.get(i))
def get_customer_groups(self, limit=100, page=1):
while True:
success, resp = oc_request(self.url + '/customergroups/limit/%s/page/%s' % (str(limit), str(page)), headers=self.headers)
customer_groups = resp.get('data')
if not success or not customer_groups:
break
page += 1
for g in customer_groups:
yield OpencartCustomerGroup(self, customer_groups.get(g)[0])
def get_customer_group_json(self, limit=100, page=1, language_id=1):
# for now just ignoring language_id
while True:
success, resp = oc_request(self.url + '/customergroups/limit/%s/page/%s' % (str(limit), str(page)), headers=self.headers)
customer_groups = resp.get('data', {})
if not success or not customer_groups:
break
page += 1
for g in customer_groups:
yield customer_groups.get(g)[0]
def get_customer_group(self, customer_group_id):
for customer_group in self.get_customer_group_json():
if customer_group.get('customer_group_id') == customer_group_id:
return (True, customer_group)
return (False, {})
def get_customer(self, customer_id):
success, resp = oc_request(self.url + '/customers/%s' % customer_id, headers=self.headers)
return (success, resp.get('data', {}))
def create_customer(self, data):
success, resp = oc_request(self.url + '/customers', headers=self.headers, method='POST', data=data)
return (success, resp)
def update_customer(self, customer_id, data):
success, resp = oc_request(self.url + '/customers/%s' % customer_id, headers=self.headers, method='PUT', data=data)
return (success, resp)
def delete_customer(self, customer_id):
data = {'customers': [customer_id]}
success, resp = oc_request(self.url + '/customers', headers=self.headers, method='DELETE', data=data)
return (success, resp)
def get_customers(self, limit=100, page=1):
while True:
success, resp = oc_request(self.url + '/customers/limit/%s/page/%s' % (str(limit), str(page)), headers=self.headers)
if not success or not resp.get('data'):
break
page += 1
for c in resp.get('data'):
yield self.get_customer(c.get('customer_id'))
def get_order(self, order_id):
success, resp = oc_request(self.url + '/orders/%s' % order_id, headers=self.headers)
order = resp.get('data')
return OpencartOrder(self, order) if order else None
def get_order_json(self, order_id):
success, resp = oc_request(self.url + '/orders/%s' % order_id, headers=self.headers)
return (success, resp.get('data', {}))
def create_order(self, data):
success, resp = oc_request(self.url + '/orderadmin', headers=self.headers, method='POST', data=data)
return (success, resp)
def update_order(self, order_id, data):
success, resp = oc_request(self.url + '/orders/%s' % order_id, headers=self.headers, method='PUT', data=data)
return (success, resp)
def delete_order(self, order_id):
data = {'orders': [order_id]}
success, resp = oc_request(self.url + '/orders/%s' % order_id, headers=self.headers, method='DELETE', data=data)
return (success, resp)
def get_orders_by_customer(self, customer_id):
success, resp = oc_request(self.url + '/orders/user/%s' % customer_id, headers=self.headers)
if not success:
return
for o in resp.get('data'):
yield self.get_order(o.get('order_id'))
def get_orders_modified_from_to(self, modified_from, modified_to):
success, resp = oc_request(self.url + '/orders/details/modified_from/%s/modified_to/%s' % (modified_from, modified_to), headers=self.headers)
return (success, resp.get('data', []) if success else [])
def get_orders_addeed_from_to(self, added_from, added_to):
success, resp = oc_request(self.url + '/orders/details/added_from/%s/added_to/%s' % (added_from, added_to), headers=self.headers)
return (success, resp.get('data', []) if success else [])
def get_orders(self):
success, resp = oc_request(self.url + '/orders', headers=self.headers)
if not success:
return
for o in resp.get('data'):
yield self.get_order(o.get('order_id'))
def check_connection(self):
if self.use_pure_rest_api:
success, resp = oc_request(self.url + '/languages', headers=self.headers)
else:
success, resp = oc_request(self.url + '/countries', headers=self.headers)
return (success, resp.get('error', ''))
def get_init(self):
success, resp = oc_request(self.url + '/init', headers=self.headers)
return (success, resp.get('data', {}) if success else {})
def get_order_statuses(self):
success, resp = oc_request(self.url + '/order_statuses', headers=self.headers)
return (success, resp.get('data', []) if success else [])
def get_shipping_methods(self):
success, resp = oc_request(self.url + '/shippingmethods', headers=self.headers)
return (success, resp.get('data', []) if success else [])
def get_payment_methods(self):
success, resp = oc_request(self.url + '/paymentmethods', headers=self.headers)
return (success, resp.get('data', []) if success else [])
def get_all_manufacturers(self, limit=100, page=1):
while True:
success, resp = oc_request(self.url + '/manufacturers/limit/%s/page/%s' % (str(limit), str(page)), headers=self.headers)
if not success or not resp.get('data'):
break
page += 1
for c in resp.get('data', []):
yield c
|
[
"models.OpencartProductOption",
"models.OpencartOrder",
"posixpath.join",
"models.OpencartStore",
"frappe.get_doc",
"utils.oc_request",
"models.OpencartProductOptionExt",
"utils.oc_upload_file"
] |
[((506, 548), 'frappe.get_doc', 'frappe.get_doc', (['"""Opencart Site"""', 'site_name'], {}), "('Opencart Site', site_name)\n", (520, 548), False, 'import frappe\n'), ((1182, 1225), 'posixpath.join', 'posixpath.join', (['base_api_url', '"""api"""', '"""rest"""'], {}), "(base_api_url, 'api', 'rest')\n", (1196, 1225), False, 'import posixpath\n'), ((2186, 2271), 'utils.oc_request', 'oc_request', (["(self.url + '/categories/parent/%s' % parent_id)"], {'headers': 'self.headers'}), "(self.url + '/categories/parent/%s' % parent_id, headers=self.headers\n )\n", (2196, 2271), False, 'from utils import oc_request, oc_upload_file\n'), ((4909, 4981), 'utils.oc_request', 'oc_request', (["(self.url + '/products/%s' % product_id)"], {'headers': 'self.headers'}), "(self.url + '/products/%s' % product_id, headers=self.headers)\n", (4919, 4981), False, 'from utils import oc_request, oc_upload_file\n'), ((5916, 6002), 'utils.oc_request', 'oc_request', (["(self.url + '/products')"], {'headers': 'self.headers', 'method': '"""POST"""', 'data': 'data'}), "(self.url + '/products', headers=self.headers, method='POST',\n data=data)\n", (5926, 6002), False, 'from utils import oc_request, oc_upload_file\n'), ((6130, 6231), 'utils.oc_request', 'oc_request', (["(self.url + '/products/%s' % product_id)"], {'headers': 'self.headers', 'method': '"""PUT"""', 'data': 'data'}), "(self.url + '/products/%s' % product_id, headers=self.headers,\n method='PUT', data=data)\n", (6140, 6231), False, 'from utils import oc_request, oc_upload_file\n'), ((6318, 6411), 'utils.oc_request', 'oc_request', (["(self.url + '/products/%s' % product_id)"], {'headers': 'self.headers', 'method': '"""DELETE"""'}), "(self.url + '/products/%s' % product_id, headers=self.headers,\n method='DELETE')\n", (6328, 6411), False, 'from utils import oc_request, oc_upload_file\n'), ((6512, 6610), 'utils.oc_upload_file', 'oc_upload_file', (["(self.url + '/products/%s/images' % product_id)", 'file_path'], {'headers': 'self.headers'}), "(self.url + '/products/%s/images' % product_id, file_path,\n headers=self.headers)\n", (6526, 6610), False, 'from utils import oc_request, oc_upload_file\n'), ((6681, 6735), 'utils.oc_request', 'oc_request', (["(self.url + '/stores')"], {'headers': 'self.headers'}), "(self.url + '/stores', headers=self.headers)\n", (6691, 6735), False, 'from utils import oc_request, oc_upload_file\n'), ((8267, 8341), 'utils.oc_request', 'oc_request', (["(self.url + '/customers/%s' % customer_id)"], {'headers': 'self.headers'}), "(self.url + '/customers/%s' % customer_id, headers=self.headers)\n", (8277, 8341), False, 'from utils import oc_request, oc_upload_file\n'), ((8451, 8538), 'utils.oc_request', 'oc_request', (["(self.url + '/customers')"], {'headers': 'self.headers', 'method': '"""POST"""', 'data': 'data'}), "(self.url + '/customers', headers=self.headers, method='POST',\n data=data)\n", (8461, 8538), False, 'from utils import oc_request, oc_upload_file\n'), ((8641, 8744), 'utils.oc_request', 'oc_request', (["(self.url + '/customers/%s' % customer_id)"], {'headers': 'self.headers', 'method': '"""PUT"""', 'data': 'data'}), "(self.url + '/customers/%s' % customer_id, headers=self.headers,\n method='PUT', data=data)\n", (8651, 8744), False, 'from utils import oc_request, oc_upload_file\n'), ((8885, 8974), 'utils.oc_request', 'oc_request', (["(self.url + '/customers')"], {'headers': 'self.headers', 'method': '"""DELETE"""', 'data': 'data'}), "(self.url + '/customers', headers=self.headers, method='DELETE',\n data=data)\n", (8895, 8974), False, 'from utils import oc_request, oc_upload_file\n'), ((9457, 9525), 'utils.oc_request', 'oc_request', (["(self.url + '/orders/%s' % order_id)"], {'headers': 'self.headers'}), "(self.url + '/orders/%s' % order_id, headers=self.headers)\n", (9467, 9525), False, 'from utils import oc_request, oc_upload_file\n'), ((9685, 9753), 'utils.oc_request', 'oc_request', (["(self.url + '/orders/%s' % order_id)"], {'headers': 'self.headers'}), "(self.url + '/orders/%s' % order_id, headers=self.headers)\n", (9695, 9753), False, 'from utils import oc_request, oc_upload_file\n'), ((9860, 9948), 'utils.oc_request', 'oc_request', (["(self.url + '/orderadmin')"], {'headers': 'self.headers', 'method': '"""POST"""', 'data': 'data'}), "(self.url + '/orderadmin', headers=self.headers, method='POST',\n data=data)\n", (9870, 9948), False, 'from utils import oc_request, oc_upload_file\n'), ((10045, 10143), 'utils.oc_request', 'oc_request', (["(self.url + '/orders/%s' % order_id)"], {'headers': 'self.headers', 'method': '"""PUT"""', 'data': 'data'}), "(self.url + '/orders/%s' % order_id, headers=self.headers, method\n ='PUT', data=data)\n", (10055, 10143), False, 'from utils import oc_request, oc_upload_file\n'), ((10271, 10372), 'utils.oc_request', 'oc_request', (["(self.url + '/orders/%s' % order_id)"], {'headers': 'self.headers', 'method': '"""DELETE"""', 'data': 'data'}), "(self.url + '/orders/%s' % order_id, headers=self.headers, method\n ='DELETE', data=data)\n", (10281, 10372), False, 'from utils import oc_request, oc_upload_file\n'), ((10475, 10551), 'utils.oc_request', 'oc_request', (["(self.url + '/orders/user/%s' % customer_id)"], {'headers': 'self.headers'}), "(self.url + '/orders/user/%s' % customer_id, headers=self.headers)\n", (10485, 10551), False, 'from utils import oc_request, oc_upload_file\n'), ((10778, 10908), 'utils.oc_request', 'oc_request', (["(self.url + '/orders/details/modified_from/%s/modified_to/%s' % (\n modified_from, modified_to))"], {'headers': 'self.headers'}), "(self.url + '/orders/details/modified_from/%s/modified_to/%s' % (\n modified_from, modified_to), headers=self.headers)\n", (10788, 10908), False, 'from utils import oc_request, oc_upload_file\n'), ((11058, 11176), 'utils.oc_request', 'oc_request', (["(self.url + '/orders/details/added_from/%s/added_to/%s' % (added_from,\n added_to))"], {'headers': 'self.headers'}), "(self.url + '/orders/details/added_from/%s/added_to/%s' % (\n added_from, added_to), headers=self.headers)\n", (11068, 11176), False, 'from utils import oc_request, oc_upload_file\n'), ((11289, 11343), 'utils.oc_request', 'oc_request', (["(self.url + '/orders')"], {'headers': 'self.headers'}), "(self.url + '/orders', headers=self.headers)\n", (11299, 11343), False, 'from utils import oc_request, oc_upload_file\n'), ((11825, 11877), 'utils.oc_request', 'oc_request', (["(self.url + '/init')"], {'headers': 'self.headers'}), "(self.url + '/init', headers=self.headers)\n", (11835, 11877), False, 'from utils import oc_request, oc_upload_file\n'), ((12003, 12065), 'utils.oc_request', 'oc_request', (["(self.url + '/order_statuses')"], {'headers': 'self.headers'}), "(self.url + '/order_statuses', headers=self.headers)\n", (12013, 12065), False, 'from utils import oc_request, oc_upload_file\n'), ((12193, 12256), 'utils.oc_request', 'oc_request', (["(self.url + '/shippingmethods')"], {'headers': 'self.headers'}), "(self.url + '/shippingmethods', headers=self.headers)\n", (12203, 12256), False, 'from utils import oc_request, oc_upload_file\n'), ((12383, 12445), 'utils.oc_request', 'oc_request', (["(self.url + '/paymentmethods')"], {'headers': 'self.headers'}), "(self.url + '/paymentmethods', headers=self.headers)\n", (12393, 12445), False, 'from utils import oc_request, oc_upload_file\n'), ((6827, 6854), 'models.OpencartStore', 'OpencartStore', (['self', 'stores'], {}), '(self, stores)\n', (6840, 6854), False, 'from models import OpencartCategory, OpencartProductOption, OpencartProductOptionExt, OpencartStore, OpencartCustomerGroup, OpencartOrder\n'), ((9574, 9600), 'models.OpencartOrder', 'OpencartOrder', (['self', 'order'], {}), '(self, order)\n', (9587, 9600), False, 'from models import OpencartCategory, OpencartProductOption, OpencartProductOptionExt, OpencartStore, OpencartCustomerGroup, OpencartOrder\n'), ((11570, 11627), 'utils.oc_request', 'oc_request', (["(self.url + '/languages')"], {'headers': 'self.headers'}), "(self.url + '/languages', headers=self.headers)\n", (11580, 11627), False, 'from utils import oc_request, oc_upload_file\n'), ((11670, 11727), 'utils.oc_request', 'oc_request', (["(self.url + '/countries')"], {'headers': 'self.headers'}), "(self.url + '/countries', headers=self.headers)\n", (11680, 11727), False, 'from utils import oc_request, oc_upload_file\n'), ((3612, 3643), 'models.OpencartProductOption', 'OpencartProductOption', (['self', 'po'], {}), '(self, po)\n', (3633, 3643), False, 'from models import OpencartCategory, OpencartProductOption, OpencartProductOptionExt, OpencartStore, OpencartCustomerGroup, OpencartOrder\n'), ((4732, 4765), 'models.OpencartProductOptionExt', 'OpencartProductOptionExt', (['self', 'o'], {}), '(self, o)\n', (4756, 4765), False, 'from models import OpencartCategory, OpencartProductOption, OpencartProductOptionExt, OpencartStore, OpencartCustomerGroup, OpencartOrder\n')]
|
import sys
from PySide2.QtWidgets import QApplication
import view.mainwindow
import controller.controller
import model.memory
memory = model.memory.MainMemory()
controller = controller.controller.Controller(memory)
app = QApplication(sys.argv)
mw = view.mainwindow.MainWindow(controller)
mw.show()
sys.exit(app.exec_())
|
[
"PySide2.QtWidgets.QApplication"
] |
[((226, 248), 'PySide2.QtWidgets.QApplication', 'QApplication', (['sys.argv'], {}), '(sys.argv)\n', (238, 248), False, 'from PySide2.QtWidgets import QApplication\n')]
|
# coding: utf-8
import logging
import sys
from collections import Mapping, Set
from functools import wraps
from PyQt5.QtCore import QAbstractItemModel, QModelIndex, Qt
from PyQt5.QtWidgets import QApplication, QMainWindow, QTreeView
from mhw_armor_edit.utils import is_sequence
data = {
"Gravity": {
"DefaultGravity": 9.18,
"DefaultSpeedRate": 21,
},
"StageAdjust": {
"DefaultCapsuleOfs": {
"x": 0, "y": 0, "z": 0
},
"DefaultCapsuleHeight": 0,
"DefaultCapsuleRadius": 0
},
"Stamina": {
"InitValue": 0,
"AddOnValue": 0,
"MinValue": 0,
"TiredValue": 0,
"AutoRecover": 0,
"AutoMaxReduce": 0,
"AutoMaxReduceTime": 0,
"EscapeDashRate": 0,
"NoBattleRate": 0,
"ReduceRateLimit_Trg": 0,
"ReduceRateLimit_Time": 0,
},
"tags": {
"foo", "bar", "baz", "wusch"
}
}
class TreeNode:
class Unacceptable(Exception):
pass
ADAPTERS = []
def __init__(self, key, value, parent=None, row=0):
self.key = key
self.value = self.init(value)
self.parent = parent
self.row = row
@property
def has_children(self):
return is_sequence(self.value)
def __len__(self):
if self.has_children:
return len(self.value)
return 1
def __getitem__(self, idx):
if self.has_children:
return self.value[idx]
def init(self, value):
for adapter in self.ADAPTERS:
try:
return adapter(self, value)
except TreeNode.Unacceptable:
continue
return value
def data(self, col):
if col == 0:
return self.key
elif col == 1:
return self.value
@classmethod
def adapter(cls, accept):
def fn_collector(fn):
@wraps(fn)
def adapter_wrapper(parent, value):
if not accept(value):
raise TreeNode.Unacceptable()
return fn(cls, parent, value)
cls.ADAPTERS.append(adapter_wrapper)
return fn
return fn_collector
@TreeNode.adapter(is_sequence)
def sequence_adapter(cls, parent, value):
return [cls(i, value, parent, i)
for i, value in enumerate(value)]
@TreeNode.adapter(lambda value: isinstance(value, Mapping))
def mapping_adapter(cls, parent, value):
return [cls(key, value, parent, i)
for i, (key, value) in enumerate(value.items())]
@TreeNode.adapter(lambda value: isinstance(value, Set))
def set_adapter(cls, parent, value):
return [cls(i, value, parent, i)
for i, value in enumerate(value)]
class TreeModel(QAbstractItemModel):
COLUMN_HEADERS = ("Key", "Value")
def __init__(self, data, parent=None):
super().__init__(parent)
self.root = TreeNode("__root__", data, None)
def check_for_root(self, parent: QModelIndex):
return self.root if not parent.isValid() else parent.internalPointer()
def columnCount(self, parent: QModelIndex=None):
return 2
def headerData(self, section, orient, role=None):
if orient == Qt.Horizontal and role == Qt.DisplayRole:
return self.COLUMN_HEADERS[section]
def rowCount(self, parent: QModelIndex):
node = self.check_for_root(parent)
return len(node)
def index(self, row, col, parent: QModelIndex):
node = self.check_for_root(parent)
child = node[row]
return self.createIndex(row, col, child) if child else QModelIndex()
def parent(self, index: QModelIndex):
if not index.isValid():
return QModelIndex()
child = index.internalPointer()
parent = child.parent
if parent is self.root:
return QModelIndex()
return self.createIndex(parent.row, 0, parent)
def hasChildren(self, parent: QModelIndex):
node = self.check_for_root(parent)
return node is not None and node.has_children
def data(self, index: QModelIndex, role):
if index.isValid() and role == Qt.DisplayRole:
node = index.internalPointer()
return node.data(index.column())
class MainWindow(QMainWindow):
def __init__(self):
super().__init__()
self.setWindowTitle("Treeview for nested dict/list")
self.setGeometry(300, 300, 600, 800)
tree_view = QTreeView()
tree_view.setModel(TreeModel(data))
self.setCentralWidget(tree_view)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG,
format="%(levelname)s %(message)s")
app = QApplication(sys.argv)
window = MainWindow()
window.show()
sys.exit(app.exec_())
|
[
"PyQt5.QtCore.QModelIndex",
"logging.basicConfig",
"mhw_armor_edit.utils.is_sequence",
"PyQt5.QtWidgets.QTreeView",
"functools.wraps",
"PyQt5.QtWidgets.QApplication"
] |
[((4610, 4686), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG', 'format': '"""%(levelname)s %(message)s"""'}), "(level=logging.DEBUG, format='%(levelname)s %(message)s')\n", (4629, 4686), False, 'import logging\n'), ((4721, 4743), 'PyQt5.QtWidgets.QApplication', 'QApplication', (['sys.argv'], {}), '(sys.argv)\n', (4733, 4743), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QTreeView\n'), ((1256, 1279), 'mhw_armor_edit.utils.is_sequence', 'is_sequence', (['self.value'], {}), '(self.value)\n', (1267, 1279), False, 'from mhw_armor_edit.utils import is_sequence\n'), ((4480, 4491), 'PyQt5.QtWidgets.QTreeView', 'QTreeView', ([], {}), '()\n', (4489, 4491), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QTreeView\n'), ((1918, 1927), 'functools.wraps', 'wraps', (['fn'], {}), '(fn)\n', (1923, 1927), False, 'from functools import wraps\n'), ((3622, 3635), 'PyQt5.QtCore.QModelIndex', 'QModelIndex', ([], {}), '()\n', (3633, 3635), False, 'from PyQt5.QtCore import QAbstractItemModel, QModelIndex, Qt\n'), ((3730, 3743), 'PyQt5.QtCore.QModelIndex', 'QModelIndex', ([], {}), '()\n', (3741, 3743), False, 'from PyQt5.QtCore import QAbstractItemModel, QModelIndex, Qt\n'), ((3865, 3878), 'PyQt5.QtCore.QModelIndex', 'QModelIndex', ([], {}), '()\n', (3876, 3878), False, 'from PyQt5.QtCore import QAbstractItemModel, QModelIndex, Qt\n')]
|
from abc import ABCMeta
from datetime import datetime
from typing import NewType
from openstacktenantcleaner.external.hgicommon.models import Model
OpenstackIdentifier = NewType("OpenstackIdentifier", str)
class OpenstackCredentials(Model):
"""
Credentials used to login to OpenStack.
"""
def __init__(self, auth_url: str, tenant: str, username: str, password: str):
"""
TODO
:param auth_url:
:param username:
:param password:
"""
self.auth_url = auth_url
self.tenant = tenant
self.username = username
self.password = password
class Timestamped(Model, metaclass=ABCMeta):
"""
Timestamps.
"""
def __init__(self, created_at: datetime=None, updated_at: datetime=None):
self.created_at = created_at
self.updated_at = updated_at
class OpenstackItem(Model, metaclass=ABCMeta):
"""
An item in OpenStack.
"""
def __init__(self, identifier: OpenstackIdentifier=None, name: str=None, **kwargs):
super().__init__(**kwargs)
self.identifier = identifier
self.name = name
class OpenstackKeypair(OpenstackItem):
"""
A key-pair in OpenStack.
"""
def __init__(self, fingerprint: str=None, **kwargs):
super().__init__(**kwargs)
self.fingerprint = fingerprint
class OpenstackInstance(OpenstackItem, Timestamped):
"""
An instance on OpenStack.
"""
def __init__(self, image: str=None, key_name: str=None, **kwargs):
super().__init__(**kwargs)
self.image = image
self.key_name = key_name
class OpenstackImage(OpenstackItem, Timestamped):
"""
An image on OpenStack.
"""
def __init__(self, protected: bool=None, **kwargs):
super().__init__(**kwargs)
self.protected = protected
|
[
"typing.NewType"
] |
[((172, 207), 'typing.NewType', 'NewType', (['"""OpenstackIdentifier"""', 'str'], {}), "('OpenstackIdentifier', str)\n", (179, 207), False, 'from typing import NewType\n')]
|
import pandas as pd
# variable holding the amount of rows you want
amount = 10
# create a Pandas DataFrame from Sunspots.csv
sunspots_df = pd.read_csv("Sunspots.csv")
# get the most recent data by selecting the last 10 rows
recent_sunspots = sunspots_df.tail(amount)
# calculate the mean
avg = sum(recent_sunspots["Monthly Mean Total Sunspot Number"]) / len(recent_sunspots["Monthly Mean Total Sunspot Number"])
# use DataFrame method to produce mean
total_mean = recent_sunspots["Monthly Mean Total Sunspot Number"].mean()
# create lists for adding a mean and level column to the DataFrame
mean_list = []
level = []
# iterate through list and append mean
for i in range(amount):
mean_list.append(total_mean)
# iterate through "Monthly Mean Total Sunspot Number" column of the DataFrame
for x in recent_sunspots["Monthly Mean Total Sunspot Number"]:
# add conditionals
if x > total_mean:
level.append("High")
if x < total_mean:
level.append("Low")
if x == total_mean:
level.append("Flat")
# insert lists as columns into the DataFrame
recent_sunspots.insert(3, "level", level)
recent_sunspots.insert(4, "mean", mean_list)
# print results
print(recent_sunspots)
# get min and max values
print(recent_sunspots["Monthly Mean Total Sunspot Number"].min())
print(recent_sunspots["Monthly Mean Total Sunspot Number"].max())
# convert to dictionary and assign it to a variable
recent_sunspots_dict = recent_sunspots.to_dict()
print(recent_sunspots_dict)
# inspect the DataFrame more
print(recent_sunspots.info())
|
[
"pandas.read_csv"
] |
[((141, 168), 'pandas.read_csv', 'pd.read_csv', (['"""Sunspots.csv"""'], {}), "('Sunspots.csv')\n", (152, 168), True, 'import pandas as pd\n')]
|
#!/home/ivan/.virtualenvs/yagolabelfetcher/bin/python
import re
def _split_camelcase(string):
_camel_case_regex = re.compile(r"([A-Z])")
_token_list = _camel_case_regex.split(string)
_word_list = [_token_list[0]]
for i, _ in enumerate(_token_list):
if i % 2 == 1:
word = "".join([_token_list[i], _token_list[i + 1]])
_word_list.append(word)
return " ".join(_word_list)
def _split_underscore(string):
return " ".join(string.split("_")[1:])
def _generate_label(string):
label = _split_underscore(string)
if label == "":
label = _split_camelcase(string)
if label == "":
return string
return label
subjects = []
with open("subjects.txt", "rU") as f:
subjects = f.read().splitlines()
labels = []
with open("labels-complete", "rU") as f:
labels = f.read().splitlines()
objects = []
with open("objects-no-namespace.txt", "rU") as f:
objects = f.read().splitlines()
subjects_length = len(subjects)
for i in range(0, subjects_length):
if labels[i] == "":
labels[i] = _generate_label(objects[i])
for i in range(0, subjects_length):
csv_string = '%s;separator;%s' % (subjects[i], labels[i])
print(csv_string)
|
[
"re.compile"
] |
[((247, 268), 're.compile', 're.compile', (['"""([A-Z])"""'], {}), "('([A-Z])')\n", (257, 268), False, 'import re\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import pandas as pd
import click
from sklearn.preprocessing import StandardScaler
@click.command()
@click.option("--training", type=float, default=0.7)
@click.option("--validation", type=float, default=0.2)
@click.option("--test", type=float, default=0.1)
def main(training: float, validation: float, test: float):
if training + validation + test == 1:
raise Exception("Dataset ratio error!")
# read dataset
ifn = '../../data/processed/climate_1hour.parquet'
if not os.path.isfile(ifn):
raise Exception(f"There is no file: {ifn}")
df = pd.read_parquet(ifn)
# divide dataset
columns_indices = {name: i for i, name in enumerate(df.columns)}
n = df.shape[0]
train_df = df.iloc[:int(n*training)]
valid_df = df.iloc[int(n*training):int(n*(training+validation))]
test_df = df.iloc[int(n*(training+validation)):]
num_features = df.shape[1]
# normalize data
scaler = StandardScaler()
scaler.fit(train_df)
normalized_train_df = train_df.copy()
normalized_train_df.loc[:, :] = scaler.transform(train_df)
normalized_valid_df = valid_df.copy()
normalized_valid_df.loc[:, :] = scaler.transform(valid_df)
normalized_test_df = test_df.copy()
normalized_test_df.loc[:, :] = scaler.transform(test_df)
ofn = '../../data/processed/train.parquet'
normalized_train_df.to_parquet(ofn)
ofn = '../../data/processed/valid.parquet'
normalized_valid_df.to_parquet(ofn)
ofn = '../../data/processed/test.parquet'
normalized_test_df.to_parquet(ofn)
if __name__ == '__main__':
main()
|
[
"sklearn.preprocessing.StandardScaler",
"click.option",
"click.command",
"os.path.isfile",
"pandas.read_parquet"
] |
[((142, 157), 'click.command', 'click.command', ([], {}), '()\n', (155, 157), False, 'import click\n'), ((159, 210), 'click.option', 'click.option', (['"""--training"""'], {'type': 'float', 'default': '(0.7)'}), "('--training', type=float, default=0.7)\n", (171, 210), False, 'import click\n'), ((212, 265), 'click.option', 'click.option', (['"""--validation"""'], {'type': 'float', 'default': '(0.2)'}), "('--validation', type=float, default=0.2)\n", (224, 265), False, 'import click\n'), ((267, 314), 'click.option', 'click.option', (['"""--test"""'], {'type': 'float', 'default': '(0.1)'}), "('--test', type=float, default=0.1)\n", (279, 314), False, 'import click\n'), ((632, 652), 'pandas.read_parquet', 'pd.read_parquet', (['ifn'], {}), '(ifn)\n', (647, 652), True, 'import pandas as pd\n'), ((996, 1012), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ([], {}), '()\n', (1010, 1012), False, 'from sklearn.preprocessing import StandardScaler\n'), ((550, 569), 'os.path.isfile', 'os.path.isfile', (['ifn'], {}), '(ifn)\n', (564, 569), False, 'import os\n')]
|
from django.contrib import admin
from .models import *
class ParticipantesInline(admin.TabularInline):
model = Participantes
extra = 1
class NivelConocimientoInline(admin.TabularInline):
model = NivelConocimiento
extra = 1
class FotosMediosInline(admin.TabularInline):
model = FotosMedios
extra = 1
class MediosFortalecimientoAdmin(admin.ModelAdmin):
filter_horizontal = ['tipo_medio','temas','grupos_metas','papel_simas']
inlines = [ParticipantesInline,NivelConocimientoInline,
FotosMediosInline]
list_display = ['nombre','get_medios','get_temas']
list_filter = ['papel_simas','grupos_metas']
search_fields = ['nombre']
admin.site.register(MediosFortalecimiento, MediosFortalecimientoAdmin)
admin.site.register(TiposMedios)
admin.site.register(TemasAbordan)
admin.site.register(GruposMetas)
admin.site.register(PapelSimas)
admin.site.register(PersonasResultados)
admin.site.register(Participantes)
admin.site.register(NivelConocimiento)
admin.site.register(FotosMedios)
|
[
"django.contrib.admin.site.register"
] |
[((642, 712), 'django.contrib.admin.site.register', 'admin.site.register', (['MediosFortalecimiento', 'MediosFortalecimientoAdmin'], {}), '(MediosFortalecimiento, MediosFortalecimientoAdmin)\n', (661, 712), False, 'from django.contrib import admin\n'), ((713, 745), 'django.contrib.admin.site.register', 'admin.site.register', (['TiposMedios'], {}), '(TiposMedios)\n', (732, 745), False, 'from django.contrib import admin\n'), ((746, 779), 'django.contrib.admin.site.register', 'admin.site.register', (['TemasAbordan'], {}), '(TemasAbordan)\n', (765, 779), False, 'from django.contrib import admin\n'), ((780, 812), 'django.contrib.admin.site.register', 'admin.site.register', (['GruposMetas'], {}), '(GruposMetas)\n', (799, 812), False, 'from django.contrib import admin\n'), ((813, 844), 'django.contrib.admin.site.register', 'admin.site.register', (['PapelSimas'], {}), '(PapelSimas)\n', (832, 844), False, 'from django.contrib import admin\n'), ((845, 884), 'django.contrib.admin.site.register', 'admin.site.register', (['PersonasResultados'], {}), '(PersonasResultados)\n', (864, 884), False, 'from django.contrib import admin\n'), ((885, 919), 'django.contrib.admin.site.register', 'admin.site.register', (['Participantes'], {}), '(Participantes)\n', (904, 919), False, 'from django.contrib import admin\n'), ((920, 958), 'django.contrib.admin.site.register', 'admin.site.register', (['NivelConocimiento'], {}), '(NivelConocimiento)\n', (939, 958), False, 'from django.contrib import admin\n'), ((959, 991), 'django.contrib.admin.site.register', 'admin.site.register', (['FotosMedios'], {}), '(FotosMedios)\n', (978, 991), False, 'from django.contrib import admin\n')]
|
# from vimba import *
import sys
from PyQt5.QtWidgets import QApplication
from PyQt5.QtWidgets import QLabel
from PyQt5.QtWidgets import QWidget
from PyQt5 import uic
qtcreator_file = "C:\\Users\\Andrew\\Documents\\PhDSantiago\\VimbaCameraGUI\\GUI_vimbaCamera.ui" # Enter file here.
# Ui_MainWindow, QtBaseClass = uic.loadUiType(qtcreator_file)
'''From the example below'''
app = QApplication(sys.argv)
window = QWidget()
window.setWindowTitle('PyQt5 App')
window.setGeometry(100, 100, 280, 80)
window.move(60, 15)
helloMsg = QLabel('<h1>Hello World!</h1>', parent=window)
helloMsg.move(60, 15)
window.show()
sys.exit(app.exec_())
'''This is an outline of code for the QT designer. Just have to put the file path below to the ui.'''
# import sys
# from PyQt5 import QtWidgets, uic
# qtcreator_file = "C:\\Users\\Andrew\\Documents\\PhDSantiago\\VimbaCameraGUI\\GUI_vimbaCamera.ui" # Enter file here.
# Ui_MainWindow, QtBaseClass = uic.loadUiType(qtcreator_file)
# class MyApp(QtWidgets.QMainWindow, Ui_MainWindow):
# def __init__(self):
# QtWidgets.QMainWindow.__init__(self)
# Ui_MainWindow.__init__(self)
# self.setupUi(self)
# if __name__ == "__main__":
# app = QtWidgets.QApplication(sys.argv)
# window = MyWindow()
# window.show()
# sys.exit(app.exec_())
|
[
"PyQt5.QtWidgets.QApplication",
"PyQt5.QtWidgets.QLabel",
"PyQt5.QtWidgets.QWidget"
] |
[((385, 407), 'PyQt5.QtWidgets.QApplication', 'QApplication', (['sys.argv'], {}), '(sys.argv)\n', (397, 407), False, 'from PyQt5.QtWidgets import QApplication\n'), ((418, 427), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (425, 427), False, 'from PyQt5.QtWidgets import QWidget\n'), ((532, 578), 'PyQt5.QtWidgets.QLabel', 'QLabel', (['"""<h1>Hello World!</h1>"""'], {'parent': 'window'}), "('<h1>Hello World!</h1>', parent=window)\n", (538, 578), False, 'from PyQt5.QtWidgets import QLabel\n')]
|
import codecs
# Extract wordclass type from HTML file
f = codecs.open('KENCOLLO2', 'r', 'utf-8')
categories = []
for line in f:
start = line.find("【")
if start > 0:
end = line.rfind("】")
category = line[start:end+1]
if category not in categories:
print(category)
categories.append(category)
f.close()
print(categories)
|
[
"codecs.open"
] |
[((59, 97), 'codecs.open', 'codecs.open', (['"""KENCOLLO2"""', '"""r"""', '"""utf-8"""'], {}), "('KENCOLLO2', 'r', 'utf-8')\n", (70, 97), False, 'import codecs\n')]
|
import os
import shutil
import subprocess
if __name__ == "__main__":
root = os.path.dirname(os.path.abspath(__file__))
shutil.rmtree(os.path.join(root,"bin"), ignore_errors=True)
shutil.rmtree(os.path.join(root,"include"), ignore_errors=True)
if os.path.exists(os.path.join(root,".built")):
os.remove(os.path.join(root,".built"))
|
[
"os.path.abspath",
"os.path.join"
] |
[((97, 122), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (112, 122), False, 'import os\n'), ((142, 167), 'os.path.join', 'os.path.join', (['root', '"""bin"""'], {}), "(root, 'bin')\n", (154, 167), False, 'import os\n'), ((206, 235), 'os.path.join', 'os.path.join', (['root', '"""include"""'], {}), "(root, 'include')\n", (218, 235), False, 'import os\n'), ((278, 306), 'os.path.join', 'os.path.join', (['root', '""".built"""'], {}), "(root, '.built')\n", (290, 306), False, 'import os\n'), ((326, 354), 'os.path.join', 'os.path.join', (['root', '""".built"""'], {}), "(root, '.built')\n", (338, 354), False, 'import os\n')]
|
#
# error.py
# CloudKitPy
#
# Created by <NAME> on 01/05/2016.
# Copyright (c) 2016 <NAME> - Pig on a Hill Productions.
#
# !/usr/bin/env python
from datatypes import ZoneID
from helpers import parse
class CKError:
is_error = False
ck_error_code = None
is_server_error = False
server_error_code = None
reason = None
retry_after = None
uuid = None
redirect_url = None
record_name = None
subscription_id = None
zone_id = None
payload = None
status_code = None
status_code_reason = None
request = None
def __init__(self, json, status_code, request, payload):
if json is not None:
self.ck_error_code = parse(json, 'ckErrorCode')
self.is_error = self.ck_error_code is not None
self.server_error_code = parse(json, 'serverErrorCode')
self.is_server_error = self.server_error_code is not None
self.reason = parse(json, 'reason')
self.retry_after = parse(json, 'retryAfter')
self.uuid = parse(json, 'uuid')
self.redirect_url = parse(json, 'redirectURL')
self.record_name = parse(json, 'recordName')
self.subscription_id = parse(json, 'subscriptionID')
zone_id_json = parse(json, 'zoneID')
if zone_id_json is not None:
zone_id = ZoneID(zone_id_json)
self.zone_id = zone_id
self.payload = payload
self.status_code = status_code
self.status_code_reason = self.__error_reason_from_satus_code(
status_code
)
if status_code != 200 and self.server_error_code is None:
self.server_error_code = self.__error_from_satus_code(status_code)
if self.reason is None:
self.reason = self.status_code_reason
self.request = request
def __error_from_satus_code(self, status_code): # noqa
if status_code is None:
return None
elif status_code == 403:
return CKError.ACCESS_DENIED
elif status_code == 400:
return CKError.BAD_REQUEST
elif status_code == 401:
return CKError.AUTHENTICATION_FAILED
elif status_code == 404:
return CKError.NOT_FOUND
elif status_code == 409:
return CKError.EXISTS
elif status_code == 412:
return CKError.VALIDATING_REFERENCE_ERROR
elif status_code == 413:
return CKError.QUOTA_EXCEEDED
elif status_code == 421:
return CKError.AUTHENTICATION_REQUIRED
elif status_code == 429:
return CKError.THROTTLED
elif status_code == 500:
return CKError.INTERNAL_ERROR
elif status_code == 503:
return CKError.TRY_AGAIN_LATER
else:
return None
def __error_reason_from_satus_code(self, status_code): # noqa
if status_code is None:
return None
elif status_code == 403:
return CKError.ACCESS_DENIED_REASON
elif status_code == 400:
return CKError.BAD_REQUEST_REASON
elif status_code == 401:
return CKError.AUTHENTICATION_FAILED_REASON
elif status_code == 404:
return CKError.NOT_FOUND_REASON
elif status_code == 409:
return CKError.EXISTS_REASON
elif status_code == 412:
return CKError.VALIDATING_REFERENCE_ERROR_REASON
elif status_code == 413:
return CKError.QUOTA_EXCEEDED_REASON
elif status_code == 421:
return CKError.AUTHENTICATION_REQUIRED_REASON
elif status_code == 429:
return CKError.THROTTLED_REASON
elif status_code == 500:
return CKError.INTERNAL_ERROR_REASON
elif status_code == 503:
return CKError.TRY_AGAIN_LATER_REASON
else:
return None
ACCESS_DENIED = 'ACCESS_DENIED'
ATOMIC_ERROR = 'ATOMIC_ERROR'
AUTHENTICATION_FAILED = 'AUTHENTICATION_FAILED'
AUTHENTICATION_REQUIRED = 'AUTHENTICATION_REQUIRED'
BAD_REQUEST = 'BAD_REQUEST'
CONFLICT = 'CONFLICT'
EXISTS = 'EXISTS'
INTERNAL_ERROR = 'INTERNAL_ERROR'
NOT_FOUND = 'NOT_FOUND'
QUOTA_EXCEEDED = 'QUOTA_EXCEEDED'
SIGN_IN_FAILED = 'SIGN_IN_FAILED'
THROTTLED = 'THROTTLED'
TRY_AGAIN_LATER = 'TRY_AGAIN_LATER'
VALIDATING_REFERENCE_ERROR = 'VALIDATING_REFERENCE_ERROR'
UNIQUE_FIELD_ERROR = 'UNIQUE_FIELD_ERROR'
ZONE_NOT_FOUND = 'ZONE_NOT_FOUND'
UNKNOWN_ERROR = 'UNKNOWN_ERROR'
NETWORK_ERROR = 'NETWORK_ERROR'
SERVICE_UNAVAILABLE = 'SERVICE_UNAVAILABLE'
INVALID_ARGUMENTS = 'INVALID_ARGUMENTS'
UNEXPECTED_SERVER_RESPONSE = 'UNEXPECTED_SERVER_RESPONSE'
CONFIGURATION_ERROR = 'CONFIGURATION_ERROR'
ACCESS_DENIED_REASON = 'You don\'t have permission to access the endpoint, record, zone, or database.' # noqa
ATOMIC_ERROR_REASON = 'An atomic batch operation failed.'
AUTHENTICATION_FAILED_REASON = 'Authentication was rejected.'
AUTHENTICATION_REQUIRED_REASON = 'The request requires authentication but none was provided.' # noqa
BAD_REQUEST_REASON = 'The request was not valid.'
CONFLICT_REASON = 'The recordChangeTag value expired. (Retry the request with the latest tag.)' # noqa
EXISTS_REASON = 'The resource that you attempted to create already exists.'
INTERNAL_ERROR_REASON = 'An internal error occurred.'
NOT_FOUND_REASON = 'The resource was not found.'
QUOTA_EXCEEDED_REASON = 'If accessing the public database, you exceeded the app\'s quota. If accessing the private database, you exceeded the user\'s iCloud quota.' # noqa
THROTTLED_REASON = 'The request was throttled. Try the request again later.' # noqa
TRY_AGAIN_LATER_REASON = 'An internal error occurred. Try the request again.' # noqa
VALIDATING_REFERENCE_ERROR_REASON = 'The request violates a validating reference constraint.' # noqa
ZONE_NOT_FOUND_REASON = 'The zone specified in the request was not found.'
|
[
"helpers.parse",
"datatypes.ZoneID"
] |
[((688, 714), 'helpers.parse', 'parse', (['json', '"""ckErrorCode"""'], {}), "(json, 'ckErrorCode')\n", (693, 714), False, 'from helpers import parse\n'), ((811, 841), 'helpers.parse', 'parse', (['json', '"""serverErrorCode"""'], {}), "(json, 'serverErrorCode')\n", (816, 841), False, 'from helpers import parse\n'), ((938, 959), 'helpers.parse', 'parse', (['json', '"""reason"""'], {}), "(json, 'reason')\n", (943, 959), False, 'from helpers import parse\n'), ((991, 1016), 'helpers.parse', 'parse', (['json', '"""retryAfter"""'], {}), "(json, 'retryAfter')\n", (996, 1016), False, 'from helpers import parse\n'), ((1041, 1060), 'helpers.parse', 'parse', (['json', '"""uuid"""'], {}), "(json, 'uuid')\n", (1046, 1060), False, 'from helpers import parse\n'), ((1093, 1119), 'helpers.parse', 'parse', (['json', '"""redirectURL"""'], {}), "(json, 'redirectURL')\n", (1098, 1119), False, 'from helpers import parse\n'), ((1151, 1176), 'helpers.parse', 'parse', (['json', '"""recordName"""'], {}), "(json, 'recordName')\n", (1156, 1176), False, 'from helpers import parse\n'), ((1212, 1241), 'helpers.parse', 'parse', (['json', '"""subscriptionID"""'], {}), "(json, 'subscriptionID')\n", (1217, 1241), False, 'from helpers import parse\n'), ((1269, 1290), 'helpers.parse', 'parse', (['json', '"""zoneID"""'], {}), "(json, 'zoneID')\n", (1274, 1290), False, 'from helpers import parse\n'), ((1358, 1378), 'datatypes.ZoneID', 'ZoneID', (['zone_id_json'], {}), '(zone_id_json)\n', (1364, 1378), False, 'from datatypes import ZoneID\n')]
|
import numpy as np
import megengine as mge
import megengine.functional as F
from common import se3, so3
def compute_losses(data_batch, endpoints, params):
loss = {}
# compute losses
if params.loss_type == "omnet":
num_iter = len(endpoints["all_pose_pair"])
for i in range(num_iter):
# mask loss
src_cls_pair, ref_cls_pair = endpoints["all_src_cls_pair"][i], endpoints["all_ref_cls_pair"][i]
src_cls = F.nn.frequency_weighted_cross_entropy(src_cls_pair[1], src_cls_pair[0], weight=mge.tensor([0.7, 0.3]))
ref_cls = F.nn.frequency_weighted_cross_entropy(ref_cls_pair[1], ref_cls_pair[0], weight=mge.tensor([0.7, 0.3]))
loss["cls_{}".format(i)] = (src_cls + ref_cls) / 2.0
# reg loss
pose_pair = endpoints["all_pose_pair"][i]
loss["quat_{}".format(i)] = F.nn.l1_loss(pose_pair[1][:, :4], pose_pair[0][:, :4]) * params.loss_alpha1
loss["translate_{}".format(i)] = F.nn.square_loss(pose_pair[1][:, 4:], pose_pair[0][:, 4:]) * params.loss_alpha2
# total loss
total_losses = []
for k in loss:
total_losses.append(loss[k])
loss["total"] = F.sum(F.concat(total_losses))
else:
raise NotImplementedError
return loss
def compute_metrics(data_batch, endpoints, params):
metrics = {}
gt_transforms = endpoints["transform_pair"][0]
pred_transforms = endpoints["transform_pair"][1]
# Euler angles, Individual translation errors (Deep Closest Point convention)
if "prnet" in params.transform_type:
r_gt_euler_deg = so3.mge_dcm2euler(gt_transforms[:, :3, :3], seq="zyx")
r_pred_euler_deg = so3.mge_dcm2euler(pred_transforms[:, :3, :3], seq="zyx")
else:
r_gt_euler_deg = so3.mge_dcm2euler(gt_transforms[:, :3, :3], seq="xyz")
r_pred_euler_deg = so3.mge_dcm2euler(pred_transforms[:, :3, :3], seq="xyz")
t_gt = gt_transforms[:, :3, 3]
t_pred = pred_transforms[:, :3, 3]
r_mse = F.mean((r_gt_euler_deg - r_pred_euler_deg)**2, axis=1)
r_mae = F.mean(F.abs(r_gt_euler_deg - r_pred_euler_deg), axis=1)
t_mse = F.mean((t_gt - t_pred)**2, axis=1)
t_mae = F.mean(F.abs(t_gt - t_pred), axis=1)
r_mse = F.mean(r_mse)
t_mse = F.mean(t_mse)
r_mae = F.mean(r_mae)
t_mae = F.mean(t_mae)
# Rotation, translation errors (isotropic, i.e. doesn"t depend on error
# direction, which is more representative of the actual error)
concatenated = se3.mge_concatenate(se3.mge_inverse(gt_transforms), pred_transforms)
rot_trace = concatenated[:, 0, 0] + concatenated[:, 1, 1] + concatenated[:, 2, 2]
residual_rotdeg = F.acos(F.clip(0.5 * (rot_trace - 1), -1.0, 1.0)) * 180.0 / np.pi
residual_transmag = F.norm(concatenated[:, :, 3], axis=-1)
err_r = F.mean(residual_rotdeg)
err_t = F.mean(residual_transmag)
# weighted score of isotropic errors
score = err_r * 0.01 + err_t
metrics = {"R_MSE": r_mse, "R_MAE": r_mae, "t_MSE": t_mse, "t_MAE": t_mae, "Err_R": err_r, "Err_t": err_t, "score": score}
# metrics = utils.tensor_mge(metrics, check_on=False)
return metrics
|
[
"megengine.tensor",
"megengine.functional.nn.l1_loss",
"megengine.functional.clip",
"common.se3.mge_inverse",
"megengine.functional.nn.square_loss",
"megengine.functional.mean",
"megengine.functional.norm",
"megengine.functional.abs",
"common.so3.mge_dcm2euler",
"megengine.functional.concat"
] |
[((2027, 2083), 'megengine.functional.mean', 'F.mean', (['((r_gt_euler_deg - r_pred_euler_deg) ** 2)'], {'axis': '(1)'}), '((r_gt_euler_deg - r_pred_euler_deg) ** 2, axis=1)\n', (2033, 2083), True, 'import megengine.functional as F\n'), ((2163, 2199), 'megengine.functional.mean', 'F.mean', (['((t_gt - t_pred) ** 2)'], {'axis': '(1)'}), '((t_gt - t_pred) ** 2, axis=1)\n', (2169, 2199), True, 'import megengine.functional as F\n'), ((2260, 2273), 'megengine.functional.mean', 'F.mean', (['r_mse'], {}), '(r_mse)\n', (2266, 2273), True, 'import megengine.functional as F\n'), ((2286, 2299), 'megengine.functional.mean', 'F.mean', (['t_mse'], {}), '(t_mse)\n', (2292, 2299), True, 'import megengine.functional as F\n'), ((2312, 2325), 'megengine.functional.mean', 'F.mean', (['r_mae'], {}), '(r_mae)\n', (2318, 2325), True, 'import megengine.functional as F\n'), ((2338, 2351), 'megengine.functional.mean', 'F.mean', (['t_mae'], {}), '(t_mae)\n', (2344, 2351), True, 'import megengine.functional as F\n'), ((2781, 2819), 'megengine.functional.norm', 'F.norm', (['concatenated[:, :, 3]'], {'axis': '(-1)'}), '(concatenated[:, :, 3], axis=-1)\n', (2787, 2819), True, 'import megengine.functional as F\n'), ((2832, 2855), 'megengine.functional.mean', 'F.mean', (['residual_rotdeg'], {}), '(residual_rotdeg)\n', (2838, 2855), True, 'import megengine.functional as F\n'), ((2868, 2893), 'megengine.functional.mean', 'F.mean', (['residual_transmag'], {}), '(residual_transmag)\n', (2874, 2893), True, 'import megengine.functional as F\n'), ((1627, 1681), 'common.so3.mge_dcm2euler', 'so3.mge_dcm2euler', (['gt_transforms[:, :3, :3]'], {'seq': '"""zyx"""'}), "(gt_transforms[:, :3, :3], seq='zyx')\n", (1644, 1681), False, 'from common import se3, so3\n'), ((1709, 1765), 'common.so3.mge_dcm2euler', 'so3.mge_dcm2euler', (['pred_transforms[:, :3, :3]'], {'seq': '"""zyx"""'}), "(pred_transforms[:, :3, :3], seq='zyx')\n", (1726, 1765), False, 'from common import se3, so3\n'), ((1801, 1855), 'common.so3.mge_dcm2euler', 'so3.mge_dcm2euler', (['gt_transforms[:, :3, :3]'], {'seq': '"""xyz"""'}), "(gt_transforms[:, :3, :3], seq='xyz')\n", (1818, 1855), False, 'from common import se3, so3\n'), ((1883, 1939), 'common.so3.mge_dcm2euler', 'so3.mge_dcm2euler', (['pred_transforms[:, :3, :3]'], {'seq': '"""xyz"""'}), "(pred_transforms[:, :3, :3], seq='xyz')\n", (1900, 1939), False, 'from common import se3, so3\n'), ((2101, 2141), 'megengine.functional.abs', 'F.abs', (['(r_gt_euler_deg - r_pred_euler_deg)'], {}), '(r_gt_euler_deg - r_pred_euler_deg)\n', (2106, 2141), True, 'import megengine.functional as F\n'), ((2217, 2237), 'megengine.functional.abs', 'F.abs', (['(t_gt - t_pred)'], {}), '(t_gt - t_pred)\n', (2222, 2237), True, 'import megengine.functional as F\n'), ((2535, 2565), 'common.se3.mge_inverse', 'se3.mge_inverse', (['gt_transforms'], {}), '(gt_transforms)\n', (2550, 2565), False, 'from common import se3, so3\n'), ((1219, 1241), 'megengine.functional.concat', 'F.concat', (['total_losses'], {}), '(total_losses)\n', (1227, 1241), True, 'import megengine.functional as F\n'), ((877, 931), 'megengine.functional.nn.l1_loss', 'F.nn.l1_loss', (['pose_pair[1][:, :4]', 'pose_pair[0][:, :4]'], {}), '(pose_pair[1][:, :4], pose_pair[0][:, :4])\n', (889, 931), True, 'import megengine.functional as F\n'), ((998, 1056), 'megengine.functional.nn.square_loss', 'F.nn.square_loss', (['pose_pair[1][:, 4:]', 'pose_pair[0][:, 4:]'], {}), '(pose_pair[1][:, 4:], pose_pair[0][:, 4:])\n', (1014, 1056), True, 'import megengine.functional as F\n'), ((2699, 2739), 'megengine.functional.clip', 'F.clip', (['(0.5 * (rot_trace - 1))', '(-1.0)', '(1.0)'], {}), '(0.5 * (rot_trace - 1), -1.0, 1.0)\n', (2705, 2739), True, 'import megengine.functional as F\n'), ((546, 568), 'megengine.tensor', 'mge.tensor', (['[0.7, 0.3]'], {}), '([0.7, 0.3])\n', (556, 568), True, 'import megengine as mge\n'), ((671, 693), 'megengine.tensor', 'mge.tensor', (['[0.7, 0.3]'], {}), '([0.7, 0.3])\n', (681, 693), True, 'import megengine as mge\n')]
|
import json
import logging
from eth_utils.hexadecimal import is_hex
import base64
from service_client.generic import GenericServiceClient
from tcf_connector.work_order_interface import WorkOrderInterface
from tcf_connector.utils import create_jrpc_response
from utils.tcf_types import JsonRpcErrorCode
logging.basicConfig(format="%(asctime)s - %(levelname)s - %(message)s", level=logging.INFO)
class WorkOrderJRPCImpl(WorkOrderInterface):
def __init__(self, config):
self.__uri_client = GenericServiceClient(config["tcf"]["json_rpc_uri"])
"""
key value pair with field name and boolean indicator that tells
whether field is mandatory or not. True is mandatory and False is optional
"""
self.__param_key_map = {
"responseTimeoutMSecs":True,
"payloadFormat":False,
"resultUri":False,
"notifyUri":False,
"workOrderId":True,
"workerId":True,
"workloadId":True,
"requesterId":True,
"workerEncryptionKey":False,
"dataEncryptionAlgorithm":False,
"encryptedSessionKey":True,
"sessionKeyIv":False,
"requesterNonce":True,
"encryptedRequestHash":True,
"requesterSignature":False,
"verifyingKey":True
}
self.__data_key_map = {
"index":True,
"dataHash":False,
"data":True,
"encryptedDataEncryptionKey":True,
"iv":True
}
def __validate_parameters(self, params, in_data, out_data):
"""Validate parameter dictionary for existence of fields and mandatory fields """
key_list = []
for key in params.keys():
if not key in self.__param_key_map.keys():
logging.error("Invalid parameter %s",key)
return "Invalid parameter {}".format(key)
else:
key_list.append(key)
for k, v in self.__param_key_map.items():
if v == True and not k in key_list:
logging.error("Missing parameter %s", k)
return "Missing parameter {}".format(k)
"""Validate in_data and out_data dictionary for existence of fields
and mandatory fields """
for data in in_data:
in_data_keys = []
for key in data.keys():
if not key in self.__data_key_map.keys():
logging.error("Invalid in data parameter %s",key)
return "Invalid in data parameter {}".format(key)
else:
in_data_keys.append(key)
for k, v in self.__data_key_map.items():
if v == True and not k in in_data_keys:
logging.error("Missing in data parameter %s", k)
return "Missing in data parameter {}".format(k)
for data in out_data:
out_data_keys = []
for key in data.keys():
if not key in self.__data_key_map.keys():
logging.error("Invalid out data parameter %s",key)
return "Invalid out data parameter {}".format(key)
else:
out_data_keys.append(key)
for k, v in self.__data_key_map.items():
if v == True and not k in out_data_keys:
logging.error("Missing out data parameter %s", k)
return "Missing out data parameter {}".format(k)
return None
def work_order_submit(self, params, in_data, out_data, id=None):
is_valid = self.__validate_parameters(params, in_data, out_data)
if is_valid is not None:
return create_jrpc_response(id, JsonRpcErrorCode.INVALID_PARAMETER,
is_valid)
if not is_hex(params["workOrderId"]):
logging.error("Invalid work order id")
return create_jrpc_response(id, JsonRpcErrorCode.INVALID_PARAMETER,
"Invalid work order id")
if not is_hex(params["workloadId"]):
logging.error("Invalid work load id")
return create_jrpc_response(id, JsonRpcErrorCode.INVALID_PARAMETER,
"Invalid work load id")
if not is_hex(params["requesterId"]):
logging.error("Invalid requester id id")
return create_jrpc_response(id, JsonRpcErrorCode.INVALID_PARAMETER,
"Invalid requester id id")
if not is_hex(params["workerEncryptionKey"]):
logging.error("Invalid worker encryption key")
return create_jrpc_response(id, JsonRpcErrorCode.INVALID_PARAMETER,
"Invalid worker encryption key")
for data in in_data:
if not is_hex(data["dataHash"]):
logging.error("Invalid data hash of in data")
return create_jrpc_response(id, JsonRpcErrorCode.INVALID_PARAMETER,
"Invalid data hash of in data")
if not is_hex(data["encryptedDataEncryptionKey"]):
logging.error("Invalid Encryption key of in data")
return create_jrpc_response(id, JsonRpcErrorCode.INVALID_PARAMETER,
"Invalid Encryption key of in data")
if not is_hex(data["iv"]):
logging.error("Invalid initialization vector of in data")
return create_jrpc_response(id, JsonRpcErrorCode.INVALID_PARAMETER,
"Invalid initialization vector of in data")
try:
base64.b64decode(data["data"])
except Exception as e:
logging.error("Invalid base64 format of in data")
return create_jrpc_response(id, JsonRpcErrorCode.INVALID_PARAMETER,
"Invalid base64 format of in data")
for data in out_data:
if not is_hex(data["dataHash"]):
logging.error("Invalid data hash of out data")
return create_jrpc_response(id, JsonRpcErrorCode.INVALID_PARAMETER,
"Invalid data hash of out data")
if not is_hex(data["encryptedDataEncryptionKey"]):
logging.error("Invalid Encryption key of out data")
return create_jrpc_response(id, JsonRpcErrorCode.INVALID_PARAMETER,
"Invalid Encryption key of out data")
if not is_hex(data["iv"]):
logging.error("Invalid initialization vector of out data")
return create_jrpc_response(id, JsonRpcErrorCode.INVALID_PARAMETER,
"Invalid initialization vector of out data")
try:
base64.b64decode(data["data"])
except Exception as e:
logging.error("Invalid base64 format of out data")
return create_jrpc_response(id, JsonRpcErrorCode.INVALID_PARAMETER,
"Invalid base64 format of out data")
json_rpc_request = {
"jsonrpc": "2.0",
"method": "WorkOrderSubmit",
"id": id,
"params":
{
"responseTimeoutMSecs": params["responseTimeoutMSecs"],
"payloadFormat": params["payloadFormat"],
"resultUri": params["resultUri"],
"notifyUri": params["notifyUri"],
"workOrderId": params["workOrderId"],
"workerId": params["workerId"],
"workloadId": params["workloadId"],
"requesterId": params["requesterId"],
"workerEncryptionKey": params["workerEncryptionKey"],
"dataEncryptionAlgorithm": params["dataEncryptionAlgorithm"],
"encryptedSessionKey": params["encryptedSessionKey"],
"sessionKeyIv": params["sessionKeyIv"],
"requesterNonce": params["requesterNonce"],
"encryptedRequestHash": params["encryptedRequestHash"],
"requesterSignature": params["requesterSignature"],
"inData": in_data,
"outData": out_data
}
}
response = self.__uri_client._postmsg(json.dumps(json_rpc_request))
return response
def work_order_get_result(self, work_order_id, id=None):
if not is_hex(work_order_id):
logging.error("Invalid workOrder Id")
return create_jrpc_response(id, JsonRpcErrorCode.INVALID_PARAMETER,
"Invalid workOrder Id")
json_rpc_request = {
"jsonrpc": "2.0",
"method": "WorkOrderGetResult",
"id": id,
"params": {
"workOrderId": work_order_id
}
}
response = self.__uri_client._postmsg(json.dumps(json_rpc_request))
return response
|
[
"logging.error",
"eth_utils.hexadecimal.is_hex",
"logging.basicConfig",
"json.dumps",
"base64.b64decode",
"tcf_connector.utils.create_jrpc_response",
"service_client.generic.GenericServiceClient"
] |
[((303, 398), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(asctime)s - %(levelname)s - %(message)s"""', 'level': 'logging.INFO'}), "(format='%(asctime)s - %(levelname)s - %(message)s',\n level=logging.INFO)\n", (322, 398), False, 'import logging\n'), ((501, 552), 'service_client.generic.GenericServiceClient', 'GenericServiceClient', (["config['tcf']['json_rpc_uri']"], {}), "(config['tcf']['json_rpc_uri'])\n", (521, 552), False, 'from service_client.generic import GenericServiceClient\n'), ((3766, 3836), 'tcf_connector.utils.create_jrpc_response', 'create_jrpc_response', (['id', 'JsonRpcErrorCode.INVALID_PARAMETER', 'is_valid'], {}), '(id, JsonRpcErrorCode.INVALID_PARAMETER, is_valid)\n', (3786, 3836), False, 'from tcf_connector.utils import create_jrpc_response\n'), ((3872, 3901), 'eth_utils.hexadecimal.is_hex', 'is_hex', (["params['workOrderId']"], {}), "(params['workOrderId'])\n", (3878, 3901), False, 'from eth_utils.hexadecimal import is_hex\n'), ((3915, 3953), 'logging.error', 'logging.error', (['"""Invalid work order id"""'], {}), "('Invalid work order id')\n", (3928, 3953), False, 'import logging\n'), ((3973, 4062), 'tcf_connector.utils.create_jrpc_response', 'create_jrpc_response', (['id', 'JsonRpcErrorCode.INVALID_PARAMETER', '"""Invalid work order id"""'], {}), "(id, JsonRpcErrorCode.INVALID_PARAMETER,\n 'Invalid work order id')\n", (3993, 4062), False, 'from tcf_connector.utils import create_jrpc_response\n'), ((4090, 4118), 'eth_utils.hexadecimal.is_hex', 'is_hex', (["params['workloadId']"], {}), "(params['workloadId'])\n", (4096, 4118), False, 'from eth_utils.hexadecimal import is_hex\n'), ((4132, 4169), 'logging.error', 'logging.error', (['"""Invalid work load id"""'], {}), "('Invalid work load id')\n", (4145, 4169), False, 'import logging\n'), ((4189, 4277), 'tcf_connector.utils.create_jrpc_response', 'create_jrpc_response', (['id', 'JsonRpcErrorCode.INVALID_PARAMETER', '"""Invalid work load id"""'], {}), "(id, JsonRpcErrorCode.INVALID_PARAMETER,\n 'Invalid work load id')\n", (4209, 4277), False, 'from tcf_connector.utils import create_jrpc_response\n'), ((4305, 4334), 'eth_utils.hexadecimal.is_hex', 'is_hex', (["params['requesterId']"], {}), "(params['requesterId'])\n", (4311, 4334), False, 'from eth_utils.hexadecimal import is_hex\n'), ((4348, 4388), 'logging.error', 'logging.error', (['"""Invalid requester id id"""'], {}), "('Invalid requester id id')\n", (4361, 4388), False, 'import logging\n'), ((4408, 4499), 'tcf_connector.utils.create_jrpc_response', 'create_jrpc_response', (['id', 'JsonRpcErrorCode.INVALID_PARAMETER', '"""Invalid requester id id"""'], {}), "(id, JsonRpcErrorCode.INVALID_PARAMETER,\n 'Invalid requester id id')\n", (4428, 4499), False, 'from tcf_connector.utils import create_jrpc_response\n'), ((4527, 4564), 'eth_utils.hexadecimal.is_hex', 'is_hex', (["params['workerEncryptionKey']"], {}), "(params['workerEncryptionKey'])\n", (4533, 4564), False, 'from eth_utils.hexadecimal import is_hex\n'), ((4578, 4624), 'logging.error', 'logging.error', (['"""Invalid worker encryption key"""'], {}), "('Invalid worker encryption key')\n", (4591, 4624), False, 'import logging\n'), ((4644, 4741), 'tcf_connector.utils.create_jrpc_response', 'create_jrpc_response', (['id', 'JsonRpcErrorCode.INVALID_PARAMETER', '"""Invalid worker encryption key"""'], {}), "(id, JsonRpcErrorCode.INVALID_PARAMETER,\n 'Invalid worker encryption key')\n", (4664, 4741), False, 'from tcf_connector.utils import create_jrpc_response\n'), ((8203, 8231), 'json.dumps', 'json.dumps', (['json_rpc_request'], {}), '(json_rpc_request)\n', (8213, 8231), False, 'import json\n'), ((8334, 8355), 'eth_utils.hexadecimal.is_hex', 'is_hex', (['work_order_id'], {}), '(work_order_id)\n', (8340, 8355), False, 'from eth_utils.hexadecimal import is_hex\n'), ((8369, 8406), 'logging.error', 'logging.error', (['"""Invalid workOrder Id"""'], {}), "('Invalid workOrder Id')\n", (8382, 8406), False, 'import logging\n'), ((8426, 8514), 'tcf_connector.utils.create_jrpc_response', 'create_jrpc_response', (['id', 'JsonRpcErrorCode.INVALID_PARAMETER', '"""Invalid workOrder Id"""'], {}), "(id, JsonRpcErrorCode.INVALID_PARAMETER,\n 'Invalid workOrder Id')\n", (8446, 8514), False, 'from tcf_connector.utils import create_jrpc_response\n'), ((8792, 8820), 'json.dumps', 'json.dumps', (['json_rpc_request'], {}), '(json_rpc_request)\n', (8802, 8820), False, 'import json\n'), ((1833, 1875), 'logging.error', 'logging.error', (['"""Invalid parameter %s"""', 'key'], {}), "('Invalid parameter %s', key)\n", (1846, 1875), False, 'import logging\n'), ((2102, 2142), 'logging.error', 'logging.error', (['"""Missing parameter %s"""', 'k'], {}), "('Missing parameter %s', k)\n", (2115, 2142), False, 'import logging\n'), ((4802, 4826), 'eth_utils.hexadecimal.is_hex', 'is_hex', (["data['dataHash']"], {}), "(data['dataHash'])\n", (4808, 4826), False, 'from eth_utils.hexadecimal import is_hex\n'), ((4844, 4889), 'logging.error', 'logging.error', (['"""Invalid data hash of in data"""'], {}), "('Invalid data hash of in data')\n", (4857, 4889), False, 'import logging\n'), ((4913, 5009), 'tcf_connector.utils.create_jrpc_response', 'create_jrpc_response', (['id', 'JsonRpcErrorCode.INVALID_PARAMETER', '"""Invalid data hash of in data"""'], {}), "(id, JsonRpcErrorCode.INVALID_PARAMETER,\n 'Invalid data hash of in data')\n", (4933, 5009), False, 'from tcf_connector.utils import create_jrpc_response\n'), ((5045, 5087), 'eth_utils.hexadecimal.is_hex', 'is_hex', (["data['encryptedDataEncryptionKey']"], {}), "(data['encryptedDataEncryptionKey'])\n", (5051, 5087), False, 'from eth_utils.hexadecimal import is_hex\n'), ((5105, 5155), 'logging.error', 'logging.error', (['"""Invalid Encryption key of in data"""'], {}), "('Invalid Encryption key of in data')\n", (5118, 5155), False, 'import logging\n'), ((5179, 5280), 'tcf_connector.utils.create_jrpc_response', 'create_jrpc_response', (['id', 'JsonRpcErrorCode.INVALID_PARAMETER', '"""Invalid Encryption key of in data"""'], {}), "(id, JsonRpcErrorCode.INVALID_PARAMETER,\n 'Invalid Encryption key of in data')\n", (5199, 5280), False, 'from tcf_connector.utils import create_jrpc_response\n'), ((5316, 5334), 'eth_utils.hexadecimal.is_hex', 'is_hex', (["data['iv']"], {}), "(data['iv'])\n", (5322, 5334), False, 'from eth_utils.hexadecimal import is_hex\n'), ((5352, 5409), 'logging.error', 'logging.error', (['"""Invalid initialization vector of in data"""'], {}), "('Invalid initialization vector of in data')\n", (5365, 5409), False, 'import logging\n'), ((5433, 5541), 'tcf_connector.utils.create_jrpc_response', 'create_jrpc_response', (['id', 'JsonRpcErrorCode.INVALID_PARAMETER', '"""Invalid initialization vector of in data"""'], {}), "(id, JsonRpcErrorCode.INVALID_PARAMETER,\n 'Invalid initialization vector of in data')\n", (5453, 5541), False, 'from tcf_connector.utils import create_jrpc_response\n'), ((5591, 5621), 'base64.b64decode', 'base64.b64decode', (["data['data']"], {}), "(data['data'])\n", (5607, 5621), False, 'import base64\n'), ((5921, 5945), 'eth_utils.hexadecimal.is_hex', 'is_hex', (["data['dataHash']"], {}), "(data['dataHash'])\n", (5927, 5945), False, 'from eth_utils.hexadecimal import is_hex\n'), ((5963, 6009), 'logging.error', 'logging.error', (['"""Invalid data hash of out data"""'], {}), "('Invalid data hash of out data')\n", (5976, 6009), False, 'import logging\n'), ((6033, 6130), 'tcf_connector.utils.create_jrpc_response', 'create_jrpc_response', (['id', 'JsonRpcErrorCode.INVALID_PARAMETER', '"""Invalid data hash of out data"""'], {}), "(id, JsonRpcErrorCode.INVALID_PARAMETER,\n 'Invalid data hash of out data')\n", (6053, 6130), False, 'from tcf_connector.utils import create_jrpc_response\n'), ((6166, 6208), 'eth_utils.hexadecimal.is_hex', 'is_hex', (["data['encryptedDataEncryptionKey']"], {}), "(data['encryptedDataEncryptionKey'])\n", (6172, 6208), False, 'from eth_utils.hexadecimal import is_hex\n'), ((6226, 6277), 'logging.error', 'logging.error', (['"""Invalid Encryption key of out data"""'], {}), "('Invalid Encryption key of out data')\n", (6239, 6277), False, 'import logging\n'), ((6301, 6403), 'tcf_connector.utils.create_jrpc_response', 'create_jrpc_response', (['id', 'JsonRpcErrorCode.INVALID_PARAMETER', '"""Invalid Encryption key of out data"""'], {}), "(id, JsonRpcErrorCode.INVALID_PARAMETER,\n 'Invalid Encryption key of out data')\n", (6321, 6403), False, 'from tcf_connector.utils import create_jrpc_response\n'), ((6439, 6457), 'eth_utils.hexadecimal.is_hex', 'is_hex', (["data['iv']"], {}), "(data['iv'])\n", (6445, 6457), False, 'from eth_utils.hexadecimal import is_hex\n'), ((6475, 6533), 'logging.error', 'logging.error', (['"""Invalid initialization vector of out data"""'], {}), "('Invalid initialization vector of out data')\n", (6488, 6533), False, 'import logging\n'), ((6557, 6666), 'tcf_connector.utils.create_jrpc_response', 'create_jrpc_response', (['id', 'JsonRpcErrorCode.INVALID_PARAMETER', '"""Invalid initialization vector of out data"""'], {}), "(id, JsonRpcErrorCode.INVALID_PARAMETER,\n 'Invalid initialization vector of out data')\n", (6577, 6666), False, 'from tcf_connector.utils import create_jrpc_response\n'), ((6716, 6746), 'base64.b64decode', 'base64.b64decode', (["data['data']"], {}), "(data['data'])\n", (6732, 6746), False, 'import base64\n'), ((2490, 2540), 'logging.error', 'logging.error', (['"""Invalid in data parameter %s"""', 'key'], {}), "('Invalid in data parameter %s', key)\n", (2503, 2540), False, 'import logging\n'), ((2806, 2854), 'logging.error', 'logging.error', (['"""Missing in data parameter %s"""', 'k'], {}), "('Missing in data parameter %s', k)\n", (2819, 2854), False, 'import logging\n'), ((3107, 3158), 'logging.error', 'logging.error', (['"""Invalid out data parameter %s"""', 'key'], {}), "('Invalid out data parameter %s', key)\n", (3120, 3158), False, 'import logging\n'), ((3427, 3476), 'logging.error', 'logging.error', (['"""Missing out data parameter %s"""', 'k'], {}), "('Missing out data parameter %s', k)\n", (3440, 3476), False, 'import logging\n'), ((5673, 5722), 'logging.error', 'logging.error', (['"""Invalid base64 format of in data"""'], {}), "('Invalid base64 format of in data')\n", (5686, 5722), False, 'import logging\n'), ((5746, 5846), 'tcf_connector.utils.create_jrpc_response', 'create_jrpc_response', (['id', 'JsonRpcErrorCode.INVALID_PARAMETER', '"""Invalid base64 format of in data"""'], {}), "(id, JsonRpcErrorCode.INVALID_PARAMETER,\n 'Invalid base64 format of in data')\n", (5766, 5846), False, 'from tcf_connector.utils import create_jrpc_response\n'), ((6798, 6848), 'logging.error', 'logging.error', (['"""Invalid base64 format of out data"""'], {}), "('Invalid base64 format of out data')\n", (6811, 6848), False, 'import logging\n'), ((6872, 6973), 'tcf_connector.utils.create_jrpc_response', 'create_jrpc_response', (['id', 'JsonRpcErrorCode.INVALID_PARAMETER', '"""Invalid base64 format of out data"""'], {}), "(id, JsonRpcErrorCode.INVALID_PARAMETER,\n 'Invalid base64 format of out data')\n", (6892, 6973), False, 'from tcf_connector.utils import create_jrpc_response\n')]
|
# class Event(object):
# _observers = []
#
# def __init__(self, webscraper, item):
# self.webscraper = webscraper
# self.item = item
#
# def __repr__(self):
# return self.__class__.__name__
#
# @classmethod
# def register(cls, observer):
# if observer not in cls._observers:
# cls._observers.append(observer)
#
# @classmethod
# def unregister(cls, observer):
# if observer in cls._observers:
# cls._observers.remove(observer)
#
# @classmethod
# def notify(cls, subject, item):
# event = cls(subject, item)
# # print(cls,'-', subject,'-', event)
# print(cls._observers, end="\n")
# for observer in cls._observers:
# # print(observer, end="\n")
# observer(event)
#
# ##### 2. Ignore method repr ###
# class MagnetRequestEvent(Event):
# def repr(self):
# pass
#
# # class MagnetNotifierEvent(Event):
# # def repr(self):
# # pass
#
# def log_add_magnet(event):
# print('{0} magnet has been added: {1}'.format(event.webscraper, event.item))
#
# class Announcer():
# def __call__(self, event):
# print('Announcer Magnet Has Been Added {0}'.format(event.item))
#
# MagnetRequestEvent.register(log_add_magnet)
# MagnetRequestEvent.register(Announcer())
# MagnetRequestEvent.notify('MejorTorrentScraper', 'magnet:aferqwejklrq12')
#
# # def log(event):
# # print('{} was written'.format(event.subject))
# #
# # class AnotherObserver():
# # def __call__(self, event):
# # print('Yeah {} told me !'.format(event))
# #
# # WriteEvent.register(log)
# # WriteEvent.register(AnotherObserver())
# # WriteEvent.notify('a given file', '')
# #
# # class AnotherEvent(Event):
# # def repr(self):
# # pass
# # # AnotherEvent = Event(subject = 'test2')
# # AnotherEvent.register(log)
# # AnotherEvent.register(AnotherObserver())
# # AnotherEvent.notify('second file', '')
import numpy as np
webscraper_list = [1,2,3,4,5,6,7]
c = 80/len(webscraper_list)
magnet_counter = 30
f = c/30
for i in np.arange(0, c, f):
print(int(i))
def _calculate_progress_bar(webscraper_list, counter):
base = 80/len(webscraper_list)
chunk = base/len(counter)
return base, chunk
def _update_progress_bar(analized, chunk):
return int(analized * chunk)
|
[
"numpy.arange"
] |
[((2095, 2113), 'numpy.arange', 'np.arange', (['(0)', 'c', 'f'], {}), '(0, c, f)\n', (2104, 2113), True, 'import numpy as np\n')]
|
#!/usr/bin/env python
# Copyright 2007 Google Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import webapp2
import jinja2
from google.appengine.ext import ndb
from google.appengine.api import users
import datetime
#Jinja Loader
import logging
template_env = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.getcwd()))
class ProjectFeatures(ndb.Expando):
strProjectName = ndb.StringProperty()
strNames = ndb.StringProperty()
strSurname = ndb.StringProperty()
strCell = ndb.StringProperty()
strEmail = ndb.StringProperty()
strWebApp = ndb.StringProperty()
strSuggestedFeature = ndb.StringProperty()
strDate = ndb.DateProperty()
strTime = ndb.TimeProperty()
def writeNames(self,strinput):
try:
strinput = str(strinput)
if strinput != None:
self.strNames = strinput
return True
else:
return False
except:
return False
def writeSurname(self,strinput):
try:
strinput = str(strinput)
if strinput != None:
self.strSurname = strinput
return True
else:
return False
except:
return False
def writeCell(self,strinput):
try:
strinput = str(strinput)
if strinput != None:
self.strCell = strinput
return True
else:
return False
except:
return False
def writeEmail(self,strinput):
try:
strinput = str(strinput)
if strinput != None:
self.strEmail = strinput
return True
else:
return False
except:
return False
def writeWebApp(self,strinput):
try:
strinput = str(strinput)
if strinput != None:
self.strWebApp = strinput
return True
else:
return False
except:
return False
def writeSuggestedFeature(self,strinput):
try:
strinput = str(strinput)
if strinput != None:
self.strSuggestedFeature = strinput
return True
else:
return False
except:
return False
def writeDate(self,strinput):
try:
if isinstance(strinput,datetime.date):
self.strDate = strinput
return True
else:
return False
except:
return False
def writeTime(self,strinput):
try:
if isinstance(strinput,datetime.time):
self.strTime = strinput
return True
else:
return False
except:
return False
class BugReports(ndb.Expando):
strProjectName = ndb.StringProperty()
strNames = ndb.StringProperty()
strSurname = ndb.StringProperty()
strCell = ndb.StringProperty()
strEmail = ndb.StringProperty()
strWebApp = ndb.StringProperty()
strBugDescription = ndb.StringProperty()
strDate = ndb.DateProperty()
strTime = ndb.TimeProperty()
def writeProjectName(self,strinput):
try:
strinput = str(strinput)
if strinput != None:
self.strProjectName = strinput
return True
else:
return False
except:
return False
def writeNames(self,strinput):
try:
strinput = str(strinput)
if strinput != None:
self.strNames = strinput
return True
else:
return False
except:
return False
def writeSurname(self,strinput):
try:
strinput = str(strinput)
if strinput != None:
self.strSurname = strinput
return True
else:
return False
except:
return False
def writeCell(self,strinput):
try:
strinput = str(strinput)
if strinput != None:
self.strCell = strinput
return True
else:
return False
except:
return False
def writeEmail(self,strinput):
try:
strinput = str(strinput)
if strinput != None:
self.strEmail = strinput
return True
else:
return False
except:
return False
def writeWebApp(self,strinput):
try:
strinput = str(strinput)
if strinput != None:
self.strWebApp = strinput
return True
else:
return False
except:
return False
def writeBugDescription(self,strinput):
try:
strinput = str(strinput)
if strinput != None:
self.strBugDescription = strinput
return True
else:
return False
except:
return False
def writeDate(self,strinput):
try:
if isinstance(strinput,datetime.date):
self.strDate = strinput
return True
else:
return False
except:
return False
def writeTime(self,strinput):
try:
if isinstance(strinput,datetime.time):
self.strTime = strinput
return True
else:
return False
except:
return False
class ProjectsHomeHandler(webapp2.RequestHandler):
def get(self):
template = template_env.get_template('templates/projects/projects.html')
context = {}
self.response.write(template.render(context))
def post(self):
vstrPath = self.request.get('vstrPath')
if vstrPath == "church-admin":
template = template_env.get_template('templates/projects/projects/churchadmin/project.html')
context = {}
self.response.write(template.render(context))
elif vstrPath == "cover-manager":
template = template_env.get_template('templates/projects/projects/cover-manager/project.html')
context = {}
self.response.write(template.render(context))
elif vstrPath == "hotel-manager":
template = template_env.get_template('templates/projects/projects/hotel-manager/project.html')
context = {}
self.response.write(template.render(context))
elif vstrPath == "sms-messaging":
template = template_env.get_template('templates/projects/projects/sms-messaging/project.html')
context = {}
self.response.write(template.render(context))
elif vstrPath == "hr-systems":
template = template_env.get_template('templates/projects/projects/hr-systems/project.html')
context = {}
self.response.write(template.render(context))
elif vstrPath == "school-management":
template = template_env.get_template('templates/projects/projects/school-management/project.html')
context = {}
self.response.write(template.render(context))
elif vstrPath == "loans-management":
template = template_env.get_template('templates/projects/projects/loans-management/project.html')
context = {}
self.response.write(template.render(context))
elif vstrPath == "client-trace":
template = template_env.get_template('templates/projects/projects/client-trace/project.html')
context = {}
self.response.write(template.render(context))
elif vstrPath == "p2p-traders":
template = template_env.get_template('templates/projects/projects/p2p-traders/project.html')
context = {}
self.response.write(template.render(context))
elif vstrPath == "job-cloud":
template = template_env.get_template('templates/projects/projects/job-cloud/project.html')
context = {}
self.response.write(template.render(context))
elif vstrPath == "freelancing-solutions":
template = template_env.get_template('templates/projects/projects/freelancing-solutions/project.html')
context = {}
self.response.write(template.render(context))
elif vstrPath == "bus-admin":
template = template_env.get_template('templates/projects/projects/bus-admin/project.html')
context = {}
self.response.write(template.render(context))
elif vstrPath == "submit-feature":
vstrNames = self.request.get('vstrNames')
vstrSurname = self.request.get('vstrSurname')
vstrCell = self.request.get('vstrCell')
vstrEmail = self.request.get('vstrEmail')
vstrAppSelect = self.request.get('vstrAppSelect')
vstrFeature = self.request.get('vstrFeature')
thisProjectFeature = ProjectFeatures()
thisProjectFeature.writeNames(strinput=vstrNames)
thisProjectFeature.writeSurname(strinput=vstrSurname)
thisProjectFeature.writeCell(strinput=vstrCell)
thisProjectFeature.writeEmail(strinput=vstrEmail)
thisProjectFeature.writeWebApp(strinput=vstrAppSelect)
thisProjectFeature.writeSuggestedFeature(strinput=vstrFeature)
thisProjectFeature.put()
self.response.write("Successfully saved project feature suggestion our team will get back to you soon")
elif vstrPath == "submit-bug":
vstrBugNames = self.request.get('vstrBugNames')
vstrBugSurname = self.request.get('vstrBugSurname')
vstrBugCell = self.request.get('vstrBugCell')
vstrBugEmail = self.request.ger('vstrBugEmail')
vstrBugAppSelect = self.request.get('vstrBugAppSelect')
vstrBugDescription = self.request.get('vstrBugDescription')
thisBugReport = BugReports()
thisBugReport.writeWebApp(strinput=vstrBugAppSelect)
thisBugReport.writeBugDescription(strinput=vstrBugDescription)
thisBugReport.writeNames(strinput=vstrBugNames)
thisBugReport.writeSurname(strinput=vstrBugSurname)
thisBugReport.writeCell(strinput=vstrBugCell)
thisBugReport.writeEmail(strinput=vstrBugEmail)
thisBugReport.put()
self.response.write("Successfully saved project Bug Report")
class ProjectsRouterHandler(webapp2.RequestHandler):
def get(self):
template = template_env.get_template('templates/projects/projects.html')
context = {}
self.response.write(template.render(context))
app = webapp2.WSGIApplication([
('/projects', ProjectsHomeHandler),
('/projects/.*', ProjectsRouterHandler),
], debug=True)
|
[
"os.getcwd",
"google.appengine.ext.ndb.StringProperty",
"google.appengine.ext.ndb.DateProperty",
"webapp2.WSGIApplication",
"google.appengine.ext.ndb.TimeProperty"
] |
[((11494, 11613), 'webapp2.WSGIApplication', 'webapp2.WSGIApplication', (["[('/projects', ProjectsHomeHandler), ('/projects/.*', ProjectsRouterHandler)]"], {'debug': '(True)'}), "([('/projects', ProjectsHomeHandler), (\n '/projects/.*', ProjectsRouterHandler)], debug=True)\n", (11517, 11613), False, 'import webapp2\n'), ((896, 916), 'google.appengine.ext.ndb.StringProperty', 'ndb.StringProperty', ([], {}), '()\n', (914, 916), False, 'from google.appengine.ext import ndb\n'), ((932, 952), 'google.appengine.ext.ndb.StringProperty', 'ndb.StringProperty', ([], {}), '()\n', (950, 952), False, 'from google.appengine.ext import ndb\n'), ((970, 990), 'google.appengine.ext.ndb.StringProperty', 'ndb.StringProperty', ([], {}), '()\n', (988, 990), False, 'from google.appengine.ext import ndb\n'), ((1005, 1025), 'google.appengine.ext.ndb.StringProperty', 'ndb.StringProperty', ([], {}), '()\n', (1023, 1025), False, 'from google.appengine.ext import ndb\n'), ((1041, 1061), 'google.appengine.ext.ndb.StringProperty', 'ndb.StringProperty', ([], {}), '()\n', (1059, 1061), False, 'from google.appengine.ext import ndb\n'), ((1078, 1098), 'google.appengine.ext.ndb.StringProperty', 'ndb.StringProperty', ([], {}), '()\n', (1096, 1098), False, 'from google.appengine.ext import ndb\n'), ((1125, 1145), 'google.appengine.ext.ndb.StringProperty', 'ndb.StringProperty', ([], {}), '()\n', (1143, 1145), False, 'from google.appengine.ext import ndb\n'), ((1160, 1178), 'google.appengine.ext.ndb.DateProperty', 'ndb.DateProperty', ([], {}), '()\n', (1176, 1178), False, 'from google.appengine.ext import ndb\n'), ((1193, 1211), 'google.appengine.ext.ndb.TimeProperty', 'ndb.TimeProperty', ([], {}), '()\n', (1209, 1211), False, 'from google.appengine.ext import ndb\n'), ((3455, 3475), 'google.appengine.ext.ndb.StringProperty', 'ndb.StringProperty', ([], {}), '()\n', (3473, 3475), False, 'from google.appengine.ext import ndb\n'), ((3491, 3511), 'google.appengine.ext.ndb.StringProperty', 'ndb.StringProperty', ([], {}), '()\n', (3509, 3511), False, 'from google.appengine.ext import ndb\n'), ((3529, 3549), 'google.appengine.ext.ndb.StringProperty', 'ndb.StringProperty', ([], {}), '()\n', (3547, 3549), False, 'from google.appengine.ext import ndb\n'), ((3564, 3584), 'google.appengine.ext.ndb.StringProperty', 'ndb.StringProperty', ([], {}), '()\n', (3582, 3584), False, 'from google.appengine.ext import ndb\n'), ((3600, 3620), 'google.appengine.ext.ndb.StringProperty', 'ndb.StringProperty', ([], {}), '()\n', (3618, 3620), False, 'from google.appengine.ext import ndb\n'), ((3637, 3657), 'google.appengine.ext.ndb.StringProperty', 'ndb.StringProperty', ([], {}), '()\n', (3655, 3657), False, 'from google.appengine.ext import ndb\n'), ((3682, 3702), 'google.appengine.ext.ndb.StringProperty', 'ndb.StringProperty', ([], {}), '()\n', (3700, 3702), False, 'from google.appengine.ext import ndb\n'), ((3717, 3735), 'google.appengine.ext.ndb.DateProperty', 'ndb.DateProperty', ([], {}), '()\n', (3733, 3735), False, 'from google.appengine.ext import ndb\n'), ((3750, 3768), 'google.appengine.ext.ndb.TimeProperty', 'ndb.TimeProperty', ([], {}), '()\n', (3766, 3768), False, 'from google.appengine.ext import ndb\n'), ((823, 834), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (832, 834), False, 'import os\n')]
|
from copy import copy
import time
import os
import numpy as np
import numpy.linalg as linalg
import gym
from gym import spaces
from gym.utils import seeding
from roboball2d.physics import B2World
from roboball2d.robot import DefaultRobotConfig
from roboball2d.robot import DefaultRobotState
from roboball2d.ball import BallConfig
from roboball2d.ball_gun import DefaultBallGun
from roboball2d.utils import Box
class Tennis2DEnv(gym.GoalEnv):
"""2D Toy Robotic Tennis Environment.
Task: 2D robot with 3 degrees of freedom has to return tennis ball to given
goal landing point by hitting it appropriately. A sparse reward is given
only if the ball lands within the goal region."""
metadata = {"render.mode": ["human"]}
# This version of the environment uses a sparse reward
dense_reward = False
# goal properties
_goal_min = 3.0
_goal_max = 6.0
_goal_diameter = 0.8
_goal_color = (0.2, 0.7, 0.0)
# variables for reward design
_hit_reward = 2.0
# maximum episode length in seconds
_max_episode_length_sec = 5.0
# divide by this attribute to normalize angles
_angle_normalization = 0.5*np.pi
# maximum angular velocity
_max_angular_vel = 8.0
# safety factor (for joint limits because solver
# can't ensure that they are always satisfied)
_safety_factor = 1.3
# simulation steps per second
_steps_per_sec = 100
_arrow_width = 0.02
_arrow_head_size = 0.06
_arrow_scaling = 0.3
def __init__(self, slow_motion_factor = 2.0):
super().__init__()
self._subgoals = []
self._timed_subgoals = []
self._tolerances = None
self._subgoal_colors = []
# maximum episode length in steps
self.max_episode_length = int(self._max_episode_length_sec*self._steps_per_sec)
self.seed()
self.verbose = 0
self._slow_motion_factor = slow_motion_factor
self._renderer = None
self._callbacks = []
#####################################
# Physics simulation using Roboball2D
#####################################
# robot and ball configuration
self._robot_config = DefaultRobotConfig()
self._robot_config.linear_damping = 0.1
self._robot_config.angular_damping = 4.4
self._ball_configs = [BallConfig()]
self._ball_configs[0].color = (0.3, 0.3, 0.3)
self._ball_configs[0].line_color = (0.8, 0.8, 0.8)
# safety factors for joint angles to avoid giving observations out of interval
self._joint_factor = []
for index in range(3):
if index in [0, 1]:
factor = self._robot_config.rod_joint_limit*self._safety_factor
else:
factor = self._robot_config.racket_joint_limit*self._safety_factor
self._joint_factor.append(factor)
self._visible_area_width = 6.0
self._visual_height = 0.05
# physics simulation
self._world = B2World(
robot_configs = self._robot_config,
ball_configs = self._ball_configs,
visible_area_width = self._visible_area_width,
steps_per_sec = self._steps_per_sec
)
# ball gun : specifies the reset of
# the ball (by shooting a new one)
self._ball_guns = [DefaultBallGun(self._ball_configs[0])]
# robot init : specifies the reinit of the robot
# (e.g. angles of the rods and rackets, etc)
self._robot_init_state = DefaultRobotState(
robot_config = self._robot_config,
#generalized_coordinates = [0., -0.5*np.pi, 0.],
generalized_coordinates = [0.25*np.pi, -0.5*np.pi, 0.],
generalized_velocities = [0., 0., 0.])
###################
# Observation space
###################
obs_space_dict = {}
bounded_space = spaces.Box(low = -1.0, high = 1.0, shape= (1,), dtype = np.float32)
unbounded_space = spaces.Box(low = -np.inf, high = np.inf, shape= (1,), dtype = np.float32)
unit_interval = spaces.Box(low = 0.0, high = 1.0, shape= (1,), dtype = np.float32)
for index in [0, 1, 2]:
obs_space_dict["joint_" + str(index) + "_angle"] = bounded_space
for index in [0, 1, 2]:
obs_space_dict["joint_" + str(index) + "_angular_vel"] = bounded_space
obs_space_dict["ball_pos_x"] = unbounded_space
obs_space_dict["ball_pos_y"] = unbounded_space
obs_space_dict["ball_vel_x"] = unbounded_space
obs_space_dict["ball_vel_y"] = unbounded_space
obs_space_dict["ball_anguler_vel"] = unbounded_space
obs_space_dict["ball_bounced_at_least_once"] = unit_interval
obs_space_dict["ball_bouncing_second_time"] = unit_interval
obs_space_dict["ball_bounced_at_least_twice"] = unit_interval
if self.dense_reward == True:
# in case of dense reward have to include (first component of) desired goal into observation space
# (second and third component are always one and therefore not useful as observation)
obs_space_dict["desired_landing_pos_x"] = bounded_space
# partial observation space (without goal)
self._preliminary_obs_space = spaces.Dict(obs_space_dict)
# Note: Observations are scaled versions of corresponding quantities
# in physics simulation.
# in sparse reward case, also have to specifiy desired and achieved
# goal spaces
if self.dense_reward == False:
# goal space has components
# 1. ball position x
# 2. bool indicating whether ball bounced at least once
# 3. bool indicating whether ball is bouncing for the second time
# in this time step
# 4. bool indicating whether ball bounced at least twice
desired_goal_space = spaces.Box(
low = np.array([-np.inf, 0., 0., 0.]),
high = np.array([np.inf, 1., 1., 1.]),
dtype = np.float32)
achieved_goal_space = desired_goal_space
# observation space consists of dictionary of subspaces
# corresponding to observation, desired goal and achieved
# goal spaces
self.observation_space = spaces.Dict({
"observation": self._preliminary_obs_space,
"desired_goal": desired_goal_space,
"achieved_goal": achieved_goal_space
})
# in dense reward case, observation space is simply preliminary
# observation space
else:
self.observation_space = self._preliminary_obs_space
###################
# Action space
###################
# action space consists of torques applied to the three joints
# Note: Actions are scaled versions of torques in physics simulation.
act_space_dict = {}
for index in range(3):
act_space_dict["joint_" + str(index) + "_torque"] = bounded_space
self.action_space = spaces.Dict(act_space_dict)
# reset to make sure environment is not used without resetting it first
self.reset()
def step(self, action):
####################
# Physics simulation
####################
action_keys = sorted(action.keys())
torques = [action[key][0] for key in action_keys]
# perform one step of physics simulation, receive new world state
self._world_state = self._world.step(torques, relative_torques = True)
# clip angular velocities to make sure they are in a bounded interval
for joint in self._world_state.robots[0].joints:
joint.angular_velocity = np.clip(joint.angular_velocity, -self._max_angular_vel,
self._max_angular_vel)
####################
# Reward calculation
####################
reward = 0
info = {}
# check whether the ball is bouncing off the floor in this time step
self._ball_bouncing_second_time = False
if self._world_state.ball_hits_floor:
self._n_ball_bounces += 1
if self._n_ball_bounces == 2:
self._ball_bouncing_second_time = True
# set achieved goal
achieved_goal = self._get_achieved_goal()
# dense reward case
if self.dense_reward == True:
# reward for hitting ball with racket
if self._world_state.balls_hits_racket[0]:
self._n_hits_ball_racket += 1
if self._n_hits_ball_racket == 1:
reward += self._hit_reward
# reward for bouncing off ground in goal area
goal_reward = self.compute_reward(achieved_goal, self._desired_goal, info)
reward += goal_reward
if goal_reward == 0.:
done = True
# sparse reward case
else:
goal_reward = self.compute_reward(achieved_goal, self._desired_goal, info)
reward += goal_reward
if goal_reward == 0.:
done = True
# end episode after some time
if self._world_state.t >= self._max_episode_length_sec:
self.done = True
return self.get_observation(), reward, self.done, info
def _get_achieved_goal(self):
return [(self._world_state.balls[0].position[0] - self._goal_min)/(self._goal_max - self._goal_min),
int(self._n_ball_bounces >= 1),
int(self._ball_bouncing_second_time),
int(self._n_ball_bounces >= 2)]
def update_subgoals(self, subgoals, tolerances = None):
self._subgoals = subgoals
self._tolerances = tolerances
def update_timed_subgoals(self, timed_subgoals, tolerances = None):
self._subgoals = [tsg.goal for tsg in timed_subgoals if tsg is not None]
self._timed_subgoals = timed_subgoals
self._tolerances = tolerances
def reset(self):
self.t = 0
# check for consistency with GoalEnv
if self.dense_reward == False:
super().reset()
self.done = False
# reset physics simulation
self._world_state = self._world.reset(self._robot_init_state, self._ball_guns)
# reset variables necessary for computation of reward
self._n_ball_bounces = 0
self._ball_bouncing_second_time = False
self._n_hits_ball_racket = 0
# sample goal position (last three components indicate that ball bounced for
# the second time in this time step)
self._desired_goal = np.array([self.np_random.uniform(0., 1.), 1., 1., 1.])
return self.get_observation()
def seed(self, seed=None):
self.np_random, seed = seeding.np_random(seed)
return [seed]
def render(self, mode = "human", close = False):
# have to import renderer here to avoid problems when running training without display
# server
from roboball2d.rendering import PygletRenderer
from roboball2d.rendering import RenderingConfig
import roboball2d.rendering.pyglet_utils as pyglet_utils
import pyglet.gl as gl
from ..utils.graphics_utils import get_default_subgoal_colors
# render callback method which draws arrow for velocity of racket
def render_racket_vel_callback(ws):
scaled_vector = [self._arrow_scaling*x for x in ws.robot.racket.linear_velocity]
pyglet_utils.draw_vector(
initial_point = ws.robot.racket.position,
vector = scaled_vector,
width = self._arrow_width,
arrow_head_size = self._arrow_head_size,
color = (0.8, 0.8, 0.8))
# callback function for rendering of subgoals
def render_subgoal_callback(ws):
z = -0.01
for sg, color in zip(self._subgoals, self._subgoal_colors):
# robot
generalized_coordinates = [sg[f"joint_{i}_angle"]*self._joint_factor[i] for i in range(3)]
generalized_velocities = [sg[f"joint_{i}_angular_vel"]*self._max_angular_vel for i in range(3)]
robot_state = DefaultRobotState(
robot_config = self._robot_config,
generalized_coordinates = generalized_coordinates,
generalized_velocities = generalized_velocities)
robot_state.render(
color = color,
z_coordinate = z)
# racket velocity
scaled_vector = [self._arrow_scaling*x for x in robot_state.racket.linear_velocity]
gl.glPushMatrix()
gl.glTranslatef(0., 0., z)
pyglet_utils.draw_vector(
initial_point = robot_state.racket.position,
vector = scaled_vector,
width = self._arrow_width,
arrow_head_size = self._arrow_head_size,
color = color)
gl.glPopMatrix()
z += -0.01
def render_time_bars_callback(ws):
y_pos = 2.5
for tsg, color in zip(self._timed_subgoals, self._subgoal_colors):
if tsg is not None:
width = tsg.delta_t_ach*0.01
pyglet_utils.draw_box((0.16 + 0.5*width, y_pos), width, 0.1, 0., color)
y_pos -= 0.1
########################
# Renderer from Tennis2D
########################
if self._renderer is None:
self._subgoal_colors = get_default_subgoal_colors()
self._callbacks.append(render_racket_vel_callback)
self._callbacks.append(render_subgoal_callback)
self._callbacks.append(render_time_bars_callback)
renderer_config = RenderingConfig(self._visible_area_width,
self._visual_height)
renderer_config.window.width = 1920
renderer_config.window.height = 960
renderer_config.background_color = (1.0, 1.0, 1.0, 1.0)
renderer_config.ground_color = (0.702, 0.612, 0.51)
self._renderer = PygletRenderer(renderer_config,
self._robot_config,
self._ball_configs,
self._callbacks)
# render based on the information provided by
# the physics simulation and the desired goal
goals = [(
self._desired_goal[0]*(self._goal_max - self._goal_min) \
+ self._goal_min - 0.5*self._goal_diameter,
self._desired_goal[0]*(self._goal_max - self._goal_min) \
+ self._goal_min + 0.5*self._goal_diameter,
self._goal_color
)]
self._renderer.render(
world_state = self._world_state,
goals = goals,
time_step = self._slow_motion_factor*self._world_state.applied_time_step)
def compute_reward(self, achieved_goal, desired_goal, info):
if self.dense_reward == False:
if np.all(achieved_goal[1:] == desired_goal[1:]):
if abs((desired_goal[0] - achieved_goal[0])*(self._goal_max - self._goal_min)) <= 0.5*self._goal_diameter:
return 0.
return -1.
else:
if np.all(achieved_goal[1:] == desired_goal[1:]):
return (-min(abs((desired_goal[0] - achieved_goal[0])*(self._goal_max - self._goal_min)),
self._goal_max + self._goal_diameter - self._robot_config.position) +
self._goal_max + self._goal_diameter - self._robot_config.position)
return 0.
# part of observation depending only on env state and not on goal
def _get_env_observation(self):
ws = self._world_state
env_observation = {}
for index, joint in enumerate(ws.robots[0].joints):
env_observation["joint_" + str(index) + "_angle"] = np.clip([joint.angle/self._joint_factor[index]], -1., 1.)
env_observation["joint_" + str(index) + "_angular_vel"] = [joint.angular_velocity/self._max_angular_vel]
ball = ws.balls[0]
env_observation["ball_pos_x"] = [ball.position[0]/self._ball_guns[0].initial_pos_x]
env_observation["ball_pos_y"] = [ball.position[1]/self._ball_guns[0].initial_pos_x]
env_observation["ball_vel_x"] = [ball.linear_velocity[0]/self._ball_guns[0].speed_mean]
env_observation["ball_vel_y"] = [ball.linear_velocity[1]/self._ball_guns[0].speed_mean]
env_observation["ball_anguler_vel"] = [ball.angular_velocity/self._ball_guns[0].spin_std]
env_observation["ball_bounced_at_least_once"] = [int(self._n_ball_bounces >= 1)]
env_observation["ball_bouncing_second_time"] = [int(self._ball_bouncing_second_time)]
env_observation["ball_bounced_at_least_twice"] = [int(self._n_ball_bounces >= 2)]
for key, value in env_observation.items():
env_observation[key] = np.array(value)
return env_observation
def get_observation(self):
observation = self._get_env_observation()
if self.dense_reward == False:
result = {
"observation": observation,
"achieved_goal": np.array(self._get_achieved_goal()),
"desired_goal" : np.array(self._desired_goal)
}
return result
else:
observation["desired_landing_pos_x"] = self._desired_goal[0]
return observation
def map_to_achieved_goal(self, partial_obs):
pos_x = partial_obs["ball_pos_x"][0]*self._ball_guns[0].initial_pos_x
achieved_goal = [
[(pos_x - self._goal_min)/(self._goal_max - self._goal_min)],
partial_obs["ball_bounced_at_least_once"],
partial_obs["ball_bouncing_second_time"],
partial_obs["ball_bounced_at_least_twice"]]
return np.concatenate(achieved_goal)
def _get_robot_conf_and_vel(self, robot_state):
pass
class Tennis2DDenseRewardEnv(Tennis2DEnv):
"""Dense reward version of the 2D robotic toy tennis environment.
In contrast to the sparse reward version, the dense reward version of the
environment gives a constant reward to the agent when it hits the ball
and another one when the ball bounces on the ground for the second time after being hit
by the racket. The latter reward is proportional to the nagative distance
to the goal landing point."""
dense_reward = True
|
[
"roboball2d.rendering.pyglet_utils.draw_vector",
"numpy.clip",
"roboball2d.rendering.pyglet_utils.draw_box",
"roboball2d.physics.B2World",
"gym.utils.seeding.np_random",
"roboball2d.robot.DefaultRobotState",
"roboball2d.ball.BallConfig",
"pyglet.gl.glTranslatef",
"roboball2d.ball_gun.DefaultBallGun",
"roboball2d.rendering.RenderingConfig",
"pyglet.gl.glPushMatrix",
"numpy.all",
"pyglet.gl.glPopMatrix",
"numpy.concatenate",
"gym.spaces.Dict",
"roboball2d.robot.DefaultRobotConfig",
"roboball2d.rendering.PygletRenderer",
"numpy.array",
"gym.spaces.Box"
] |
[((2207, 2227), 'roboball2d.robot.DefaultRobotConfig', 'DefaultRobotConfig', ([], {}), '()\n', (2225, 2227), False, 'from roboball2d.robot import DefaultRobotConfig\n'), ((3020, 3183), 'roboball2d.physics.B2World', 'B2World', ([], {'robot_configs': 'self._robot_config', 'ball_configs': 'self._ball_configs', 'visible_area_width': 'self._visible_area_width', 'steps_per_sec': 'self._steps_per_sec'}), '(robot_configs=self._robot_config, ball_configs=self._ball_configs,\n visible_area_width=self._visible_area_width, steps_per_sec=self.\n _steps_per_sec)\n', (3027, 3183), False, 'from roboball2d.physics import B2World\n'), ((3563, 3717), 'roboball2d.robot.DefaultRobotState', 'DefaultRobotState', ([], {'robot_config': 'self._robot_config', 'generalized_coordinates': '[0.25 * np.pi, -0.5 * np.pi, 0.0]', 'generalized_velocities': '[0.0, 0.0, 0.0]'}), '(robot_config=self._robot_config, generalized_coordinates=\n [0.25 * np.pi, -0.5 * np.pi, 0.0], generalized_velocities=[0.0, 0.0, 0.0])\n', (3580, 3717), False, 'from roboball2d.robot import DefaultRobotState\n'), ((3964, 4024), 'gym.spaces.Box', 'spaces.Box', ([], {'low': '(-1.0)', 'high': '(1.0)', 'shape': '(1,)', 'dtype': 'np.float32'}), '(low=-1.0, high=1.0, shape=(1,), dtype=np.float32)\n', (3974, 4024), False, 'from gym import spaces\n'), ((4058, 4124), 'gym.spaces.Box', 'spaces.Box', ([], {'low': '(-np.inf)', 'high': 'np.inf', 'shape': '(1,)', 'dtype': 'np.float32'}), '(low=-np.inf, high=np.inf, shape=(1,), dtype=np.float32)\n', (4068, 4124), False, 'from gym import spaces\n'), ((4156, 4215), 'gym.spaces.Box', 'spaces.Box', ([], {'low': '(0.0)', 'high': '(1.0)', 'shape': '(1,)', 'dtype': 'np.float32'}), '(low=0.0, high=1.0, shape=(1,), dtype=np.float32)\n', (4166, 4215), False, 'from gym import spaces\n'), ((5341, 5368), 'gym.spaces.Dict', 'spaces.Dict', (['obs_space_dict'], {}), '(obs_space_dict)\n', (5352, 5368), False, 'from gym import spaces\n'), ((7174, 7201), 'gym.spaces.Dict', 'spaces.Dict', (['act_space_dict'], {}), '(act_space_dict)\n', (7185, 7201), False, 'from gym import spaces\n'), ((10915, 10938), 'gym.utils.seeding.np_random', 'seeding.np_random', (['seed'], {}), '(seed)\n', (10932, 10938), False, 'from gym.utils import seeding\n'), ((18353, 18382), 'numpy.concatenate', 'np.concatenate', (['achieved_goal'], {}), '(achieved_goal)\n', (18367, 18382), True, 'import numpy as np\n'), ((2355, 2367), 'roboball2d.ball.BallConfig', 'BallConfig', ([], {}), '()\n', (2365, 2367), False, 'from roboball2d.ball import BallConfig\n'), ((3380, 3417), 'roboball2d.ball_gun.DefaultBallGun', 'DefaultBallGun', (['self._ball_configs[0]'], {}), '(self._ball_configs[0])\n', (3394, 3417), False, 'from roboball2d.ball_gun import DefaultBallGun\n'), ((6400, 6535), 'gym.spaces.Dict', 'spaces.Dict', (["{'observation': self._preliminary_obs_space, 'desired_goal':\n desired_goal_space, 'achieved_goal': achieved_goal_space}"], {}), "({'observation': self._preliminary_obs_space, 'desired_goal':\n desired_goal_space, 'achieved_goal': achieved_goal_space})\n", (6411, 6535), False, 'from gym import spaces\n'), ((7851, 7929), 'numpy.clip', 'np.clip', (['joint.angular_velocity', '(-self._max_angular_vel)', 'self._max_angular_vel'], {}), '(joint.angular_velocity, -self._max_angular_vel, self._max_angular_vel)\n', (7858, 7929), True, 'import numpy as np\n'), ((11630, 11813), 'roboball2d.rendering.pyglet_utils.draw_vector', 'pyglet_utils.draw_vector', ([], {'initial_point': 'ws.robot.racket.position', 'vector': 'scaled_vector', 'width': 'self._arrow_width', 'arrow_head_size': 'self._arrow_head_size', 'color': '(0.8, 0.8, 0.8)'}), '(initial_point=ws.robot.racket.position, vector=\n scaled_vector, width=self._arrow_width, arrow_head_size=self.\n _arrow_head_size, color=(0.8, 0.8, 0.8))\n', (11654, 11813), True, 'import roboball2d.rendering.pyglet_utils as pyglet_utils\n'), ((14087, 14149), 'roboball2d.rendering.RenderingConfig', 'RenderingConfig', (['self._visible_area_width', 'self._visual_height'], {}), '(self._visible_area_width, self._visual_height)\n', (14102, 14149), False, 'from roboball2d.rendering import RenderingConfig\n'), ((14454, 14546), 'roboball2d.rendering.PygletRenderer', 'PygletRenderer', (['renderer_config', 'self._robot_config', 'self._ball_configs', 'self._callbacks'], {}), '(renderer_config, self._robot_config, self._ball_configs,\n self._callbacks)\n', (14468, 14546), False, 'from roboball2d.rendering import PygletRenderer\n'), ((15434, 15479), 'numpy.all', 'np.all', (['(achieved_goal[1:] == desired_goal[1:])'], {}), '(achieved_goal[1:] == desired_goal[1:])\n', (15440, 15479), True, 'import numpy as np\n'), ((15686, 15731), 'numpy.all', 'np.all', (['(achieved_goal[1:] == desired_goal[1:])'], {}), '(achieved_goal[1:] == desired_goal[1:])\n', (15692, 15731), True, 'import numpy as np\n'), ((16341, 16402), 'numpy.clip', 'np.clip', (['[joint.angle / self._joint_factor[index]]', '(-1.0)', '(1.0)'], {}), '([joint.angle / self._joint_factor[index]], -1.0, 1.0)\n', (16348, 16402), True, 'import numpy as np\n'), ((17378, 17393), 'numpy.array', 'np.array', (['value'], {}), '(value)\n', (17386, 17393), True, 'import numpy as np\n'), ((12382, 12533), 'roboball2d.robot.DefaultRobotState', 'DefaultRobotState', ([], {'robot_config': 'self._robot_config', 'generalized_coordinates': 'generalized_coordinates', 'generalized_velocities': 'generalized_velocities'}), '(robot_config=self._robot_config, generalized_coordinates=\n generalized_coordinates, generalized_velocities=generalized_velocities)\n', (12399, 12533), False, 'from roboball2d.robot import DefaultRobotState\n'), ((12876, 12893), 'pyglet.gl.glPushMatrix', 'gl.glPushMatrix', ([], {}), '()\n', (12891, 12893), True, 'import pyglet.gl as gl\n'), ((12910, 12938), 'pyglet.gl.glTranslatef', 'gl.glTranslatef', (['(0.0)', '(0.0)', 'z'], {}), '(0.0, 0.0, z)\n', (12925, 12938), True, 'import pyglet.gl as gl\n'), ((12953, 13129), 'roboball2d.rendering.pyglet_utils.draw_vector', 'pyglet_utils.draw_vector', ([], {'initial_point': 'robot_state.racket.position', 'vector': 'scaled_vector', 'width': 'self._arrow_width', 'arrow_head_size': 'self._arrow_head_size', 'color': 'color'}), '(initial_point=robot_state.racket.position, vector=\n scaled_vector, width=self._arrow_width, arrow_head_size=self.\n _arrow_head_size, color=color)\n', (12977, 13129), True, 'import roboball2d.rendering.pyglet_utils as pyglet_utils\n'), ((13271, 13287), 'pyglet.gl.glPopMatrix', 'gl.glPopMatrix', ([], {}), '()\n', (13285, 13287), True, 'import pyglet.gl as gl\n'), ((17734, 17762), 'numpy.array', 'np.array', (['self._desired_goal'], {}), '(self._desired_goal)\n', (17742, 17762), True, 'import numpy as np\n'), ((6012, 6046), 'numpy.array', 'np.array', (['[-np.inf, 0.0, 0.0, 0.0]'], {}), '([-np.inf, 0.0, 0.0, 0.0])\n', (6020, 6046), True, 'import numpy as np\n'), ((6073, 6106), 'numpy.array', 'np.array', (['[np.inf, 1.0, 1.0, 1.0]'], {}), '([np.inf, 1.0, 1.0, 1.0])\n', (6081, 6106), True, 'import numpy as np\n'), ((13567, 13641), 'roboball2d.rendering.pyglet_utils.draw_box', 'pyglet_utils.draw_box', (['(0.16 + 0.5 * width, y_pos)', 'width', '(0.1)', '(0.0)', 'color'], {}), '((0.16 + 0.5 * width, y_pos), width, 0.1, 0.0, color)\n', (13588, 13641), True, 'import roboball2d.rendering.pyglet_utils as pyglet_utils\n')]
|
import os
import shutil
import tempfile
import subprocess
import pytest
from lightning import LightningRpc
from bitcoin import BitcoinRPC
from .utils import TailableProc, wait_for
bitcoind_bin = os.getenv("BITCOIND")
lightningd_bin = os.getenv("LIGHTNINGD")
bitcoin_cli_bin = os.getenv("BITCOIN_CLI")
@pytest.fixture
def bitcoin_dir():
bitcoin = tempfile.mkdtemp(prefix="bitcoin.")
yield bitcoin
shutil.rmtree(bitcoin)
@pytest.fixture
def lightning_dirs():
lightning_a = tempfile.mkdtemp(prefix="lightning-a.")
lightning_b = tempfile.mkdtemp(prefix="lightning-b.")
yield [lightning_a, lightning_b]
shutil.rmtree(lightning_a)
shutil.rmtree(lightning_b)
@pytest.fixture
def bitcoind(bitcoin_dir):
proc = TailableProc(
"{bitcoind_bin} -regtest -datadir={dir} -server -printtoconsole -logtimestamps -nolisten -rpcport=10287 -rpcuser=rpcuser -rpcpassword=<PASSWORD>".format(
bitcoind_bin=bitcoind_bin, dir=bitcoin_dir
),
verbose=False,
procname="bitcoind",
)
proc.start()
proc.wait_for_log("Done loading")
rpc = BitcoinRPC("http://127.0.0.1:10287/", "rpcuser", "rpcpassword")
rpc.generate(101)
yield proc, rpc
proc.stop()
@pytest.fixture
def lightnings(bitcoin_dir, bitcoind, lightning_dirs):
procs = []
for i, dir in enumerate(lightning_dirs):
proc = TailableProc(
"{lightningd_bin} --network regtest --bitcoin-cli {bitcoin_cli_bin} --bitcoin-rpcport=10287 --bitcoin-datadir {bitcoin_dir} --bitcoin-rpcuser rpcuser --bitcoin-rpcpassword rpcpassword --lightning-dir {dir} --bind-addr 127.0.0.1:987{i}".format(
lightningd_bin=lightningd_bin,
bitcoin_cli_bin=bitcoin_cli_bin,
bitcoin_dir=bitcoin_dir,
dir=dir,
i=i,
),
verbose=False,
procname="lightningd-{}".format(i),
)
proc.start()
proc.wait_for_log("Server started with public key")
procs.append(proc)
# make rpc clients
rpcs = []
for dir in lightning_dirs:
rpc = LightningRpc(os.path.join(dir, "lightning-rpc"))
rpcs.append(rpc)
# get nodes funded
_, bitcoin_rpc = bitcoind
for rpc in rpcs:
addr = rpc.newaddr()["address"]
bitcoin_rpc.sendtoaddress(addr, 15)
bitcoin_rpc.generate(1)
for rpc in rpcs:
wait_for(lambda: len(rpc.listfunds()["outputs"]) == 1, timeout=60)
# make a channel between the two
t = rpcs[0]
f = rpcs[1]
tinfo = t.getinfo()
f.connect(tinfo["id"], tinfo["binding"][0]["address"], tinfo["binding"][0]["port"])
num_tx = len(bitcoin_rpc.getrawmempool())
f.fundchannel(tinfo["id"], 10000000)
wait_for(lambda: len(bitcoin_rpc.getrawmempool()) == num_tx + 1)
bitcoin_rpc.generate(1)
# wait for channels
for proc in procs:
proc.wait_for_log("to CHANNELD_NORMAL", timeout=60)
for rpc in rpcs:
wait_for(lambda: len(rpc.listfunds()["channels"]) > 0, timeout=60)
# send some money just to open space at the channel
f.pay(t.invoice(1000000000, "open", "nada")["bolt11"])
t.waitinvoice("open")
yield procs, rpcs
# stop nodes
for proc, rpc in zip(procs, rpcs):
try:
rpc.stop()
except:
pass
proc.proc.wait(5)
proc.stop()
@pytest.fixture
def init_db():
db = os.getenv("DATABASE_URL")
if "@localhost" not in db or "test" not in db:
raise Exception("Use the test postgres database, please.")
# destroy db
end = subprocess.run(
"psql {url} -c 'drop table if exists withdrawals cascade; drop table if exists internal_transfers cascade; drop table if exists calls cascade; drop table if exists contracts cascade; drop table if exists accounts cascade;'".format(
url=db
),
shell=True,
capture_output=True,
)
print("db destroy stdout: " + end.stdout.decode("utf-8"))
print("db destroy stderr: " + end.stderr.decode("utf-8"))
# rebuild db
end = subprocess.run(
"psql {url} -f postgres.sql".format(url=db), shell=True, capture_output=True
)
print("db creation stdout: " + end.stdout.decode("utf-8"))
print("db creation stderr: " + end.stderr.decode("utf-8"))
# create an account
subprocess.run(
"""psql {url} -c "insert into accounts values ('account1', 'xxx')"
""".format(
url=db
),
shell=True,
capture_output=True,
)
@pytest.fixture
def flush_redis():
r = os.getenv("REDIS_URL")
if "localhost" not in r:
raise Exception("Use the test redis database, please.")
# delete everything
end = subprocess.run("redis-cli flushdb", shell=True, capture_output=True)
print("redis destroy stdout: " + end.stdout.decode("utf-8"))
print("redis destroy stderr: " + end.stderr.decode("utf-8"))
@pytest.fixture
def etleneum(init_db, flush_redis, lightning_dirs, lightnings):
dir_a = lightning_dirs[0]
env = os.environ.copy()
env.update({"SOCKET_PATH": os.path.join(dir_a, "lightning-rpc")})
proc = TailableProc("./etleneum", env=env, procname="etleneum")
proc.start()
proc.wait_for_log("listening.")
yield proc, env["SERVICE_URL"]
proc.stop()
|
[
"subprocess.run",
"os.environ.copy",
"bitcoin.BitcoinRPC",
"tempfile.mkdtemp",
"shutil.rmtree",
"os.path.join",
"os.getenv"
] |
[((198, 219), 'os.getenv', 'os.getenv', (['"""BITCOIND"""'], {}), "('BITCOIND')\n", (207, 219), False, 'import os\n'), ((237, 260), 'os.getenv', 'os.getenv', (['"""LIGHTNINGD"""'], {}), "('LIGHTNINGD')\n", (246, 260), False, 'import os\n'), ((279, 303), 'os.getenv', 'os.getenv', (['"""BITCOIN_CLI"""'], {}), "('BITCOIN_CLI')\n", (288, 303), False, 'import os\n'), ((355, 390), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {'prefix': '"""bitcoin."""'}), "(prefix='bitcoin.')\n", (371, 390), False, 'import tempfile\n'), ((413, 435), 'shutil.rmtree', 'shutil.rmtree', (['bitcoin'], {}), '(bitcoin)\n', (426, 435), False, 'import shutil\n'), ((494, 533), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {'prefix': '"""lightning-a."""'}), "(prefix='lightning-a.')\n", (510, 533), False, 'import tempfile\n'), ((552, 591), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {'prefix': '"""lightning-b."""'}), "(prefix='lightning-b.')\n", (568, 591), False, 'import tempfile\n'), ((633, 659), 'shutil.rmtree', 'shutil.rmtree', (['lightning_a'], {}), '(lightning_a)\n', (646, 659), False, 'import shutil\n'), ((664, 690), 'shutil.rmtree', 'shutil.rmtree', (['lightning_b'], {}), '(lightning_b)\n', (677, 690), False, 'import shutil\n'), ((1113, 1176), 'bitcoin.BitcoinRPC', 'BitcoinRPC', (['"""http://127.0.0.1:10287/"""', '"""rpcuser"""', '"""rpcpassword"""'], {}), "('http://127.0.0.1:10287/', 'rpcuser', 'rpcpassword')\n", (1123, 1176), False, 'from bitcoin import BitcoinRPC\n'), ((3441, 3466), 'os.getenv', 'os.getenv', (['"""DATABASE_URL"""'], {}), "('DATABASE_URL')\n", (3450, 3466), False, 'import os\n'), ((4609, 4631), 'os.getenv', 'os.getenv', (['"""REDIS_URL"""'], {}), "('REDIS_URL')\n", (4618, 4631), False, 'import os\n'), ((4760, 4828), 'subprocess.run', 'subprocess.run', (['"""redis-cli flushdb"""'], {'shell': '(True)', 'capture_output': '(True)'}), "('redis-cli flushdb', shell=True, capture_output=True)\n", (4774, 4828), False, 'import subprocess\n'), ((5081, 5098), 'os.environ.copy', 'os.environ.copy', ([], {}), '()\n', (5096, 5098), False, 'import os\n'), ((2142, 2176), 'os.path.join', 'os.path.join', (['dir', '"""lightning-rpc"""'], {}), "(dir, 'lightning-rpc')\n", (2154, 2176), False, 'import os\n'), ((5130, 5166), 'os.path.join', 'os.path.join', (['dir_a', '"""lightning-rpc"""'], {}), "(dir_a, 'lightning-rpc')\n", (5142, 5166), False, 'import os\n')]
|
from typing import Optional, Union, List, Tuple
import os
import cv2 as cv
import numpy as np
from PySide6.QtWidgets import QLayout, QLabel, QWidget, QGridLayout
from PySide6.QtGui import QImage, QMouseEvent, QCloseEvent, QResizeEvent, QMoveEvent, QPixmap
from PySide6.QtCore import Slot, QSize, QPoint, Qt, Signal
from .ui_editor_window import Ui_EditorWindow
from .preview_window import PreviewWindow
from .cluster_image_entry import ClusterImageEntry
from .layer_image_entry import LayerImageEntry
from .layer_data import LayerData
from .utils import load_image, array2d_to_pixmap, fit_to_frame, create_cluster
class CLusterPreviewWindow(QWidget):
"""
Extends QWidget. Floating window next to ClusterEditor showing the current cluster state.
"""
def __init__(self, parent: Optional[QWidget] = None, size: QSize = QSize(600, 600), image: Optional[QImage] = None):
super(CLusterPreviewWindow, self).__init__(parent)
self.setWindowFlags(Qt.Window | Qt.FramelessWindowHint)
self.resize(size)
self.imageLabel = QLabel("Cluster Preview", self)
self.imageLabel.setAlignment(Qt.AlignCenter)
layout = QGridLayout(self)
layout.addWidget(self.imageLabel)
if image is not None:
self.__update_cluster_preview(QPixmap.fromImage(image))
def __update_cluster_preview(self, image: QPixmap) -> None:
self.imageLabel.setPixmap(fit_to_frame(image, QSize(self.width(), self.height())))
def update_cluster_preview(self, image: Union[np.ndarray, str]) -> None:
"""
Load an image from a string or an array and update the cluster preview.
:param image: Can be both a numpy array and a string.
"""
if isinstance(image, np.ndarray):
self.__update_cluster_preview(array2d_to_pixmap(image, normalize=True, colormap=cv.COLORMAP_JET))
return
if isinstance(image, str):
self.__update_cluster_preview(QPixmap.fromImage(QImage(image)))
return
raise ValueError("Invalid image type: {}".format(type(image)))
class ClusterEditor(PreviewWindow):
"""
Extends PreviewWindow. The window that allows editing of the clusters.
"""
applied_to_all = Signal(list)
def __init__(self, parent: Optional[QWidget], calling_image_entry: ClusterImageEntry):
super(ClusterEditor, self).__init__(parent)
self.ui = Ui_EditorWindow()
self.ui.setupUi(self)
self.ui.mergeButton.clicked.connect(self.merge)
self.ui.applyButton.clicked.connect(self.apply_to_all)
self.ui.resetButton.clicked.connect(self.reset)
# self.ui.unmergeButton.clicked.connect(self.unmerge)
self.ui.undoButton.clicked.connect(self.undo)
self._source_image_entries: List[LayerImageEntry] = []
self._selected_image_entry: Optional[LayerImageEntry] = None
self.__cluster_image_entry: ClusterImageEntry = calling_image_entry
self.__pending_mergers: List[List[int]] = []
self.__pending_ime: List[LayerImageEntry] = []
self.__old_entries: List[List[LayerImageEntry]] = []
self.__cluster_array: np.ndarray = np.load(self.__cluster_image_entry.array_path)
side_length = self.height() - self.menuBar().height()
self.__cluster_preview_window = CLusterPreviewWindow(self, QSize(side_length, side_length),
load_image(self.__cluster_image_entry.image_path))
# self.cluster_preview_window.show()
# first = True
for i in range(self.__cluster_image_entry.layer_count()):
layer_data = self.__cluster_image_entry.get_layer_data(i)
array = np.load(layer_data.array_path)
qim: QImage = load_image(layer_data.image_path)
ime = LayerImageEntry(self, qim, array, layer_data.name(), is_merger=layer_data.is_merger,
layer_index=layer_data.layer_index, parent_layers=layer_data.parent_layers)
ime.mouse_pressed.connect(self.image_entry_click_handler)
ime.state_changed.connect(self.change_merge_button_state)
self.add_source_image_entry(ime)
# if first:
# self.set_preview_image(qim, ime)
# first = False
def source_layout(self) -> QLayout:
return self.ui.scrollAreaLayersContents.layout()
def image_preview(self) -> QLabel:
return self.ui.imageLabel
@Slot(LayerImageEntry, QMouseEvent)
def image_entry_click_handler(self, sender: LayerImageEntry, event: QMouseEvent) -> None:
assert type(sender) == LayerImageEntry
self.set_preview_image(array2d_to_pixmap(sender.array, normalize=True).toImage(), sender)
def resizeEvent(self, event: QResizeEvent) -> None:
if self._selected_image_entry is None:
return
self.draw_preview_image(array2d_to_pixmap(self._selected_image_entry.array, normalize=True).toImage())
def moveEvent(self, event: QMoveEvent) -> None:
position = event.pos()
self.__cluster_preview_window.move(position - QPoint(self.__cluster_preview_window.width(), 0))
if self.__cluster_preview_window.isHidden():
self.__cluster_preview_window.show()
def closeEvent(self, event: QCloseEvent) -> None:
self.__cluster_preview_window.close()
self.__cluster_preview_window = None
def __pending_add(self, mergers_idx: List[int], ime: LayerImageEntry, old_entries: List[LayerImageEntry]) -> None:
"""
Add the result of a merge to the pending list, and store the merged layers to be able to undo the merge.
:param mergers_idx: Indices of the layers to merge.
:type mergers_idx: list of int
:param LayerImageEntry ime: The newly merged image entry.
:param old_entries: A list of the layers used to generate the merged layer.
:type old_entries: list of LayerImageEntry
"""
if not self.ui.undoButton.isEnabled():
self.ui.undoButton.setEnabled(True)
self.__pending_mergers.append(mergers_idx)
self.__pending_ime.append(ime)
self.__old_entries.append(old_entries)
def __pending_clear(self) -> None:
"""
Deletes the mergers that haven't been applied yet.
"""
self.ui.undoButton.setEnabled(False)
self.__pending_mergers.clear()
self.__pending_ime.clear()
self.__old_entries.clear()
def __pending_count(self) -> int:
n = len(self.__pending_mergers)
assert n == len(self.__pending_ime) == len(self.__old_entries)
return n
def __pending_pop(self) -> Tuple[List[int], LayerImageEntry, List[LayerImageEntry]]:
"""
Gives a tuple of the last merged indices, image entry of the merged layers and the list of image entries
used to generate the merger.
:rtype: (list of int, LayerImageEntry, list of LayerImageEntry)
"""
if self.__pending_count() == 1:
self.ui.undoButton.setEnabled(False)
return self.__pending_mergers.pop(), self.__pending_ime.pop(), self.__old_entries.pop()
@Slot(int)
def change_merge_button_state(self, state: int) -> None:
if not state == Qt.CheckState.Checked:
self.ui.mergeButton.setEnabled(True)
return
if len(self.get_selected_entries()) == len(self._source_image_entries):
self.ui.mergeButton.setEnabled(False)
@Slot()
def merge(self) -> None:
"""
Merge the selected layers only in the current view. Update the cluster preview with the newly merged cluster.
"""
if len(self.get_selected_entries()) < 2:
return
checked_indices: List[int] = []
old_ime: List[LayerImageEntry] = []
merger: Optional[np.ndarray] = None
parent_layers: List[int] = []
for index, ime in enumerate(self._source_image_entries):
if not ime.isChecked():
continue
if ime.layer_data.is_merger:
assert ime.layer_data.parent_layers is not None
parent_layers.extend(ime.layer_data.parent_layers)
else:
assert ime.layer_data.layer_index is not None
parent_layers.append(ime.layer_data.layer_index)
checked_indices.append(index)
old_ime.append(ime)
merger = self._source_image_entries[index].array if merger is None else merger | self._source_image_entries[
index].array
ime.setChecked(False)
ime.close()
for i in sorted(checked_indices, reverse=True):
self._source_image_entries.pop(i)
self.__cluster_array = create_cluster([ime.array for ime in self._source_image_entries])
self.__cluster_preview_window.update_cluster_preview(self.__cluster_array)
qim: QImage = array2d_to_pixmap(merger, normalize=True).toImage()
merged_ime = LayerImageEntry(self, qim, merger, f"m {LayerData.indices2str(parent_layers)}",
is_merger=True, parent_layers=parent_layers)
merged_ime.mouse_pressed.connect(self.image_entry_click_handler)
merged_ime.state_changed.connect(self.change_merge_button_state)
self.__pending_add(checked_indices, merged_ime, old_ime)
self.set_preview_image(qim, merged_ime)
self.add_source_image_entry(merged_ime)
self.change_all_entries_check_state(False)
@Slot()
def apply_to_all(self) -> None:
"""
Send a merge signal for each pending merge.
"""
for merger in self.__pending_mergers:
self.applied_to_all.emit(merger)
self.__pending_clear()
@Slot()
def reset(self) -> None:
"""
Removes all uncommitted changes done in the editor.
"""
if len(self.__pending_mergers) == 0:
return
self.__pending_clear()
for ime in self._source_image_entries:
ime.close()
self._source_image_entries.clear()
self.__cluster_array = np.load(self.__cluster_image_entry.array_path)
self.__cluster_preview_window.update_cluster_preview(self.__cluster_array)
for i in range(self.__cluster_image_entry.layer_count()):
layer_data = self.__cluster_image_entry.get_layer_data(i)
array = np.load(layer_data.array_path)
qim: QImage = load_image(layer_data.image_path)
ime = LayerImageEntry(self, qim, array, layer_data.name(), layer_data.is_merger, layer_data.layer_index,
layer_data.parent_layers)
ime.mouse_pressed.connect(self.image_entry_click_handler)
ime.state_changed.connect(self.change_merge_button_state)
self.add_source_image_entry(ime)
if i == 0:
self.set_preview_image(load_image(layer_data.image_path), ime)
self.change_all_entries_check_state(False)
@Slot()
def unmerge(self) -> None:
"""
Unmerge the selected layers in the editor. Global behavior not implemented.
"""
for index, ime in enumerate(self._source_image_entries):
if not ime.isChecked() or not ime.layer_data.is_merger:
continue
self._source_image_entries.pop(index)
assert ime.layer_data.parent_layers is not None
for parent_layer_index in ime.layer_data.parent_layers.copy():
directory = os.path.dirname(self.__cluster_image_entry.image_path)
path_no_ext = os.path.join(directory,
f"{self.__cluster_image_entry.basename}_layer_{parent_layer_index}")
image_path = f"{path_no_ext}.png"
array_path = f"{path_no_ext}.npy"
parent_ime = LayerImageEntry(self, load_image(image_path), np.load(array_path), str(parent_layer_index),
layer_index=parent_layer_index)
parent_ime.mouse_pressed.connect(self.image_entry_click_handler)
parent_ime.state_changed.connect(self.change_merge_button_state)
self.add_source_image_entry(parent_ime)
ime.close()
@Slot()
def undo(self) -> None:
"""
Go one step back.
"""
if self.__pending_count() == 0:
return
indices, pending_ime, old_ime = self.__pending_pop()
self._source_image_entries.pop()
pending_ime.close()
for index, ime in zip(indices, old_ime):
ime.setVisible(True)
self.add_source_image_entry(ime, index)
self.image_preview().setText("Layer")
self.__cluster_preview_window.update_cluster_preview(self.__cluster_image_entry.image_path)
self.change_all_entries_check_state(False)
|
[
"numpy.load",
"os.path.dirname",
"PySide6.QtWidgets.QLabel",
"PySide6.QtGui.QImage",
"PySide6.QtCore.Signal",
"PySide6.QtCore.QSize",
"PySide6.QtGui.QPixmap.fromImage",
"PySide6.QtCore.Slot",
"PySide6.QtWidgets.QGridLayout",
"os.path.join"
] |
[((2253, 2265), 'PySide6.QtCore.Signal', 'Signal', (['list'], {}), '(list)\n', (2259, 2265), False, 'from PySide6.QtCore import Slot, QSize, QPoint, Qt, Signal\n'), ((4502, 4536), 'PySide6.QtCore.Slot', 'Slot', (['LayerImageEntry', 'QMouseEvent'], {}), '(LayerImageEntry, QMouseEvent)\n', (4506, 4536), False, 'from PySide6.QtCore import Slot, QSize, QPoint, Qt, Signal\n'), ((7208, 7217), 'PySide6.QtCore.Slot', 'Slot', (['int'], {}), '(int)\n', (7212, 7217), False, 'from PySide6.QtCore import Slot, QSize, QPoint, Qt, Signal\n'), ((7530, 7536), 'PySide6.QtCore.Slot', 'Slot', ([], {}), '()\n', (7534, 7536), False, 'from PySide6.QtCore import Slot, QSize, QPoint, Qt, Signal\n'), ((9575, 9581), 'PySide6.QtCore.Slot', 'Slot', ([], {}), '()\n', (9579, 9581), False, 'from PySide6.QtCore import Slot, QSize, QPoint, Qt, Signal\n'), ((9822, 9828), 'PySide6.QtCore.Slot', 'Slot', ([], {}), '()\n', (9826, 9828), False, 'from PySide6.QtCore import Slot, QSize, QPoint, Qt, Signal\n'), ((11085, 11091), 'PySide6.QtCore.Slot', 'Slot', ([], {}), '()\n', (11089, 11091), False, 'from PySide6.QtCore import Slot, QSize, QPoint, Qt, Signal\n'), ((12370, 12376), 'PySide6.QtCore.Slot', 'Slot', ([], {}), '()\n', (12374, 12376), False, 'from PySide6.QtCore import Slot, QSize, QPoint, Qt, Signal\n'), ((839, 854), 'PySide6.QtCore.QSize', 'QSize', (['(600)', '(600)'], {}), '(600, 600)\n', (844, 854), False, 'from PySide6.QtCore import Slot, QSize, QPoint, Qt, Signal\n'), ((1065, 1096), 'PySide6.QtWidgets.QLabel', 'QLabel', (['"""Cluster Preview"""', 'self'], {}), "('Cluster Preview', self)\n", (1071, 1096), False, 'from PySide6.QtWidgets import QLayout, QLabel, QWidget, QGridLayout\n'), ((1168, 1185), 'PySide6.QtWidgets.QGridLayout', 'QGridLayout', (['self'], {}), '(self)\n', (1179, 1185), False, 'from PySide6.QtWidgets import QLayout, QLabel, QWidget, QGridLayout\n'), ((3188, 3234), 'numpy.load', 'np.load', (['self.__cluster_image_entry.array_path'], {}), '(self.__cluster_image_entry.array_path)\n', (3195, 3234), True, 'import numpy as np\n'), ((10186, 10232), 'numpy.load', 'np.load', (['self.__cluster_image_entry.array_path'], {}), '(self.__cluster_image_entry.array_path)\n', (10193, 10232), True, 'import numpy as np\n'), ((3365, 3396), 'PySide6.QtCore.QSize', 'QSize', (['side_length', 'side_length'], {}), '(side_length, side_length)\n', (3370, 3396), False, 'from PySide6.QtCore import Slot, QSize, QPoint, Qt, Signal\n'), ((3735, 3765), 'numpy.load', 'np.load', (['layer_data.array_path'], {}), '(layer_data.array_path)\n', (3742, 3765), True, 'import numpy as np\n'), ((10473, 10503), 'numpy.load', 'np.load', (['layer_data.array_path'], {}), '(layer_data.array_path)\n', (10480, 10503), True, 'import numpy as np\n'), ((1301, 1325), 'PySide6.QtGui.QPixmap.fromImage', 'QPixmap.fromImage', (['image'], {}), '(image)\n', (1318, 1325), False, 'from PySide6.QtGui import QImage, QMouseEvent, QCloseEvent, QResizeEvent, QMoveEvent, QPixmap\n'), ((11603, 11657), 'os.path.dirname', 'os.path.dirname', (['self.__cluster_image_entry.image_path'], {}), '(self.__cluster_image_entry.image_path)\n', (11618, 11657), False, 'import os\n'), ((11688, 11784), 'os.path.join', 'os.path.join', (['directory', 'f"""{self.__cluster_image_entry.basename}_layer_{parent_layer_index}"""'], {}), "(directory,\n f'{self.__cluster_image_entry.basename}_layer_{parent_layer_index}')\n", (11700, 11784), False, 'import os\n'), ((1995, 2008), 'PySide6.QtGui.QImage', 'QImage', (['image'], {}), '(image)\n', (2001, 2008), False, 'from PySide6.QtGui import QImage, QMouseEvent, QCloseEvent, QResizeEvent, QMoveEvent, QPixmap\n'), ((11999, 12018), 'numpy.load', 'np.load', (['array_path'], {}), '(array_path)\n', (12006, 12018), True, 'import numpy as np\n')]
|
from app.models import ParentHood
from datetime import datetime
from flask import jsonify, request
from flask_jwt_extended import get_jwt_identity, jwt_required
from app import db
from app.api import bluePrint
from app.api.auth.auth_utils import jwt_roles_required
from app.dbUtils.dbUtils import query_existing_user, query_unvalidated_parents, query_parent_hood,\
query_parent_students, query_student_sessions
from app.utils.utils import Roles, VALIDATIONS, datetime_string_to_utc
# -------------------Teachers Section--------------------------------------------------------
@bluePrint.route('/parent', methods=['POST'])
@jwt_roles_required(Roles.EVERYBODY)
def register_parent():
"""
This api adds parent's real information to the DB.
"""
parent_id = get_jwt_identity().get('id')
parent = query_existing_user(parent_id)
if parent:
parent.phone = request.json.get('phone', None)
parent.nick_name = request.json.get('nick_name', None)
parent.real_name = request.json.get('real_name', None)
parent.gender = request.json.get('gender', None)
parent.language = request.json.get('language', 'CN')
parent.province = request.json.get('province', None)
parent.city = request.json.get('city', None)
parent.avatar = request.json.get('avatar', None)
parent.roles = Roles.PARENT
parent.register_time = datetime.utcnow()
db.session.add(parent)
db.session.commit()
return jsonify(message="Parent created successfully"), 201
else:
return jsonify(message='User does not exist'), 201
@bluePrint.route('/validate_parent', methods=['POST'])
@jwt_roles_required(Roles.TEACHER) # Teacher and above can validate a parent
def validate_parent():
"""
This api validates a parent in the DB.
"""
parent_id = request.json.get('parent_id', None)
decision = request.json.get('decision', 0)
parent = query_existing_user(parent_id)
if parent:
parent.validated = decision
parent.approver_id = get_jwt_identity().get('id')
parent.approve_time = datetime.utcnow()
db.session.add(parent)
db.session.commit()
return jsonify(message="Parent validation updated"), 201
else:
return jsonify(message='User does not exist'), 201
@bluePrint.route('/unvalidated_parents', methods=['GET'])
@jwt_roles_required(Roles.TEACHER) # Teacher and above can get unvalidated parents.
def get_unvalidated_parents():
"""
This API gets a list of all existing but unvalidated parents from the DB.
"""
users = query_unvalidated_parents()
if users:
result = [user.validate_info() for user in users]
else:
result = []
return jsonify(message=result), 201
@bluePrint.route('/bind_parents', methods=['POST'])
@jwt_required()
def bind_parents():
"""
This API adds parent information into DB and binds a parent with a student.
"""
parent_id = get_jwt_identity().get('id')
student_id = request.json.get('student_id', None)
teacher_id = request.json.get('teacher_id', None)
relation = request.json.get('relation', None)
parent_hood = query_parent_hood(parent_id, student_id)
# First, find the parent based on the parent_id
parent = query_existing_user(parent_id)
if parent:
# If the parent is already logged in, then add the info into db
parent.phone = request.json.get('phone', None)
parent.real_name = request.json.get('real_name', None)
# The following info can be get from wechat
parent.nick_name = request.json.get('nick_name', None)
parent.gender = request.json.get('gender', None)
parent.language = request.json.get('language', 'CN')
parent.province = request.json.get('province', None)
parent.city = request.json.get('city', None)
parent.avatar = request.json.get('avatar', None)
if parent.roles <= Roles.PARENT:
# If the user's role is no larger than PARENT
# Then change the user's role and register_time, validated status, approve_time, approver_id
# Else, the following information stay the same.
parent.roles = Roles.PARENT
parent.register_time = datetime.utcnow()
parent.validated = VALIDATIONS.APPROVED
parent.approve_time = datetime.utcnow()
parent.approver_id = teacher_id
db.session.add(parent)
db.session.commit()
if parent_hood:
# Second, find if there is already a parenthood record in the DB
# If so, update the original parenthood to a new value
original_relation = parent_hood.relation
parent_hood.relation = relation
db.session.add(parent_hood)
db.session.commit()
return jsonify(message="modified relation from " + str(original_relation) + " to " + str(relation)), 201
else:
# If no such parenthood in the DB
# Create a new one in the DB
parent_hood = ParentHood()
parent_hood.parent_id = parent_id
parent_hood.student_id = student_id
parent_hood.relation = relation
db.session.add(parent_hood)
db.session.commit()
return jsonify(message="Successfully binded parent"), 201
else:
return jsonify(message="No such user"), 201
@bluePrint.route('/parent_students', methods=['GET'])
@jwt_roles_required(Roles.PARENT)
def get_parent_students():
"""
This api gets the students of a parent.
"""
parent_id = get_jwt_identity().get('id')
parent = query_existing_user(parent_id)
if parent:
students = query_parent_students(parent_id)
return jsonify(message=[student.to_dict() for student, _ in students]), 201
else:
return jsonify(message=[]), 201
@bluePrint.route('/parent_students_sessions', methods=['POST'])
@jwt_roles_required(Roles.PARENT)
def get_parent_students_sessions():
"""
This api gets all the parent's student's sessions within the time frame.
"""
parent_id = get_jwt_identity().get('id')
start_time = request.json.get('start_time', None)
end_time = request.json.get('end_time', None)
start_time_utc = datetime_string_to_utc(start_time)
end_time_utc = datetime_string_to_utc(end_time)
parent = query_existing_user(parent_id)
result = []
if parent:
students = query_parent_students(parent_id)
for student, _ in students:
class_sessions = query_student_sessions(student.id, start_time_utc, end_time_utc)
student_result = student.to_dict()
student_result['class_sessions'] = []
for class_session, _ in class_sessions:
student_result['class_sessions'].append(class_session.to_dict())
result.append(student_result)
return jsonify(message=result), 201
else:
return jsonify(message=result), 201
|
[
"app.dbUtils.dbUtils.query_unvalidated_parents",
"flask_jwt_extended.get_jwt_identity",
"app.dbUtils.dbUtils.query_parent_students",
"app.api.bluePrint.route",
"app.dbUtils.dbUtils.query_parent_hood",
"flask_jwt_extended.jwt_required",
"app.dbUtils.dbUtils.query_existing_user",
"datetime.datetime.utcnow",
"flask.jsonify",
"app.models.ParentHood",
"app.db.session.add",
"app.db.session.commit",
"app.dbUtils.dbUtils.query_student_sessions",
"flask.request.json.get",
"app.api.auth.auth_utils.jwt_roles_required",
"app.utils.utils.datetime_string_to_utc"
] |
[((586, 630), 'app.api.bluePrint.route', 'bluePrint.route', (['"""/parent"""'], {'methods': "['POST']"}), "('/parent', methods=['POST'])\n", (601, 630), False, 'from app.api import bluePrint\n'), ((632, 667), 'app.api.auth.auth_utils.jwt_roles_required', 'jwt_roles_required', (['Roles.EVERYBODY'], {}), '(Roles.EVERYBODY)\n', (650, 667), False, 'from app.api.auth.auth_utils import jwt_roles_required\n'), ((1621, 1674), 'app.api.bluePrint.route', 'bluePrint.route', (['"""/validate_parent"""'], {'methods': "['POST']"}), "('/validate_parent', methods=['POST'])\n", (1636, 1674), False, 'from app.api import bluePrint\n'), ((1676, 1709), 'app.api.auth.auth_utils.jwt_roles_required', 'jwt_roles_required', (['Roles.TEACHER'], {}), '(Roles.TEACHER)\n', (1694, 1709), False, 'from app.api.auth.auth_utils import jwt_roles_required\n'), ((2332, 2388), 'app.api.bluePrint.route', 'bluePrint.route', (['"""/unvalidated_parents"""'], {'methods': "['GET']"}), "('/unvalidated_parents', methods=['GET'])\n", (2347, 2388), False, 'from app.api import bluePrint\n'), ((2390, 2423), 'app.api.auth.auth_utils.jwt_roles_required', 'jwt_roles_required', (['Roles.TEACHER'], {}), '(Roles.TEACHER)\n', (2408, 2423), False, 'from app.api.auth.auth_utils import jwt_roles_required\n'), ((2786, 2836), 'app.api.bluePrint.route', 'bluePrint.route', (['"""/bind_parents"""'], {'methods': "['POST']"}), "('/bind_parents', methods=['POST'])\n", (2801, 2836), False, 'from app.api import bluePrint\n'), ((2838, 2852), 'flask_jwt_extended.jwt_required', 'jwt_required', ([], {}), '()\n', (2850, 2852), False, 'from flask_jwt_extended import get_jwt_identity, jwt_required\n'), ((5443, 5495), 'app.api.bluePrint.route', 'bluePrint.route', (['"""/parent_students"""'], {'methods': "['GET']"}), "('/parent_students', methods=['GET'])\n", (5458, 5495), False, 'from app.api import bluePrint\n'), ((5497, 5529), 'app.api.auth.auth_utils.jwt_roles_required', 'jwt_roles_required', (['Roles.PARENT'], {}), '(Roles.PARENT)\n', (5515, 5529), False, 'from app.api.auth.auth_utils import jwt_roles_required\n'), ((5910, 5972), 'app.api.bluePrint.route', 'bluePrint.route', (['"""/parent_students_sessions"""'], {'methods': "['POST']"}), "('/parent_students_sessions', methods=['POST'])\n", (5925, 5972), False, 'from app.api import bluePrint\n'), ((5974, 6006), 'app.api.auth.auth_utils.jwt_roles_required', 'jwt_roles_required', (['Roles.PARENT'], {}), '(Roles.PARENT)\n', (5992, 6006), False, 'from app.api.auth.auth_utils import jwt_roles_required\n'), ((821, 851), 'app.dbUtils.dbUtils.query_existing_user', 'query_existing_user', (['parent_id'], {}), '(parent_id)\n', (840, 851), False, 'from app.dbUtils.dbUtils import query_existing_user, query_unvalidated_parents, query_parent_hood, query_parent_students, query_student_sessions\n'), ((1851, 1886), 'flask.request.json.get', 'request.json.get', (['"""parent_id"""', 'None'], {}), "('parent_id', None)\n", (1867, 1886), False, 'from flask import jsonify, request\n'), ((1902, 1933), 'flask.request.json.get', 'request.json.get', (['"""decision"""', '(0)'], {}), "('decision', 0)\n", (1918, 1933), False, 'from flask import jsonify, request\n'), ((1947, 1977), 'app.dbUtils.dbUtils.query_existing_user', 'query_existing_user', (['parent_id'], {}), '(parent_id)\n', (1966, 1977), False, 'from app.dbUtils.dbUtils import query_existing_user, query_unvalidated_parents, query_parent_hood, query_parent_students, query_student_sessions\n'), ((2611, 2638), 'app.dbUtils.dbUtils.query_unvalidated_parents', 'query_unvalidated_parents', ([], {}), '()\n', (2636, 2638), False, 'from app.dbUtils.dbUtils import query_existing_user, query_unvalidated_parents, query_parent_hood, query_parent_students, query_student_sessions\n'), ((3031, 3067), 'flask.request.json.get', 'request.json.get', (['"""student_id"""', 'None'], {}), "('student_id', None)\n", (3047, 3067), False, 'from flask import jsonify, request\n'), ((3085, 3121), 'flask.request.json.get', 'request.json.get', (['"""teacher_id"""', 'None'], {}), "('teacher_id', None)\n", (3101, 3121), False, 'from flask import jsonify, request\n'), ((3137, 3171), 'flask.request.json.get', 'request.json.get', (['"""relation"""', 'None'], {}), "('relation', None)\n", (3153, 3171), False, 'from flask import jsonify, request\n'), ((3191, 3231), 'app.dbUtils.dbUtils.query_parent_hood', 'query_parent_hood', (['parent_id', 'student_id'], {}), '(parent_id, student_id)\n', (3208, 3231), False, 'from app.dbUtils.dbUtils import query_existing_user, query_unvalidated_parents, query_parent_hood, query_parent_students, query_student_sessions\n'), ((3297, 3327), 'app.dbUtils.dbUtils.query_existing_user', 'query_existing_user', (['parent_id'], {}), '(parent_id)\n', (3316, 3327), False, 'from app.dbUtils.dbUtils import query_existing_user, query_unvalidated_parents, query_parent_hood, query_parent_students, query_student_sessions\n'), ((5675, 5705), 'app.dbUtils.dbUtils.query_existing_user', 'query_existing_user', (['parent_id'], {}), '(parent_id)\n', (5694, 5705), False, 'from app.dbUtils.dbUtils import query_existing_user, query_unvalidated_parents, query_parent_hood, query_parent_students, query_student_sessions\n'), ((6198, 6234), 'flask.request.json.get', 'request.json.get', (['"""start_time"""', 'None'], {}), "('start_time', None)\n", (6214, 6234), False, 'from flask import jsonify, request\n'), ((6250, 6284), 'flask.request.json.get', 'request.json.get', (['"""end_time"""', 'None'], {}), "('end_time', None)\n", (6266, 6284), False, 'from flask import jsonify, request\n'), ((6307, 6341), 'app.utils.utils.datetime_string_to_utc', 'datetime_string_to_utc', (['start_time'], {}), '(start_time)\n', (6329, 6341), False, 'from app.utils.utils import Roles, VALIDATIONS, datetime_string_to_utc\n'), ((6361, 6393), 'app.utils.utils.datetime_string_to_utc', 'datetime_string_to_utc', (['end_time'], {}), '(end_time)\n', (6383, 6393), False, 'from app.utils.utils import Roles, VALIDATIONS, datetime_string_to_utc\n'), ((6408, 6438), 'app.dbUtils.dbUtils.query_existing_user', 'query_existing_user', (['parent_id'], {}), '(parent_id)\n', (6427, 6438), False, 'from app.dbUtils.dbUtils import query_existing_user, query_unvalidated_parents, query_parent_hood, query_parent_students, query_student_sessions\n'), ((891, 922), 'flask.request.json.get', 'request.json.get', (['"""phone"""', 'None'], {}), "('phone', None)\n", (907, 922), False, 'from flask import jsonify, request\n'), ((950, 985), 'flask.request.json.get', 'request.json.get', (['"""nick_name"""', 'None'], {}), "('nick_name', None)\n", (966, 985), False, 'from flask import jsonify, request\n'), ((1013, 1048), 'flask.request.json.get', 'request.json.get', (['"""real_name"""', 'None'], {}), "('real_name', None)\n", (1029, 1048), False, 'from flask import jsonify, request\n'), ((1073, 1105), 'flask.request.json.get', 'request.json.get', (['"""gender"""', 'None'], {}), "('gender', None)\n", (1089, 1105), False, 'from flask import jsonify, request\n'), ((1132, 1166), 'flask.request.json.get', 'request.json.get', (['"""language"""', '"""CN"""'], {}), "('language', 'CN')\n", (1148, 1166), False, 'from flask import jsonify, request\n'), ((1193, 1227), 'flask.request.json.get', 'request.json.get', (['"""province"""', 'None'], {}), "('province', None)\n", (1209, 1227), False, 'from flask import jsonify, request\n'), ((1250, 1280), 'flask.request.json.get', 'request.json.get', (['"""city"""', 'None'], {}), "('city', None)\n", (1266, 1280), False, 'from flask import jsonify, request\n'), ((1305, 1337), 'flask.request.json.get', 'request.json.get', (['"""avatar"""', 'None'], {}), "('avatar', None)\n", (1321, 1337), False, 'from flask import jsonify, request\n'), ((1405, 1422), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (1420, 1422), False, 'from datetime import datetime\n'), ((1431, 1453), 'app.db.session.add', 'db.session.add', (['parent'], {}), '(parent)\n', (1445, 1453), False, 'from app import db\n'), ((1462, 1481), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (1479, 1481), False, 'from app import db\n'), ((2118, 2135), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (2133, 2135), False, 'from datetime import datetime\n'), ((2144, 2166), 'app.db.session.add', 'db.session.add', (['parent'], {}), '(parent)\n', (2158, 2166), False, 'from app import db\n'), ((2175, 2194), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (2192, 2194), False, 'from app import db\n'), ((2754, 2777), 'flask.jsonify', 'jsonify', ([], {'message': 'result'}), '(message=result)\n', (2761, 2777), False, 'from flask import jsonify, request\n'), ((3438, 3469), 'flask.request.json.get', 'request.json.get', (['"""phone"""', 'None'], {}), "('phone', None)\n", (3454, 3469), False, 'from flask import jsonify, request\n'), ((3497, 3532), 'flask.request.json.get', 'request.json.get', (['"""real_name"""', 'None'], {}), "('real_name', None)\n", (3513, 3532), False, 'from flask import jsonify, request\n'), ((3613, 3648), 'flask.request.json.get', 'request.json.get', (['"""nick_name"""', 'None'], {}), "('nick_name', None)\n", (3629, 3648), False, 'from flask import jsonify, request\n'), ((3673, 3705), 'flask.request.json.get', 'request.json.get', (['"""gender"""', 'None'], {}), "('gender', None)\n", (3689, 3705), False, 'from flask import jsonify, request\n'), ((3732, 3766), 'flask.request.json.get', 'request.json.get', (['"""language"""', '"""CN"""'], {}), "('language', 'CN')\n", (3748, 3766), False, 'from flask import jsonify, request\n'), ((3793, 3827), 'flask.request.json.get', 'request.json.get', (['"""province"""', 'None'], {}), "('province', None)\n", (3809, 3827), False, 'from flask import jsonify, request\n'), ((3850, 3880), 'flask.request.json.get', 'request.json.get', (['"""city"""', 'None'], {}), "('city', None)\n", (3866, 3880), False, 'from flask import jsonify, request\n'), ((3905, 3937), 'flask.request.json.get', 'request.json.get', (['"""avatar"""', 'None'], {}), "('avatar', None)\n", (3921, 3937), False, 'from flask import jsonify, request\n'), ((4452, 4474), 'app.db.session.add', 'db.session.add', (['parent'], {}), '(parent)\n', (4466, 4474), False, 'from app import db\n'), ((4483, 4502), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (4500, 4502), False, 'from app import db\n'), ((5740, 5772), 'app.dbUtils.dbUtils.query_parent_students', 'query_parent_students', (['parent_id'], {}), '(parent_id)\n', (5761, 5772), False, 'from app.dbUtils.dbUtils import query_existing_user, query_unvalidated_parents, query_parent_hood, query_parent_students, query_student_sessions\n'), ((6490, 6522), 'app.dbUtils.dbUtils.query_parent_students', 'query_parent_students', (['parent_id'], {}), '(parent_id)\n', (6511, 6522), False, 'from app.dbUtils.dbUtils import query_existing_user, query_unvalidated_parents, query_parent_hood, query_parent_students, query_student_sessions\n'), ((778, 796), 'flask_jwt_extended.get_jwt_identity', 'get_jwt_identity', ([], {}), '()\n', (794, 796), False, 'from flask_jwt_extended import get_jwt_identity, jwt_required\n'), ((1497, 1543), 'flask.jsonify', 'jsonify', ([], {'message': '"""Parent created successfully"""'}), "(message='Parent created successfully')\n", (1504, 1543), False, 'from flask import jsonify, request\n'), ((1574, 1612), 'flask.jsonify', 'jsonify', ([], {'message': '"""User does not exist"""'}), "(message='User does not exist')\n", (1581, 1612), False, 'from flask import jsonify, request\n'), ((2210, 2254), 'flask.jsonify', 'jsonify', ([], {'message': '"""Parent validation updated"""'}), "(message='Parent validation updated')\n", (2217, 2254), False, 'from flask import jsonify, request\n'), ((2285, 2323), 'flask.jsonify', 'jsonify', ([], {'message': '"""User does not exist"""'}), "(message='User does not exist')\n", (2292, 2323), False, 'from flask import jsonify, request\n'), ((2985, 3003), 'flask_jwt_extended.get_jwt_identity', 'get_jwt_identity', ([], {}), '()\n', (3001, 3003), False, 'from flask_jwt_extended import get_jwt_identity, jwt_required\n'), ((4278, 4295), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (4293, 4295), False, 'from datetime import datetime\n'), ((4382, 4399), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (4397, 4399), False, 'from datetime import datetime\n'), ((4781, 4808), 'app.db.session.add', 'db.session.add', (['parent_hood'], {}), '(parent_hood)\n', (4795, 4808), False, 'from app import db\n'), ((4821, 4840), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (4838, 4840), False, 'from app import db\n'), ((5085, 5097), 'app.models.ParentHood', 'ParentHood', ([], {}), '()\n', (5095, 5097), False, 'from app.models import ParentHood\n'), ((5248, 5275), 'app.db.session.add', 'db.session.add', (['parent_hood'], {}), '(parent_hood)\n', (5262, 5275), False, 'from app import db\n'), ((5288, 5307), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (5305, 5307), False, 'from app import db\n'), ((5403, 5434), 'flask.jsonify', 'jsonify', ([], {'message': '"""No such user"""'}), "(message='No such user')\n", (5410, 5434), False, 'from flask import jsonify, request\n'), ((5633, 5651), 'flask_jwt_extended.get_jwt_identity', 'get_jwt_identity', ([], {}), '()\n', (5649, 5651), False, 'from flask_jwt_extended import get_jwt_identity, jwt_required\n'), ((5882, 5901), 'flask.jsonify', 'jsonify', ([], {'message': '[]'}), '(message=[])\n', (5889, 5901), False, 'from flask import jsonify, request\n'), ((6152, 6170), 'flask_jwt_extended.get_jwt_identity', 'get_jwt_identity', ([], {}), '()\n', (6168, 6170), False, 'from flask_jwt_extended import get_jwt_identity, jwt_required\n'), ((6588, 6652), 'app.dbUtils.dbUtils.query_student_sessions', 'query_student_sessions', (['student.id', 'start_time_utc', 'end_time_utc'], {}), '(student.id, start_time_utc, end_time_utc)\n', (6610, 6652), False, 'from app.dbUtils.dbUtils import query_existing_user, query_unvalidated_parents, query_parent_hood, query_parent_students, query_student_sessions\n'), ((6941, 6964), 'flask.jsonify', 'jsonify', ([], {'message': 'result'}), '(message=result)\n', (6948, 6964), False, 'from flask import jsonify, request\n'), ((6995, 7018), 'flask.jsonify', 'jsonify', ([], {'message': 'result'}), '(message=result)\n', (7002, 7018), False, 'from flask import jsonify, request\n'), ((2059, 2077), 'flask_jwt_extended.get_jwt_identity', 'get_jwt_identity', ([], {}), '()\n', (2075, 2077), False, 'from flask_jwt_extended import get_jwt_identity, jwt_required\n'), ((5327, 5372), 'flask.jsonify', 'jsonify', ([], {'message': '"""Successfully binded parent"""'}), "(message='Successfully binded parent')\n", (5334, 5372), False, 'from flask import jsonify, request\n')]
|
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
update-model-url-to-authorized-endpoint
"""
from yoyo import step
__depends__ = {"20210621_01_IRyiT-rename-qa-f1"}
steps = [
step(
"UPDATE rounds SET url = REPLACE(url, 'fhcxpbltv0', 'obws766r82')",
"UPDATE rounds SET url = REPLACE(url, 'obws766r82', 'fhcxpbltv0')",
)
]
|
[
"yoyo.step"
] |
[((313, 457), 'yoyo.step', 'step', (['"""UPDATE rounds SET url = REPLACE(url, \'fhcxpbltv0\', \'obws766r82\')"""', '"""UPDATE rounds SET url = REPLACE(url, \'obws766r82\', \'fhcxpbltv0\')"""'], {}), '("UPDATE rounds SET url = REPLACE(url, \'fhcxpbltv0\', \'obws766r82\')",\n "UPDATE rounds SET url = REPLACE(url, \'obws766r82\', \'fhcxpbltv0\')")\n', (317, 457), False, 'from yoyo import step\n')]
|
import os
import __main__
import http.server
import socketserver
PORT = 8001
HOST = "0.0.0.0"
DIR = os.path.dirname(os.path.dirname(os.path.realpath(__main__.__file__)))
DIR = os.path.join(DIR, "data")
os.chdir(DIR)
Handler = http.server.SimpleHTTPRequestHandler
httpd = socketserver.TCPServer((HOST, PORT), Handler)
print("serving at port", PORT)
httpd.serve_forever()
|
[
"os.path.realpath",
"socketserver.TCPServer",
"os.path.join",
"os.chdir"
] |
[((178, 203), 'os.path.join', 'os.path.join', (['DIR', '"""data"""'], {}), "(DIR, 'data')\n", (190, 203), False, 'import os\n'), ((205, 218), 'os.chdir', 'os.chdir', (['DIR'], {}), '(DIR)\n', (213, 218), False, 'import os\n'), ((276, 321), 'socketserver.TCPServer', 'socketserver.TCPServer', (['(HOST, PORT)', 'Handler'], {}), '((HOST, PORT), Handler)\n', (298, 321), False, 'import socketserver\n'), ((134, 169), 'os.path.realpath', 'os.path.realpath', (['__main__.__file__'], {}), '(__main__.__file__)\n', (150, 169), False, 'import os\n')]
|
import typing
from flask import url_for
from flask_babel import gettext
from flask_login import current_user
from web.table.table import lazy_join, DictValueMixin, custom_formatter_column, IbanColumn
from web.table.table import BootstrapTable, Column, SplittedTable, \
BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn
from web.template_filters import money_filter
@custom_formatter_column('table.coloredFormatter')
class ColoredColumn(DictValueMixin, Column):
if typing.TYPE_CHECKING:
@classmethod
def value(cls, value: str, is_positive: bool) -> dict: ...
class FinanceTable(BootstrapTable):
class Meta:
table_args = {
'data-side-pagination': 'server',
# 'data-search': 'true',
'data-sort-order': 'desc',
# 'data-sort-name': 'valid_on',
'data-page-list': '[5, 10, 25, 50, 100]'
}
def __init__(self, *a, saldo=None, user_id=None, inverted=False, **kw):
"""Init
:param int user_id: An optional user_id. If set, this causes
a “details” button to be rendered in the toolbar
referencing the user.
:param bool inverted: An optional switch adding
`style=inverted` to the given `data_url`
"""
self.saldo = saldo
if inverted:
self._enforced_url_params = frozenset(
{('style', 'inverted')}
.union(self._enforced_url_params)
)
self.saldo = -saldo
super().__init__(*a, **kw)
self.user_id = user_id
self.table_footer_offset = 3
posted_at = Column("Erstellt um")
valid_on = Column("Gültig am")
description = LinkColumn("Beschreibung")
amount = ColoredColumn("Wert", cell_style='table.tdRelativeCellStyle')
@property
def toolbar(self):
"""Generate a toolbar with a details button
If a user_id was passed in the constructor, this renders a
“details” button reaching the finance overview of the user's account.
"""
if self.user_id is None:
return
href = url_for("user.user_account", user_id=self.user_id)
return button_toolbar("Details", href, icon="fa-chart-area")
@property
@lazy_join
def table_footer(self):
yield "<tfoot>"
yield "<tr>"
yield f"<td colspan=\"{self.table_footer_offset}\" class=\"text-right\">"
yield "<strong>Saldo:</strong>"
yield "</td>"
yield "<td>"
yield "{}".format(money_filter(self.saldo)
if self.saldo is not None else "-")
yield "</td>"
yield "</tr>"
yield "</tfoot>"
class FinanceTableSplitted(FinanceTable, SplittedTable):
class Meta:
table_args = {
'data-row-style': False,
'data-sort-name': False, # the "valid_on" col doesn't exist here
}
enforced_url_params = {'splitted': True}
splits = (('soll', "Soll"), ('haben', "Haben"))
def __init__(self, *a, **kw):
super().__init__(*a, **kw)
self.table_footer_offset = 7
def no_finance_change():
return not current_user.has_property('finance_change')
class MembershipFeeTable(BootstrapTable):
"""A table for displaying the current membership fees"""
name = Column("Name")
regular_fee = Column("Regulär")
payment_deadline = Column("Frist")
payment_deadline_final = Column("Endgültige Frist")
begins_on = DateColumn("Beginn")
ends_on = DateColumn("Ende")
actions = MultiBtnColumn("Aktionen")
@property
def toolbar(self):
"""An “add fee” button"""
href = url_for(".membership_fee_create")
return button_toolbar(gettext("Beitrag erstellen"), href)
class UsersDueTable(BootstrapTable):
"""A table for displaying the users that """
user_id = Column("Nutzer-ID")
user = LinkColumn("Name")
valid_on = Column("Gültig am")
description = Column("Beschreibung")
fee_account_id = LinkColumn("Beitragskonto")
amount = Column("Betrag", formatter="table.coloredFormatter")
class BankAccountTable(BootstrapTable):
"""A table for displaying bank accounts
:param bool create_account: An optional switch adding
a “create bank account” button to the toolbar
"""
name = Column("Name")
bank = Column("Bank")
iban = IbanColumn("IBAN")
bic = Column("SWIFT-BIC")
balance = Column("Saldo")
last_imported_at = Column("Zuletzt importiert")
kto = BtnColumn("Konto")
def __init__(self, *a, create_account=False, **kw):
self.create_account = create_account
super().__init__(*a, **kw)
@property
def toolbar(self):
"""A “create bank account” button"""
if not self.create_account:
return
href = url_for(".bank_accounts_create")
return button_toolbar(gettext("Neues Bankkonto anlegen"), href)
class BankAccountActivityTable(BootstrapTable):
"""A table for displaying bank account activities """
bank_account = Column("Bankkonto", width=1)
name = Column("Name", width=2)
valid_on = DateColumn("Gültig am", width=1)
imported_at = DateColumn("Importiert am", hide_if=lambda: True)
reference = Column("Verwendungszweck")
iban = Column("IBAN", hide_if=lambda: True)
amount = Column("Betrag", width=1, formatter="table.euroFormatter")
actions = MultiBtnColumn("Aktionen", width=1)
def __init__(self, *a, **kw):
table_args = kw.pop('table_args', {})
table_args.setdefault('data-detail-view', "true")
table_args.setdefault('data-row-style', "table.financeRowFormatter")
table_args.setdefault('data-detail-formatter', "table.bankAccountActivitiesDetailFormatter")
kw['table_args'] = table_args
super().__init__(*a, **kw)
class Meta:
table_args = {
'data-sort-order': 'desc',
'data-sort-name': 'valid_on',
}
class TransactionTable(BootstrapTable):
"""A table for displaying bank account activities """
account = LinkColumn("Konto")
amount = Column("Wert")
class Meta:
table_args = {
'data-row-style': 'table.financeRowFormatter',
}
class UnconfirmedTransactionsTable(BootstrapTable):
"""A table for displaying unconfirmed transactions """
description = LinkColumn("Beschreibung")
user = LinkColumn("Nutzer")
room = Column("Wohnort")
date = DateColumn("Datum")
amount = Column("Wert")
author = LinkColumn("Ersteller")
actions = MultiBtnColumn("Aktionen")
class ImportErrorTable(BootstrapTable):
"""A table for displaying buggy mt940 imports"""
name = Column("Bankkonto")
imported_at = Column("Importiert am")
fix = BtnColumn("Importieren")
|
[
"web.table.table.BtnColumn",
"web.table.table.DateColumn",
"web.template_filters.money_filter",
"web.table.table.custom_formatter_column",
"web.table.table.LinkColumn",
"web.table.table.Column",
"flask.url_for",
"web.table.table.IbanColumn",
"web.table.table.button_toolbar",
"flask_babel.gettext",
"flask_login.current_user.has_property",
"web.table.table.MultiBtnColumn"
] |
[((391, 440), 'web.table.table.custom_formatter_column', 'custom_formatter_column', (['"""table.coloredFormatter"""'], {}), "('table.coloredFormatter')\n", (414, 440), False, 'from web.table.table import lazy_join, DictValueMixin, custom_formatter_column, IbanColumn\n'), ((1649, 1670), 'web.table.table.Column', 'Column', (['"""Erstellt um"""'], {}), "('Erstellt um')\n", (1655, 1670), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((1686, 1705), 'web.table.table.Column', 'Column', (['"""Gültig am"""'], {}), "('Gültig am')\n", (1692, 1705), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((1724, 1750), 'web.table.table.LinkColumn', 'LinkColumn', (['"""Beschreibung"""'], {}), "('Beschreibung')\n", (1734, 1750), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((3348, 3362), 'web.table.table.Column', 'Column', (['"""Name"""'], {}), "('Name')\n", (3354, 3362), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((3381, 3398), 'web.table.table.Column', 'Column', (['"""Regulär"""'], {}), "('Regulär')\n", (3387, 3398), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((3422, 3437), 'web.table.table.Column', 'Column', (['"""Frist"""'], {}), "('Frist')\n", (3428, 3437), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((3467, 3493), 'web.table.table.Column', 'Column', (['"""Endgültige Frist"""'], {}), "('Endgültige Frist')\n", (3473, 3493), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((3510, 3530), 'web.table.table.DateColumn', 'DateColumn', (['"""Beginn"""'], {}), "('Beginn')\n", (3520, 3530), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((3545, 3563), 'web.table.table.DateColumn', 'DateColumn', (['"""Ende"""'], {}), "('Ende')\n", (3555, 3563), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((3578, 3604), 'web.table.table.MultiBtnColumn', 'MultiBtnColumn', (['"""Aktionen"""'], {}), "('Aktionen')\n", (3592, 3604), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((3894, 3913), 'web.table.table.Column', 'Column', (['"""Nutzer-ID"""'], {}), "('Nutzer-ID')\n", (3900, 3913), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((3925, 3943), 'web.table.table.LinkColumn', 'LinkColumn', (['"""Name"""'], {}), "('Name')\n", (3935, 3943), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((3959, 3978), 'web.table.table.Column', 'Column', (['"""Gültig am"""'], {}), "('Gültig am')\n", (3965, 3978), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((3997, 4019), 'web.table.table.Column', 'Column', (['"""Beschreibung"""'], {}), "('Beschreibung')\n", (4003, 4019), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((4041, 4068), 'web.table.table.LinkColumn', 'LinkColumn', (['"""Beitragskonto"""'], {}), "('Beitragskonto')\n", (4051, 4068), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((4082, 4134), 'web.table.table.Column', 'Column', (['"""Betrag"""'], {'formatter': '"""table.coloredFormatter"""'}), "('Betrag', formatter='table.coloredFormatter')\n", (4088, 4134), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((4353, 4367), 'web.table.table.Column', 'Column', (['"""Name"""'], {}), "('Name')\n", (4359, 4367), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((4379, 4393), 'web.table.table.Column', 'Column', (['"""Bank"""'], {}), "('Bank')\n", (4385, 4393), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((4405, 4423), 'web.table.table.IbanColumn', 'IbanColumn', (['"""IBAN"""'], {}), "('IBAN')\n", (4415, 4423), False, 'from web.table.table import lazy_join, DictValueMixin, custom_formatter_column, IbanColumn\n'), ((4434, 4453), 'web.table.table.Column', 'Column', (['"""SWIFT-BIC"""'], {}), "('SWIFT-BIC')\n", (4440, 4453), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((4468, 4483), 'web.table.table.Column', 'Column', (['"""Saldo"""'], {}), "('Saldo')\n", (4474, 4483), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((4507, 4535), 'web.table.table.Column', 'Column', (['"""Zuletzt importiert"""'], {}), "('Zuletzt importiert')\n", (4513, 4535), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((4546, 4564), 'web.table.table.BtnColumn', 'BtnColumn', (['"""Konto"""'], {}), "('Konto')\n", (4555, 4564), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((5087, 5115), 'web.table.table.Column', 'Column', (['"""Bankkonto"""'], {'width': '(1)'}), "('Bankkonto', width=1)\n", (5093, 5115), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((5127, 5150), 'web.table.table.Column', 'Column', (['"""Name"""'], {'width': '(2)'}), "('Name', width=2)\n", (5133, 5150), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((5166, 5198), 'web.table.table.DateColumn', 'DateColumn', (['"""Gültig am"""'], {'width': '(1)'}), "('Gültig am', width=1)\n", (5176, 5198), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((5217, 5267), 'web.table.table.DateColumn', 'DateColumn', (['"""Importiert am"""'], {'hide_if': '(lambda : True)'}), "('Importiert am', hide_if=lambda : True)\n", (5227, 5267), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((5283, 5309), 'web.table.table.Column', 'Column', (['"""Verwendungszweck"""'], {}), "('Verwendungszweck')\n", (5289, 5309), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((5321, 5358), 'web.table.table.Column', 'Column', (['"""IBAN"""'], {'hide_if': '(lambda : True)'}), "('IBAN', hide_if=lambda : True)\n", (5327, 5358), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((5371, 5429), 'web.table.table.Column', 'Column', (['"""Betrag"""'], {'width': '(1)', 'formatter': '"""table.euroFormatter"""'}), "('Betrag', width=1, formatter='table.euroFormatter')\n", (5377, 5429), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((5444, 5479), 'web.table.table.MultiBtnColumn', 'MultiBtnColumn', (['"""Aktionen"""'], {'width': '(1)'}), "('Aktionen', width=1)\n", (5458, 5479), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((6116, 6135), 'web.table.table.LinkColumn', 'LinkColumn', (['"""Konto"""'], {}), "('Konto')\n", (6126, 6135), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((6149, 6163), 'web.table.table.Column', 'Column', (['"""Wert"""'], {}), "('Wert')\n", (6155, 6163), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((6404, 6430), 'web.table.table.LinkColumn', 'LinkColumn', (['"""Beschreibung"""'], {}), "('Beschreibung')\n", (6414, 6430), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((6442, 6462), 'web.table.table.LinkColumn', 'LinkColumn', (['"""Nutzer"""'], {}), "('Nutzer')\n", (6452, 6462), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((6474, 6491), 'web.table.table.Column', 'Column', (['"""Wohnort"""'], {}), "('Wohnort')\n", (6480, 6491), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((6503, 6522), 'web.table.table.DateColumn', 'DateColumn', (['"""Datum"""'], {}), "('Datum')\n", (6513, 6522), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((6536, 6550), 'web.table.table.Column', 'Column', (['"""Wert"""'], {}), "('Wert')\n", (6542, 6550), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((6564, 6587), 'web.table.table.LinkColumn', 'LinkColumn', (['"""Ersteller"""'], {}), "('Ersteller')\n", (6574, 6587), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((6602, 6628), 'web.table.table.MultiBtnColumn', 'MultiBtnColumn', (['"""Aktionen"""'], {}), "('Aktionen')\n", (6616, 6628), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((6735, 6754), 'web.table.table.Column', 'Column', (['"""Bankkonto"""'], {}), "('Bankkonto')\n", (6741, 6754), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((6773, 6796), 'web.table.table.Column', 'Column', (['"""Importiert am"""'], {}), "('Importiert am')\n", (6779, 6796), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((6807, 6831), 'web.table.table.BtnColumn', 'BtnColumn', (['"""Importieren"""'], {}), "('Importieren')\n", (6816, 6831), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((2141, 2191), 'flask.url_for', 'url_for', (['"""user.user_account"""'], {'user_id': 'self.user_id'}), "('user.user_account', user_id=self.user_id)\n", (2148, 2191), False, 'from flask import url_for\n'), ((2207, 2260), 'web.table.table.button_toolbar', 'button_toolbar', (['"""Details"""', 'href'], {'icon': '"""fa-chart-area"""'}), "('Details', href, icon='fa-chart-area')\n", (2221, 2260), False, 'from web.table.table import BootstrapTable, Column, SplittedTable, BtnColumn, LinkColumn, button_toolbar, DateColumn, MultiBtnColumn\n'), ((3188, 3231), 'flask_login.current_user.has_property', 'current_user.has_property', (['"""finance_change"""'], {}), "('finance_change')\n", (3213, 3231), False, 'from flask_login import current_user\n'), ((3692, 3725), 'flask.url_for', 'url_for', (['""".membership_fee_create"""'], {}), "('.membership_fee_create')\n", (3699, 3725), False, 'from flask import url_for\n'), ((4855, 4887), 'flask.url_for', 'url_for', (['""".bank_accounts_create"""'], {}), "('.bank_accounts_create')\n", (4862, 4887), False, 'from flask import url_for\n'), ((3756, 3784), 'flask_babel.gettext', 'gettext', (['"""Beitrag erstellen"""'], {}), "('Beitrag erstellen')\n", (3763, 3784), False, 'from flask_babel import gettext\n'), ((4918, 4952), 'flask_babel.gettext', 'gettext', (['"""Neues Bankkonto anlegen"""'], {}), "('Neues Bankkonto anlegen')\n", (4925, 4952), False, 'from flask_babel import gettext\n'), ((2557, 2581), 'web.template_filters.money_filter', 'money_filter', (['self.saldo'], {}), '(self.saldo)\n', (2569, 2581), False, 'from web.template_filters import money_filter\n')]
|
#!/usr/bin/env python3
# Copyright 2020-present NAVER Corp. Under BSD 3-clause license
import os.path as path
import path_to_kapture_localization # noqa: F401
import kapture_localization.utils.path_to_kapture # noqa: F401
from kapture.utils.paths import safe_remove_any_path
HERE_PATH = path.normpath(path.dirname(__file__))
colmap_sfm_folder = path.join(HERE_PATH, 'colmap-sfm')
colmap_localization_folder = path.join(HERE_PATH, 'colmap-localization')
sift_colmap_vocab_tree_folder = path.join(HERE_PATH, 'sift_colmap_vocab_tree')
ir_bench_folder = path.join(HERE_PATH, 'image_retrieval_benchmark')
if path.isdir(colmap_sfm_folder):
safe_remove_any_path(colmap_sfm_folder, force=False)
if path.isdir(colmap_localization_folder):
safe_remove_any_path(colmap_localization_folder, force=False)
if path.isdir(sift_colmap_vocab_tree_folder):
safe_remove_any_path(sift_colmap_vocab_tree_folder, force=False)
if path.isdir(ir_bench_folder):
safe_remove_any_path(ir_bench_folder, force=False)
matches_no_gv_folder = path.join(HERE_PATH, 'local_features/r2d2_500/NN_no_gv')
if path.isdir(matches_no_gv_folder):
safe_remove_any_path(matches_no_gv_folder, force=False)
matches_colmap_gv_folder = path.join(HERE_PATH, 'local_features/r2d2_500/NN_colmap_gv')
if path.isdir(matches_colmap_gv_folder):
safe_remove_any_path(matches_colmap_gv_folder, force=False)
|
[
"os.path.isdir",
"os.path.dirname",
"kapture.utils.paths.safe_remove_any_path",
"os.path.join"
] |
[((348, 382), 'os.path.join', 'path.join', (['HERE_PATH', '"""colmap-sfm"""'], {}), "(HERE_PATH, 'colmap-sfm')\n", (357, 382), True, 'import os.path as path\n'), ((412, 455), 'os.path.join', 'path.join', (['HERE_PATH', '"""colmap-localization"""'], {}), "(HERE_PATH, 'colmap-localization')\n", (421, 455), True, 'import os.path as path\n'), ((488, 534), 'os.path.join', 'path.join', (['HERE_PATH', '"""sift_colmap_vocab_tree"""'], {}), "(HERE_PATH, 'sift_colmap_vocab_tree')\n", (497, 534), True, 'import os.path as path\n'), ((553, 602), 'os.path.join', 'path.join', (['HERE_PATH', '"""image_retrieval_benchmark"""'], {}), "(HERE_PATH, 'image_retrieval_benchmark')\n", (562, 602), True, 'import os.path as path\n'), ((607, 636), 'os.path.isdir', 'path.isdir', (['colmap_sfm_folder'], {}), '(colmap_sfm_folder)\n', (617, 636), True, 'import os.path as path\n'), ((698, 736), 'os.path.isdir', 'path.isdir', (['colmap_localization_folder'], {}), '(colmap_localization_folder)\n', (708, 736), True, 'import os.path as path\n'), ((807, 848), 'os.path.isdir', 'path.isdir', (['sift_colmap_vocab_tree_folder'], {}), '(sift_colmap_vocab_tree_folder)\n', (817, 848), True, 'import os.path as path\n'), ((922, 949), 'os.path.isdir', 'path.isdir', (['ir_bench_folder'], {}), '(ir_bench_folder)\n', (932, 949), True, 'import os.path as path\n'), ((1030, 1086), 'os.path.join', 'path.join', (['HERE_PATH', '"""local_features/r2d2_500/NN_no_gv"""'], {}), "(HERE_PATH, 'local_features/r2d2_500/NN_no_gv')\n", (1039, 1086), True, 'import os.path as path\n'), ((1090, 1122), 'os.path.isdir', 'path.isdir', (['matches_no_gv_folder'], {}), '(matches_no_gv_folder)\n', (1100, 1122), True, 'import os.path as path\n'), ((1212, 1272), 'os.path.join', 'path.join', (['HERE_PATH', '"""local_features/r2d2_500/NN_colmap_gv"""'], {}), "(HERE_PATH, 'local_features/r2d2_500/NN_colmap_gv')\n", (1221, 1272), True, 'import os.path as path\n'), ((1276, 1312), 'os.path.isdir', 'path.isdir', (['matches_colmap_gv_folder'], {}), '(matches_colmap_gv_folder)\n', (1286, 1312), True, 'import os.path as path\n'), ((304, 326), 'os.path.dirname', 'path.dirname', (['__file__'], {}), '(__file__)\n', (316, 326), True, 'import os.path as path\n'), ((642, 694), 'kapture.utils.paths.safe_remove_any_path', 'safe_remove_any_path', (['colmap_sfm_folder'], {'force': '(False)'}), '(colmap_sfm_folder, force=False)\n', (662, 694), False, 'from kapture.utils.paths import safe_remove_any_path\n'), ((742, 803), 'kapture.utils.paths.safe_remove_any_path', 'safe_remove_any_path', (['colmap_localization_folder'], {'force': '(False)'}), '(colmap_localization_folder, force=False)\n', (762, 803), False, 'from kapture.utils.paths import safe_remove_any_path\n'), ((854, 918), 'kapture.utils.paths.safe_remove_any_path', 'safe_remove_any_path', (['sift_colmap_vocab_tree_folder'], {'force': '(False)'}), '(sift_colmap_vocab_tree_folder, force=False)\n', (874, 918), False, 'from kapture.utils.paths import safe_remove_any_path\n'), ((955, 1005), 'kapture.utils.paths.safe_remove_any_path', 'safe_remove_any_path', (['ir_bench_folder'], {'force': '(False)'}), '(ir_bench_folder, force=False)\n', (975, 1005), False, 'from kapture.utils.paths import safe_remove_any_path\n'), ((1128, 1183), 'kapture.utils.paths.safe_remove_any_path', 'safe_remove_any_path', (['matches_no_gv_folder'], {'force': '(False)'}), '(matches_no_gv_folder, force=False)\n', (1148, 1183), False, 'from kapture.utils.paths import safe_remove_any_path\n'), ((1318, 1377), 'kapture.utils.paths.safe_remove_any_path', 'safe_remove_any_path', (['matches_colmap_gv_folder'], {'force': '(False)'}), '(matches_colmap_gv_folder, force=False)\n', (1338, 1377), False, 'from kapture.utils.paths import safe_remove_any_path\n')]
|
import numpy as np
import torch
from tensorboardX import SummaryWriter
from tqdm import tqdm
import argparse
import config
from data_gen import TextMelLoader, TextMelCollate
from taco2models.loss_function import Tacotron2Loss
from taco2models.models import Tacotron2
from taco2models.optimizer import Tacotron2Optimizer
from utils_1 import save_checkpoint, AverageMeter, get_logger, test
import os
def train_net(args):
torch.manual_seed(7)
np.random.seed(7)
checkpoint = args.checkpoint
start_epoch = 0
best_loss = float('inf')
writer = SummaryWriter()
steps_since_improvement = 0
# Initialize / load checkpoint
if checkpoint is None:
print('Training from scratch ...')
# model
model = Tacotron2(config)
print(model)
# model = nn.DataParallel(model)
# optimizer
optimizer = Tacotron2Optimizer(
torch.optim.Adam(model.parameters(), lr=args.lr, weight_decay=args.l2, betas=(0.9, 0.999), eps=1e-6))
else:
print('Loading model:{}'.format(checkpoint))
load_mode = args.load_type
if load_mode == 'dict':
checkpoint = torch.load(checkpoint)
model = checkpoint['model']
start_epoch = checkpoint['epoch'] + 1
step = checkpoint['step'] + 1
steps_since_improvement = checkpoint['steps_since_improvement']
optimizer = checkpoint['optimizer']
model = checkpoint['model']
best_loss = checkpoint['loss']
if best_loss < 0.4:
# 为了防止loss由于best_loss太低导致 loss一直无法下降到best_loss而导致best_checkpoint存不下来
best_loss = 0.4
else:
checkpoint = torch.load(checkpoint)
model = Tacotron2(config)
model.load_state_dict(checkpoint)
optimizer = Tacotron2Optimizer(
torch.optim.Adam(model.parameters(), lr=args.lr, weight_decay=args.l2, betas=(0.9, 0.999), eps=1e-6))
print(model)
logger = get_logger()
print(f'learning rate is',optimizer.lr)
model = model.to(config.device)
criterion = Tacotron2Loss()
collate_fn = TextMelCollate(config.n_frames_per_step)
# Custom dataloaders
if args.dataset == 'biaobei':
training_files = args.dataset + '_filelist/' + args.dataset + '_audio_text_train_filelist.txt'
validation_files = args.dataset + '_filelist/' + args.dataset + '_audio_text_valid_filelist.txt'
else:
training_files = args.dataset + '_filelist/' + args.dataset + '_audio_text_train_filelist.json'
validation_files = args.dataset + '_filelist/' + args.dataset + '_audio_text_valid_filelist.json'
train_dataset = TextMelLoader(training_files, config, dataset=args.dataset)
print('batch size is ', args.batch_size)
train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=args.batch_size, collate_fn=collate_fn,
pin_memory=True, shuffle=True, num_workers=args.num_workers)
print(f'loaded dataset from {training_files}')
valid_dataset = TextMelLoader(validation_files, config, dataset=args.dataset)
valid_loader = torch.utils.data.DataLoader(valid_dataset, batch_size=args.batch_size, collate_fn=collate_fn,
pin_memory=True, shuffle=False, num_workers=args.num_workers)
print(f'loaded dataset from {validation_files}')
# Epochs
for epoch in range(start_epoch, args.epochs):
# One epoch's training
losses = AverageMeter()
for i, batch in enumerate(train_loader):
model.train()
model.zero_grad()
x, y = model.parse_batch(batch)
# Forward prop.
y_pred = model(x)
# loss
loss = criterion(y_pred, y)
# Back prop.
optimizer.zero_grad()
loss.backward()
# Update weights
optimizer.step()
# Keep track of metrics
losses.update(loss.item())
torch.cuda.empty_cache()
writer.add_scalar('model/train_loss', losses.val, optimizer.step_num)
# Print status
if i % args.print_freq == 0:
logger.info('Epoch: [{0}][{1}/{2}]\t'
'Loss {loss.val:.4f} ({loss.avg:.4f})'.format(epoch, i, len(train_loader), loss=losses))
# validation
if i % config.validation_steps == 0 and i != 0:
valid_losses = AverageMeter()
model.eval()
lr = optimizer.lr
step_num = optimizer.step_num
print('\nLearning rate: {}'.format(lr))
writer.add_scalar('model/learning_rate', lr, step_num)
print('Step num: {}\n'.format(step_num))
with torch.no_grad():
for batch in valid_loader:
model.zero_grad()
x, y = model.parse_batch(batch)
# Forward prop.
y_pred = model(x)
loss = criterion(y_pred, y)
# Keep track of metrics
valid_losses.update(loss.item())
valid_loss = valid_losses.avg
writer.add_scalar('model/valid_loss', valid_loss, step_num)
logger.info('Epoch: [{0}][{1}/{2}]\t'
'Validation Loss {loss:.4f}'.format(epoch, i, len(train_loader), loss=valid_loss))
# Check if there was an improvement
is_best = valid_loss < best_loss
best_loss = min(valid_loss, best_loss)
if not is_best:
steps_since_improvement += config.validation_steps
print("\nSteps since last improvement: %d\n" % (steps_since_improvement,))
else:
steps_since_improvement = 0
# saving checkpoint and update the best checkpoint
save_checkpoint(epoch, step_num, steps_since_improvement, model, optimizer, best_loss, is_best, dataset=args.dataset,trial_type=args.trial_type)
# drawing alignment
img_align = test(model, step_num, valid_loss)
writer.add_image('model/alignment', img_align, step_num, dataformats='HWC')
def parse_args():
parser = argparse.ArgumentParser(description='Tacotron2')
parser.add_argument('--epochs', default=10000, type=int)
parser.add_argument('--max_norm', default=1, type=float, help='Gradient norm threshold to clip')
# trial type
parser.add_argument('--trial_type', type=str, default='new', help='new vaocal dict or old vaocal dict')
parser.add_argument('--load_type', type=str, default='dict', help='method to load model')
# dataset
parser.add_argument('--dataset', type=str, default='aixia', help='name of dataset')
# minibatch
parser.add_argument('--batch_size', default=16, type=int)
parser.add_argument('--num-workers', default=4, type=int, help='Number of workers to generate minibatch')
# logging
parser.add_argument('--print_freq', default=10, type=int, help='Frequency of printing training information')
# optimizer
parser.add_argument('--lr', default=1e-3, type=float, help='Init learning rate')
parser.add_argument('--l2', default=1e-6, type=float, help='weight decay (L2)')
parser.add_argument('--checkpoint', type=str, default='biaobei_checkpoints/biaobei.tar', help='checkpoint')
args = parser.parse_args()
return args
def main():
global args
args = parse_args()
train_net(args)
if __name__ == '__main__':
main()
|
[
"tensorboardX.SummaryWriter",
"numpy.random.seed",
"utils_1.get_logger",
"torch.utils.data.DataLoader",
"argparse.ArgumentParser",
"utils_1.AverageMeter",
"torch.manual_seed",
"torch.load",
"taco2models.loss_function.Tacotron2Loss",
"data_gen.TextMelLoader",
"torch.cuda.empty_cache",
"utils_1.save_checkpoint",
"taco2models.models.Tacotron2",
"utils_1.test",
"torch.no_grad",
"data_gen.TextMelCollate"
] |
[((426, 446), 'torch.manual_seed', 'torch.manual_seed', (['(7)'], {}), '(7)\n', (443, 446), False, 'import torch\n'), ((451, 468), 'numpy.random.seed', 'np.random.seed', (['(7)'], {}), '(7)\n', (465, 468), True, 'import numpy as np\n'), ((564, 579), 'tensorboardX.SummaryWriter', 'SummaryWriter', ([], {}), '()\n', (577, 579), False, 'from tensorboardX import SummaryWriter\n'), ((2013, 2025), 'utils_1.get_logger', 'get_logger', ([], {}), '()\n', (2023, 2025), False, 'from utils_1 import save_checkpoint, AverageMeter, get_logger, test\n'), ((2124, 2139), 'taco2models.loss_function.Tacotron2Loss', 'Tacotron2Loss', ([], {}), '()\n', (2137, 2139), False, 'from taco2models.loss_function import Tacotron2Loss\n'), ((2158, 2198), 'data_gen.TextMelCollate', 'TextMelCollate', (['config.n_frames_per_step'], {}), '(config.n_frames_per_step)\n', (2172, 2198), False, 'from data_gen import TextMelLoader, TextMelCollate\n'), ((2712, 2771), 'data_gen.TextMelLoader', 'TextMelLoader', (['training_files', 'config'], {'dataset': 'args.dataset'}), '(training_files, config, dataset=args.dataset)\n', (2725, 2771), False, 'from data_gen import TextMelLoader, TextMelCollate\n'), ((2837, 3000), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['train_dataset'], {'batch_size': 'args.batch_size', 'collate_fn': 'collate_fn', 'pin_memory': '(True)', 'shuffle': '(True)', 'num_workers': 'args.num_workers'}), '(train_dataset, batch_size=args.batch_size,\n collate_fn=collate_fn, pin_memory=True, shuffle=True, num_workers=args.\n num_workers)\n', (2864, 3000), False, 'import torch\n'), ((3110, 3171), 'data_gen.TextMelLoader', 'TextMelLoader', (['validation_files', 'config'], {'dataset': 'args.dataset'}), '(validation_files, config, dataset=args.dataset)\n', (3123, 3171), False, 'from data_gen import TextMelLoader, TextMelCollate\n'), ((3191, 3355), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['valid_dataset'], {'batch_size': 'args.batch_size', 'collate_fn': 'collate_fn', 'pin_memory': '(True)', 'shuffle': '(False)', 'num_workers': 'args.num_workers'}), '(valid_dataset, batch_size=args.batch_size,\n collate_fn=collate_fn, pin_memory=True, shuffle=False, num_workers=args\n .num_workers)\n', (3218, 3355), False, 'import torch\n'), ((6474, 6522), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Tacotron2"""'}), "(description='Tacotron2')\n", (6497, 6522), False, 'import argparse\n'), ((750, 767), 'taco2models.models.Tacotron2', 'Tacotron2', (['config'], {}), '(config)\n', (759, 767), False, 'from taco2models.models import Tacotron2\n'), ((3558, 3572), 'utils_1.AverageMeter', 'AverageMeter', ([], {}), '()\n', (3570, 3572), False, 'from utils_1 import save_checkpoint, AverageMeter, get_logger, test\n'), ((1160, 1182), 'torch.load', 'torch.load', (['checkpoint'], {}), '(checkpoint)\n', (1170, 1182), False, 'import torch\n'), ((1710, 1732), 'torch.load', 'torch.load', (['checkpoint'], {}), '(checkpoint)\n', (1720, 1732), False, 'import torch\n'), ((1753, 1770), 'taco2models.models.Tacotron2', 'Tacotron2', (['config'], {}), '(config)\n', (1762, 1770), False, 'from taco2models.models import Tacotron2\n'), ((4076, 4100), 'torch.cuda.empty_cache', 'torch.cuda.empty_cache', ([], {}), '()\n', (4098, 4100), False, 'import torch\n'), ((4552, 4566), 'utils_1.AverageMeter', 'AverageMeter', ([], {}), '()\n', (4564, 4566), False, 'from utils_1 import save_checkpoint, AverageMeter, get_logger, test\n'), ((6107, 6256), 'utils_1.save_checkpoint', 'save_checkpoint', (['epoch', 'step_num', 'steps_since_improvement', 'model', 'optimizer', 'best_loss', 'is_best'], {'dataset': 'args.dataset', 'trial_type': 'args.trial_type'}), '(epoch, step_num, steps_since_improvement, model, optimizer,\n best_loss, is_best, dataset=args.dataset, trial_type=args.trial_type)\n', (6122, 6256), False, 'from utils_1 import save_checkpoint, AverageMeter, get_logger, test\n'), ((6316, 6349), 'utils_1.test', 'test', (['model', 'step_num', 'valid_loss'], {}), '(model, step_num, valid_loss)\n', (6320, 6349), False, 'from utils_1 import save_checkpoint, AverageMeter, get_logger, test\n'), ((4881, 4896), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (4894, 4896), False, 'import torch\n')]
|
#!/usr/bin/env python3
import os
import unittest
from textwrap import dedent
from python_utils import import_vars, set_env_var, print_input_args, \
print_info_msg, print_err_msg_exit, cfg_to_yaml_str
from fill_jinja_template import fill_jinja_template
def create_diag_table_file(run_dir):
""" Creates a diagnostic table file for each cycle to be run
Args:
run_dir: run directory
Returns:
Boolean
"""
print_input_args(locals())
#import all environment variables
import_vars()
#create a diagnostic table file within the specified run directory
print_info_msg(f'''
Creating a diagnostics table file (\"{DIAG_TABLE_FN}\") in the specified
run directory...
run_dir = \"{run_dir}\"''', verbose=VERBOSE)
diag_table_fp = os.path.join(run_dir, DIAG_TABLE_FN)
print_info_msg(f'''
Using the template diagnostics table file:
diag_table_tmpl_fp = {DIAG_TABLE_TMPL_FP}
to create:
diag_table_fp = \"{diag_table_fp}\"''', verbose=VERBOSE)
settings = {
'starttime': CDATE,
'cres': CRES
}
settings_str = cfg_to_yaml_str(settings)
#call fill jinja
try:
fill_jinja_template(["-q", "-u", settings_str, "-t", DIAG_TABLE_TMPL_FP, "-o", diag_table_fp])
except:
print_err_msg_exit(f'''
!!!!!!!!!!!!!!!!!
fill_jinja_template.py failed!
!!!!!!!!!!!!!!!!!''')
return False
return True
class Testing(unittest.TestCase):
def test_create_diag_table_file(self):
path = os.path.join(os.getenv('USHDIR'), "test_data")
self.assertTrue(create_diag_table_file(run_dir=path))
def setUp(self):
USHDIR = os.path.dirname(os.path.abspath(__file__))
DIAG_TABLE_FN="diag_table"
DIAG_TABLE_TMPL_FP = os.path.join(USHDIR,"templates",f"{DIAG_TABLE_FN}.FV3_GFS_v15p2")
set_env_var('DEBUG',True)
set_env_var('VERBOSE',True)
set_env_var("USHDIR",USHDIR)
set_env_var("DIAG_TABLE_FN",DIAG_TABLE_FN)
set_env_var("DIAG_TABLE_TMPL_FP",DIAG_TABLE_TMPL_FP)
set_env_var("CRES","C48")
set_env_var("CDATE","2021010106")
|
[
"python_utils.set_env_var",
"python_utils.print_info_msg",
"os.path.abspath",
"python_utils.cfg_to_yaml_str",
"fill_jinja_template.fill_jinja_template",
"python_utils.import_vars",
"python_utils.print_err_msg_exit",
"os.path.join",
"os.getenv"
] |
[((537, 550), 'python_utils.import_vars', 'import_vars', ([], {}), '()\n', (548, 550), False, 'from python_utils import import_vars, set_env_var, print_input_args, print_info_msg, print_err_msg_exit, cfg_to_yaml_str\n'), ((631, 827), 'python_utils.print_info_msg', 'print_info_msg', (['f"""\n Creating a diagnostics table file ("{DIAG_TABLE_FN}") in the specified\n run directory...\n \n run_dir = "{run_dir}\\""""'], {'verbose': 'VERBOSE'}), '(\n f"""\n Creating a diagnostics table file ("{DIAG_TABLE_FN}") in the specified\n run directory...\n \n run_dir = "{run_dir}\\""""\n , verbose=VERBOSE)\n', (645, 827), False, 'from python_utils import import_vars, set_env_var, print_input_args, print_info_msg, print_err_msg_exit, cfg_to_yaml_str\n'), ((842, 878), 'os.path.join', 'os.path.join', (['run_dir', 'DIAG_TABLE_FN'], {}), '(run_dir, DIAG_TABLE_FN)\n', (854, 878), False, 'import os\n'), ((884, 1141), 'python_utils.print_info_msg', 'print_info_msg', (['f"""\n \n Using the template diagnostics table file:\n \n diag_table_tmpl_fp = {DIAG_TABLE_TMPL_FP}\n \n to create:\n \n diag_table_fp = "{diag_table_fp}\\""""'], {'verbose': 'VERBOSE'}), '(\n f"""\n \n Using the template diagnostics table file:\n \n diag_table_tmpl_fp = {DIAG_TABLE_TMPL_FP}\n \n to create:\n \n diag_table_fp = "{diag_table_fp}\\""""\n , verbose=VERBOSE)\n', (898, 1141), False, 'from python_utils import import_vars, set_env_var, print_input_args, print_info_msg, print_err_msg_exit, cfg_to_yaml_str\n'), ((1223, 1248), 'python_utils.cfg_to_yaml_str', 'cfg_to_yaml_str', (['settings'], {}), '(settings)\n', (1238, 1248), False, 'from python_utils import import_vars, set_env_var, print_input_args, print_info_msg, print_err_msg_exit, cfg_to_yaml_str\n'), ((1288, 1386), 'fill_jinja_template.fill_jinja_template', 'fill_jinja_template', (["['-q', '-u', settings_str, '-t', DIAG_TABLE_TMPL_FP, '-o', diag_table_fp]"], {}), "(['-q', '-u', settings_str, '-t', DIAG_TABLE_TMPL_FP,\n '-o', diag_table_fp])\n", (1307, 1386), False, 'from fill_jinja_template import fill_jinja_template\n'), ((1944, 2011), 'os.path.join', 'os.path.join', (['USHDIR', '"""templates"""', 'f"""{DIAG_TABLE_FN}.FV3_GFS_v15p2"""'], {}), "(USHDIR, 'templates', f'{DIAG_TABLE_FN}.FV3_GFS_v15p2')\n", (1956, 2011), False, 'import os\n'), ((2018, 2044), 'python_utils.set_env_var', 'set_env_var', (['"""DEBUG"""', '(True)'], {}), "('DEBUG', True)\n", (2029, 2044), False, 'from python_utils import import_vars, set_env_var, print_input_args, print_info_msg, print_err_msg_exit, cfg_to_yaml_str\n'), ((2052, 2080), 'python_utils.set_env_var', 'set_env_var', (['"""VERBOSE"""', '(True)'], {}), "('VERBOSE', True)\n", (2063, 2080), False, 'from python_utils import import_vars, set_env_var, print_input_args, print_info_msg, print_err_msg_exit, cfg_to_yaml_str\n'), ((2088, 2117), 'python_utils.set_env_var', 'set_env_var', (['"""USHDIR"""', 'USHDIR'], {}), "('USHDIR', USHDIR)\n", (2099, 2117), False, 'from python_utils import import_vars, set_env_var, print_input_args, print_info_msg, print_err_msg_exit, cfg_to_yaml_str\n'), ((2125, 2168), 'python_utils.set_env_var', 'set_env_var', (['"""DIAG_TABLE_FN"""', 'DIAG_TABLE_FN'], {}), "('DIAG_TABLE_FN', DIAG_TABLE_FN)\n", (2136, 2168), False, 'from python_utils import import_vars, set_env_var, print_input_args, print_info_msg, print_err_msg_exit, cfg_to_yaml_str\n'), ((2176, 2229), 'python_utils.set_env_var', 'set_env_var', (['"""DIAG_TABLE_TMPL_FP"""', 'DIAG_TABLE_TMPL_FP'], {}), "('DIAG_TABLE_TMPL_FP', DIAG_TABLE_TMPL_FP)\n", (2187, 2229), False, 'from python_utils import import_vars, set_env_var, print_input_args, print_info_msg, print_err_msg_exit, cfg_to_yaml_str\n'), ((2237, 2263), 'python_utils.set_env_var', 'set_env_var', (['"""CRES"""', '"""C48"""'], {}), "('CRES', 'C48')\n", (2248, 2263), False, 'from python_utils import import_vars, set_env_var, print_input_args, print_info_msg, print_err_msg_exit, cfg_to_yaml_str\n'), ((2271, 2305), 'python_utils.set_env_var', 'set_env_var', (['"""CDATE"""', '"""2021010106"""'], {}), "('CDATE', '2021010106')\n", (2282, 2305), False, 'from python_utils import import_vars, set_env_var, print_input_args, print_info_msg, print_err_msg_exit, cfg_to_yaml_str\n'), ((1403, 1569), 'python_utils.print_err_msg_exit', 'print_err_msg_exit', (['f"""\n !!!!!!!!!!!!!!!!!\n \n fill_jinja_template.py failed!\n \n !!!!!!!!!!!!!!!!!"""'], {}), '(\n f"""\n !!!!!!!!!!!!!!!!!\n \n fill_jinja_template.py failed!\n \n !!!!!!!!!!!!!!!!!"""\n )\n', (1421, 1569), False, 'from python_utils import import_vars, set_env_var, print_input_args, print_info_msg, print_err_msg_exit, cfg_to_yaml_str\n'), ((1703, 1722), 'os.getenv', 'os.getenv', (['"""USHDIR"""'], {}), "('USHDIR')\n", (1712, 1722), False, 'import os\n'), ((1853, 1878), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (1868, 1878), False, 'import os\n')]
|
from src.Squad import Squad
import src
def test_squad_getters():
country = src.CountriesConstants.FRANCE
army = src.Army.Army(country, None)
sq = Squad(army)
assert (sq.get_country() == country)
assert (sq.get_army() == army)
assert (sq.get_init_health() == src.GameplayParameters.INITIAL_SQUAD_HEALTH)
assert (sq.get_init_protection() == src.GameplayParameters.INITIAL_SQUAD_PROTECTION)
assert (sq.get_health() == 0)
assert (sq.get_protection() == 0)
assert (len(sq.get_units()) == 0)
sq.update_init_health(10)
sq.update_init_protection(50)
assert (sq.get_init_health() == src.GameplayParameters.INITIAL_SQUAD_HEALTH + 10)
assert (sq.get_init_protection() == src.GameplayParameters.INITIAL_SQUAD_PROTECTION + 50)
sq.add_unit(None)
assert (sq.get_units() == (None,))
def test_squad_battle_related():
country = src.CountriesConstants.FRANCE
army = src.Army.Army(country, None)
sq = Squad(army)
units = (src.Archer.Archer(sq), src.Archer.Archer(sq), src.Swordsman.Swordsman(sq))
for unit in units:
sq.add_unit(unit)
units[0].update_health(-1000)
sq.reset_battle_parameters()
assert (sq.get_health() == 3 * src.GameplayParameters.INITIAL_SQUAD_HEALTH)
attack = sq.attack_initiator(country)
assert (attack.attacking == country)
assert (attack.damage == 2 * src.Archer.Archer.ATTACKING_DAMAGE + src.Swordsman.Swordsman.ATTACKING_DAMAGE)
assert (attack.attacker == sq)
|
[
"src.Archer.Archer",
"src.Swordsman.Swordsman",
"src.Squad.Squad",
"src.Army.Army"
] |
[((128, 156), 'src.Army.Army', 'src.Army.Army', (['country', 'None'], {}), '(country, None)\n', (141, 156), False, 'import src\n'), ((167, 178), 'src.Squad.Squad', 'Squad', (['army'], {}), '(army)\n', (172, 178), False, 'from src.Squad import Squad\n'), ((954, 982), 'src.Army.Army', 'src.Army.Army', (['country', 'None'], {}), '(country, None)\n', (967, 982), False, 'import src\n'), ((993, 1004), 'src.Squad.Squad', 'Squad', (['army'], {}), '(army)\n', (998, 1004), False, 'from src.Squad import Squad\n'), ((1019, 1040), 'src.Archer.Archer', 'src.Archer.Archer', (['sq'], {}), '(sq)\n', (1036, 1040), False, 'import src\n'), ((1042, 1063), 'src.Archer.Archer', 'src.Archer.Archer', (['sq'], {}), '(sq)\n', (1059, 1063), False, 'import src\n'), ((1065, 1092), 'src.Swordsman.Swordsman', 'src.Swordsman.Swordsman', (['sq'], {}), '(sq)\n', (1088, 1092), False, 'import src\n')]
|
from scipy.spatial import KDTree
def listOfClosest(list_sem, radius = 100):
"""
Gives back the indices of elements to be removed
Notice that this method requires some sophistication.
In this approach we just leave the first element we find.
:param m_list_px:
:param m_list_py:
:param radius:
:return:
"""
cp_list = list_sem.copy()
# for the tree
ncp_list = list_sem.copy()
ncp_list_x = ncp_list[:, 0]
ncp_list_y = ncp_list[:, 1]
tree_list = list(zip(ncp_list_x, ncp_list_y))
list_excluded = []
for ind,el in enumerate(cp_list):
el = el[0:2]
distances_p, indexes = KDTree(tree_list).query(el,k=2) # closest element that is not yourself
if( distances_p[1] < radius):
list_excluded.append(ind)
ncp_list = np.delete(ncp_list,[ind],axis=0)
ncp_list_x = ncp_list[:, 0]
ncp_list_y = ncp_list[:, 1]
tree_list = list(zip(ncp_list_x, ncp_list_y))
if(len(tree_list)<2):
break
return list_excluded
|
[
"scipy.spatial.KDTree"
] |
[((728, 745), 'scipy.spatial.KDTree', 'KDTree', (['tree_list'], {}), '(tree_list)\n', (734, 745), False, 'from scipy.spatial import KDTree\n')]
|
import rclpy
from rclpy.node import Node
import random
from geometry_msgs.msg import Twist
import time
class CmdPublisher(Node):
def __init__(self):
super().__init__('cmd_vel_node')
self.publisher_ = self.create_publisher(Twist, '/cmd_vel', 1)
timer_period = 1
self.timer = self.create_timer(timer_period, self.timer_callback)
self.count = 0
self.twist = Twist()
def timer_callback(self):
# self.twist.linear.x = (random.random()-0.5)*1
self.twist.linear.x = 0.1
self.twist.angular.z = 0.0
self.publisher_.publish(self.twist)
self.get_logger().info('Data: "%f"' % (self.twist.linear.x))
self.count += 1
if self.count ==10:
print("종료")
self.twist.linear.x = 0.
self.publisher_.publish(self.twist)
time.sleep(1)
raise KeyboardInterrupt
def stop(self):
self.twist.linear.x = 0.
self.publisher_.publish(self.twist)
time.sleep(1)
def main(args=None):
rclpy.init(args=args)
custom_msg_publisher = CmdPublisher()
try:
rclpy.spin(custom_msg_publisher)
except KeyboardInterrupt:
custom_msg_publisher.stop()
print("finish")
finally:
custom_msg_publisher.destroy_node()
rclpy.shutdown()
if __name__ == '__main__':
main()
|
[
"rclpy.spin",
"rclpy.init",
"geometry_msgs.msg.Twist",
"time.sleep",
"rclpy.shutdown"
] |
[((1069, 1090), 'rclpy.init', 'rclpy.init', ([], {'args': 'args'}), '(args=args)\n', (1079, 1090), False, 'import rclpy\n'), ((424, 431), 'geometry_msgs.msg.Twist', 'Twist', ([], {}), '()\n', (429, 431), False, 'from geometry_msgs.msg import Twist\n'), ((1029, 1042), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (1039, 1042), False, 'import time\n'), ((1150, 1182), 'rclpy.spin', 'rclpy.spin', (['custom_msg_publisher'], {}), '(custom_msg_publisher)\n', (1160, 1182), False, 'import rclpy\n'), ((1338, 1354), 'rclpy.shutdown', 'rclpy.shutdown', ([], {}), '()\n', (1352, 1354), False, 'import rclpy\n'), ((874, 887), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (884, 887), False, 'import time\n')]
|
from doppelkopf.db import db
from datetime import datetime
from enum import Enum
class EventTypes(Enum):
GAME_SINGLEPLAYER_START = 0
GAME_SINGLEPLAYER_WIN = 1
GAME_SINGLEPLAYER_LOSE = 2
GAME_MULTIPLAYER_START = 100
CRON_DB_BACKUP = 1000
class Event(db.Model):
id = db.Column(db.Integer, primary_key=True)
event_type = db.Column(db.Enum(EventTypes), nullable=False, server_default="")
created_at = db.Column(db.DateTime, nullable=False, default=datetime.utcnow())
def __repr__(self):
return f"<Event: {self.type}>"
|
[
"doppelkopf.db.db.Enum",
"datetime.datetime.utcnow",
"doppelkopf.db.db.Column"
] |
[((295, 334), 'doppelkopf.db.db.Column', 'db.Column', (['db.Integer'], {'primary_key': '(True)'}), '(db.Integer, primary_key=True)\n', (304, 334), False, 'from doppelkopf.db import db\n'), ((362, 381), 'doppelkopf.db.db.Enum', 'db.Enum', (['EventTypes'], {}), '(EventTypes)\n', (369, 381), False, 'from doppelkopf.db import db\n'), ((482, 499), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (497, 499), False, 'from datetime import datetime\n')]
|
import ROOT as root
########################## Data of .C file, insert data underneath ######################################
qMap_Ag_C0_V0 = root.TProfile2D("qMap_Ag_C0_V0","qMap_Ag_C0 (V0)",52,0,52,80,0,80,0,0);
qMap_Ag_C0_V0.SetBinEntries(2345,1);
qMap_Ag_C0_V0.SetBinEntries(2398,14628);
qMap_Ag_C0_V0.SetBinEntries(2399,14480);
qMap_Ag_C0_V0.SetBinEntries(2451,13146);
qMap_Ag_C0_V0.SetBinEntries(2452,56859);
qMap_Ag_C0_V0.SetBinEntries(2453,45921);
qMap_Ag_C0_V0.SetBinEntries(2454,10482);
qMap_Ag_C0_V0.SetBinEntries(2505,14645);
qMap_Ag_C0_V0.SetBinEntries(2506,144387);
qMap_Ag_C0_V0.SetBinEntries(2507,89747);
qMap_Ag_C0_V0.SetBinEntries(2508,14505);
qMap_Ag_C0_V0.SetBinEntries(2559,470);
qMap_Ag_C0_V0.SetBinEntries(2560,90404);
qMap_Ag_C0_V0.SetBinEntries(2561,63400);
qMap_Ag_C0_V0.SetBinEntries(2562,5074);
qMap_Ag_C0_V0.SetBinEntries(2614,14657);
qMap_Ag_C0_V0.SetBinEntries(2615,14285);
qMap_Ag_C0_V0.SetBinContent(2345,89);
qMap_Ag_C0_V0.SetBinContent(2398,1499022);
qMap_Ag_C0_V0.SetBinContent(2399,1279581);
qMap_Ag_C0_V0.SetBinContent(2451,776764);
qMap_Ag_C0_V0.SetBinContent(2452,9492774);
qMap_Ag_C0_V0.SetBinContent(2453,5036156);
qMap_Ag_C0_V0.SetBinContent(2454,916118);
qMap_Ag_C0_V0.SetBinContent(2505,1076333);
qMap_Ag_C0_V0.SetBinContent(2506,5.208083e+07);
qMap_Ag_C0_V0.SetBinContent(2507,4.197776e+07);
qMap_Ag_C0_V0.SetBinContent(2508,1462464);
qMap_Ag_C0_V0.SetBinContent(2559,39063);
qMap_Ag_C0_V0.SetBinContent(2560,1.088624e+07);
qMap_Ag_C0_V0.SetBinContent(2561,9917064);
qMap_Ag_C0_V0.SetBinContent(2562,397610);
qMap_Ag_C0_V0.SetBinContent(2614,1490885);
qMap_Ag_C0_V0.SetBinContent(2615,1202723);
qMap_Ag_C0_V0.SetBinError(2345,89);
qMap_Ag_C0_V0.SetBinError(2398,12484.77);
qMap_Ag_C0_V0.SetBinError(2399,10716.67);
qMap_Ag_C0_V0.SetBinError(2451,6927.153);
qMap_Ag_C0_V0.SetBinError(2452,394722.2);
qMap_Ag_C0_V0.SetBinError(2453,175124.6);
qMap_Ag_C0_V0.SetBinError(2454,9107.791);
qMap_Ag_C0_V0.SetBinError(2505,9074.18);
qMap_Ag_C0_V0.SetBinError(2506,1070245);
qMap_Ag_C0_V0.SetBinError(2507,1269670);
qMap_Ag_C0_V0.SetBinError(2508,12216.89);
qMap_Ag_C0_V0.SetBinError(2559,1829.499);
qMap_Ag_C0_V0.SetBinError(2560,240290);
qMap_Ag_C0_V0.SetBinError(2561,265107.6);
qMap_Ag_C0_V0.SetBinError(2562,5706.877);
qMap_Ag_C0_V0.SetBinError(2614,12370.39);
qMap_Ag_C0_V0.SetBinError(2615,10193.58);
qMap_Ag_C0_V0.SetMinimum(0);
qMap_Ag_C0_V0.SetEntries(607091);
qMap_Ag_C0_V0.SetStats(0);
qMap_Ag_C0_V0.SetContour(20);
qMap_Ag_C0_V0.SetContourLevel(0,0);
qMap_Ag_C0_V0.SetContourLevel(1,23.38672);
qMap_Ag_C0_V0.SetContourLevel(2,46.77344);
qMap_Ag_C0_V0.SetContourLevel(3,70.16016);
qMap_Ag_C0_V0.SetContourLevel(4,93.54688);
qMap_Ag_C0_V0.SetContourLevel(5,116.9336);
qMap_Ag_C0_V0.SetContourLevel(6,140.3203);
qMap_Ag_C0_V0.SetContourLevel(7,163.707);
qMap_Ag_C0_V0.SetContourLevel(8,187.0938);
qMap_Ag_C0_V0.SetContourLevel(9,210.4805);
qMap_Ag_C0_V0.SetContourLevel(10,233.8672);
qMap_Ag_C0_V0.SetContourLevel(11,257.2539);
qMap_Ag_C0_V0.SetContourLevel(12,280.6406);
qMap_Ag_C0_V0.SetContourLevel(13,304.0274);
qMap_Ag_C0_V0.SetContourLevel(14,327.4141);
qMap_Ag_C0_V0.SetContourLevel(15,350.8008);
qMap_Ag_C0_V0.SetContourLevel(16,374.1875);
qMap_Ag_C0_V0.SetContourLevel(17,397.5742);
qMap_Ag_C0_V0.SetContourLevel(18,420.961);
qMap_Ag_C0_V0.SetContourLevel(19,444.3477);
ci = root.TColor.GetColor("#000099");
qMap_Ag_C0_V0.SetLineColor(ci);
qMap_Ag_C0_V0.GetXaxis().SetTitle("col");
qMap_Ag_C0_V0.GetXaxis().SetRange(15,30);
qMap_Ag_C0_V0.GetXaxis().SetNdivisions(508);
qMap_Ag_C0_V0.GetXaxis().SetLabelFont(42);
qMap_Ag_C0_V0.GetXaxis().SetLabelSize(0.05);
qMap_Ag_C0_V0.GetXaxis().SetTitleSize(0.05);
qMap_Ag_C0_V0.GetXaxis().SetTitleOffset(1.1);
qMap_Ag_C0_V0.GetXaxis().SetTitleFont(42);
qMap_Ag_C0_V0.GetYaxis().SetTitle("row");
qMap_Ag_C0_V0.GetYaxis().SetRange(30,60);
qMap_Ag_C0_V0.GetYaxis().SetLabelFont(42);
qMap_Ag_C0_V0.GetYaxis().SetLabelSize(0.05);
qMap_Ag_C0_V0.GetYaxis().SetTitleSize(0.05);
qMap_Ag_C0_V0.GetYaxis().SetTitleOffset(1.1);
qMap_Ag_C0_V0.GetYaxis().SetTitleFont(42);
qMap_Ag_C0_V0.GetZaxis().SetLabelFont(42);
qMap_Ag_C0_V0.GetZaxis().SetLabelSize(0.035);
qMap_Ag_C0_V0.GetZaxis().SetTitleSize(0.035);
|
[
"ROOT.TColor.GetColor",
"ROOT.TProfile2D"
] |
[((145, 224), 'ROOT.TProfile2D', 'root.TProfile2D', (['"""qMap_Ag_C0_V0"""', '"""qMap_Ag_C0 (V0)"""', '(52)', '(0)', '(52)', '(80)', '(0)', '(80)', '(0)', '(0)'], {}), "('qMap_Ag_C0_V0', 'qMap_Ag_C0 (V0)', 52, 0, 52, 80, 0, 80, 0, 0)\n", (160, 224), True, 'import ROOT as root\n'), ((3335, 3366), 'ROOT.TColor.GetColor', 'root.TColor.GetColor', (['"""#000099"""'], {}), "('#000099')\n", (3355, 3366), True, 'import ROOT as root\n')]
|
import pytest
import torch
import torch.nn as nn
import torch.nn.functional as F
from einops import repeat, rearrange
from src.models.modules.masking import FullMask, LengthMask
from src.models.attention.linformer_attention import LinformerAttention
def seed_cpu_cuda(seed):
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
class TestLinformerAttention:
@pytest.mark.parametrize('device', ['cpu', 'cuda'])
@pytest.mark.parametrize('softmax_temp', [None, 1.0, 0.235])
@pytest.mark.parametrize('share_kv', [False, True])
@pytest.mark.parametrize('proj_dim_k', [13, 47, 88])
@pytest.mark.parametrize('seq_len', [127, 28, 468])
def test_output(self, seq_len, proj_dim_k, share_kv, softmax_temp, device):
seed = 2357
embed_dim = 21
v_dim = 17
num_heads = 7
batch_size = 18
q_seqlen = 47
# [2021-08-08] local_dot_product_cuda has a bug when q_seqlen != k_seqlen
# https://github.com/idiap/fast-transformers/issues/98
k_seqlen = seq_len
seed_cpu_cuda(seed)
key_padding_mask = LengthMask(torch.randint(low=0, high=k_seqlen, size=(batch_size,),
device=device), max_len=k_seqlen)
lin_attn = LinformerAttention(seq_len, k=proj_dim_k, share_kv=share_kv,
softmax_temp=softmax_temp, attention_dropout=0.0).to(device)
q = torch.randn(batch_size, q_seqlen, num_heads, embed_dim, device=device)
k = torch.randn(batch_size, k_seqlen, num_heads, embed_dim, device=device)
v = torch.randn(batch_size, k_seqlen, num_heads, v_dim, device=device)
out_lin, A_lin = lin_attn(q, k, v, key_padding_mask=key_padding_mask, need_weights=True)
assert out_lin.shape == (batch_size, q_seqlen, num_heads, v_dim)
assert A_lin.shape == (batch_size, num_heads, q_seqlen, proj_dim_k)
assert torch.all(A_lin >= 0)
# Sum of each row should be either 0.0 or 1.0
A_local_sum = A_lin.sum(dim=-1)
assert torch.all(torch.isclose(A_local_sum, torch.ones_like(A_local_sum))
| torch.isclose(A_local_sum, torch.zeros_like(A_local_sum)))
|
[
"torch.ones_like",
"torch.randint",
"torch.zeros_like",
"torch.manual_seed",
"torch.cuda.manual_seed",
"torch.randn",
"src.models.attention.linformer_attention.LinformerAttention",
"pytest.mark.parametrize",
"torch.all"
] |
[((285, 308), 'torch.manual_seed', 'torch.manual_seed', (['seed'], {}), '(seed)\n', (302, 308), False, 'import torch\n'), ((313, 341), 'torch.cuda.manual_seed', 'torch.cuda.manual_seed', (['seed'], {}), '(seed)\n', (335, 341), False, 'import torch\n'), ((380, 430), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""device"""', "['cpu', 'cuda']"], {}), "('device', ['cpu', 'cuda'])\n", (403, 430), False, 'import pytest\n'), ((436, 495), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""softmax_temp"""', '[None, 1.0, 0.235]'], {}), "('softmax_temp', [None, 1.0, 0.235])\n", (459, 495), False, 'import pytest\n'), ((501, 551), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""share_kv"""', '[False, True]'], {}), "('share_kv', [False, True])\n", (524, 551), False, 'import pytest\n'), ((557, 608), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""proj_dim_k"""', '[13, 47, 88]'], {}), "('proj_dim_k', [13, 47, 88])\n", (580, 608), False, 'import pytest\n'), ((614, 664), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""seq_len"""', '[127, 28, 468]'], {}), "('seq_len', [127, 28, 468])\n", (637, 664), False, 'import pytest\n'), ((1447, 1517), 'torch.randn', 'torch.randn', (['batch_size', 'q_seqlen', 'num_heads', 'embed_dim'], {'device': 'device'}), '(batch_size, q_seqlen, num_heads, embed_dim, device=device)\n', (1458, 1517), False, 'import torch\n'), ((1530, 1600), 'torch.randn', 'torch.randn', (['batch_size', 'k_seqlen', 'num_heads', 'embed_dim'], {'device': 'device'}), '(batch_size, k_seqlen, num_heads, embed_dim, device=device)\n', (1541, 1600), False, 'import torch\n'), ((1613, 1679), 'torch.randn', 'torch.randn', (['batch_size', 'k_seqlen', 'num_heads', 'v_dim'], {'device': 'device'}), '(batch_size, k_seqlen, num_heads, v_dim, device=device)\n', (1624, 1679), False, 'import torch\n'), ((1942, 1963), 'torch.all', 'torch.all', (['(A_lin >= 0)'], {}), '(A_lin >= 0)\n', (1951, 1963), False, 'import torch\n'), ((1114, 1184), 'torch.randint', 'torch.randint', ([], {'low': '(0)', 'high': 'k_seqlen', 'size': '(batch_size,)', 'device': 'device'}), '(low=0, high=k_seqlen, size=(batch_size,), device=device)\n', (1127, 1184), False, 'import torch\n'), ((1275, 1390), 'src.models.attention.linformer_attention.LinformerAttention', 'LinformerAttention', (['seq_len'], {'k': 'proj_dim_k', 'share_kv': 'share_kv', 'softmax_temp': 'softmax_temp', 'attention_dropout': '(0.0)'}), '(seq_len, k=proj_dim_k, share_kv=share_kv, softmax_temp=\n softmax_temp, attention_dropout=0.0)\n', (1293, 1390), False, 'from src.models.attention.linformer_attention import LinformerAttention\n'), ((2110, 2138), 'torch.ones_like', 'torch.ones_like', (['A_local_sum'], {}), '(A_local_sum)\n', (2125, 2138), False, 'import torch\n'), ((2194, 2223), 'torch.zeros_like', 'torch.zeros_like', (['A_local_sum'], {}), '(A_local_sum)\n', (2210, 2223), False, 'import torch\n')]
|
import importlib
import json
import os
import sys
from threading import Thread
from typing import *
from kivy.app import App as KivyApp
from kivy.config import ConfigParser
from kivy.lang.builder import Builder
from kivy.logger import Logger
from kivy.uix.screenmanager import ScreenManager
from aiventure.common.ai import AI
from aiventure.common.adventure import Adventure
from aiventure.client.uix.menu import MenuScreen
from aiventure.client.uix.play import PlayScreen
from aiventure.common.utils import get_save_name, is_model_valid
class App(KivyApp):
def __init__(self, **kwargs):
super().__init__(**kwargs)
# UI
self.title: str = 'Aiventure'
self.sm: Optional[ScreenManager] = None
self.screens: Dict[str, ClassVar] = {}
# AI
self.ai: Optional[AI] = None
self.adventure: Optional[Adventure] = None
# Threading
self.threads: Dict[str, Thread] = {}
# Modules
self.loaded_modules: Dict[str, str] = {}
self.input_filters: List[Callable[[str], str]] = []
self.output_filters: List[Callable[[str], str]] = []
self.display_filter: Optional[Callable[[List[str]], str]] = None
def build(self) -> ScreenManager:
"""
"""
self.init_mods()
self.init_ui()
return self.sm
def build_config(self, _) -> None:
"""
"""
self.config = ConfigParser()
self.config.read('config.ini')
self.config.setdefaults('general', {
'userdir': 'user',
'autosave': True
})
self.config.setdefaults('ai', {
'timeout': 20.0,
'memory': 20,
'max_length': 60,
'beam_searches': 1,
'temperature': 0.8,
'top_k': 40,
'top_p': 0.9,
'repetition_penalty': 1.1
})
self.config.setdefaults('modules', {
'input_filters': 'aiventure:filters',
'output_filters': 'aiventure:filters',
'display_filter': 'aiventure:filters'
})
self.config.write()
def init_mods(self) -> None:
"""
Initializes the game's module system and loads mods based on the current configuration.
"""
sys.path.append(self.config.get('general', 'userdir'))
for f in self.config.get('modules', 'input_filters').split(','):
domain, module = f.split(':')
Logger.info(f'Modules: Loading {f}.filter_input')
self.input_filters += [self.load_submodule(domain, module, 'filter_input')]
for f in self.config.get('modules', 'output_filters').split(','):
domain, module = f.split(':')
Logger.info(f'Modules: Loading {f}.filter_output')
self.output_filters += [self.load_submodule(domain, module, 'filter_output')]
domain, module = self.config.get('modules', 'display_filter').split(':')
Logger.info(f'Modules: Loading {f}.filter_display')
self.display_filter = self.load_submodule(domain, module, 'filter_display')
def init_ui(self) -> None:
"""
Initializes the screen manager, loads all screen kivy files and their associated python modules.
"""
self.sm = ScreenManager()
self.screens = {'menu': MenuScreen, 'play': PlayScreen }
for n, s in self.screens.items():
Builder.load_file(f'aiventure/client/uix/{n}.kv')
self.sm.add_widget(s(name=n))
self.sm.current = 'menu'
def get_user_path(self, *args: str) -> str:
"""
Retrieves a path relative to the current user directory.
:param args: The subdirectories / filenames in the user directory.
:return: A path in the current user directory.
"""
return os.path.join(self.config.get('general', 'userdir'), *args)
def get_model_path(self, model: str) -> str:
"""
Gets the path to the currently selected (but not necessarily loaded) AI model.
:param model: The model within the models subdirectory.
:return: The current selected model path.
"""
return self.get_user_path('models', model)
def get_valid_models(self) -> List[str]:
"""
:return: A list of valid model names, inside {userdir}/models
"""
return [m.name for m in os.scandir(self.get_user_path('models')) if is_model_valid(m.path)]
def get_module_path(self, domain: str, module: str) -> str:
return self.get_user_path('modules', domain, f'{module}.py')
def load_module(self, domain: str, module: str) -> Any:
"""
Loads a module and returns it (if it hasn't been loaded already).
:param domain: The module domain.
:param module: The module to load from the given domain.
:return: The loaded module.
"""
k = f'{domain}:{module}'
v = self.loaded_modules.get(k)
if v is None:
v = importlib.import_module(f'.{module}', f'modules.{domain}')
self.loaded_modules[k] = v
return v
def load_submodule(self, domain: str, module: str, submodule: str) -> str:
"""
Loads a submodule (a method, class, or variable from a given module).
:param domain: The module domain.
:param module: The module to load from the given domain.
:param submodule: The submodule to load from the given module.
:return: The loaded submodule.
"""
m = self.load_module(domain, module)
return getattr(m, submodule)
# SAVING AND LOADING
def save_adventure(self) -> None:
"""
Saves the current adventure.
"""
savefile = get_save_name(self.adventure.name)
with open(self.get_user_path('adventures', f'{savefile}.json'), 'w') as json_file:
json.dump(self.adventure.to_dict(), json_file, indent=4)
def load_adventure(self) -> None:
"""
Loads the current adventure.
"""
savefile = get_save_name(self.adventure.name)
with open(self.get_user_path('adventures', f'{savefile}.json'), 'r') as json_file:
self.adventure.from_dict(json.load(json_file))
|
[
"kivy.lang.builder.Builder.load_file",
"json.load",
"kivy.uix.screenmanager.ScreenManager",
"importlib.import_module",
"aiventure.common.utils.is_model_valid",
"aiventure.common.utils.get_save_name",
"kivy.logger.Logger.info",
"kivy.config.ConfigParser"
] |
[((1425, 1439), 'kivy.config.ConfigParser', 'ConfigParser', ([], {}), '()\n', (1437, 1439), False, 'from kivy.config import ConfigParser\n'), ((2962, 3013), 'kivy.logger.Logger.info', 'Logger.info', (['f"""Modules: Loading {f}.filter_display"""'], {}), "(f'Modules: Loading {f}.filter_display')\n", (2973, 3013), False, 'from kivy.logger import Logger\n'), ((3277, 3292), 'kivy.uix.screenmanager.ScreenManager', 'ScreenManager', ([], {}), '()\n', (3290, 3292), False, 'from kivy.uix.screenmanager import ScreenManager\n'), ((5736, 5770), 'aiventure.common.utils.get_save_name', 'get_save_name', (['self.adventure.name'], {}), '(self.adventure.name)\n', (5749, 5770), False, 'from aiventure.common.utils import get_save_name, is_model_valid\n'), ((6050, 6084), 'aiventure.common.utils.get_save_name', 'get_save_name', (['self.adventure.name'], {}), '(self.adventure.name)\n', (6063, 6084), False, 'from aiventure.common.utils import get_save_name, is_model_valid\n'), ((2464, 2513), 'kivy.logger.Logger.info', 'Logger.info', (['f"""Modules: Loading {f}.filter_input"""'], {}), "(f'Modules: Loading {f}.filter_input')\n", (2475, 2513), False, 'from kivy.logger import Logger\n'), ((2731, 2781), 'kivy.logger.Logger.info', 'Logger.info', (['f"""Modules: Loading {f}.filter_output"""'], {}), "(f'Modules: Loading {f}.filter_output')\n", (2742, 2781), False, 'from kivy.logger import Logger\n'), ((3412, 3461), 'kivy.lang.builder.Builder.load_file', 'Builder.load_file', (['f"""aiventure/client/uix/{n}.kv"""'], {}), "(f'aiventure/client/uix/{n}.kv')\n", (3429, 3461), False, 'from kivy.lang.builder import Builder\n'), ((4994, 5052), 'importlib.import_module', 'importlib.import_module', (['f""".{module}"""', 'f"""modules.{domain}"""'], {}), "(f'.{module}', f'modules.{domain}')\n", (5017, 5052), False, 'import importlib\n'), ((4423, 4445), 'aiventure.common.utils.is_model_valid', 'is_model_valid', (['m.path'], {}), '(m.path)\n', (4437, 4445), False, 'from aiventure.common.utils import get_save_name, is_model_valid\n'), ((6213, 6233), 'json.load', 'json.load', (['json_file'], {}), '(json_file)\n', (6222, 6233), False, 'import json\n')]
|
#! /usr/bin/env python3
import Deck as D
import Player as P
import Game as G
import matplotlib.pyplot as plt
import cProfile
score_dict = {1:'High Card',
2:'One Pair',
3:'Two Pair',
4:'Three of a Kind',
5:'Straight',
6:'Flush',
7:'Full House',
8:'Four of a Kind',
9:'Straight Flush',
10:'Royal Flush'}
tot_hands = float(133784560.) # 7 cards, choose best 5 (52 choose 7)
odds_dict = {1:23294460/tot_hands,
2:58627800/tot_hands,
3:31433400/tot_hands,
4:6461620/tot_hands,
5:6180020/tot_hands,
6:4047644/tot_hands,
7:3473184/tot_hands,
8:224848/tot_hands,
9:37260/tot_hands,
10:4324/tot_hands}
#_________________
def print_result(deck, pool, players):
'''Output results'''
best_hand = None
best_player = None
tie_list = []
# Print Deck
print('Deck state:\n%s'%deck)
# Print Pool
print('\nPool of common cards:\n%s'%pool)
# Print hands
print('\nPlayers\' hands:')
for p in players:
i_poker_hand = G.PokerHand(p.hand,pool.hand)
i_poker_hand.get_score()
if not best_hand:
best_player = p
best_hand = i_poker_hand
elif i_poker_hand.is_tie(best_hand.score, best_hand.rank_cards, best_hand.kicker_cards):
tie_list.append(p)
# if current is better than best, update
elif not i_poker_hand.is_better(best_hand.score, best_hand.rank_cards, best_hand.kicker_cards):
best_player = p
best_hand = i_poker_hand
tie_list = []
print('\n{0} Score:({1}) {2}'.format(p, i_poker_hand.score, score_dict[i_poker_hand.score]))
print('\t\t Final Hand: {0}'.format(i_poker_hand.final_hand))
print('\t\t Rank cards: {0}'.format(i_poker_hand.rank_cards))
print('\t\tKicker Cards: {0}'.format(i_poker_hand.kicker_cards))
# Print the winner
print('\nWinner:\n{} ({})\n\t\t Final Hand:{}'.format(best_player, score_dict[best_hand.score] ,best_hand.final_hand))
if len(tie_list) > 0:
print('Tied:')
for t in tie_list:
print('{}'.format(t))
#______________
def sim_game(names):
'''Create deck, deal to players'''
# Create deck
my_deck = D.Deck()
my_deck.shuffle()
# Create 3 Players
players = []
for p in names:
if len(players) < P.Player.max_players:
players.append(P.Player(p))
# Deal hands
for _ in range(P.Player.max_cards):
for p in players:
my_deck.deal_card(p)
# Deal Pool
my_pool = G.PokerPool('common_pool')
for _ in range(G.PokerPool.max_cards):
my_deck.deal_card(my_pool)
# Score hands
top_score = 0
for p in players:
i_poker_hand = G.PokerHand(p.hand,my_pool.hand)
i_poker_hand.get_score()
if i_poker_hand.score > top_score: top_score = i_poker_hand.score
return my_deck, my_pool, players, top_score
#_______________________
def set_up(n_runs=1000, num_players=3, print_state=False, print_stats=False):
names_list = ['Alice','Bob','Cthulhu','Dude','Einstein','Feynman','Gandolf','Humpledink','Io','Jenkins']
names = []
if num_players > 10: num_players = 10
for i in range(num_players):
names.append(names_list[i])
scores = [0,0,0,0,0,0,0,0,0,0]
l_scores = []
print('Simulating {} hands for {} players'.format(n_runs, num_players))
for x in range(n_runs):
if not x%1000: print('Simulation: {0}'.format(x))
deck, pool, players, score = sim_game(names)
if score > 10 or score < 1:
print('Invalid score! {}'.format(score))
scores[score-1] += 1
l_scores.append(score)
if print_state:
print_result(deck, pool, players)
if print_stats:
for i in range(10,0,-1):
print('({1:>2}) {0:>20}: {2:6.4f} % ({3:6.4f} %)'.format(score_dict[i], i,
100.*scores[i-1]/float(n_runs), 100*odds_dict[i]))
odds_list = [odds_dict[x] for x in range(1,11)]
bins = [0.5,1.5,2.5,3.5,4.5,5.5,6.5,7.5,8.5,9.5,10.5]
# Plot histograms and ratio
fig, (ax1, ax2) = plt.subplots(nrows=2)
ns, bs, ps = ax1.hist(l_scores, bins=bins, normed=True, color="g", alpha=0.5, label="simulation")
n2, b2, p2 = ax1.hist([x for x in range(1,11)], bins=bins, weights=odds_list, color="crimson",alpha=0.45, label="theory")
ax1.legend(loc='upper right')
ax1.set_xlim(0.5, 10.5)
ax1.set_ylabel('Frequency')
ax1.set_xlabel('Score')
ratio = [ns[i]/n2[i]-1 if n2[i] != 0 else 0 for i in range(10)]
bins = [i for i in range(1,11) ]
ax2.bar(bins, ratio,align="center",bottom=1.0, width=1.0, alpha=0.75, color="midnightblue")
ax2.set_ylabel('Ratio sim/theory')
ax2.set_xlabel('Score')
ax2.axhline(1, color='black')
ax2.set_ylim(0.5,1.5)
ax2.set_xlim(0.5,10.5)
plt.show()
#__________
def main():
# Verify long-term odds match expected for each outcome
set_up(100000,num_players=1,print_state=False, print_stats=True)
# Analyze output of example round
#set_up(1,num_players=5,print_state=True, print_stats=False)
#________________________
if __name__ == "__main__":
main()
|
[
"Player.Player",
"matplotlib.pyplot.show",
"Deck.Deck",
"Game.PokerPool",
"Game.PokerHand",
"matplotlib.pyplot.subplots"
] |
[((2273, 2281), 'Deck.Deck', 'D.Deck', ([], {}), '()\n', (2279, 2281), True, 'import Deck as D\n'), ((2562, 2588), 'Game.PokerPool', 'G.PokerPool', (['"""common_pool"""'], {}), "('common_pool')\n", (2573, 2588), True, 'import Game as G\n'), ((1163, 1193), 'Game.PokerHand', 'G.PokerHand', (['p.hand', 'pool.hand'], {}), '(p.hand, pool.hand)\n', (1174, 1193), True, 'import Game as G\n'), ((2732, 2765), 'Game.PokerHand', 'G.PokerHand', (['p.hand', 'my_pool.hand'], {}), '(p.hand, my_pool.hand)\n', (2743, 2765), True, 'import Game as G\n'), ((4056, 4077), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'nrows': '(2)'}), '(nrows=2)\n', (4068, 4077), True, 'import matplotlib.pyplot as plt\n'), ((4787, 4797), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4795, 4797), True, 'import matplotlib.pyplot as plt\n'), ((2421, 2432), 'Player.Player', 'P.Player', (['p'], {}), '(p)\n', (2429, 2432), True, 'import Player as P\n')]
|
# -*-coding:utf-8-*-
# 作者: 29511
# 文件名: list_shuffle.py
# 日期时间:2021/4/19,15:47
def list_shuffle(li):
"""
打乱列表元素
:param li: 原列表
"""
import random
random.shuffle(li)
li = [1, 2, 3, 4, 5]
list_shuffle(li) # 调用之后li反生了改变
print(li)
|
[
"random.shuffle"
] |
[((174, 192), 'random.shuffle', 'random.shuffle', (['li'], {}), '(li)\n', (188, 192), False, 'import random\n')]
|
# -*- coding: utf-8 -*-
"""
Created on Wed May 6 09:28:40 2020
@author: yo
Función auxiliar para calcular el RSE
"""
import numpy as np
def calc_rse(valores,prediccion):
return(sum(valores-prediccion)**2/sum((valores-np.mean(valores))**2))
|
[
"numpy.mean"
] |
[((226, 242), 'numpy.mean', 'np.mean', (['valores'], {}), '(valores)\n', (233, 242), True, 'import numpy as np\n')]
|
# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Modifiers classes related to holding sparsity level constant for finetuning or
transfer learning
"""
from typing import List, Union
import torch
from torch import Tensor
from torch.nn import Module, Parameter
from sparseml.pytorch.optim.modifier import (
ModifierProp,
PyTorchModifierYAML,
ScheduledModifier,
)
from sparseml.pytorch.sparsification.pruning.mask_creator import PruningMaskCreator
from sparseml.pytorch.sparsification.pruning.modifier_pruning_base import (
BasePruningModifier,
)
from sparseml.pytorch.utils import get_prunable_layers, tensor_sparsity
from sparseml.sparsification import (
ConstantPruningModifier as BaseConstantPruningModifier,
)
from sparseml.utils import ALL_TOKEN
__all__ = [
"ConstantMaskCreator",
"ConstantPruningModifier",
]
class ConstantMaskCreator(PruningMaskCreator):
"""
Class for creating sparsity masks that only mask already pruned parameters.
i.e. if the value of a paraemeter is 0 it will be masked, otherwise it will
remain unmasked
"""
def create_sparsity_masks(
self,
tensors: List[Tensor],
target: Union[float, List[float]],
global_sparsity: bool = False,
) -> List[Tensor]:
"""
:param tensors: tensors to generate constant masks for
:param target: not used for constant pruning
:param global_sparsity: not used for constant pruning
:return: list of masks derived from pruned values of each of the given tensors
"""
return [torch.ne(tensor, 0.0).type(tensor.type()) for tensor in tensors]
@PyTorchModifierYAML()
class ConstantPruningModifier(BasePruningModifier, BaseConstantPruningModifier):
"""
Holds the sparsity level and shape for a given parameter(s) constant while training.
Useful for transfer learning use cases.
| Sample yaml:
| !ConstantPruningModifier
| start_epoch: 0.0
| end_epoch: 10.0
| params: ['re:.*weight']
| log_types: __ALL__
:param start_epoch: The epoch to start the modifier at
:param end_epoch: The epoch to end the modifier at
:param update_frequency: Ignored for this modifier
:param params: A list of full parameter names or regex patterns of names to apply
pruning to. Regex patterns must be specified with the prefix 're:'. __ALL__
will match to all parameters. __ALL_PRUNABLE__ will match to all ConvNd
and Linear layers' weights
:param log_types: The loggers to allow the learning rate to be logged to,
default is __ALL__
"""
@staticmethod
def from_sparse_model(model: Module) -> List[ScheduledModifier]:
"""
Create constant ks modifiers for all prunable params in the given model
(conv, linear) that have been artificially sparsified (sparsity > 40%).
Useful for transfer learning from a pruned model.
:param model: the model to create constant ks modifiers for
:return: the list of created constant ks modifiers
"""
prunable = get_prunable_layers(model)
modifiers = []
for name, layer in prunable:
weight = getattr(layer, "weight")
sparsity = tensor_sparsity(weight)
if sparsity > 0.1: # set at 10% sparsity to be threshold for intentional
modifiers.append(
ConstantPruningModifier(params=["{}.{}".format(name, "weight")])
)
return modifiers
def __init__(
self,
params: Union[str, List[str]],
start_epoch: float = -1.0,
end_epoch: float = -1.0,
update_frequency: float = -1.0,
log_types: Union[str, List[str]] = ALL_TOKEN,
):
super(ConstantPruningModifier, self).__init__(
params=params,
start_epoch=start_epoch,
end_epoch=end_epoch,
end_comparator=-1,
update_frequency=-1,
log_types=log_types,
allow_reintroduction=False,
leave_enabled=False,
parent_class_kwarg_names=["params"],
)
def _get_mask_creator(
self, param_names: List[str], params: List[Parameter]
) -> PruningMaskCreator:
"""
:param names: full names of parameters to be pruned
:param params: list of Parameters to be masked
:return: mask creator object to be used by this pruning algorithm
"""
return ConstantMaskCreator()
def _get_scorer(self, *args, **kwargs):
"""
:return: None, no scorer is used, defaults to using raw parameter values
"""
return None
def get_applied_sparsity_for_epoch(self, *args, **kwargs):
"""
:return: None, sparsity is set by the existing levels
"""
return None
@ModifierProp(serializable=False)
def leave_enabled(self) -> bool:
"""
:return: True to continue masking the weights after end_epoch,
False to stop masking. Should be set to False if exporting the result
immediately after or doing some other prune.
"""
return self._leave_enabled
|
[
"torch.ne",
"sparseml.pytorch.utils.tensor_sparsity",
"sparseml.pytorch.utils.get_prunable_layers",
"sparseml.pytorch.optim.modifier.ModifierProp",
"sparseml.pytorch.optim.modifier.PyTorchModifierYAML"
] |
[((2222, 2243), 'sparseml.pytorch.optim.modifier.PyTorchModifierYAML', 'PyTorchModifierYAML', ([], {}), '()\n', (2241, 2243), False, 'from sparseml.pytorch.optim.modifier import ModifierProp, PyTorchModifierYAML, ScheduledModifier\n'), ((5457, 5489), 'sparseml.pytorch.optim.modifier.ModifierProp', 'ModifierProp', ([], {'serializable': '(False)'}), '(serializable=False)\n', (5469, 5489), False, 'from sparseml.pytorch.optim.modifier import ModifierProp, PyTorchModifierYAML, ScheduledModifier\n'), ((3689, 3715), 'sparseml.pytorch.utils.get_prunable_layers', 'get_prunable_layers', (['model'], {}), '(model)\n', (3708, 3715), False, 'from sparseml.pytorch.utils import get_prunable_layers, tensor_sparsity\n'), ((3846, 3869), 'sparseml.pytorch.utils.tensor_sparsity', 'tensor_sparsity', (['weight'], {}), '(weight)\n', (3861, 3869), False, 'from sparseml.pytorch.utils import get_prunable_layers, tensor_sparsity\n'), ((2154, 2175), 'torch.ne', 'torch.ne', (['tensor', '(0.0)'], {}), '(tensor, 0.0)\n', (2162, 2175), False, 'import torch\n')]
|
#!/usr/bin/env python3
#
#
#
#
# <NAME> (03 Apr 2019), contact: <EMAIL>
import argparse
import datetime
import os
import re
import sys
import requests
import kbr.config_utils as config_utils
import kbr.db_utils as db_utils
import kbr.timedate_utils as timedate_utils
points = []
url = None
db = None
dbuser = None
dbpass = None
def write_points(data):
global url, db, dbuser, dbpass
wurl = f"{url}/write?db={db}"
if url is None:
print( data )
return
try:
res = requests.post(wurl, data=data, auth=(dbuser, dbpass))
res.raise_for_status()
except requests.exceptions.HTTPError as e:
print (e.response.text)
class Timerange:
"""Iterator that counts upward forever."""
def __init__(self, start:str, end:str, interval:str):
self._start = timedate_utils.datestr_to_ts(start)
self._end = timedate_utils.datestr_to_ts(end)
self._timeframe = timedate_utils.timedelta_to_sec( interval )
def __iter__(self):
return self
def __next__(self):
ts = self._start
self._start = self._start + datetime.timedelta(seconds = self._timeframe)
if ts >= self._end:
raise StopIteration # signals "the end"
return ts
def unix_time_nano(dt):
epoch = datetime.datetime.utcfromtimestamp(0)
return int((dt - epoch).total_seconds() * 1000000000)
DB = None
def date_range(start:str, end:str, timeframe:str) -> []:
start = timedate_utils.datestr_to_ts(start)
end = timedate_utils.datestr_to_ts(end)
timeframe = timedate_utils.timedelta_to_sec( timeframe )
res = [start]
while True:
start = start + datetime.timedelta(seconds = timeframe)
# print( start )
if start >= end:
break
res.append( start )
return res
def interval_type(interval:str) -> (int, str):
''' 1m, 3h, 2d, 1w --> now - delta as epoc secs '''
try:
g = re.match(r'(\d+)([hdM])', interval)
num, range = g.groups(0)
if range == 'h':
return num, "hour"
elif range == 'd':
return num, "day"
elif range == 'M':
return num, "month"
except Exception as e:
print(f"timerange {interval} is invalid valid examples: 1d 2h 1w 1M")
sys.exit(1)
def make_timeframe(start:str, end:str, interval:str):
size, sort = interval_type(interval)
timeframe = f"timeframe={sort},size={size}"
delta_time = f"{size} {sort}"
return timeframe, delta_time
def workflow_stats(start:str, end:str, interval:str, resolution:str="30s"):
timeframe, delta_time = make_timeframe(start, end, interval)
for i in Timerange(start, end, resolution):
sql = f"select count(*) AS count from workflow_invocation WHERE create_time AT TIME ZONE 'UTC' < '{i}' and create_time AT TIME ZONE 'UTC' > timestamp '{i}' - INTERVAL '{delta_time}'"
ts = unix_time_nano(i)
for entry in DB.get_as_dict(sql):
if entry["count"] == None or entry["count"] == 0:
continue
l = f"workflows,{timeframe} count={entry['count']} {ts}"
print(l)
write_points(l)
def data_stats(start:str, end:str, interval, resolution:str="30s"):
timeframe, delta_time = make_timeframe(start, end, interval)
for i in Timerange(start, end, resolution):
sql = f"SELECT sum(coalesce(dataset.total_size, dataset.file_size, 0)) AS size FROM dataset WHERE create_time AT TIME ZONE 'UTC' < '{i}' and create_time AT TIME ZONE 'UTC' > timestamp '{i}' - INTERVAL '{delta_time}'"
ts = unix_time_nano(i)
for entry in DB.get_as_dict(sql):
if entry["size"] == None or entry["size"] == 0:
continue
l = f"data_growth,{timeframe} size={entry['size']} {ts}"
#print(l)
write_points(l)
def job_stats(start:str, end:str, interval, resolution:str="30s"):
timeframe, delta_time = make_timeframe(start, end, interval)
for i in Timerange(start, end, resolution):
sql = f"SELECT state, count(*) AS count from job WHERE create_time AT TIME ZONE 'UTC' < '{i}' and create_time AT TIME ZONE 'UTC' > timestamp '{i}' - INTERVAL '{delta_time}' group by state"
ts = unix_time_nano(i)
for entry in DB.get_as_dict(sql):
l = f"jobs,{timeframe},state={entry['state']} count={entry['count']} {ts}"
#print(l)
write_points(l)
def user_stats(start:str, end:str, interval:str, resolution:str="30s"):
timeframe, delta_time = make_timeframe(start, end, interval)
for i in Timerange(start, end, resolution):
sql = f"select count(distinct(user_id)) as count from job WHERE create_time AT TIME ZONE 'UTC' < '{i}' and create_time AT TIME ZONE 'UTC' > timestamp '{i}' - INTERVAL '{delta_time}'"
ts = unix_time_nano(i)
for entry in DB.get_as_dict(sql):
if entry["count"] == None or entry["count"] == 0:
continue
l = f"galaxy-users,{timeframe} count={entry['count']} {ts}"
#print(l)
write_points(l)
def jobs_total(start:str, end:str, interval:str, resolution:str="30s"):
timeframe = "timeframe=epoch"
for i in Timerange(start, end, resolution):
sql = f"SELECT state, count(*) AS count from job WHERE create_time AT TIME ZONE 'UTC' < '{i}' group by state"
ts = unix_time_nano(i)
for entry in DB.get_as_dict(sql):
if entry["count"] == None or entry["count"] == 0:
continue
l = f"jobs,{timeframe},state={entry['state']} count={entry['count']} {ts}"
write_points(l)
def datagrowth_total(start:str, end:str, interval:str, resolution:str="30s"):
timeframe = "timeframe=epoch"
for i in Timerange(start, end, resolution):
sql = f"SELECT sum(coalesce(dataset.total_size, dataset.file_size, 0)) AS size FROM dataset WHERE create_time AT TIME ZONE 'UTC' < '{i}'"
ts = unix_time_nano(i)
for entry in DB.get_as_dict(sql):
if entry["size"] == None or entry["size"] == 0:
continue
l = f"data_growth,{timeframe} size={entry['size']} {ts}"
write_points(l)
def workflow_total(start:str, end:str, interval:str, resolution:str="30s"):
timeframe = "timeframe=epoch"
for i in Timerange(start, end, resolution):
sql = f"select count(*) AS count from workflow_invocation WHERE create_time AT TIME ZONE 'UTC' < '{i}'"
ts = unix_time_nano(i)
for entry in DB.get_as_dict(sql):
if entry["count"] == None or entry["count"] == 0:
continue
l = f"workflows,{timeframe} count={entry['count']} {ts}"
write_points(l)
def nels_export_total(start:str, end:str, interval:str, resolution:str="30s"):
for i in Timerange(start, end, resolution):
sql = f"SELECT count(*), instance FROM nels_export_tracking WHERE create_time AT TIME ZONE 'UTC' < '{i}' GROUP BY instance"
ts = unix_time_nano(i)
for entry in DB.get_as_dict(sql):
l = f"nels-exports,instance={entry['instance']} count={entry['count']} {ts}"
write_points(l)
def nels_import_total(start:str, end:str, interval:str, resolution:str="30s"):
for i in Timerange(start, end, resolution):
sql = f"SELECT count(*) FROM nels_import_tracking WHERE create_time AT TIME ZONE 'UTC' < '{i}'"
ts = unix_time_nano(i)
for entry in DB.get_as_dict(sql):
l = f"nels-imports count={entry['count']} {ts}"
write_points(l)
def main():
parser = argparse.ArgumentParser(description='cbu galaxy admin tool')
parser.add_argument('-c', '--config', default="galaxy.json", help="config file", required=True)
parser.add_argument('-s', '--start', help="start time", required=True)
parser.add_argument('-e', '--end', help="end time", required=True)
parser.add_argument('-i', '--interval', help="time interval", required=True)
parser.add_argument('-r', '--resolution', default='5m', help="time resolution to pull data for")
parser.add_argument('-U', '--url', help="time interval")
parser.add_argument('-d', '--database', help="time interval")
parser.add_argument('-u', '--user', help="time interval")
parser.add_argument('-p', '--password', help="time interval")
args = parser.parse_args()
# workflow_stats(args.start, args.end, args.interval)
# sys.exit()
global url, db, dbuser, dbpass
url = args.url
db = args.database
dbuser = args.user
dbpass = args.password
config = config_utils.readin_config_file(args.config)
global DB
if "db_url" in config:
DB = db_utils.DB(config.db_url)
elif "galaxy" in config and "database_connection" in config['galaxy']:
DB = db_utils.DB(config['galaxy']['database_connection'])
# workflow_stats(args.start, args.end, args.interval, args.resolution)
# user_stats(args.start, args.end, args.interval, args.resolution)
# data_stats(args.start, args.end, args.interval, args.resolution)
# job_stats(args.start, args.end, args.interval, args.resolution)
jobs_total(args.start, args.end, args.interval, args.resolution)
datagrowth_total(args.start, args.end, args.interval, args.resolution)
workflow_total(args.start, args.end, args.interval, args.resolution)
nels_export_total(args.start, args.end, args.interval, args.resolution)
nels_import_total(args.start, args.end, args.interval, args.resolution)
if __name__ == "__main__":
main()
|
[
"kbr.timedate_utils.timedelta_to_sec",
"kbr.db_utils.DB",
"argparse.ArgumentParser",
"kbr.config_utils.readin_config_file",
"re.match",
"kbr.timedate_utils.datestr_to_ts",
"datetime.datetime.utcfromtimestamp",
"datetime.timedelta",
"requests.post",
"sys.exit"
] |
[((1308, 1345), 'datetime.datetime.utcfromtimestamp', 'datetime.datetime.utcfromtimestamp', (['(0)'], {}), '(0)\n', (1342, 1345), False, 'import datetime\n'), ((1486, 1521), 'kbr.timedate_utils.datestr_to_ts', 'timedate_utils.datestr_to_ts', (['start'], {}), '(start)\n', (1514, 1521), True, 'import kbr.timedate_utils as timedate_utils\n'), ((1534, 1567), 'kbr.timedate_utils.datestr_to_ts', 'timedate_utils.datestr_to_ts', (['end'], {}), '(end)\n', (1562, 1567), True, 'import kbr.timedate_utils as timedate_utils\n'), ((1584, 1626), 'kbr.timedate_utils.timedelta_to_sec', 'timedate_utils.timedelta_to_sec', (['timeframe'], {}), '(timeframe)\n', (1615, 1626), True, 'import kbr.timedate_utils as timedate_utils\n'), ((7681, 7741), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""cbu galaxy admin tool"""'}), "(description='cbu galaxy admin tool')\n", (7704, 7741), False, 'import argparse\n'), ((8684, 8728), 'kbr.config_utils.readin_config_file', 'config_utils.readin_config_file', (['args.config'], {}), '(args.config)\n', (8715, 8728), True, 'import kbr.config_utils as config_utils\n'), ((518, 571), 'requests.post', 'requests.post', (['wurl'], {'data': 'data', 'auth': '(dbuser, dbpass)'}), '(wurl, data=data, auth=(dbuser, dbpass))\n', (531, 571), False, 'import requests\n'), ((831, 866), 'kbr.timedate_utils.datestr_to_ts', 'timedate_utils.datestr_to_ts', (['start'], {}), '(start)\n', (859, 866), True, 'import kbr.timedate_utils as timedate_utils\n'), ((889, 922), 'kbr.timedate_utils.datestr_to_ts', 'timedate_utils.datestr_to_ts', (['end'], {}), '(end)\n', (917, 922), True, 'import kbr.timedate_utils as timedate_utils\n'), ((949, 990), 'kbr.timedate_utils.timedelta_to_sec', 'timedate_utils.timedelta_to_sec', (['interval'], {}), '(interval)\n', (980, 990), True, 'import kbr.timedate_utils as timedate_utils\n'), ((1967, 2002), 're.match', 're.match', (['"""(\\\\d+)([hdM])"""', 'interval'], {}), "('(\\\\d+)([hdM])', interval)\n", (1975, 2002), False, 'import re\n'), ((8783, 8809), 'kbr.db_utils.DB', 'db_utils.DB', (['config.db_url'], {}), '(config.db_url)\n', (8794, 8809), True, 'import kbr.db_utils as db_utils\n'), ((1124, 1167), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': 'self._timeframe'}), '(seconds=self._timeframe)\n', (1142, 1167), False, 'import datetime\n'), ((1688, 1725), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': 'timeframe'}), '(seconds=timeframe)\n', (1706, 1725), False, 'import datetime\n'), ((2321, 2332), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2329, 2332), False, 'import sys\n'), ((8898, 8950), 'kbr.db_utils.DB', 'db_utils.DB', (["config['galaxy']['database_connection']"], {}), "(config['galaxy']['database_connection'])\n", (8909, 8950), True, 'import kbr.db_utils as db_utils\n')]
|
from fastapi import APIRouter
router = APIRouter()
@router.get('/')
def index():
return 'hello ergo'
|
[
"fastapi.APIRouter"
] |
[((40, 51), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (49, 51), False, 'from fastapi import APIRouter\n')]
|
import cv2
cap = cv2.VideoCapture("peo.mp4") # input video file
ret, primary = cap.read()
ret, secondary = cap.read()
while cap.isOpened():
diff = cv2.absdiff(primary, secondary)
gray_img = cv2.cvtColor(diff, cv2.COLOR_BGR2GRAY)
blur = cv2.GaussianBlur(gray_img, (5, 5), 0)
_, thresh = cv2.threshold(blur, 20, 255, cv2.THRESH_BINARY)
dilate = cv2.dilate(thresh, None, iterations=7)
cont, _ = cv2.findContours(dilate, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
for contour in cont:
(x, y, w, h) = cv2.boundingRect(contour)
if cv2.contourArea(contour) < 2000:
continue
cv2.rectangle(primary, (x, y), (x + w, y + h), (255, 0, 0), 2)
frame_show = cv2.resize(primary, (854, 480))
cv2.imshow("Output Video", frame_show)
primary = secondary
ret, secondary = cap.read()
if cv2.waitKey(40) == 27:
break
cv2.destroyAllWindows()
cap.release()
|
[
"cv2.resize",
"cv2.GaussianBlur",
"cv2.boundingRect",
"cv2.contourArea",
"cv2.dilate",
"cv2.cvtColor",
"cv2.waitKey",
"cv2.threshold",
"cv2.imshow",
"cv2.VideoCapture",
"cv2.rectangle",
"cv2.absdiff",
"cv2.destroyAllWindows",
"cv2.findContours"
] |
[((18, 45), 'cv2.VideoCapture', 'cv2.VideoCapture', (['"""peo.mp4"""'], {}), "('peo.mp4')\n", (34, 45), False, 'import cv2\n'), ((893, 916), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (914, 916), False, 'import cv2\n'), ((156, 187), 'cv2.absdiff', 'cv2.absdiff', (['primary', 'secondary'], {}), '(primary, secondary)\n', (167, 187), False, 'import cv2\n'), ((203, 241), 'cv2.cvtColor', 'cv2.cvtColor', (['diff', 'cv2.COLOR_BGR2GRAY'], {}), '(diff, cv2.COLOR_BGR2GRAY)\n', (215, 241), False, 'import cv2\n'), ((253, 290), 'cv2.GaussianBlur', 'cv2.GaussianBlur', (['gray_img', '(5, 5)', '(0)'], {}), '(gray_img, (5, 5), 0)\n', (269, 290), False, 'import cv2\n'), ((307, 354), 'cv2.threshold', 'cv2.threshold', (['blur', '(20)', '(255)', 'cv2.THRESH_BINARY'], {}), '(blur, 20, 255, cv2.THRESH_BINARY)\n', (320, 354), False, 'import cv2\n'), ((368, 406), 'cv2.dilate', 'cv2.dilate', (['thresh', 'None'], {'iterations': '(7)'}), '(thresh, None, iterations=7)\n', (378, 406), False, 'import cv2\n'), ((421, 485), 'cv2.findContours', 'cv2.findContours', (['dilate', 'cv2.RETR_TREE', 'cv2.CHAIN_APPROX_SIMPLE'], {}), '(dilate, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)\n', (437, 485), False, 'import cv2\n'), ((716, 747), 'cv2.resize', 'cv2.resize', (['primary', '(854, 480)'], {}), '(primary, (854, 480))\n', (726, 747), False, 'import cv2\n'), ((752, 790), 'cv2.imshow', 'cv2.imshow', (['"""Output Video"""', 'frame_show'], {}), "('Output Video', frame_show)\n", (762, 790), False, 'import cv2\n'), ((535, 560), 'cv2.boundingRect', 'cv2.boundingRect', (['contour'], {}), '(contour)\n', (551, 560), False, 'import cv2\n'), ((635, 697), 'cv2.rectangle', 'cv2.rectangle', (['primary', '(x, y)', '(x + w, y + h)', '(255, 0, 0)', '(2)'], {}), '(primary, (x, y), (x + w, y + h), (255, 0, 0), 2)\n', (648, 697), False, 'import cv2\n'), ((855, 870), 'cv2.waitKey', 'cv2.waitKey', (['(40)'], {}), '(40)\n', (866, 870), False, 'import cv2\n'), ((573, 597), 'cv2.contourArea', 'cv2.contourArea', (['contour'], {}), '(contour)\n', (588, 597), False, 'import cv2\n')]
|
from selenium import webdriver
from time import sleep
from selenium.webdriver.common.by import By
#initialize webdriver
executable_path='/Users/softwareengineer/Desktop/web_automation/chromedriver'
driver = webdriver.Chrome(executable_path)
# Expand the window
driver.maximize_window()
driver.implicitly_wait(5)
#Open the URL
driver.get("https://google.com")
search_bar = driver.find_element(By.NAME, 'q')
search_bar.clear()
search_bar.send_keys("<NAME>")
#Wait for 3 sec
sleep(3)
#click search
driver.find_element(By.NAME, 'btnK').click()
print("Congratulations! Test is PASSED!")
sleep(5)
driver.quit()
|
[
"selenium.webdriver.Chrome",
"time.sleep"
] |
[((209, 242), 'selenium.webdriver.Chrome', 'webdriver.Chrome', (['executable_path'], {}), '(executable_path)\n', (225, 242), False, 'from selenium import webdriver\n'), ((477, 485), 'time.sleep', 'sleep', (['(3)'], {}), '(3)\n', (482, 485), False, 'from time import sleep\n'), ((589, 597), 'time.sleep', 'sleep', (['(5)'], {}), '(5)\n', (594, 597), False, 'from time import sleep\n')]
|
"""This script crawls the CURIA database and saves all cases
and relevant document links for each case to the database.
"""
import argparse
import concurrent.futures
from lazylawyer.crawlers.crawlers import CURIACrawler
from lazylawyer.database import table_cases, table_docs, table_appeals
from lazylawyer import helpers
from tqdm import tqdm
def crawl_cases_docs_curia(crawl_docs_only=False, num_cases=-1):
"""Crawls cases and the corresponding documents.
Input params:
crawl_docs_only: If True, does not crawl cases and only crawls docs.
num_cases: If <= 0, crawls all available cases. Otherwise, crawls num_cases
cases.
"""
formats = ['html', 'pdf'] # formats are processed in the order they are given
crawler = CURIACrawler()
if crawl_docs_only:
cases = table_cases.get_all_cases()
max_case_id = table_docs.get_max_case_id_in_docs()
cases = [x for x in cases if x['id'] > max_case_id]
else:
cases, appeals = crawler.crawl_ecj_cases(num_cases)
table_cases.write_cases(cases)
cases = table_cases.get_all_cases() # obtain cases once more to get ids
# convert appeal case names to numbers
for appeal in appeals:
orig_case_id = table_cases.get_case_with_name(appeal['orig'])
appeal['orig_case_id'] = None if not orig_case_id else orig_case_id['id']
appeal_case_id = table_cases.get_case_with_name(appeal['appeal'])
appeal['appeal_case_id'] = None if not appeal_case_id else appeal_case_id['id']
del appeal['orig']
del appeal['appeal']
appeals = [appeal for appeal in appeals if appeal['orig_case_id'] and appeal['appeal_case_id']] # remove appeals with None
table_appeals.write_appeals(appeals)
cases_batches = helpers.create_batches_list(cases, 50)
for batch in tqdm(cases_batches):
with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
futures_cases = {executor.submit(crawler.crawl_case_docs, case, formats):case for case in batch}
for future in concurrent.futures.as_completed(futures_cases):
case = futures_cases[future]
docs = future.result()
if docs is not None:
# insert parties to cases table
table_cases.update_parties(case, docs[0]['party1'], docs[0]['party2'])
table_cases.update_subject(case, docs[0]['subject'])
for doc in docs:
doc.pop('party1')
doc.pop('party2')
doc.pop('subject')
table_docs.write_docs_for_case(case, docs)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Crawl CURIA cases and docs.')
parser.add_argument('--docs_only', action='store_true', help='only crawl documents')
parser.add_argument('--num_cases', type=int, default=-1, help='only crawl a limited number of cases')
args = parser.parse_args()
crawl_cases_docs_curia(args.docs_only, args.num_cases)
|
[
"lazylawyer.helpers.create_batches_list",
"lazylawyer.crawlers.crawlers.CURIACrawler",
"tqdm.tqdm",
"argparse.ArgumentParser",
"lazylawyer.database.table_cases.get_all_cases",
"lazylawyer.database.table_cases.update_subject",
"lazylawyer.database.table_docs.get_max_case_id_in_docs",
"lazylawyer.database.table_cases.get_case_with_name",
"lazylawyer.database.table_docs.write_docs_for_case",
"lazylawyer.database.table_cases.update_parties",
"lazylawyer.database.table_appeals.write_appeals",
"lazylawyer.database.table_cases.write_cases"
] |
[((769, 783), 'lazylawyer.crawlers.crawlers.CURIACrawler', 'CURIACrawler', ([], {}), '()\n', (781, 783), False, 'from lazylawyer.crawlers.crawlers import CURIACrawler\n'), ((1857, 1895), 'lazylawyer.helpers.create_batches_list', 'helpers.create_batches_list', (['cases', '(50)'], {}), '(cases, 50)\n', (1884, 1895), False, 'from lazylawyer import helpers\n'), ((1914, 1933), 'tqdm.tqdm', 'tqdm', (['cases_batches'], {}), '(cases_batches)\n', (1918, 1933), False, 'from tqdm import tqdm\n'), ((2774, 2840), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Crawl CURIA cases and docs."""'}), "(description='Crawl CURIA cases and docs.')\n", (2797, 2840), False, 'import argparse\n'), ((829, 856), 'lazylawyer.database.table_cases.get_all_cases', 'table_cases.get_all_cases', ([], {}), '()\n', (854, 856), False, 'from lazylawyer.database import table_cases, table_docs, table_appeals\n'), ((881, 917), 'lazylawyer.database.table_docs.get_max_case_id_in_docs', 'table_docs.get_max_case_id_in_docs', ([], {}), '()\n', (915, 917), False, 'from lazylawyer.database import table_cases, table_docs, table_appeals\n'), ((1060, 1090), 'lazylawyer.database.table_cases.write_cases', 'table_cases.write_cases', (['cases'], {}), '(cases)\n', (1083, 1090), False, 'from lazylawyer.database import table_cases, table_docs, table_appeals\n'), ((1108, 1135), 'lazylawyer.database.table_cases.get_all_cases', 'table_cases.get_all_cases', ([], {}), '()\n', (1133, 1135), False, 'from lazylawyer.database import table_cases, table_docs, table_appeals\n'), ((1797, 1833), 'lazylawyer.database.table_appeals.write_appeals', 'table_appeals.write_appeals', (['appeals'], {}), '(appeals)\n', (1824, 1833), False, 'from lazylawyer.database import table_cases, table_docs, table_appeals\n'), ((1282, 1328), 'lazylawyer.database.table_cases.get_case_with_name', 'table_cases.get_case_with_name', (["appeal['orig']"], {}), "(appeal['orig'])\n", (1312, 1328), False, 'from lazylawyer.database import table_cases, table_docs, table_appeals\n'), ((1446, 1494), 'lazylawyer.database.table_cases.get_case_with_name', 'table_cases.get_case_with_name', (["appeal['appeal']"], {}), "(appeal['appeal'])\n", (1476, 1494), False, 'from lazylawyer.database import table_cases, table_docs, table_appeals\n'), ((2377, 2447), 'lazylawyer.database.table_cases.update_parties', 'table_cases.update_parties', (['case', "docs[0]['party1']", "docs[0]['party2']"], {}), "(case, docs[0]['party1'], docs[0]['party2'])\n", (2403, 2447), False, 'from lazylawyer.database import table_cases, table_docs, table_appeals\n'), ((2465, 2517), 'lazylawyer.database.table_cases.update_subject', 'table_cases.update_subject', (['case', "docs[0]['subject']"], {}), "(case, docs[0]['subject'])\n", (2491, 2517), False, 'from lazylawyer.database import table_cases, table_docs, table_appeals\n'), ((2687, 2729), 'lazylawyer.database.table_docs.write_docs_for_case', 'table_docs.write_docs_for_case', (['case', 'docs'], {}), '(case, docs)\n', (2717, 2729), False, 'from lazylawyer.database import table_cases, table_docs, table_appeals\n')]
|
# +
from .variable import Variable
from .spd import SPD
from collections import Counter
from math import pi
import torch
gauss_norm = torch.tensor(2*pi).sqrt()
def discrete(val, delta):
return tuple(val.div(delta).floor().int().view(-1).tolist())
def conformed(x):
if x.dim() > 1:
return x
else:
return x.view(1, -1)
def dist(x, y):
_x = conformed(x)
_y = conformed(y)
return _x[:, None]-_y[None]
class GaussianKernel:
def __init__(self, delta):
self.delta = torch.as_tensor(delta)
def __call__(self, x, y):
d = dist(x, y).div(self.delta).norm(dim=-1)
k = d.pow(2).neg().div(2).exp()
return k
def optimize(self, *args, **kwargs):
return None
class GaussianARD:
def __init__(self, dim):
self.trans = torch.distributions.transforms.LowerCholeskyTransform()
self._param = torch.zeros(dim, dim)
@property
def covariance(self):
chol = self.trans(self._param)
return chol @ chol.t()
@property
def precision(self):
return self.trans(self._param).cholesky_inverse()
def __call__(self, x, y):
r = dist(x, y)
d = ((r @ self.precision)*r).sum(dim=-1)
k = d.neg().div(2).exp()
return k
def optimize(self, x, y, steps=100, lr=None, noise=0.):
self._param.requires_grad_(True)
lr = lr or 1./y.var().sqrt()
optimizer = torch.optim.LBFGS([self._param], lr=lr)
for _ in range(steps):
def closure():
optimizer.zero_grad()
matrix = self(x, x) + noise*torch.eye(x.size(0))
cholesky = matrix.cholesky()
inverse = cholesky.cholesky_inverse()
mu = inverse @ y
loss = (y.T @ mu + 2*cholesky.diag().log().sum())
loss.backward()
return loss
optimizer.step(closure)
self._param.requires_grad_(False)
return self(x, x)
class History(Variable):
def __init__(self, var, file=None, stop=float('inf')):
super().__init__(var, file=file)
self.requires_update.add(self)
self.var = var
self.file = file
self.history = []
self.write(f'# {var}', 'w')
self.stop = stop
def write(self, msg, mode='a'):
if self.file:
with open(self.file, mode) as f:
f.write(f'{msg}\n')
def evaluate(self, contex):
if self.history == []:
self.update()
return torch.cat(self.history)
def update(self, x=None):
if len(self.history) < self.stop:
t = x or self.var().clone().detach()
self.history.append(t)
self.write(t.tolist())
class Histogram(Variable):
def __init__(self, var, delta):
super().__init__(var, delta)
self.requires_update.add(self)
self.var = var
self.delta = torch.as_tensor(delta)
self.hst = Counter()
self.fixed = False
def evaluate(self, context):
return self.hst[discrete(self.var(contex), self.delta)]
def update(self, x=None):
if not self.fixed:
self.hst[discrete(x or self.var(), self.delta)] += 1.
def full(self, density=True):
x = torch.tensor(list(self.hst.keys()))*self.delta
y = torch.tensor(list(self.hst.values()))
if density:
y /= y.sum()*self.delta.prod()
return x, y
def save(self, file):
with open(file, 'w') as f:
for key, val in self.hst.items():
f.write(f'{key} : {val}\n')
def load(self, file):
with open(file, 'r') as f:
for line in f:
key, val = line.split(':')
self.hst[eval(key)] += eval(val)
class KDE(Histogram):
def __init__(self, var, kern):
super().__init__(var, kern.delta)
self.kern = kern
def evaluate(self, context):
X, y = self.full(density=False)
if X is None:
return torch.zeros(1)
x = self.var(context)
k = self.kern(x, X+self.delta/2) # <- dist from center of grid
kde = k.mul(y).sum(dim=-1) / gauss_norm
return kde
class KDR(Variable):
def __init__(self, var, kern, epsilon=0.1):
super().__init__(var, kern)
self.requires_update.add(self)
self.var = var
self.kern = kern
self.epsilon = epsilon
self.fixed = False
self.inducing = []
@property
def X(self):
return torch.stack(self.inducing)
@property
def y(self):
return self.k.data@self.mu / gauss_norm
def update(self, x=None):
if not self.fixed:
x = x or self.var().clone().detach()
if len(self.inducing) == 0:
self.inducing.append(x)
self.k = SPD(epsilon=self.epsilon)
self.mu = torch.ones(1, 1)
else:
k = self.kern(x, self.X)
inv = self.k.inverse()
d_mu = inv@k.t()
self.mu += d_mu
if self.k.append_(k, 1.):
self.inducing.append(x)
#self.mu = torch.cat([self.mu, self().detach().view(1, 1)])
self.mu = torch.cat([self.mu, torch.zeros(1, 1)])
def optimize(self, **kwargs):
opt = self.kern.optimize(self.X, self.y, **kwargs)
if opt is not None:
self.k.data = opt
def evaluate(self, context):
if len(self.inducing) == 0:
return torch.zeros(1)
x = self.var(context)
k = self.kern(x, self.X)
kde = (k@self.mu).sum(dim=-1)
return kde / gauss_norm
|
[
"torch.ones",
"torch.stack",
"collections.Counter",
"torch.cat",
"torch.distributions.transforms.LowerCholeskyTransform",
"torch.zeros",
"torch.as_tensor",
"torch.optim.LBFGS",
"torch.tensor"
] |
[((136, 156), 'torch.tensor', 'torch.tensor', (['(2 * pi)'], {}), '(2 * pi)\n', (148, 156), False, 'import torch\n'), ((522, 544), 'torch.as_tensor', 'torch.as_tensor', (['delta'], {}), '(delta)\n', (537, 544), False, 'import torch\n'), ((819, 874), 'torch.distributions.transforms.LowerCholeskyTransform', 'torch.distributions.transforms.LowerCholeskyTransform', ([], {}), '()\n', (872, 874), False, 'import torch\n'), ((897, 918), 'torch.zeros', 'torch.zeros', (['dim', 'dim'], {}), '(dim, dim)\n', (908, 918), False, 'import torch\n'), ((1440, 1479), 'torch.optim.LBFGS', 'torch.optim.LBFGS', (['[self._param]'], {'lr': 'lr'}), '([self._param], lr=lr)\n', (1457, 1479), False, 'import torch\n'), ((2550, 2573), 'torch.cat', 'torch.cat', (['self.history'], {}), '(self.history)\n', (2559, 2573), False, 'import torch\n'), ((2952, 2974), 'torch.as_tensor', 'torch.as_tensor', (['delta'], {}), '(delta)\n', (2967, 2974), False, 'import torch\n'), ((2994, 3003), 'collections.Counter', 'Counter', ([], {}), '()\n', (3001, 3003), False, 'from collections import Counter\n'), ((4566, 4592), 'torch.stack', 'torch.stack', (['self.inducing'], {}), '(self.inducing)\n', (4577, 4592), False, 'import torch\n'), ((4055, 4069), 'torch.zeros', 'torch.zeros', (['(1)'], {}), '(1)\n', (4066, 4069), False, 'import torch\n'), ((5594, 5608), 'torch.zeros', 'torch.zeros', (['(1)'], {}), '(1)\n', (5605, 5608), False, 'import torch\n'), ((4937, 4953), 'torch.ones', 'torch.ones', (['(1)', '(1)'], {}), '(1, 1)\n', (4947, 4953), False, 'import torch\n'), ((5333, 5350), 'torch.zeros', 'torch.zeros', (['(1)', '(1)'], {}), '(1, 1)\n', (5344, 5350), False, 'import torch\n')]
|