code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
#<NAME>
#adaptation of shortest paths code from my phd for more general case. kind of.
#no objects, just a method, gpl since penn/phd
import priodict
def shortestPaths(nodes, edges, startDist, initialNodes):
'''simple shortest paths algorithm, using advanced data structure. calculates
all distances from the starting set of nodes to all other nodes. returns
nodes to distance mapping. nodes is a list, edges is a dict from nodes to
other nodes with distance as a tuple, startdist is a float, initialnodes
is a list.'''
currentNodes = priodict.priorityDictionary()
#currentNodes holds data on nodes left to process
nodeDist = {}
for initialNode in initialNodes:
currentNodes[initialNode] = startDist
while len(currentNodes) > 0:
currentNode = currentNodes.smallest()
lastDist = currentNodes.pop(currentNodes.smallest())
if currentNode not in nodeDist or lastDist < nodeDist[currentNode]:
#update the dist, add neighbors to heap
nodeDist[currentNode] = lastDist
for neighborNode, nbDist in edges[currentNode]:
newDist = lastDist + nbDist
if neighborNode not in currentNodes or \
newDist <= currentNodes[neighborNode]:
currentNodes[neighborNode] = newDist # updates prio dict
return nodeDist
|
[
"priodict.priorityDictionary"
] |
[((549, 578), 'priodict.priorityDictionary', 'priodict.priorityDictionary', ([], {}), '()\n', (576, 578), False, 'import priodict\n')]
|
#/usr/bin/python3
#-*- encoding=utf-8 -*-
from pathlib import Path
import random
import numpy as np
import cv2
from cv2 import cv2 as cv
from keras.utils import Sequence
import os
def readTxt(txtpath):
filelist = []
with open(txtpath, 'r') as f:
for line in f.readlines():
filelist.append(line.strip())
return filelist
class NoisyImageGenerator(Sequence):
def __init__(self, source_image_txt, target_image_txt, batch_size, image_size):
#image_suffixes = (".jpeg", ".jpg", ".png", ".bmp")
# self.source_image_paths = [p for p in Path(source_image_dir).glob("**/*") if p.suffix.lower() in image_suffixes]
# self.target_image_paths = [p for p in Path(target_image_dir).glob("**/*") if p.suffix.lower() in image_suffixes]
self.source_image_paths = readTxt(source_image_txt)
self.target_image_paths = readTxt(target_image_txt)
self.target_image_txt = target_image_txt
#self.source_noise_model = source_noise_model
#self.target_noise_model = target_noise_model
self.source_image_num = len(self.source_image_paths)
self.target_image_num = len(self.target_image_paths)
self.batch_size = batch_size
self.image_size = image_size
#self.target_image_dir = target_image_dir
if self.source_image_num == 0:
raise ValueError("source image dir does not include any image")
if self.target_image_num == 0:
raise ValueError("target image dir does not include any image")
def __len__(self):
return self.source_image_num // self.batch_size
def __getitem__(self, idx):
batch_size = self.batch_size
image_size = self.image_size
#target_image_dir = self.target_image_dir
x = np.zeros((batch_size, image_size, image_size, 3), dtype=np.uint8)
y = np.zeros((batch_size, image_size, image_size, 3), dtype=np.uint8)
sample_id = 0
########
while True:
source_image_path = random.choice(self.source_image_paths)
#print(source_image_path)
name = os.path.basename(source_image_path)
source_image_path = source_image_path + "/" +name + ".jpg"
#print("source_image_path: ",source_image_path)
#label_gt = os.path.basename(os.path.dirname(source_image_path)) #basename:返回文件名 dirname:去掉文件名,返回目录
#re_item = '.*/' + label_gt + '/.*'
#target_image_list = os.popen("grep %s %s | shuf -n 1" %(name, self.target_image_txt)).readlines()
target_image_list = os.popen("grep --word-regexp %s %s | shuf -n 1" %(name, self.target_image_txt)).readlines()
if len(target_image_list)== 0:
continue
target_image_path = target_image_list[0].strip()
#print("target_image_path: ",target_image_path)
#print(" ")
if ":" in target_image_path:
target_image_path = target_image_path.split(":")[-1]
# print('target_image_list',target_image_list)
if not os.path.exists(target_image_path) or not os.path.exists(source_image_path):
print(source_image_path, target_image_list)
print("Image NOT exists!")
continue
source_image = cv2.imread(source_image_path)
target_image = cv2.imread(target_image_path)
source_patch = cv2.resize(source_image,(image_size,image_size))
target_patch = cv2.resize(target_image,(image_size,image_size))
h, w, _ = source_image.shape
if h >= image_size and w >= image_size:
#h, w, _ = source_image.shape
i = np.random.randint(h - image_size + 1)
j = np.random.randint(w - image_size + 1)
source_patch = source_image[i:i + image_size, j:j + image_size]
target_patch = target_image[i:i + image_size, j:j + image_size]
h1, w1, _ = source_patch.shape
h2, w2, _ = target_patch.shape
#if(h1 != h2 | w1 != w2):
#print(source_image_path)
#print("h1,w1",h1," ",w1)
#print(target_image_path)
#print("h2,w2",h2," ",w2)
#cv2.imshow("source_patch", source_patch)
#cv2.imshow("target_patch", target_patch)
#cv2.waitKey()
x[sample_id] = source_patch
y[sample_id] = target_patch
sample_id += 1
if sample_id == batch_size:
return x, y
class ValGenerator(Sequence):
def __init__(self, source_image_txt, target_image_txt, image_size):
self.test_source_image_paths = readTxt(source_image_txt)
self.test_target_image_paths = readTxt(target_image_txt)
self.target_image_txt = target_image_txt
self.test_source_image_num = len(self.test_source_image_paths)
self.test_target_image_num = len(self.test_target_image_paths)
self.image_size = image_size
#self.test_target_dir = test_target_dir
self.data = []
if self.test_source_image_num == 0:
raise ValueError("test source image dir does not include any image")
if self.test_target_image_num == 0:
raise ValueError("test_target image dir does not include any image")
######
for test_source_image_path in self.test_source_image_paths:
name = os.path.basename(test_source_image_path)
test_source_image_path = test_source_image_path + "/" +name + ".jpg"
#label_gt = os.path.basename(os.path.dirname(source_image_path)) #basename:返回文件名 dirname:去掉文件名,返回目录
#re_item = '.*/' + label_gt + '/.*'
target_image_list = os.popen("grep --word-regexp %s %s | shuf -n 1" %(name, self.target_image_txt)).readlines()
#filename = os.path.basename(test_source_image_path)
#label_gt = os.path.basename(os.path.dirname(test_source_image_path))
#re_item = '.*/' + label_gt + '/.*'
#target_image_list = os.popen("grep %s %s | shuf -n 1" %(re_item, self.target_image_txt)).readlines()
# print('1target_image_list',target_image_list)
if len(target_image_list) ==0:
continue
test_target_image_path = target_image_list[0].strip()
if ":" in test_target_image_path:
test_target_image_path = test_target_image_path.split(":")[-1]
#test_target_image_path = self.test_target_dir+'/'+real_fname
if not os.path.exists(test_target_image_path):
continue
x_source = cv2.imread(test_source_image_path)
y_target = cv2.imread(test_target_image_path)
h, w, _ = x_source.shape
if h >= image_size and w >= image_size:
i = np.random.randint(h - image_size + 1)
j = np.random.randint(w - image_size + 1)
x_source = x_source[i:i + image_size, j:j + image_size]
y_target = y_target[i:i + image_size, j:j + image_size]
#cv2.imshow("source_patch", x_source)
#cv2.imshow("target_patch", y_target)
#cv2.waitKey()
#x = cv2.resize(x_source,(self.image_size,self.image_size))
#print('test_target_image_path',test_target_image_path)
#print('y_target:',y_target.shape)
#y = cv2.resize(y_target,(self.image_size,self.image_size))
self.data.append([np.expand_dims(x_source, axis=0), np.expand_dims(y_target, axis=0)])
def __len__(self):
return len(self.data)
def __getitem__(self, idx):
return self.data[idx]
|
[
"os.path.basename",
"os.popen",
"numpy.zeros",
"random.choice",
"os.path.exists",
"numpy.expand_dims",
"cv2.imread",
"numpy.random.randint",
"cv2.resize"
] |
[((1788, 1853), 'numpy.zeros', 'np.zeros', (['(batch_size, image_size, image_size, 3)'], {'dtype': 'np.uint8'}), '((batch_size, image_size, image_size, 3), dtype=np.uint8)\n', (1796, 1853), True, 'import numpy as np\n'), ((1866, 1931), 'numpy.zeros', 'np.zeros', (['(batch_size, image_size, image_size, 3)'], {'dtype': 'np.uint8'}), '((batch_size, image_size, image_size, 3), dtype=np.uint8)\n', (1874, 1931), True, 'import numpy as np\n'), ((2024, 2062), 'random.choice', 'random.choice', (['self.source_image_paths'], {}), '(self.source_image_paths)\n', (2037, 2062), False, 'import random\n'), ((2133, 2168), 'os.path.basename', 'os.path.basename', (['source_image_path'], {}), '(source_image_path)\n', (2149, 2168), False, 'import os\n'), ((3383, 3412), 'cv2.imread', 'cv2.imread', (['source_image_path'], {}), '(source_image_path)\n', (3393, 3412), False, 'import cv2\n'), ((3440, 3469), 'cv2.imread', 'cv2.imread', (['target_image_path'], {}), '(target_image_path)\n', (3450, 3469), False, 'import cv2\n'), ((3498, 3548), 'cv2.resize', 'cv2.resize', (['source_image', '(image_size, image_size)'], {}), '(source_image, (image_size, image_size))\n', (3508, 3548), False, 'import cv2\n'), ((3574, 3624), 'cv2.resize', 'cv2.resize', (['target_image', '(image_size, image_size)'], {}), '(target_image, (image_size, image_size))\n', (3584, 3624), False, 'import cv2\n'), ((5644, 5684), 'os.path.basename', 'os.path.basename', (['test_source_image_path'], {}), '(test_source_image_path)\n', (5660, 5684), False, 'import os\n'), ((6877, 6911), 'cv2.imread', 'cv2.imread', (['test_source_image_path'], {}), '(test_source_image_path)\n', (6887, 6911), False, 'import cv2\n'), ((6948, 6982), 'cv2.imread', 'cv2.imread', (['test_target_image_path'], {}), '(test_target_image_path)\n', (6958, 6982), False, 'import cv2\n'), ((3795, 3832), 'numpy.random.randint', 'np.random.randint', (['(h - image_size + 1)'], {}), '(h - image_size + 1)\n', (3812, 3832), True, 'import numpy as np\n'), ((3853, 3890), 'numpy.random.randint', 'np.random.randint', (['(w - image_size + 1)'], {}), '(w - image_size + 1)\n', (3870, 3890), True, 'import numpy as np\n'), ((6789, 6827), 'os.path.exists', 'os.path.exists', (['test_target_image_path'], {}), '(test_target_image_path)\n', (6803, 6827), False, 'import os\n'), ((7118, 7155), 'numpy.random.randint', 'np.random.randint', (['(h - image_size + 1)'], {}), '(h - image_size + 1)\n', (7135, 7155), True, 'import numpy as np\n'), ((7176, 7213), 'numpy.random.randint', 'np.random.randint', (['(w - image_size + 1)'], {}), '(w - image_size + 1)\n', (7193, 7213), True, 'import numpy as np\n'), ((2643, 2728), 'os.popen', 'os.popen', (["('grep --word-regexp %s %s | shuf -n 1' % (name, self.target_image_txt))"], {}), "('grep --word-regexp %s %s | shuf -n 1' % (name, self.target_image_txt)\n )\n", (2651, 2728), False, 'import os\n'), ((3151, 3184), 'os.path.exists', 'os.path.exists', (['target_image_path'], {}), '(target_image_path)\n', (3165, 3184), False, 'import os\n'), ((3192, 3225), 'os.path.exists', 'os.path.exists', (['source_image_path'], {}), '(source_image_path)\n', (3206, 3225), False, 'import os\n'), ((5974, 6059), 'os.popen', 'os.popen', (["('grep --word-regexp %s %s | shuf -n 1' % (name, self.target_image_txt))"], {}), "('grep --word-regexp %s %s | shuf -n 1' % (name, self.target_image_txt)\n )\n", (5982, 6059), False, 'import os\n'), ((7834, 7866), 'numpy.expand_dims', 'np.expand_dims', (['x_source'], {'axis': '(0)'}), '(x_source, axis=0)\n', (7848, 7866), True, 'import numpy as np\n'), ((7868, 7900), 'numpy.expand_dims', 'np.expand_dims', (['y_target'], {'axis': '(0)'}), '(y_target, axis=0)\n', (7882, 7900), True, 'import numpy as np\n')]
|
import sys
import os
import math
import cv2
import numpy as np
import pandas as pd
from skimage import io
from PIL import Image
from sklearn.model_selection import train_test_split
from skimage.color import gray2rgb
import torch
from torch.utils.data.sampler import SubsetRandomSampler
from torch.utils.data import Dataset, DataLoader
from torchvision import transforms, utils
sys.path.append('../')
from config.cfg import cfg
class RafFaceDataset(Dataset):
"""
RAF-Face dataset for Face Expression Recognition
"""
def __init__(self, train=True, type='basic', transform=None):
manual_annotation_dir = os.path.join(cfg['root'], 'RAF-Face', '%s/Annotation/manual' % type)
emotion_label_txt_path = os.path.join(cfg['root'], 'RAF-Face', "%s/EmoLabel/list_patition_label.txt" % type)
emotion_dict = dict(np.loadtxt(emotion_label_txt_path, dtype=np.str))
if train:
face_files = []
genders = []
races = []
ages = []
emotions = []
ldmks = []
for _ in os.listdir(manual_annotation_dir):
if _.startswith('train_'):
face_fname = _.replace('_manu_attri', '_aligned').replace('.txt', '.jpg')
face_files.append(os.path.join(cfg['root'], 'RAF-Face', '%s/Image/aligned' % type, face_fname))
with open(os.path.join(manual_annotation_dir, _), mode='rt') as f:
manu_info_list = f.readlines()
genders.append(int(manu_info_list[5]))
races.append(int(manu_info_list[6]))
ages.append(int(manu_info_list[7]))
emotions.append(int(emotion_dict[face_fname.replace('_aligned', '')].strip()) - 1)
ldmks.append(np.array([[[float(_.replace('\n', ''))] for _ in line.split('\t')] for line in
manu_info_list[0:5]]).flatten().tolist())
else:
face_files = []
genders = []
races = []
ages = []
emotions = []
ldmks = []
for _ in os.listdir(manual_annotation_dir):
if _.startswith('test_'):
face_fname = _.replace('_manu_attri', '_aligned').replace('.txt', '.jpg')
face_files.append(os.path.join(cfg['root'], 'RAF-Face', '%s/Image/aligned' % type, face_fname))
with open(os.path.join(manual_annotation_dir, _), mode='rt') as f:
manu_info_list = f.readlines()
genders.append(int(manu_info_list[5]))
races.append(int(manu_info_list[6]))
ages.append(int(manu_info_list[7]))
emotions.append(int(emotion_dict[face_fname.replace('_aligned', '')].strip()) - 1)
ldmks.append(np.array([[[float(_.replace('\n', ''))] for _ in line.split('\t')] for line in
manu_info_list[0:5]]).flatten().tolist())
self.face_files = face_files
self.genders = genders
self.races = races
self.ages = ages
self.emotions = emotions
self.ldmks = ldmks
self.transform = transform
def __len__(self):
return len(self.face_files)
def __getitem__(self, idx):
image = io.imread(self.face_files[idx])
gender = self.genders[idx]
race = self.races[idx]
age = self.ages[idx]
emotion = self.emotions[idx]
ldmk = self.ldmks[idx]
sample = {'image': image, 'gender': gender, 'race': race, 'age': age, 'emotion': emotion,
'landmark': np.array(ldmk), 'filename': self.face_files[idx]}
if self.transform:
sample['image'] = self.transform(Image.fromarray(sample['image'].astype(np.uint8)))
return sample
class RafPartDataset(Dataset):
"""
RAF-Face dataset for Local Part
"""
def __init__(self, train=True, type='basic', part_name="Mouth", transform=None):
"""
:param train:
:param type:
:param part_name: Mouth, LeftEye, RightEye, Nose
:param transform:
"""
# manual_annotation_dir = os.path.join(cfg['root'], 'RAF-Face', '%s/Annotation/manual' % type)
emotion_label_txt_path = os.path.join(cfg['root'], 'RAF-Face', "%s/EmoLabel/list_patition_label.txt" % type)
local_part_img_dir = os.path.join(cfg['root'], 'RAF-Face', '{0}/LocalParts/{1}'.format(type, part_name))
emotion_dict = dict(np.loadtxt(emotion_label_txt_path, dtype=np.str))
if train:
local_part_imgs = []
emotions = []
for _ in os.listdir(local_part_img_dir):
if _.startswith('train_'):
local_part_imgs.append(os.path.join(local_part_img_dir, _))
emotions.append(int(emotion_dict[_.replace('_aligned', '')].strip()) - 1)
else:
local_part_imgs = []
emotions = []
for _ in os.listdir(local_part_img_dir):
if _.startswith('test_'):
local_part_imgs.append(os.path.join(local_part_img_dir, _))
emotions.append(int(emotion_dict[_.replace('_aligned', '')].strip()) - 1)
self.local_part_imgs = local_part_imgs
self.emotions = emotions
self.transform = transform
def __len__(self):
return len(self.local_part_imgs)
def __getitem__(self, idx):
image = io.imread(self.local_part_imgs[idx])
emotion = self.emotions[idx]
sample = {'image': image, 'emotion': emotion, 'filename': self.local_part_imgs[idx]}
if self.transform:
trans_image = self.transform(Image.fromarray(sample['image'].astype(np.uint8)))
sample['image'] = trans_image
return sample
class CelebADataset(Dataset):
"""
CelebA dataset
"""
def __init__(self, transform=None):
list_attr_celeba_txt = os.path.join(cfg['root'], 'CelebA', 'Anno', 'list_attr_celeba.txt')
df = pd.read_csv(list_attr_celeba_txt, delim_whitespace=True, header=None)
df.columns = ["File", "5_o_Clock_Shadow", "Arched_Eyebrows", "Attractive", "Bags_Under_Eyes", "Bald", "Bangs",
"Big_Lips", "Big_Nose", "Black_Hair", "Blond_Hair", "Blurry", "Brown_Hair", "Bushy_Eyebrows",
"Chubby",
"Double_Chin", "Eyeglasses", "Goatee", "Gray_Hair", "Heavy_Makeup", "High_Cheekbones", "Male",
"Mouth_Slightly_Open", "Mustache", "Narrow_Eyes", "No_Beard", "Oval_Face", "Pale_Skin",
"Pointy_Nose",
"Receding_Hairline", "Rosy_Cheeks", "Sideburns", "Smiling", "Straight_Hair", "Wavy_Hair",
"Wearing_Earrings", "Wearing_Hat", "Wearing_Lipstick", "Wearing_Necklace", "Wearing_Necktie",
"Young"]
self.file_list = df['File']
self.o_clock_shadow_list = df['5_o_Clock_Shadow']
self.arched_eyebrows_list = df['Arched_Eyebrows']
self.attractive_list = df['Attractive']
self.bags_under_eyes_list = df['Bags_Under_Eyes']
self.bald_list = df['Bald']
self.bangs_list = df['Bangs']
self.big_lips_list = df['Big_Lips']
self.big_nose_list = df['Big_Nose']
self.black_hair_list = df['Black_Hair']
self.blond_hair_list = df['Blond_Hair']
self.blurry_list = df['Blurry']
self.brown_hair_list = df['Brown_Hair']
self.bushy_eyebrows_list = df['Bushy_Eyebrows']
self.chubby_list = ['Chubby']
self.double_chin_list = ['Double_Chin']
self.eyeglasses_list = df['Eyeglasses']
self.goatee_list = df['Goatee']
self.gray_hair_list = df['Gray_Hair']
self.heavy_makeup_list = df['Heavy_Makeup']
self.high_cheekbones_list = df['High_Cheekbones']
self.male_list = df['Male']
self.mouth_slightly_open_list = df['Mouth_Slightly_Open']
self.mustache_list = df['Mustache']
self.narrow_eyes_list = df['Narrow_Eyes']
self.no_beard_list = df['No_Beard']
self.oval_face_list = df['Oval_Face']
self.pale_skin_list = df['Pale_Skin']
self.pointy_nose_list = df['Pointy_Nose']
self.receding_hairline_list = df['Receding_Hairline']
self.rosy_cheeks_list = df['Rosy_Cheeks']
self.sideburns_list = df['Sideburns']
self.smiling_list = df['Smiling']
self.straight_hair_list = df['Straight_Hair']
self.wavy_hair_list = df['Wavy_Hair']
self.wearing_earrings_list = df['Wearing_Earrings']
self.wearing_hat_list = df['Wearing_Hat']
self.wearing_lipstick_list = df['Wearing_Lipstick']
self.wearing_necklace_list = df['Wearing_Necklace']
self.wearing_necktie_list = df['Wearing_Necktie']
self.young_list = df['Young']
self.transform = transform
def __len__(self):
return len(self.file_list)
def __getitem__(self, idx):
image = io.imread(os.path.join(cfg['coot', 'CelebA', 'Img', 'img_align_celeba', self.file_list[idx]]))
sample = {'image': image, '5_o_Clock_Shadow': max(self.o_clock_shadow_list[idx], 0),
'Arched_Eyebrows': max(self.arched_eyebrows_list[idx], 0),
'Attractive': max(self.attractive_list[idx], 0),
'Bags_Under_Eyes': max(self.bags_under_eyes_list[idx], 0),
'Bald': max(self.bald_list[idx], 0),
'Bangs': max(self.bangs_list[idx], 0), 'Big_Lips': max(self.big_lips_list[idx], 0),
'Big_Nose': max(self.big_nose_list[idx], 0), 'Black_Hair': max(self.black_hair_list[idx], 0),
'Blond_Hair': max(self.blond_hair_list[idx], 0), 'Blurry': max(self.blurry_list[idx], 0),
'Brown_Hair': max(self.brown_hair_list[idx], 0),
'Bushy_Eyebrows': max(self.bushy_eyebrows_list[idx], 0),
'Chubby': max(self.chubby_list[idx], 0), 'Double_Chin': max(self.double_chin_list[idx], 0),
'Eyeglasses': max(self.eyeglasses_list[idx], 0), 'Goatee': max(self.goatee_list[idx], 0),
'Gray_Hair': max(self.gray_hair_list[idx], 0), 'Heavy_Makeup': max(self.heavy_makeup_list[idx], 0),
'High_Cheekbones': max(self.high_cheekbones_list[idx], 0),
'Male': max(self.male_list[idx], 0),
'Mouth_Slightly_Open': max(self.mouth_slightly_open_list[idx], 0),
'Mustache': max(self.mustache_list[idx], 0),
'Narrow_Eyes': max(self.narrow_eyes_list[idx], 0), 'No_Beard': max(self.no_beard_list[idx], 0),
'Oval_Face': max(self.oval_face_list[idx], 0),
'Pale_Skin': max(self.pale_skin_list[idx], 0), 'Pointy_Nose': max(self.pointy_nose_list[idx], 0),
'Receding_Hairline': max(self.receding_hairline_list[idx], 0),
'Rosy_Cheeks': max(self.rosy_cheeks_list[idx], 0), 'Sideburns': max(self.sideburns_list[idx], 0),
'Smiling': max(self.smiling_list[idx], 0), 'Straight_Hair': max(self.straight_hair_list[idx], 0),
'Wavy_Hair': max(self.wavy_hair_list[idx], 0),
'Wearing_Earrings': max(self.wearing_earrings_list[idx], 0),
'Wearing_Hat': max(self.wearing_hat_list[idx], 0),
'Wearing_Lipstick': max(self.wearing_lipstick_list[idx], 0),
'Wearing_Necklace': max(self.wearing_necklace_list[idx], 0),
'Wearing_Necktie': max(self.wearing_necktie_list[idx], 0), 'Young': max(self.young_list[idx], 0)}
if self.transform:
sample['image'] = self.transform(Image.fromarray(sample['image'].astype(np.uint8)))
return sample
class UTKFaceDataset(Dataset):
"""
UTKFace dataset
"""
def __init__(self, train=True, transform=None):
files = os.listdir(os.path.join(cfg['root'], 'UTKFace'))
ages = [int(fname.split("_")[0]) for fname in files]
train_files, test_files, train_ages, test_ages = train_test_split(files, ages, test_size=0.2, random_state=42)
if train:
self.filelist = train_files
self.agelist = train_ages
self.genderlist = [int(fname.split("_")[1]) for fname in train_files]
self.racelist = [int(fname.split("_")[2]) if len(fname.split("_")[2]) == 1 else 4 for fname in train_files]
else:
self.filelist = test_files
self.agelist = test_ages
self.genderlist = [int(fname.split("_")[1]) for fname in test_files]
self.racelist = [int(fname.split("_")[2]) if len(fname.split("_")[2]) == 1 else 4 for fname in test_files]
self.transform = transform
def __len__(self):
return len(self.filelist)
def __getitem__(self, idx):
img_name = os.path.join(cfg['root'], 'UTKFace', self.filelist[idx])
image = io.imread(img_name)
sample = {'image': image, 'age': self.agelist[idx], "gender": self.genderlist[idx],
"race": self.racelist[idx]}
if self.transform:
sample['image'] = self.transform(Image.fromarray(sample['image'].astype(np.uint8)))
return sample
class FER2013Dataset(Dataset):
"""
FER2013 dataset
"""
def __init__(self, train=True, transform=None):
imgs = []
labels = []
type_ = 'train' if train else 'test'
for cat in os.listdir(os.path.join(cfg['root'], 'FER2013', type_)):
for img_f in os.listdir(os.path.join(cfg['root'], 'FER2013', type_, cat)):
imgs.append(os.path.join(cfg['root'], 'FER2013', type_, cat, img_f))
labels.append(int(cat))
self.imagefiles = imgs
self.labels = labels
self.transform = transform
def __len__(self):
return len(self.labels)
def __getitem__(self, idx):
sample = {'filename': self.imagefiles[idx], 'image': io.imread(self.imagefiles[idx]),
'emotion': self.labels[idx], "gender": 0, "race": 0, "age": 0}
if self.transform:
sample['image'] = self.transform(Image.fromarray(gray2rgb(sample['image']).astype(np.uint8)))
return sample
# class FER2013Dataset(Dataset):
# """
# FER2013 dataset
# """
#
# def __init__(self, train=True, fer2013_csv=os.path.join(cfg['root'], 'FER2013', 'fer2013.csv'), transform=None):
# df = pd.read_csv(fer2013_csv)
# train_imgs = []
# test_imgs = []
# train_labels = []
# test_labels = []
#
# for i in range(len(df['Usage'])):
# if df['Usage'][i] == 'Training':
# img_array = np.zeros((48, 48, 3))
# img_array[:, :, 0] = np.array(df['pixels'][i].split(" ")).reshape(48, 48).astype(np.float)
# img_array[:, :, 1] = np.array(df['pixels'][i].split(" ")).reshape(48, 48).astype(np.float)
# img_array[:, :, 2] = np.array(df['pixels'][i].split(" ")).reshape(48, 48).astype(np.float)
# test_imgs.append(img_array)
# train_imgs.append(img_array)
# train_labels.append(df['emotion'][i])
# elif df['Usage'][i] == 'PrivateTest':
# img_array = np.zeros((48, 48, 3))
# img_array[:, :, 0] = np.array(df['pixels'][i].split(" ")).reshape(48, 48).astype(np.float)
# img_array[:, :, 1] = np.array(df['pixels'][i].split(" ")).reshape(48, 48).astype(np.float)
# img_array[:, :, 2] = np.array(df['pixels'][i].split(" ")).reshape(48, 48).astype(np.float)
# test_imgs.append(img_array)
# test_labels.append(df['emotion'][i])
#
# if train:
# self.images = train_imgs
# self.labels = train_labels
# else:
# self.images = test_imgs
# self.labels = test_labels
#
# self.transform = transform
#
# def __len__(self):
# return len(self.labels)
#
# def __getitem__(self, idx):
# sample = {'image': self.images[idx], 'emotion': self.labels[idx], "gender": 0, "race": 0, "age": 0}
#
# if self.transform:
# sample['image'] = self.transform(Image.fromarray(sample['image'].astype(np.uint8)))
#
# return sample
|
[
"sys.path.append",
"pandas.read_csv",
"sklearn.model_selection.train_test_split",
"numpy.array",
"numpy.loadtxt",
"skimage.color.gray2rgb",
"os.path.join",
"os.listdir",
"skimage.io.imread"
] |
[((379, 401), 'sys.path.append', 'sys.path.append', (['"""../"""'], {}), "('../')\n", (394, 401), False, 'import sys\n'), ((630, 698), 'os.path.join', 'os.path.join', (["cfg['root']", '"""RAF-Face"""', "('%s/Annotation/manual' % type)"], {}), "(cfg['root'], 'RAF-Face', '%s/Annotation/manual' % type)\n", (642, 698), False, 'import os\n'), ((732, 819), 'os.path.join', 'os.path.join', (["cfg['root']", '"""RAF-Face"""', "('%s/EmoLabel/list_patition_label.txt' % type)"], {}), "(cfg['root'], 'RAF-Face', '%s/EmoLabel/list_patition_label.txt' %\n type)\n", (744, 819), False, 'import os\n'), ((3393, 3424), 'skimage.io.imread', 'io.imread', (['self.face_files[idx]'], {}), '(self.face_files[idx])\n', (3402, 3424), False, 'from skimage import io\n'), ((4372, 4459), 'os.path.join', 'os.path.join', (["cfg['root']", '"""RAF-Face"""', "('%s/EmoLabel/list_patition_label.txt' % type)"], {}), "(cfg['root'], 'RAF-Face', '%s/EmoLabel/list_patition_label.txt' %\n type)\n", (4384, 4459), False, 'import os\n'), ((5570, 5606), 'skimage.io.imread', 'io.imread', (['self.local_part_imgs[idx]'], {}), '(self.local_part_imgs[idx])\n', (5579, 5606), False, 'from skimage import io\n'), ((6062, 6129), 'os.path.join', 'os.path.join', (["cfg['root']", '"""CelebA"""', '"""Anno"""', '"""list_attr_celeba.txt"""'], {}), "(cfg['root'], 'CelebA', 'Anno', 'list_attr_celeba.txt')\n", (6074, 6129), False, 'import os\n'), ((6143, 6212), 'pandas.read_csv', 'pd.read_csv', (['list_attr_celeba_txt'], {'delim_whitespace': '(True)', 'header': 'None'}), '(list_attr_celeba_txt, delim_whitespace=True, header=None)\n', (6154, 6212), True, 'import pandas as pd\n'), ((12238, 12299), 'sklearn.model_selection.train_test_split', 'train_test_split', (['files', 'ages'], {'test_size': '(0.2)', 'random_state': '(42)'}), '(files, ages, test_size=0.2, random_state=42)\n', (12254, 12299), False, 'from sklearn.model_selection import train_test_split\n'), ((13035, 13091), 'os.path.join', 'os.path.join', (["cfg['root']", '"""UTKFace"""', 'self.filelist[idx]'], {}), "(cfg['root'], 'UTKFace', self.filelist[idx])\n", (13047, 13091), False, 'import os\n'), ((13109, 13128), 'skimage.io.imread', 'io.imread', (['img_name'], {}), '(img_name)\n', (13118, 13128), False, 'from skimage import io\n'), ((845, 893), 'numpy.loadtxt', 'np.loadtxt', (['emotion_label_txt_path'], {'dtype': 'np.str'}), '(emotion_label_txt_path, dtype=np.str)\n', (855, 893), True, 'import numpy as np\n'), ((1082, 1115), 'os.listdir', 'os.listdir', (['manual_annotation_dir'], {}), '(manual_annotation_dir)\n', (1092, 1115), False, 'import os\n'), ((2166, 2199), 'os.listdir', 'os.listdir', (['manual_annotation_dir'], {}), '(manual_annotation_dir)\n', (2176, 2199), False, 'import os\n'), ((3717, 3731), 'numpy.array', 'np.array', (['ldmk'], {}), '(ldmk)\n', (3725, 3731), True, 'import numpy as np\n'), ((4598, 4646), 'numpy.loadtxt', 'np.loadtxt', (['emotion_label_txt_path'], {'dtype': 'np.str'}), '(emotion_label_txt_path, dtype=np.str)\n', (4608, 4646), True, 'import numpy as np\n'), ((4747, 4777), 'os.listdir', 'os.listdir', (['local_part_img_dir'], {}), '(local_part_img_dir)\n', (4757, 4777), False, 'import os\n'), ((5091, 5121), 'os.listdir', 'os.listdir', (['local_part_img_dir'], {}), '(local_part_img_dir)\n', (5101, 5121), False, 'import os\n'), ((9156, 9244), 'os.path.join', 'os.path.join', (["cfg['coot', 'CelebA', 'Img', 'img_align_celeba', self.file_list[idx]]"], {}), "(cfg['coot', 'CelebA', 'Img', 'img_align_celeba', self.\n file_list[idx]])\n", (9168, 9244), False, 'import os\n'), ((12081, 12117), 'os.path.join', 'os.path.join', (["cfg['root']", '"""UTKFace"""'], {}), "(cfg['root'], 'UTKFace')\n", (12093, 12117), False, 'import os\n'), ((13651, 13694), 'os.path.join', 'os.path.join', (["cfg['root']", '"""FER2013"""', 'type_'], {}), "(cfg['root'], 'FER2013', type_)\n", (13663, 13694), False, 'import os\n'), ((14156, 14187), 'skimage.io.imread', 'io.imread', (['self.imagefiles[idx]'], {}), '(self.imagefiles[idx])\n', (14165, 14187), False, 'from skimage import io\n'), ((13733, 13781), 'os.path.join', 'os.path.join', (["cfg['root']", '"""FER2013"""', 'type_', 'cat'], {}), "(cfg['root'], 'FER2013', type_, cat)\n", (13745, 13781), False, 'import os\n'), ((13812, 13867), 'os.path.join', 'os.path.join', (["cfg['root']", '"""FER2013"""', 'type_', 'cat', 'img_f'], {}), "(cfg['root'], 'FER2013', type_, cat, img_f)\n", (13824, 13867), False, 'import os\n'), ((1292, 1368), 'os.path.join', 'os.path.join', (["cfg['root']", '"""RAF-Face"""', "('%s/Image/aligned' % type)", 'face_fname'], {}), "(cfg['root'], 'RAF-Face', '%s/Image/aligned' % type, face_fname)\n", (1304, 1368), False, 'import os\n'), ((2375, 2451), 'os.path.join', 'os.path.join', (["cfg['root']", '"""RAF-Face"""', "('%s/Image/aligned' % type)", 'face_fname'], {}), "(cfg['root'], 'RAF-Face', '%s/Image/aligned' % type, face_fname)\n", (2387, 2451), False, 'import os\n'), ((4865, 4900), 'os.path.join', 'os.path.join', (['local_part_img_dir', '_'], {}), '(local_part_img_dir, _)\n', (4877, 4900), False, 'import os\n'), ((5208, 5243), 'os.path.join', 'os.path.join', (['local_part_img_dir', '_'], {}), '(local_part_img_dir, _)\n', (5220, 5243), False, 'import os\n'), ((1400, 1438), 'os.path.join', 'os.path.join', (['manual_annotation_dir', '_'], {}), '(manual_annotation_dir, _)\n', (1412, 1438), False, 'import os\n'), ((2483, 2521), 'os.path.join', 'os.path.join', (['manual_annotation_dir', '_'], {}), '(manual_annotation_dir, _)\n', (2495, 2521), False, 'import os\n'), ((14359, 14384), 'skimage.color.gray2rgb', 'gray2rgb', (["sample['image']"], {}), "(sample['image'])\n", (14367, 14384), False, 'from skimage.color import gray2rgb\n')]
|
import heapq
class MedianFinder:
def __init__(self):
"""
initialize your data structure here.
"""
# 初始化大顶堆和小顶堆
self.max_heap = []
self.min_heap = []
def addNum(self, num: int) -> None:
if len(self.max_heap) == len(self.min_heap):
# 先加到小顶堆,再把小堆顶元素加到大顶堆
heapq.heappush(self.min_heap, num)
heapq.heappush(self.max_heap, - heapq.heappop(self.min_heap))
else:
# 加到大顶堆,再把大堆顶元素加到小顶堆
heapq.heappush(self.max_heap, - num)
heapq.heappush(self.min_heap, - heapq.heappop(self.max_heap))
def findMedian(self) -> float:
if len(self.max_heap) == len(self.min_heap):
return (- self.max_heap[0] + self.min_heap[0]) / 2
else:
return - self.max_heap[0]
|
[
"heapq.heappush",
"heapq.heappop"
] |
[((343, 377), 'heapq.heappush', 'heapq.heappush', (['self.min_heap', 'num'], {}), '(self.min_heap, num)\n', (357, 377), False, 'import heapq\n'), ((511, 546), 'heapq.heappush', 'heapq.heappush', (['self.max_heap', '(-num)'], {}), '(self.max_heap, -num)\n', (525, 546), False, 'import heapq\n'), ((422, 450), 'heapq.heappop', 'heapq.heappop', (['self.min_heap'], {}), '(self.min_heap)\n', (435, 450), False, 'import heapq\n'), ((592, 620), 'heapq.heappop', 'heapq.heappop', (['self.max_heap'], {}), '(self.max_heap)\n', (605, 620), False, 'import heapq\n')]
|
# Copyright 2020 Forschungszentrum Jülich GmbH and Aix-Marseille Université
# "Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements; and to You under the Apache License, Version 2.0. "
import time
import numpy as np
from mpi4py import MPI
from nest_elephant_tvb.translation.science_tvb_to_nest import generate_data
def init(path_config, nb_spike_generator, id_first_spike_detector, param,
comm, comm_receiver, comm_sender, loggers):
'''
Initialize the transformation with MPI. This is the TVB to NEST direction.
NOTE: more information will be added with more changes. This is still the very first version!
TODO: Use RichEndPoints for communication encapsulation
TODO: Seperate 1)Receive 2)Analysis/Science and 3)Send. See also the many Todos in the code
TODO: Make use of / enable MPI parallelism! Solve hardcoded communication protocol first
TODO: This side mirrors the NEST to TVB side
-> TVB communicates on rank 0
-> NEST communcates on rank 1
This is vice versa in the nest to tvb direction.
TODO: solve this together with the rest of the communication protocol.
'''
# destructure logger list to indivual variables
logger_master, logger_receive, logger_send = loggers
# science part, see import
# TODO: use os.path (or similar) for proper file handling.
# TODO: move this object creation to a proper place. They are passed through many functions.
generator = generate_data(path_config+'/../../log/',nb_spike_generator,param)
############ NEW Code:
# TODO: future work: mpi parallel, use rank 1-x for science and sending
# TODO: use this MPI intracommunicator, without receiving rank 0
# intracomm = comm.Create(comm.Get_group().Excl([0]))
# create the shared memory block / databuffer
databuffer = _shared_mem_buffer(comm)
############# NEW Code end
############ NEW Code: Receive/analyse/send
if comm.Get_rank() == 0: # Receiver from TVB
# All MPI communication is done with rank 0 from TVB side
# Make this (and the NEST side as well) scalable.
_receive(comm_receiver, databuffer, logger_receive)
else: # Science/generate and sender to NEST, rank 1-x
_send(comm_sender, databuffer, logger_send, generator, id_first_spike_detector)
############ NEW Code end
############ NEW Code: disconnect
# TODO: should this be done here?
logger_master.info('Disconnect communicators...')
comm_receiver.Disconnect()
comm_sender.Disconnect()
############ NEW Code end
def _shared_mem_buffer(comm):
'''
Create shared memory buffer. MPI One-sided-Communication.
:param comm: MPI intra communicator to create the buffer.
:return buffer: 1D shared memory buffer array
TODO: Buffersize/max. expected size of incoming data
-> free param, handle properly!
TODO: 2 doubles: [start_time,end_time] of simulation step
TODO: unknown number of doubles: array with rates
'''
datasize = MPI.DOUBLE.Get_size()
bufsize = 2 + 1000000 # NOTE: hardcoded (max.expected size of rate array)
if comm.Get_rank() == 0:
bufbytes = datasize * bufsize
else:
bufbytes= 0
# rank 0: create the shared block
# rank 1-x: get a handle to it
win = MPI.Win.Allocate_shared(bufbytes, datasize, comm=comm)
buf, datasize = win.Shared_query(0)
assert datasize == MPI.DOUBLE.Get_size()
# create a numpy array (buffer) whose data points to the shared mem
return np.ndarray(buffer=buf, dtype='d', shape=(bufsize,))
# See todo in the beginning, encapsulate I/O, transformer, science parts
def _receive(comm_receiver, databuffer, logger):
'''
Receive data on rank 0. Put it into the shared mem buffer.
Replaces the former 'receive' function.
NOTE: First refactored version -> not pretty, not final.
'''
status_ = MPI.Status()
num_sending = comm_receiver.Get_remote_size() # how many TVB ranks are sending?
# init placeholder for incoming data
time_step = np.empty(2, dtype='d') # two doubles with start and end time of the step
size = np.empty(1, dtype='i') # size of the rate-array
# TODO: the last two buffer entries are used for shared information
# --> they replace the status_data variable from previous version
# --> find more elegant solution?
databuffer[-1] = 1 # set buffer to 'ready to receive from tvb'
databuffer[-2] = 0 # marks the 'head' of the buffer
while True:
# TODO: NEST to TVB transformer: irecv
# TODO: TVB to NEST transformer (here): isend
# TODO: --> rework communication protocol between simulators and transformers!
requests=[]
logger.info(" TVB to Nest: wait receive ")
for rank in range(num_sending):
requests.append(comm_receiver.isend(True,dest=rank,tag=0))
MPI.Request.Waitall(requests)
logger.info(" TVB to Nest: receive all")
# TODO: works for now, needs rework if multiple ranks are used on TVB side
# TODO: we receive from "ANY_SOURCE", but only check the status_ of the last receive...
# get the starting and ending time of the simulation step
# NEW: receive directly into the buffer
comm_receiver.Recv([databuffer[0:], MPI.DOUBLE], source=MPI.ANY_SOURCE, tag=MPI.ANY_TAG, status=status_)
logger.info(" TVB to Nest: get time_step "+str(time_step)+" status : " + str(status_.Get_tag()))
if status_.Get_tag() == 0:
# wait until ready to receive new data (i.e. the sender has cleared the buffer)
while databuffer[-1] != 1: # TODO: use MPI, remove the sleep
time.sleep(0.001)
pass
# Get the size of the data
comm_receiver.Recv([size, 1, MPI.INT], source=status_.Get_source(), tag=0, status=status_)
# NEW: receive directly into the buffer
# First two entries are the times, see above
comm_receiver.Recv([databuffer[2:], MPI.DOUBLE], source=status_.Get_source(), tag=0, status=status_)
# Mark as 'ready to do analysis'
databuffer[-1] = 0
databuffer[-2] = size # info about size of data array
logger.info(" TVB to Nest: update buffer")
elif status_.Get_tag() == 1:
logger.info('TVB: end simulation')
break
else:
raise Exception("bad mpi tag"+str(status_.Get_tag()))
logger.info('TVB_to_NEST: End of receive function')
# See todo in the beginning, encapsulate I/O, transformer, science parts
def _send(comm_sender, databuffer, logger, generator, id_first_spike_detector):
'''
Generator/Science on INTRAcommunicator (multiple MPI ranks possible).
TODO: not yet used.
Send data to NEST on INTERcommunicator comm_sender (multiple MPI ranks possible).
Replaces the former 'send' function.
NOTE: First refactored version -> not pretty, not final.
TODO: Discuss communication protocol of TVB<->transformer and transformer<->NEST
'''
status_ = MPI.Status()
num_sending = comm_sender.Get_remote_size() # how many TVB ranks are sending?
# init placeholder for incoming data
check = np.empty(1,dtype='b')
size_list = np.empty(1, dtype='i')
while(True):
# TODO: This is still not correct. We only check for the Tag of the last rank.
# TODO: IF all ranks send always the same tag in one iteration (simulation step)
# TODO: then this works. But it should be handled differently!!!!
for rank in range(num_sending):
comm_sender.Recv([check, 1, MPI.CXX_BOOL], source=rank, tag=MPI.ANY_TAG, status=status_)
logger.info("TVB to NEST : send data status : " +str(status_.Get_tag()))
# TODO: handle properly, all ranks send tag 0?
if status_.Get_tag() == 0:
# wait until the receiver has cleared the buffer, i.e. filled with new data
while databuffer[-1] != 0: # TODO: use MPI, remove the sleep
time.sleep(0.001)
pass
# TODO: All science/generate here. Move to a proper place.
# method: generate_spike(count,time_step,rate)
# NOTE: count is a hardcoded '0'. Why?
# NOTE: time_step are the first two doubles in the buffer
# NOTE: rate is a double array, which size is stored in the second to last index
spikes_times = generator.generate_spike(0,databuffer[:2],databuffer[2:int(databuffer[-2])])
logger.info(" TVB to Nest: spike time")
# Mark as 'ready to receive next simulation step'
databuffer[-1] = 1
###### OLD code, kept the communication and science as it is for now
### TODO: Receive from status_.Get_source() and rank
### TODO: Send to status_.Get_source() and rank
### TODO: why???
### TODO: a second status_ object is used, should not be named the same
for rank in range(num_sending):
# NOTE: in 'test_receive_tvb_to_nest.py': hardcoded 10
comm_sender.Recv([size_list, 1, MPI.INT], source=rank, tag=0, status=status_)
if size_list[0] != 0:
list_id = np.empty(size_list, dtype='i')
# NOTE: in 'test_receive_tvb_to_nest.py': hardcoded np.arange(0,10,1)
comm_sender.Recv([list_id, size_list, MPI.INT], source=status_.Get_source(), tag=0, status=status_)
# Select the good spike train and send it
# logger.info(" TVB to Nest:"+str(data))
logger.info("rank "+str(rank)+" list_id "+str(list_id))
# TODO: Creating empty lists and append to them in a loop, all inside a loop
# TODO: this is slow and will be a bottleneck when we scale up.
data = []
shape = []
for i in list_id:
shape += [spikes_times[i-id_first_spike_detector].shape[0]]
data += [spikes_times[i-id_first_spike_detector]]
send_shape = np.array(np.concatenate(([np.sum(shape)],shape)), dtype='i')
# firstly send the size of the spikes train
comm_sender.Send([send_shape, MPI.INT], dest=status_.Get_source(), tag=list_id[0])
# secondly send the spikes train
data = np.concatenate(data).astype('d')
comm_sender.Send([data, MPI.DOUBLE], dest=rank, tag=list_id[0])
logger.info(" end sending:")
###### OLD code end
elif status_.Get_tag() == 1:
logger.info(" TVB to Nest end sending") # NOTE: one sim step?
elif status_.Get_tag() == 2:
logger.info(" TVB to Nest end simulation ") # NOTE: end whole sim.
break
else:
raise Exception("bad mpi tag : "+str(status_.Get_tag()))
logger.info('TVB_to_NEST: End of send function')
|
[
"numpy.sum",
"nest_elephant_tvb.translation.science_tvb_to_nest.generate_data",
"mpi4py.MPI.Win.Allocate_shared",
"mpi4py.MPI.Status",
"numpy.empty",
"time.sleep",
"mpi4py.MPI.DOUBLE.Get_size",
"mpi4py.MPI.Request.Waitall",
"numpy.ndarray",
"numpy.concatenate"
] |
[((1502, 1571), 'nest_elephant_tvb.translation.science_tvb_to_nest.generate_data', 'generate_data', (["(path_config + '/../../log/')", 'nb_spike_generator', 'param'], {}), "(path_config + '/../../log/', nb_spike_generator, param)\n", (1515, 1571), False, 'from nest_elephant_tvb.translation.science_tvb_to_nest import generate_data\n'), ((3072, 3093), 'mpi4py.MPI.DOUBLE.Get_size', 'MPI.DOUBLE.Get_size', ([], {}), '()\n', (3091, 3093), False, 'from mpi4py import MPI\n'), ((3353, 3407), 'mpi4py.MPI.Win.Allocate_shared', 'MPI.Win.Allocate_shared', (['bufbytes', 'datasize'], {'comm': 'comm'}), '(bufbytes, datasize, comm=comm)\n', (3376, 3407), False, 'from mpi4py import MPI\n'), ((3576, 3627), 'numpy.ndarray', 'np.ndarray', ([], {'buffer': 'buf', 'dtype': '"""d"""', 'shape': '(bufsize,)'}), "(buffer=buf, dtype='d', shape=(bufsize,))\n", (3586, 3627), True, 'import numpy as np\n'), ((3951, 3963), 'mpi4py.MPI.Status', 'MPI.Status', ([], {}), '()\n', (3961, 3963), False, 'from mpi4py import MPI\n'), ((4105, 4127), 'numpy.empty', 'np.empty', (['(2)'], {'dtype': '"""d"""'}), "(2, dtype='d')\n", (4113, 4127), True, 'import numpy as np\n'), ((4189, 4211), 'numpy.empty', 'np.empty', (['(1)'], {'dtype': '"""i"""'}), "(1, dtype='i')\n", (4197, 4211), True, 'import numpy as np\n'), ((7159, 7171), 'mpi4py.MPI.Status', 'MPI.Status', ([], {}), '()\n', (7169, 7171), False, 'from mpi4py import MPI\n'), ((7307, 7329), 'numpy.empty', 'np.empty', (['(1)'], {'dtype': '"""b"""'}), "(1, dtype='b')\n", (7315, 7329), True, 'import numpy as np\n'), ((7345, 7367), 'numpy.empty', 'np.empty', (['(1)'], {'dtype': '"""i"""'}), "(1, dtype='i')\n", (7353, 7367), True, 'import numpy as np\n'), ((3471, 3492), 'mpi4py.MPI.DOUBLE.Get_size', 'MPI.DOUBLE.Get_size', ([], {}), '()\n', (3490, 3492), False, 'from mpi4py import MPI\n'), ((4939, 4968), 'mpi4py.MPI.Request.Waitall', 'MPI.Request.Waitall', (['requests'], {}), '(requests)\n', (4958, 4968), False, 'from mpi4py import MPI\n'), ((5754, 5771), 'time.sleep', 'time.sleep', (['(0.001)'], {}), '(0.001)\n', (5764, 5771), False, 'import time\n'), ((8124, 8141), 'time.sleep', 'time.sleep', (['(0.001)'], {}), '(0.001)\n', (8134, 8141), False, 'import time\n'), ((9379, 9409), 'numpy.empty', 'np.empty', (['size_list'], {'dtype': '"""i"""'}), "(size_list, dtype='i')\n", (9387, 9409), True, 'import numpy as np\n'), ((10598, 10618), 'numpy.concatenate', 'np.concatenate', (['data'], {}), '(data)\n', (10612, 10618), True, 'import numpy as np\n'), ((10316, 10329), 'numpy.sum', 'np.sum', (['shape'], {}), '(shape)\n', (10322, 10329), True, 'import numpy as np\n')]
|
"""
MIT License
Copyright (c) 2021 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from logging import getLogger
from os import environ
from sys import exit
from base64 import b64encode
class Login:
def __init__(self, config):
self.config = config
self.logger = getLogger(__name__)
self._email = ""
self._app_pass = ""
@property
def email(self):
return self._email
@property
def app_pass(self):
return self._app_pass
def env_login(self):
try:
self._email = environ["EMAIL_ADDR"]
self._app_pass = environ["EMAIL_PASS"]
except KeyError as e:
self.logger.critical("EMAIL_ADDR or EMAIL_PASS environment variables not set")
exit(1)
if not self.config.has_section("login"):
self.logger.info("adding `login` section to config file")
self.config.add_section("login")
self.config.set("login", "email", b64encode(bytes(self.email, "utf8")).decode("utf8"))
self.config.set("login", "app_pass", b64encode(bytes(self.app_pass, "utf8")).decode("utf8"))
else:
if self.config["login"]["email"] != self.email:
self.logger.info("updating login email")
self.config["login"]["email"] = b64encode(bytes(self.email, "utf8")).decode("utf8")
if self.config["login"]["app_pass"] != self.app_pass:
self.logger.info("updating login app password")
self.config["login"]["app_pass"] = b64encode(bytes(self.app_pass, "utf8")).decode("utf8")
with open("config.ini", "w") as f:
self.config.write(f)
|
[
"sys.exit",
"logging.getLogger"
] |
[((1266, 1285), 'logging.getLogger', 'getLogger', (['__name__'], {}), '(__name__)\n', (1275, 1285), False, 'from logging import getLogger\n'), ((1663, 1670), 'sys.exit', 'exit', (['(1)'], {}), '(1)\n', (1667, 1670), False, 'from sys import exit\n')]
|
from CalcFinanceira import CalcFinanceira
from CalcCientifica import CalcCientifica
cc1 = CalcCientifica("Hp", "H230", "Cinza")
cf1 = CalcFinanceira("Acer", "A115", "Azul")
cc1.exponenciar(1, 2)
cf1.modular(3,5)
|
[
"CalcFinanceira.CalcFinanceira",
"CalcCientifica.CalcCientifica"
] |
[((91, 128), 'CalcCientifica.CalcCientifica', 'CalcCientifica', (['"""Hp"""', '"""H230"""', '"""Cinza"""'], {}), "('Hp', 'H230', 'Cinza')\n", (105, 128), False, 'from CalcCientifica import CalcCientifica\n'), ((135, 173), 'CalcFinanceira.CalcFinanceira', 'CalcFinanceira', (['"""Acer"""', '"""A115"""', '"""Azul"""'], {}), "('Acer', 'A115', 'Azul')\n", (149, 173), False, 'from CalcFinanceira import CalcFinanceira\n')]
|
import sys
def parse(func):
method = list(sys._current_frames().values())[0].f_back.f_globals['__name__'] #https://stackoverflow.com/questions/1095543/get-name-of-calling-functions-module-in-python#1095621
method_bytes = {
'mbinobs.ipv4' : 4,
'mbinobs.ipv6' : 16,
'mbinobs.uuid' : 16,
'mbinobs.mac' : 6
}
def wrapper_parse(bfile: bytes) -> bytes:
try:
if type(bfile) != bytes:
raise AssertionError
except AssertionError:
print((f"{method} accepts <class 'bytes'>, not {type(bfile)}"))
sys.exit(0)
b = method_bytes[method]
bfile_len = len(bfile)
file_parsed = [bfile[i:i+b] for i in range(0,bfile_len,b)] # initial byte string sapareted into method lenght(bytes)
if (bfile_len % b): #padd ip with 0 if necessary
while (len(file_parsed[-1]) < b):
file_parsed[-1] += b'\x00'
return func(b,file_parsed)
return wrapper_parse
|
[
"sys._current_frames",
"sys.exit"
] |
[((521, 532), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (529, 532), False, 'import sys\n'), ((45, 66), 'sys._current_frames', 'sys._current_frames', ([], {}), '()\n', (64, 66), False, 'import sys\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import grpc
import zemberek_grpc.language_id_pb2 as z_langid
import zemberek_grpc.language_id_pb2_grpc as z_langid_g
import zemberek_grpc.normalization_pb2 as z_normalization
import zemberek_grpc.normalization_pb2_grpc as z_normalization_g
import zemberek_grpc.preprocess_pb2 as z_preprocess
import zemberek_grpc.preprocess_pb2_grpc as z_preprocess_g
import zemberek_grpc.morphology_pb2 as z_morphology
import zemberek_grpc.morphology_pb2_grpc as z_morphology_g
channel = grpc.insecure_channel('localhost:6789')
langid_stub = z_langid_g.LanguageIdServiceStub(channel)
normalization_stub = z_normalization_g.NormalizationServiceStub(channel)
preprocess_stub = z_preprocess_g.PreprocessingServiceStub(channel)
morphology_stub = z_morphology_g.MorphologyServiceStub(channel)
def find_lang_id(i):
response = langid_stub.Detect(z_langid.LanguageIdRequest(input=i))
return response.langId
def tokenize(i):
response = preprocess_stub.Tokenize(z_preprocess.TokenizationRequest(input=i))
return response.tokens
def normalize(i):
response = normalization_stub.Normalize(z_normalization.NormalizationRequest(input=i))
return response
def analyze(i):
response = morphology_stub.AnalyzeSentence(z_morphology.SentenceAnalysisRequest(input=i))
return response;
def run():
lang_detect_input = 'merhaba dünya'
lang_id = find_lang_id(lang_detect_input)
print("Language of [" + lang_detect_input + "] is: " + lang_id)
print("")
tokenization_input = '<NAME>!'
print('Tokens for input : ' + tokenization_input)
tokens = tokenize(tokenization_input)
for t in tokens:
print(t.token + ':' + t.type)
print("")
normalization_input = '<NAME>'
print('Normalization result for input : ' + normalization_input)
n_response = normalize(normalization_input)
if n_response.normalized_input:
print(n_response.normalized_input)
else:
print('Problem normalizing input : ' + n_response.error)
print("")
analysis_input = 'Kavanozun kapağını açamadım.'
print('Analysis result for input : ' + analysis_input)
analysis_result = analyze(analysis_input)
for a in analysis_result.results:
best = a.best
lemmas = ""
for l in best.lemmas:
lemmas = lemmas + " " + l
print("Word = " + a.token + ", Lemmas = " + lemmas + ", POS = [" + best.pos + "], Full Analysis = {" + best.analysis + "}")
if __name__ == '__main__':
run()
|
[
"zemberek_grpc.preprocess_pb2.TokenizationRequest",
"zemberek_grpc.morphology_pb2.SentenceAnalysisRequest",
"zemberek_grpc.morphology_pb2_grpc.MorphologyServiceStub",
"grpc.insecure_channel",
"zemberek_grpc.preprocess_pb2_grpc.PreprocessingServiceStub",
"zemberek_grpc.normalization_pb2.NormalizationRequest",
"zemberek_grpc.language_id_pb2_grpc.LanguageIdServiceStub",
"zemberek_grpc.language_id_pb2.LanguageIdRequest",
"zemberek_grpc.normalization_pb2_grpc.NormalizationServiceStub"
] |
[((521, 560), 'grpc.insecure_channel', 'grpc.insecure_channel', (['"""localhost:6789"""'], {}), "('localhost:6789')\n", (542, 560), False, 'import grpc\n'), ((576, 617), 'zemberek_grpc.language_id_pb2_grpc.LanguageIdServiceStub', 'z_langid_g.LanguageIdServiceStub', (['channel'], {}), '(channel)\n', (608, 617), True, 'import zemberek_grpc.language_id_pb2_grpc as z_langid_g\n'), ((639, 690), 'zemberek_grpc.normalization_pb2_grpc.NormalizationServiceStub', 'z_normalization_g.NormalizationServiceStub', (['channel'], {}), '(channel)\n', (681, 690), True, 'import zemberek_grpc.normalization_pb2_grpc as z_normalization_g\n'), ((709, 757), 'zemberek_grpc.preprocess_pb2_grpc.PreprocessingServiceStub', 'z_preprocess_g.PreprocessingServiceStub', (['channel'], {}), '(channel)\n', (748, 757), True, 'import zemberek_grpc.preprocess_pb2_grpc as z_preprocess_g\n'), ((776, 821), 'zemberek_grpc.morphology_pb2_grpc.MorphologyServiceStub', 'z_morphology_g.MorphologyServiceStub', (['channel'], {}), '(channel)\n', (812, 821), True, 'import zemberek_grpc.morphology_pb2_grpc as z_morphology_g\n'), ((878, 913), 'zemberek_grpc.language_id_pb2.LanguageIdRequest', 'z_langid.LanguageIdRequest', ([], {'input': 'i'}), '(input=i)\n', (904, 913), True, 'import zemberek_grpc.language_id_pb2 as z_langid\n'), ((1000, 1041), 'zemberek_grpc.preprocess_pb2.TokenizationRequest', 'z_preprocess.TokenizationRequest', ([], {'input': 'i'}), '(input=i)\n', (1032, 1041), True, 'import zemberek_grpc.preprocess_pb2 as z_preprocess\n'), ((1133, 1178), 'zemberek_grpc.normalization_pb2.NormalizationRequest', 'z_normalization.NormalizationRequest', ([], {'input': 'i'}), '(input=i)\n', (1169, 1178), True, 'import zemberek_grpc.normalization_pb2 as z_normalization\n'), ((1264, 1309), 'zemberek_grpc.morphology_pb2.SentenceAnalysisRequest', 'z_morphology.SentenceAnalysisRequest', ([], {'input': 'i'}), '(input=i)\n', (1300, 1309), True, 'import zemberek_grpc.morphology_pb2 as z_morphology\n')]
|
#
# Copyright (c) 2019 ISP RAS (http://www.ispras.ru)
# Ivannikov Institute for System Programming of the Russian Academy of Sciences
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import json
from bridge.utils import file_get_or_create
from jobs.models import JobFile
def create_jobfile():
data = {'test': 'x', 'data': [1, 2, 3], 'new': None}
res = file_get_or_create(json.dumps(data), 'test.json', JobFile)
print("The db file:", res, res.pk)
print("Delete:", res.delete())
|
[
"json.dumps"
] |
[((882, 898), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (892, 898), False, 'import json\n')]
|
# Copyright (c) 2018-2019, NVIDIA CORPORATION.
import numpy as np
import pytest
from utils import assert_eq
import nvcategory
import nvstrings
def test_size():
strs = nvstrings.to_device(
["eee", "aaa", "eee", "ddd", "ccc", "ccc", "ccc", "eee", "aaa"]
)
cat = nvcategory.from_strings(strs)
assert strs.size() == cat.size()
def test_keys():
strs1 = nvstrings.to_device(["a", "b", "b", "f", "c", "f"])
cat = nvcategory.from_strings(strs1)
got = cat.keys()
expected = ["a", "b", "c", "f"]
assert_eq(got, expected)
def test_keys_size():
strs1 = nvstrings.to_device(["a", "b", "b", "f", "c", "f"])
cat = nvcategory.from_strings(strs1)
got = cat.keys_size()
assert got == 4
def test_values():
strs = nvstrings.to_device(
["eee", "aaa", "eee", "ddd", "ccc", "ccc", "ccc", "eee", "aaa"]
)
cat = nvcategory.from_strings(strs)
got = cat.values()
expected = [3, 0, 3, 2, 1, 1, 1, 3, 0]
assert_eq(got, expected)
def test_value_for_index():
strs = nvstrings.to_device(
["eee", "aaa", "eee", "ddd", "ccc", "ccc", "ccc", "eee", "aaa"]
)
cat = nvcategory.from_strings(strs)
got = cat.value_for_index(7)
expected = 3
assert got == expected
def test_value():
strs = nvstrings.to_device(
["eee", "aaa", "eee", "ddd", "ccc", "ccc", "ccc", "eee", "aaa"]
)
cat = nvcategory.from_strings(strs)
got = cat.value("ccc")
expected = 1
assert got == expected
def test_indexes_for_key():
strs = nvstrings.to_device(
["eee", "aaa", "eee", "ddd", "ccc", "ccc", "ccc", "eee", "aaa"]
)
cat = nvcategory.from_strings(strs)
got = cat.indexes_for_key("ccc")
expected = [4, 5, 6]
assert_eq(got, expected)
def test_to_strings():
strs = nvstrings.to_device(
["eee", "aaa", "eee", "ddd", "ccc", "ccc", "ccc", "eee", "aaa"]
)
cat = nvcategory.from_strings(strs)
got = cat.to_strings()
assert_eq(got, strs)
def test_add_strings():
strs = nvstrings.to_device(
["eee", "aaa", "eee", "ddd", "ccc", "ccc", "ccc", "eee", "aaa"]
)
cat = nvcategory.from_strings(strs)
got = cat.add_strings(strs)
expected_keys = ["aaa", "ccc", "ddd", "eee"]
expected_values = [3, 0, 3, 2, 1, 1, 1, 3, 0, 3, 0, 3, 2, 1, 1, 1, 3, 0]
assert_eq(got.keys(), expected_keys)
assert_eq(got.values(), expected_values)
def test_gather_strings():
strs = nvstrings.to_device(
["eee", "aaa", "eee", "ddd", "ccc", "ccc", "ccc", "eee", "aaa"]
)
cat = nvcategory.from_strings(strs)
got = cat.gather_strings([0, 2, 0])
expected = ["aaa", "ddd", "aaa"]
assert_eq(got, expected)
@pytest.mark.parametrize(
"func",
[
lambda cat, indexes: cat.gather_strings(indexes),
lambda cat, indexes: cat.gather(indexes),
lambda cat, indexes: cat.gather_and_remap(indexes),
],
)
def test_gather_index_exception(func):
strs = nvstrings.to_device(
["eee", "aaa", "eee", "ddd", "ccc", "ccc", "ccc", "eee", "aaa"]
)
cat = nvcategory.from_strings(strs)
indexes = [0, 2, 0, 4]
with pytest.raises(Exception):
func(cat, indexes)
def test_remove_strings():
strs = nvstrings.to_device(
["eee", "aaa", "eee", "ddd", "ccc", "ccc", "ccc", "eee", "aaa"]
)
cat = nvcategory.from_strings(strs)
removal_strings = nvstrings.to_device(["ccc", "aaa", "bbb"])
got = cat.remove_strings(removal_strings)
expected_keys = ["ddd", "eee"]
expected_values = [1, 1, 0, 1]
assert_eq(got.keys(), expected_keys)
assert_eq(got.values(), expected_values)
def test_from_strings():
strs1 = nvstrings.to_device(
["eee", "aaa", "eee", "ddd", "ccc", "ccc", "ccc", "eee", "aaa"]
)
strs2 = nvstrings.to_device(
["ggg", "fff", "hhh", "aaa", "fff", "fff", "ggg", "hhh", "bbb"]
)
cat = nvcategory.from_strings(strs1, strs2)
expected_keys = ["aaa", "bbb", "ccc", "ddd", "eee", "fff", "ggg", "hhh"]
expected_values = [4, 0, 4, 3, 2, 2, 2, 4, 0, 6, 5, 7, 0, 5, 5, 6, 7, 1]
assert_eq(cat.keys(), expected_keys)
assert_eq(cat.values(), expected_values)
def test_merge_category():
strs1 = nvstrings.to_device(
["eee", "aaa", "eee", "ddd", "ccc", "ccc", "ccc", "eee", "aaa"]
)
strs2 = nvstrings.to_device(
["ggg", "fff", "hhh", "aaa", "fff", "fff", "ggg", "hhh", "bbb"]
)
cat1 = nvcategory.from_strings(strs1)
cat2 = nvcategory.from_strings(strs2)
ncat = cat1.merge_category(cat2)
expected_keys = ["<KEY>"]
expected_values = [3, 0, 3, 2, 1, 1, 1, 3, 0, 6, 5, 7, 0, 5, 5, 6, 7, 4]
assert_eq(ncat.keys(), expected_keys)
assert_eq(ncat.values(), expected_values)
def test_merge_and_remap():
strs1 = nvstrings.to_device(
["eee", "aaa", "eee", "ddd", "ccc", "ccc", "ccc", "eee", "aaa"]
)
strs2 = nvstrings.to_device(
["ggg", "fff", "hhh", "aaa", "fff", "fff", "ggg", "hhh", "bbb"]
)
cat1 = nvcategory.from_strings(strs1)
cat2 = nvcategory.from_strings(strs2)
ncat = cat1.merge_and_remap(cat2)
expected_keys = ["<KEY>"]
expected_values = [4, 0, 4, 3, 2, 2, 2, 4, 0, 6, 5, 7, 0, 5, 5, 6, 7, 1]
assert_eq(ncat.keys(), expected_keys)
assert_eq(ncat.values(), expected_values)
def test_add_keys():
strs1 = nvstrings.to_device(["a", "b", "b", "f", "c", "f"])
strs2 = nvstrings.to_device(["a", "b", "c", "d"])
cat = nvcategory.from_strings(strs1)
cat1 = cat.add_keys(strs2)
assert_eq(cat1.keys(), ["a", "b", "c", "d", "f"])
def test_remove_keys():
strs1 = nvstrings.to_device(["a", "b", "b", "f", "c", "f"])
strs2 = nvstrings.to_device(["b", "d"])
cat = nvcategory.from_strings(strs1)
cat1 = cat.remove_keys(strs2)
assert_eq(cat1.keys(), ["a", "c", "f"])
def test_set_keys():
strs1 = nvstrings.to_device(["a", "b", "b", "f", "c", "f"])
strs2 = nvstrings.to_device(["b", "c", "e", "d"])
cat = nvcategory.from_strings(strs1)
cat1 = cat.set_keys(strs2)
assert_eq(cat1.keys(), ["b", "c", "d", "e"])
def test_remove_unused_keys():
strs1 = nvstrings.to_device(["a", "b", "b", "f", "c", "f"])
strs2 = nvstrings.to_device(["b", "c", "e", "d"])
cat = nvcategory.from_strings(strs1)
cat1 = cat.set_keys(strs2)
cat1_unused_removed = cat1.remove_unused_keys()
assert_eq(cat1_unused_removed.keys(), ["b", "c"])
def test_gather():
strs1 = nvstrings.to_device(["a", "b", "b", "f", "c", "f"])
cat = nvcategory.from_strings(strs1)
cat1 = cat.gather([1, 3, 2, 3, 1, 2])
expected_keys = ["<KEY>"]
expected_values = [1, 3, 2, 3, 1, 2]
assert_eq(cat1.keys(), expected_keys)
assert_eq(cat1.values(), expected_values)
def test_gather_and_remap():
strs1 = nvstrings.to_device(["a", "b", "b", "f", "c", "f"])
cat = nvcategory.from_strings(strs1)
cat1 = cat.gather_and_remap([1, 3, 2, 3, 1, 2])
expected_keys = ["<KEY>"]
expected_values = [0, 2, 1, 2, 0, 1]
assert_eq(cat1.keys(), expected_keys)
assert_eq(cat1.values(), expected_values)
def test_from_offsets():
values = np.array([97, 112, 112, 108, 101], dtype=np.int8)
offsets = np.array([0, 1, 2, 3, 4, 5], dtype=np.int32)
cat = nvcategory.from_offsets(values, offsets, 5)
expected_keys = ["<KEY>"]
expected_values = [0, 3, 3, 2, 1]
assert_eq(cat.keys(), expected_keys)
assert_eq(cat.values(), expected_values)
def test_from_strings_list():
s1 = nvstrings.to_device(["apple", "pear", "banana"])
s2 = nvstrings.to_device(["orange", "pear"])
cat = nvcategory.from_strings_list([s1, s2])
expected_keys = ["apple", "banana", "orange", "pear"]
expected_values = [0, 3, 1, 2, 3]
assert_eq(cat.keys(), expected_keys)
assert_eq(cat.values(), expected_values)
def test_to_device():
cat = nvcategory.to_device(["apple", "pear", "banana", "orange", "pear"])
expected_keys = ["apple", "banana", "orange", "pear"]
expected_values = [0, 3, 1, 2, 3]
assert_eq(cat.keys(), expected_keys)
assert_eq(cat.values(), expected_values)
|
[
"utils.assert_eq",
"nvcategory.to_device",
"pytest.raises",
"numpy.array",
"nvcategory.from_strings",
"nvcategory.from_strings_list",
"nvstrings.to_device",
"nvcategory.from_offsets"
] |
[((175, 263), 'nvstrings.to_device', 'nvstrings.to_device', (["['eee', 'aaa', 'eee', 'ddd', 'ccc', 'ccc', 'ccc', 'eee', 'aaa']"], {}), "(['eee', 'aaa', 'eee', 'ddd', 'ccc', 'ccc', 'ccc', 'eee',\n 'aaa'])\n", (194, 263), False, 'import nvstrings\n'), ((284, 313), 'nvcategory.from_strings', 'nvcategory.from_strings', (['strs'], {}), '(strs)\n', (307, 313), False, 'import nvcategory\n'), ((382, 433), 'nvstrings.to_device', 'nvstrings.to_device', (["['a', 'b', 'b', 'f', 'c', 'f']"], {}), "(['a', 'b', 'b', 'f', 'c', 'f'])\n", (401, 433), False, 'import nvstrings\n'), ((444, 474), 'nvcategory.from_strings', 'nvcategory.from_strings', (['strs1'], {}), '(strs1)\n', (467, 474), False, 'import nvcategory\n'), ((536, 560), 'utils.assert_eq', 'assert_eq', (['got', 'expected'], {}), '(got, expected)\n', (545, 560), False, 'from utils import assert_eq\n'), ((597, 648), 'nvstrings.to_device', 'nvstrings.to_device', (["['a', 'b', 'b', 'f', 'c', 'f']"], {}), "(['a', 'b', 'b', 'f', 'c', 'f'])\n", (616, 648), False, 'import nvstrings\n'), ((659, 689), 'nvcategory.from_strings', 'nvcategory.from_strings', (['strs1'], {}), '(strs1)\n', (682, 689), False, 'import nvcategory\n'), ((768, 856), 'nvstrings.to_device', 'nvstrings.to_device', (["['eee', 'aaa', 'eee', 'ddd', 'ccc', 'ccc', 'ccc', 'eee', 'aaa']"], {}), "(['eee', 'aaa', 'eee', 'ddd', 'ccc', 'ccc', 'ccc', 'eee',\n 'aaa'])\n", (787, 856), False, 'import nvstrings\n'), ((877, 906), 'nvcategory.from_strings', 'nvcategory.from_strings', (['strs'], {}), '(strs)\n', (900, 906), False, 'import nvcategory\n'), ((977, 1001), 'utils.assert_eq', 'assert_eq', (['got', 'expected'], {}), '(got, expected)\n', (986, 1001), False, 'from utils import assert_eq\n'), ((1043, 1131), 'nvstrings.to_device', 'nvstrings.to_device', (["['eee', 'aaa', 'eee', 'ddd', 'ccc', 'ccc', 'ccc', 'eee', 'aaa']"], {}), "(['eee', 'aaa', 'eee', 'ddd', 'ccc', 'ccc', 'ccc', 'eee',\n 'aaa'])\n", (1062, 1131), False, 'import nvstrings\n'), ((1152, 1181), 'nvcategory.from_strings', 'nvcategory.from_strings', (['strs'], {}), '(strs)\n', (1175, 1181), False, 'import nvcategory\n'), ((1290, 1378), 'nvstrings.to_device', 'nvstrings.to_device', (["['eee', 'aaa', 'eee', 'ddd', 'ccc', 'ccc', 'ccc', 'eee', 'aaa']"], {}), "(['eee', 'aaa', 'eee', 'ddd', 'ccc', 'ccc', 'ccc', 'eee',\n 'aaa'])\n", (1309, 1378), False, 'import nvstrings\n'), ((1399, 1428), 'nvcategory.from_strings', 'nvcategory.from_strings', (['strs'], {}), '(strs)\n', (1422, 1428), False, 'import nvcategory\n'), ((1541, 1629), 'nvstrings.to_device', 'nvstrings.to_device', (["['eee', 'aaa', 'eee', 'ddd', 'ccc', 'ccc', 'ccc', 'eee', 'aaa']"], {}), "(['eee', 'aaa', 'eee', 'ddd', 'ccc', 'ccc', 'ccc', 'eee',\n 'aaa'])\n", (1560, 1629), False, 'import nvstrings\n'), ((1650, 1679), 'nvcategory.from_strings', 'nvcategory.from_strings', (['strs'], {}), '(strs)\n', (1673, 1679), False, 'import nvcategory\n'), ((1746, 1770), 'utils.assert_eq', 'assert_eq', (['got', 'expected'], {}), '(got, expected)\n', (1755, 1770), False, 'from utils import assert_eq\n'), ((1807, 1895), 'nvstrings.to_device', 'nvstrings.to_device', (["['eee', 'aaa', 'eee', 'ddd', 'ccc', 'ccc', 'ccc', 'eee', 'aaa']"], {}), "(['eee', 'aaa', 'eee', 'ddd', 'ccc', 'ccc', 'ccc', 'eee',\n 'aaa'])\n", (1826, 1895), False, 'import nvstrings\n'), ((1916, 1945), 'nvcategory.from_strings', 'nvcategory.from_strings', (['strs'], {}), '(strs)\n', (1939, 1945), False, 'import nvcategory\n'), ((1977, 1997), 'utils.assert_eq', 'assert_eq', (['got', 'strs'], {}), '(got, strs)\n', (1986, 1997), False, 'from utils import assert_eq\n'), ((2035, 2123), 'nvstrings.to_device', 'nvstrings.to_device', (["['eee', 'aaa', 'eee', 'ddd', 'ccc', 'ccc', 'ccc', 'eee', 'aaa']"], {}), "(['eee', 'aaa', 'eee', 'ddd', 'ccc', 'ccc', 'ccc', 'eee',\n 'aaa'])\n", (2054, 2123), False, 'import nvstrings\n'), ((2144, 2173), 'nvcategory.from_strings', 'nvcategory.from_strings', (['strs'], {}), '(strs)\n', (2167, 2173), False, 'import nvcategory\n'), ((2458, 2546), 'nvstrings.to_device', 'nvstrings.to_device', (["['eee', 'aaa', 'eee', 'ddd', 'ccc', 'ccc', 'ccc', 'eee', 'aaa']"], {}), "(['eee', 'aaa', 'eee', 'ddd', 'ccc', 'ccc', 'ccc', 'eee',\n 'aaa'])\n", (2477, 2546), False, 'import nvstrings\n'), ((2567, 2596), 'nvcategory.from_strings', 'nvcategory.from_strings', (['strs'], {}), '(strs)\n', (2590, 2596), False, 'import nvcategory\n'), ((2678, 2702), 'utils.assert_eq', 'assert_eq', (['got', 'expected'], {}), '(got, expected)\n', (2687, 2702), False, 'from utils import assert_eq\n'), ((2976, 3064), 'nvstrings.to_device', 'nvstrings.to_device', (["['eee', 'aaa', 'eee', 'ddd', 'ccc', 'ccc', 'ccc', 'eee', 'aaa']"], {}), "(['eee', 'aaa', 'eee', 'ddd', 'ccc', 'ccc', 'ccc', 'eee',\n 'aaa'])\n", (2995, 3064), False, 'import nvstrings\n'), ((3085, 3114), 'nvcategory.from_strings', 'nvcategory.from_strings', (['strs'], {}), '(strs)\n', (3108, 3114), False, 'import nvcategory\n'), ((3244, 3332), 'nvstrings.to_device', 'nvstrings.to_device', (["['eee', 'aaa', 'eee', 'ddd', 'ccc', 'ccc', 'ccc', 'eee', 'aaa']"], {}), "(['eee', 'aaa', 'eee', 'ddd', 'ccc', 'ccc', 'ccc', 'eee',\n 'aaa'])\n", (3263, 3332), False, 'import nvstrings\n'), ((3353, 3382), 'nvcategory.from_strings', 'nvcategory.from_strings', (['strs'], {}), '(strs)\n', (3376, 3382), False, 'import nvcategory\n'), ((3405, 3447), 'nvstrings.to_device', 'nvstrings.to_device', (["['ccc', 'aaa', 'bbb']"], {}), "(['ccc', 'aaa', 'bbb'])\n", (3424, 3447), False, 'import nvstrings\n'), ((3690, 3778), 'nvstrings.to_device', 'nvstrings.to_device', (["['eee', 'aaa', 'eee', 'ddd', 'ccc', 'ccc', 'ccc', 'eee', 'aaa']"], {}), "(['eee', 'aaa', 'eee', 'ddd', 'ccc', 'ccc', 'ccc', 'eee',\n 'aaa'])\n", (3709, 3778), False, 'import nvstrings\n'), ((3801, 3889), 'nvstrings.to_device', 'nvstrings.to_device', (["['ggg', 'fff', 'hhh', 'aaa', 'fff', 'fff', 'ggg', 'hhh', 'bbb']"], {}), "(['ggg', 'fff', 'hhh', 'aaa', 'fff', 'fff', 'ggg', 'hhh',\n 'bbb'])\n", (3820, 3889), False, 'import nvstrings\n'), ((3910, 3947), 'nvcategory.from_strings', 'nvcategory.from_strings', (['strs1', 'strs2'], {}), '(strs1, strs2)\n', (3933, 3947), False, 'import nvcategory\n'), ((4230, 4318), 'nvstrings.to_device', 'nvstrings.to_device', (["['eee', 'aaa', 'eee', 'ddd', 'ccc', 'ccc', 'ccc', 'eee', 'aaa']"], {}), "(['eee', 'aaa', 'eee', 'ddd', 'ccc', 'ccc', 'ccc', 'eee',\n 'aaa'])\n", (4249, 4318), False, 'import nvstrings\n'), ((4341, 4429), 'nvstrings.to_device', 'nvstrings.to_device', (["['ggg', 'fff', 'hhh', 'aaa', 'fff', 'fff', 'ggg', 'hhh', 'bbb']"], {}), "(['ggg', 'fff', 'hhh', 'aaa', 'fff', 'fff', 'ggg', 'hhh',\n 'bbb'])\n", (4360, 4429), False, 'import nvstrings\n'), ((4451, 4481), 'nvcategory.from_strings', 'nvcategory.from_strings', (['strs1'], {}), '(strs1)\n', (4474, 4481), False, 'import nvcategory\n'), ((4493, 4523), 'nvcategory.from_strings', 'nvcategory.from_strings', (['strs2'], {}), '(strs2)\n', (4516, 4523), False, 'import nvcategory\n'), ((4799, 4887), 'nvstrings.to_device', 'nvstrings.to_device', (["['eee', 'aaa', 'eee', 'ddd', 'ccc', 'ccc', 'ccc', 'eee', 'aaa']"], {}), "(['eee', 'aaa', 'eee', 'ddd', 'ccc', 'ccc', 'ccc', 'eee',\n 'aaa'])\n", (4818, 4887), False, 'import nvstrings\n'), ((4910, 4998), 'nvstrings.to_device', 'nvstrings.to_device', (["['ggg', 'fff', 'hhh', 'aaa', 'fff', 'fff', 'ggg', 'hhh', 'bbb']"], {}), "(['ggg', 'fff', 'hhh', 'aaa', 'fff', 'fff', 'ggg', 'hhh',\n 'bbb'])\n", (4929, 4998), False, 'import nvstrings\n'), ((5020, 5050), 'nvcategory.from_strings', 'nvcategory.from_strings', (['strs1'], {}), '(strs1)\n', (5043, 5050), False, 'import nvcategory\n'), ((5062, 5092), 'nvcategory.from_strings', 'nvcategory.from_strings', (['strs2'], {}), '(strs2)\n', (5085, 5092), False, 'import nvcategory\n'), ((5362, 5413), 'nvstrings.to_device', 'nvstrings.to_device', (["['a', 'b', 'b', 'f', 'c', 'f']"], {}), "(['a', 'b', 'b', 'f', 'c', 'f'])\n", (5381, 5413), False, 'import nvstrings\n'), ((5426, 5467), 'nvstrings.to_device', 'nvstrings.to_device', (["['a', 'b', 'c', 'd']"], {}), "(['a', 'b', 'c', 'd'])\n", (5445, 5467), False, 'import nvstrings\n'), ((5478, 5508), 'nvcategory.from_strings', 'nvcategory.from_strings', (['strs1'], {}), '(strs1)\n', (5501, 5508), False, 'import nvcategory\n'), ((5632, 5683), 'nvstrings.to_device', 'nvstrings.to_device', (["['a', 'b', 'b', 'f', 'c', 'f']"], {}), "(['a', 'b', 'b', 'f', 'c', 'f'])\n", (5651, 5683), False, 'import nvstrings\n'), ((5696, 5727), 'nvstrings.to_device', 'nvstrings.to_device', (["['b', 'd']"], {}), "(['b', 'd'])\n", (5715, 5727), False, 'import nvstrings\n'), ((5738, 5768), 'nvcategory.from_strings', 'nvcategory.from_strings', (['strs1'], {}), '(strs1)\n', (5761, 5768), False, 'import nvcategory\n'), ((5882, 5933), 'nvstrings.to_device', 'nvstrings.to_device', (["['a', 'b', 'b', 'f', 'c', 'f']"], {}), "(['a', 'b', 'b', 'f', 'c', 'f'])\n", (5901, 5933), False, 'import nvstrings\n'), ((5946, 5987), 'nvstrings.to_device', 'nvstrings.to_device', (["['b', 'c', 'e', 'd']"], {}), "(['b', 'c', 'e', 'd'])\n", (5965, 5987), False, 'import nvstrings\n'), ((5998, 6028), 'nvcategory.from_strings', 'nvcategory.from_strings', (['strs1'], {}), '(strs1)\n', (6021, 6028), False, 'import nvcategory\n'), ((6154, 6205), 'nvstrings.to_device', 'nvstrings.to_device', (["['a', 'b', 'b', 'f', 'c', 'f']"], {}), "(['a', 'b', 'b', 'f', 'c', 'f'])\n", (6173, 6205), False, 'import nvstrings\n'), ((6218, 6259), 'nvstrings.to_device', 'nvstrings.to_device', (["['b', 'c', 'e', 'd']"], {}), "(['b', 'c', 'e', 'd'])\n", (6237, 6259), False, 'import nvstrings\n'), ((6270, 6300), 'nvcategory.from_strings', 'nvcategory.from_strings', (['strs1'], {}), '(strs1)\n', (6293, 6300), False, 'import nvcategory\n'), ((6471, 6522), 'nvstrings.to_device', 'nvstrings.to_device', (["['a', 'b', 'b', 'f', 'c', 'f']"], {}), "(['a', 'b', 'b', 'f', 'c', 'f'])\n", (6490, 6522), False, 'import nvstrings\n'), ((6533, 6563), 'nvcategory.from_strings', 'nvcategory.from_strings', (['strs1'], {}), '(strs1)\n', (6556, 6563), False, 'import nvcategory\n'), ((6809, 6860), 'nvstrings.to_device', 'nvstrings.to_device', (["['a', 'b', 'b', 'f', 'c', 'f']"], {}), "(['a', 'b', 'b', 'f', 'c', 'f'])\n", (6828, 6860), False, 'import nvstrings\n'), ((6871, 6901), 'nvcategory.from_strings', 'nvcategory.from_strings', (['strs1'], {}), '(strs1)\n', (6894, 6901), False, 'import nvcategory\n'), ((7154, 7203), 'numpy.array', 'np.array', (['[97, 112, 112, 108, 101]'], {'dtype': 'np.int8'}), '([97, 112, 112, 108, 101], dtype=np.int8)\n', (7162, 7203), True, 'import numpy as np\n'), ((7218, 7262), 'numpy.array', 'np.array', (['[0, 1, 2, 3, 4, 5]'], {'dtype': 'np.int32'}), '([0, 1, 2, 3, 4, 5], dtype=np.int32)\n', (7226, 7262), True, 'import numpy as np\n'), ((7273, 7316), 'nvcategory.from_offsets', 'nvcategory.from_offsets', (['values', 'offsets', '(5)'], {}), '(values, offsets, 5)\n', (7296, 7316), False, 'import nvcategory\n'), ((7512, 7560), 'nvstrings.to_device', 'nvstrings.to_device', (["['apple', 'pear', 'banana']"], {}), "(['apple', 'pear', 'banana'])\n", (7531, 7560), False, 'import nvstrings\n'), ((7570, 7609), 'nvstrings.to_device', 'nvstrings.to_device', (["['orange', 'pear']"], {}), "(['orange', 'pear'])\n", (7589, 7609), False, 'import nvstrings\n'), ((7620, 7658), 'nvcategory.from_strings_list', 'nvcategory.from_strings_list', (['[s1, s2]'], {}), '([s1, s2])\n', (7648, 7658), False, 'import nvcategory\n'), ((7876, 7943), 'nvcategory.to_device', 'nvcategory.to_device', (["['apple', 'pear', 'banana', 'orange', 'pear']"], {}), "(['apple', 'pear', 'banana', 'orange', 'pear'])\n", (7896, 7943), False, 'import nvcategory\n'), ((3151, 3175), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (3164, 3175), False, 'import pytest\n')]
|
# coding: utf-8
"""
flyteidl/service/admin.proto
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: version not set
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from flyteadmin.models.core_execution_error import CoreExecutionError # noqa: F401,E501
from flyteadmin.models.core_task_execution_phase import CoreTaskExecutionPhase # noqa: F401,E501
from flyteadmin.models.core_task_log import CoreTaskLog # noqa: F401,E501
from flyteadmin.models.event_task_execution_metadata import EventTaskExecutionMetadata # noqa: F401,E501
from flyteadmin.models.protobuf_struct import ProtobufStruct # noqa: F401,E501
class AdminTaskExecutionClosure(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'output_uri': 'str',
'error': 'CoreExecutionError',
'phase': 'CoreTaskExecutionPhase',
'logs': 'list[CoreTaskLog]',
'started_at': 'datetime',
'duration': 'str',
'created_at': 'datetime',
'updated_at': 'datetime',
'custom_info': 'ProtobufStruct',
'reason': 'str',
'task_type': 'str',
'metadata': 'EventTaskExecutionMetadata'
}
attribute_map = {
'output_uri': 'output_uri',
'error': 'error',
'phase': 'phase',
'logs': 'logs',
'started_at': 'started_at',
'duration': 'duration',
'created_at': 'created_at',
'updated_at': 'updated_at',
'custom_info': 'custom_info',
'reason': 'reason',
'task_type': 'task_type',
'metadata': 'metadata'
}
def __init__(self, output_uri=None, error=None, phase=None, logs=None, started_at=None, duration=None, created_at=None, updated_at=None, custom_info=None, reason=None, task_type=None, metadata=None): # noqa: E501
"""AdminTaskExecutionClosure - a model defined in Swagger""" # noqa: E501
self._output_uri = None
self._error = None
self._phase = None
self._logs = None
self._started_at = None
self._duration = None
self._created_at = None
self._updated_at = None
self._custom_info = None
self._reason = None
self._task_type = None
self._metadata = None
self.discriminator = None
if output_uri is not None:
self.output_uri = output_uri
if error is not None:
self.error = error
if phase is not None:
self.phase = phase
if logs is not None:
self.logs = logs
if started_at is not None:
self.started_at = started_at
if duration is not None:
self.duration = duration
if created_at is not None:
self.created_at = created_at
if updated_at is not None:
self.updated_at = updated_at
if custom_info is not None:
self.custom_info = custom_info
if reason is not None:
self.reason = reason
if task_type is not None:
self.task_type = task_type
if metadata is not None:
self.metadata = metadata
@property
def output_uri(self):
"""Gets the output_uri of this AdminTaskExecutionClosure. # noqa: E501
Path to remote data store where output blob is stored if the execution succeeded (and produced outputs). # noqa: E501
:return: The output_uri of this AdminTaskExecutionClosure. # noqa: E501
:rtype: str
"""
return self._output_uri
@output_uri.setter
def output_uri(self, output_uri):
"""Sets the output_uri of this AdminTaskExecutionClosure.
Path to remote data store where output blob is stored if the execution succeeded (and produced outputs). # noqa: E501
:param output_uri: The output_uri of this AdminTaskExecutionClosure. # noqa: E501
:type: str
"""
self._output_uri = output_uri
@property
def error(self):
"""Gets the error of this AdminTaskExecutionClosure. # noqa: E501
Error information for the task execution. Populated if the execution failed. # noqa: E501
:return: The error of this AdminTaskExecutionClosure. # noqa: E501
:rtype: CoreExecutionError
"""
return self._error
@error.setter
def error(self, error):
"""Sets the error of this AdminTaskExecutionClosure.
Error information for the task execution. Populated if the execution failed. # noqa: E501
:param error: The error of this AdminTaskExecutionClosure. # noqa: E501
:type: CoreExecutionError
"""
self._error = error
@property
def phase(self):
"""Gets the phase of this AdminTaskExecutionClosure. # noqa: E501
The last recorded phase for this task execution. # noqa: E501
:return: The phase of this AdminTaskExecutionClosure. # noqa: E501
:rtype: CoreTaskExecutionPhase
"""
return self._phase
@phase.setter
def phase(self, phase):
"""Sets the phase of this AdminTaskExecutionClosure.
The last recorded phase for this task execution. # noqa: E501
:param phase: The phase of this AdminTaskExecutionClosure. # noqa: E501
:type: CoreTaskExecutionPhase
"""
self._phase = phase
@property
def logs(self):
"""Gets the logs of this AdminTaskExecutionClosure. # noqa: E501
Detailed log information output by the task execution. # noqa: E501
:return: The logs of this AdminTaskExecutionClosure. # noqa: E501
:rtype: list[CoreTaskLog]
"""
return self._logs
@logs.setter
def logs(self, logs):
"""Sets the logs of this AdminTaskExecutionClosure.
Detailed log information output by the task execution. # noqa: E501
:param logs: The logs of this AdminTaskExecutionClosure. # noqa: E501
:type: list[CoreTaskLog]
"""
self._logs = logs
@property
def started_at(self):
"""Gets the started_at of this AdminTaskExecutionClosure. # noqa: E501
Time at which the task execution began running. # noqa: E501
:return: The started_at of this AdminTaskExecutionClosure. # noqa: E501
:rtype: datetime
"""
return self._started_at
@started_at.setter
def started_at(self, started_at):
"""Sets the started_at of this AdminTaskExecutionClosure.
Time at which the task execution began running. # noqa: E501
:param started_at: The started_at of this AdminTaskExecutionClosure. # noqa: E501
:type: datetime
"""
self._started_at = started_at
@property
def duration(self):
"""Gets the duration of this AdminTaskExecutionClosure. # noqa: E501
The amount of time the task execution spent running. # noqa: E501
:return: The duration of this AdminTaskExecutionClosure. # noqa: E501
:rtype: str
"""
return self._duration
@duration.setter
def duration(self, duration):
"""Sets the duration of this AdminTaskExecutionClosure.
The amount of time the task execution spent running. # noqa: E501
:param duration: The duration of this AdminTaskExecutionClosure. # noqa: E501
:type: str
"""
self._duration = duration
@property
def created_at(self):
"""Gets the created_at of this AdminTaskExecutionClosure. # noqa: E501
Time at which the task execution was created. # noqa: E501
:return: The created_at of this AdminTaskExecutionClosure. # noqa: E501
:rtype: datetime
"""
return self._created_at
@created_at.setter
def created_at(self, created_at):
"""Sets the created_at of this AdminTaskExecutionClosure.
Time at which the task execution was created. # noqa: E501
:param created_at: The created_at of this AdminTaskExecutionClosure. # noqa: E501
:type: datetime
"""
self._created_at = created_at
@property
def updated_at(self):
"""Gets the updated_at of this AdminTaskExecutionClosure. # noqa: E501
Time at which the task execution was last updated. # noqa: E501
:return: The updated_at of this AdminTaskExecutionClosure. # noqa: E501
:rtype: datetime
"""
return self._updated_at
@updated_at.setter
def updated_at(self, updated_at):
"""Sets the updated_at of this AdminTaskExecutionClosure.
Time at which the task execution was last updated. # noqa: E501
:param updated_at: The updated_at of this AdminTaskExecutionClosure. # noqa: E501
:type: datetime
"""
self._updated_at = updated_at
@property
def custom_info(self):
"""Gets the custom_info of this AdminTaskExecutionClosure. # noqa: E501
Custom data specific to the task plugin. # noqa: E501
:return: The custom_info of this AdminTaskExecutionClosure. # noqa: E501
:rtype: ProtobufStruct
"""
return self._custom_info
@custom_info.setter
def custom_info(self, custom_info):
"""Sets the custom_info of this AdminTaskExecutionClosure.
Custom data specific to the task plugin. # noqa: E501
:param custom_info: The custom_info of this AdminTaskExecutionClosure. # noqa: E501
:type: ProtobufStruct
"""
self._custom_info = custom_info
@property
def reason(self):
"""Gets the reason of this AdminTaskExecutionClosure. # noqa: E501
If there is an explanation for the most recent phase transition, the reason will capture it. # noqa: E501
:return: The reason of this AdminTaskExecutionClosure. # noqa: E501
:rtype: str
"""
return self._reason
@reason.setter
def reason(self, reason):
"""Sets the reason of this AdminTaskExecutionClosure.
If there is an explanation for the most recent phase transition, the reason will capture it. # noqa: E501
:param reason: The reason of this AdminTaskExecutionClosure. # noqa: E501
:type: str
"""
self._reason = reason
@property
def task_type(self):
"""Gets the task_type of this AdminTaskExecutionClosure. # noqa: E501
A predefined yet extensible Task type identifier. # noqa: E501
:return: The task_type of this AdminTaskExecutionClosure. # noqa: E501
:rtype: str
"""
return self._task_type
@task_type.setter
def task_type(self, task_type):
"""Sets the task_type of this AdminTaskExecutionClosure.
A predefined yet extensible Task type identifier. # noqa: E501
:param task_type: The task_type of this AdminTaskExecutionClosure. # noqa: E501
:type: str
"""
self._task_type = task_type
@property
def metadata(self):
"""Gets the metadata of this AdminTaskExecutionClosure. # noqa: E501
Metadata around how a task was executed. # noqa: E501
:return: The metadata of this AdminTaskExecutionClosure. # noqa: E501
:rtype: EventTaskExecutionMetadata
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""Sets the metadata of this AdminTaskExecutionClosure.
Metadata around how a task was executed. # noqa: E501
:param metadata: The metadata of this AdminTaskExecutionClosure. # noqa: E501
:type: EventTaskExecutionMetadata
"""
self._metadata = metadata
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(AdminTaskExecutionClosure, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, AdminTaskExecutionClosure):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"six.iteritems"
] |
[((12316, 12349), 'six.iteritems', 'six.iteritems', (['self.swagger_types'], {}), '(self.swagger_types)\n', (12329, 12349), False, 'import six\n')]
|
"""
Object to perform analysis and plotting on a given dataset
Methods for the measurement control software to anaylse and plot data
@author: krolljg
"""
import matplotlib.pyplot as plt
import numpy as np
import colorsys
import qcodes
#from qcodes import Instrument # consider making a qcodes instrument in the future - not sure what the advantage is
import qtt
from qtt.utilities.tools import addPPTslide
import scipy.optimize as optimisation
class MeasurementAnalysis():
"""
Class that allows for analysis of measurement datasets. Can be initialised with a dataset for analysis.
dataset: target dataset
add_ppts: automatically loads plots into a powerpoint
prev_fig: can initialise with a figure in order to continue working on it
"""
def __init__(
self,
dataset=None,
add_ppts=True,
prev_fig=None,
verbose=True,
**kwargs
):
self.add_ppts = add_ppts
# used to keep working on the same figure if necessary
if prev_fig is None:
self.init_fig()
else:
self.fig = prev_fig
if dataset is not None:
self.load_data(dataset)
if len(self.setpoint_vars) == 1:
self.plot_1D()
if len(self.setpoint_vars) == 2:
self.plot_2D()
def load_data(self,dataset, xvar=None, yvar=None, zvar=None):
self.dataset = dataset
arrays = self.dataset.arrays
self.setpoint_vars = {key:value for (key,value) in arrays.items() if arrays.get(key).is_setpoint}
self.measured_vars = {key:value for (key,value) in arrays.items() if arrays.get(key).is_setpoint==False}
# determine dimensionality of dataset, load x, y and z variables appropriately
if len(self.setpoint_vars) == 1:
if xvar is None:
self.xvar = self.setpoint_vars.get(list(self.setpoint_vars)[0])
else:
self.xvar = self.setpoint_vars.get(xvar)
if yvar is None:
self.yvar = self.measured_vars.get(list(self.measured_vars)[0])
else:
self.yvar = self.measured_vars.get(yvar)
else:
if xvar is None:
self.xvar = self.setpoint_vars.get(list(self.setpoint_vars)[0])
else:
self.xvar = self.setpoint_vars.get(xvar)
if yvar is None:
self.yvar = self.setpoint_vars.get(list(self.setpoint_vars)[1])
else:
self.yvar = self.setpoint_vars.get(yvar)
if zvar is None:
self.zvar = self.measured_vars.get(list(self.measured_vars)[0])
else:
self.zvar = self.measured_vars.get(zvar)
def init_fig(self):
''' Initailised a new figure '''
self.fig = plt.figure()
def init_labels(self):
''' Used to generate a figure for 1D plots with axis labels and a title'''
ax = self.fig.add_subplot(111)
xvarlabel = self.xvar.label
xvarunit = self.xvar.unit
yvarlabel = self.yvar.label
yvarunit = self.yvar.unit
ax.set_xlabel(xvarlabel + ' (' + xvarunit + ')', fontsize=12)
ax.set_ylabel(yvarlabel + ' (' + yvarunit + ')', fontsize=12)
ax.set_title(str(self.dataset.location))
ax.ticklabel_format(style='sci', scilimits=(0, 0))
self.fig.tight_layout()
def add_linetrace(self, dataset=None, xvar=None, yvar=None, sub_fig=0, **kwargs):
''' Add linetrace to an existing figure '''
if dataset is not None: # reloads data if new dataset
self.load_data(dataset, xvar=xvar, yvar=yvar)
ax = self.fig.axes[sub_fig] #can addres individual sub figures
ax.plot(self.xvar, self.yvar, **kwargs)
def extract_gates(self):
''' Extract the gate values from the metadata '''
instruments = self.dataset.metadata.get('station').get('instruments')
instrument_list = list(instruments.keys())
ivvis = [inst for inst in instrument_list if inst[:4] == 'ivvi']
dac_list = []
dac_values = []
for ivvi in ivvis:
dac_list += instruments.get(ivvi).get('parameters')
dacs = [dac for dac in dac_list if dac[:3] == 'dac']
dac_values += [instruments.get(ivvi).get('parameters').get(dd).get('value') for dd in dacs]
return dict(zip(dacs, dac_values)) # zip list toogether
def add_ppt_slide(self,title=None,**kwargs):
''' Adds figure to a PPT, creates one of one is not open. '''
gatelist = self.extract_gates()
if title==None:
if __name__ == '__main__':
title = str(self.dataset.location)
addPPTslide(fig=self.fig.number,title=title,notes=str(gatelist),**kwargs)
def plot_1D(self, dataset=None, xvar=None, yvar=None, new_fig=True, **kwargs):
''' Generates a 1D plot from a dataset. x and y can be specified by name.'''
if dataset is not None:
if isinstance(dataset,list): # load first dataset
self.load_data(dataset[0], xvar, yvar)
else:
if isinstance(yvar,list): # load first yvar
self.load_data(dataset,xvar,yvar[0])
else: # load yvar
self.load_data(dataset, xvar, yvar)
if new_fig:
self.init_fig()
self.init_labels()
if isinstance(dataset,list): # plotting multiple datasets
# generating my own colormap
saturation = 0.8
lightness = 0.8
hue_range = np.linspace(0.0, 0.1, len(dataset))
color_list = [colorsys.hsv_to_rgb(hv, saturation, lightness) for hv in hue_range]
for custom_color, fd in zip(color_list, dataset):
if custom_color == color_list[0]:
self.add_linetrace(dataset=fd, xvar=xvar, yvar=yvar, color=custom_color)
else:
self.add_linetrace(dataset=fd, xvar=xvar, yvar=yvar, color=custom_color)
elif isinstance(yvar,list): # plotting multiple Yvars
for yy in yvar:
self.load_data(dataset, xvar, yy)
self.add_linetrace(**kwargs)
else: # plotting single dataset
self.add_linetrace(**kwargs)
if self.add_ppts:
self.add_ppt_slide()
def plot_2D(self, dataset=None, xvar=None, yvar=None, zvar=None, **kwargs):
''' Generates a 2D plot from a dataset. x y and z variables can be specified by name.'''
if dataset is not None:
self.load_data(dataset, xvar, yvar, zvar)
self.init_fig()
self.init_labels()
cb = self.fig.axes[0].pcolormesh(self.xvar, self.yvar, self.zvar)
self.fig.colorbar(cb)
if self.add_ppts:
self.add_ppt_slide()
def calculate_resistance(self,dataset):
self.plot_1D(dataset)
# in future, add routine to calculate rescaling due to axes units (mV->V etc)
fit = np.polyfit(self.xvar, self.yvar, 1)
x_fit = np.linspace(self.xvar[0], self.xvar[-1], 100)
y_fit = fit[0] * x_fit + fit[1]
G = fit[0]
R = (1 / G)
self.fig.axes[0].plot(x_fit,y_fit,'k--',label = 'Resistance: %d Ohm'%R)
self.fig.axes[0].legend()
if self.add_ppts:
self.add_ppt_slide()
def determine_turn_on(self, threshold_factor=0.1, step=3):
self.plot_1D()
x = self.xvar
y = self.yvar
# check sweep direction and fix
if y[0] > y[-1]:
y = np.flip(y, 0)
x = np.flip(x, 0)
y_threshold = max(y) * threshold_factor
# first position in y vector above threshold value:
ind_start = np.argmax(np.asarray(y) > y_threshold)
y_clean = y[ind_start:]
x_clean = x[ind_start:]
diff_vector = y_clean[step:] - y_clean[:-step]
ind_diff_max = np.argmax(diff_vector)
diff_max_y = max(diff_vector)
diff_x = x_clean[ind_diff_max + step] - x_clean[ind_diff_max]
slope = diff_max_y / diff_x
pos_x = (x_clean[ind_diff_max + step] + x_clean[ind_diff_max]) / 2
pos_y = (y_clean[ind_diff_max + step] + y_clean[ind_diff_max]) / 2
offset_y = pos_y - pos_x * slope
turn_on_value = int(np.round(-offset_y / slope, 0))
y_fit = slope * np.asarray(x) + offset_y
self.fig.axes[0].plot(x,y_fit,'k--',label = 'Turn on: %d mV'%turn_on_value)
self.fig.axes[0].legend()
self.fig.axes[0].set_ylim(bottom=min(y),top=max(y))
if self.add_ppts:
self.add_ppt_slide()
def extract_mobility(self, dataset):
e = 1.60217662 * 10 ** -19
def unzip(iterable):
return list(zip(*iterable))[0], list(zip(*iterable))[1]
def linearmodel(x, m, c):
return x * m + c
def fit_gradient(x, y, intercept_error=100, plotting=False,
silent=True): # return gradient, intercept; error if intercept not at 0
popt = np.asarray([np.nan, np.nan])
# strip nans/infs if necessary
filtered = [(bb, rr) for (bb, rr) in zip(x, y) if (not np.isinf(rr)) and (not np.isnan(rr))]
filtered_array = np.asarray(filtered)
# if samples >= 2, fit:
if len(filtered) > 1:
x_filtered = filtered_array[:, 0]
y_filtered = filtered_array[:, 1]
popt, _ = optimisation.curve_fit(linearmodel, x_filtered, y_filtered, p0=[0, 0])
if (np.abs(popt[1]) > intercept_error) and not silent:
print('Fit intercept not at zero - check fits!')
if plotting:
plt.plot(x_filtered, y_filtered, '.')
plt.plot(x_filtered, linearmodel(x_filtered, popt[0], popt[1]), ':')
return popt
self.init_fig()
ax = self.fig.add_subplot(111)
ax.set_xlabel('n (cm$^{-2}$)', fontsize=12)
ax.set_ylabel('$\mu$ (cm$^{2}$/Vs)', fontsize=12)
ax.set_title(str(dataset.location))
ax.ticklabel_format(style='sci', scilimits=(0, 0))
self.fig.tight_layout()
Bs = dataset.B
rho_xx = dataset.Rho_xx
rho_xy = dataset.Rho_xy
rho_xy_dB_popts = np.vstack([fit_gradient(Bs, xys, plotting=True) for xys in np.transpose(rho_xy)])
drho_xy_dB = rho_xy_dB_popts[:, 0]
n_s = 1 / e / drho_xy_dB # in m^-2
mu = drho_xy_dB / rho_xx[0]
nan_inf_removal = [(bb, rr) for (bb, rr) in zip(n_s, mu) if (not np.isinf(rr)) and (not np.isnan(rr))]
negative_removal = [(bb, rr) for (bb, rr) in nan_inf_removal if (bb > 0) and (rr > 0)]
n_s_filt, mu_filt = unzip(negative_removal)
plt.plot(n_s_filt, mu_filt, '.')
def plot_multiple_scans(self, datasets, xvar=None, yvar=None, hue=0, label = None, new_fig=True, **kwargs):
self.load_data(datasets[0], xvar, yvar)
if new_fig:
self.init_fig()
else:
self.fig.clf()
self.init_labels()
# generating my own colormap
saturation = 0.8
lightness = 0.8
hue_range = np.linspace(hue, 0.1, len(datasets))
color_list = [colorsys.hsv_to_rgb(hv, saturation, lightness) for hv in hue_range]
for custom_color, fd in zip(color_list, datasets):
if custom_color == color_list[0]:
self.add_linetrace(dataset=fd, xvar=xvar, yvar=yvar, color=custom_color,label=label, **kwargs)
else:
self.add_linetrace(dataset=fd, xvar=xvar, yvar=yvar, color=custom_color,**kwargs)
if self.add_ppts:
self.add_ppt_slide()
def plot_drift_scans(self, forward_datasets, backward_datasets, xvar=None, yvar=None, new_fig=True):
'''self.add_ppts = False
self.plot_multiple_scans(forward_datasets, xvar=xvar, yvar=yvar, label='Forwards')
self.add_ppts = True
self.plot_multiple_scans(backward_datasets, new_fig=False, xvar=xvar, yvar=yvar, label='Backwards', linestyle='--')
'''
self.load_data(forward_datasets[0], xvar, yvar)
if new_fig:
self.init_fig()
else:
self.fig.clf()
self.init_labels()
# generating my own colormap
saturation = 0.8
lightness = 0.8
hue_range = np.linspace(0.0, 0.1, len(forward_datasets))
color_list = [colorsys.hsv_to_rgb(hv, saturation, lightness) for hv in hue_range]
for custom_color, fd, bd in zip(color_list, forward_datasets, backward_datasets):
if custom_color == color_list[0]:
self.add_linetrace(dataset=fd, xvar=xvar, yvar=yvar, color=custom_color, label='Forward')
self.add_linetrace(dataset=bd, xvar=xvar, yvar=yvar, color=custom_color, linestyle='--', label='Backward')
else:
self.add_linetrace(dataset=fd, xvar=xvar, yvar=yvar, color=custom_color)
self.add_linetrace(dataset=bd, xvar=xvar, yvar=yvar, color=custom_color, linestyle='--')
self.fig.axes[0].legend()
if self.add_ppts:
self.add_ppt_slide()
def analyse_drift_scans(self, forward_datasets, backward_datasets, xvar=None, yvar=None, new_fig=True):
# Written by Lucas (I think). Adapted with minimal changes.
def scans_diff(x1, y1, x2, y2): # ds1 should be shorter than ds2
# check
if len(x1) > len(x2):
print('Error: cannot process datasets in reversed order')
# sort both vectors in ascending order
if y1[0] > y1[-1]:
y1 = np.flip(y1, 0)
x1 = np.flip(x1, 0)
if y2[0] > y2[-1]:
y2 = np.flip(y2, 0)
x2 = np.flip(x2, 0)
# Only select comparable part
x2_trim = x2[:len(x1)]
y2_trim = y2[:len(x1)]
# check
if max(abs(x1 - x2_trim)) > 0.001:
print('Gate voltages are not comparable')
print(x1)
print(x2_trim)
for i in [1]:
break
# calculate sum of difference squared between both vectors
y1_np = np.array(y1)
y2_trim_np = np.array(y2_trim)
try:
y_diff_sq = sum((y1_np - y2_trim_np) ** 2)
except:
print('Error in calculating difference between two consecutive datasets')
if (y_diff_sq / len(x1)) ** 0.5 < 0:
print('ERROR: difference between datasets smaller than zero while it should be larger')
return (y_diff_sq / len(x1)) ** 0.5
##############################################################################
self.load_data(forward_datasets[0], xvar, yvar)
forward_diff_list = []
backward_diff_list = []
peak_voltage_list = []
for i in range(len(forward_datasets) - 1):
# FORWARD
ds1 = forward_datasets[i]
ds2 = forward_datasets[i + 1]
self.load_data(ds1)
x1, y1 = self.xvar, self.yvar
self.load_data(ds2)
x2, y2 = self.xvar, self.yvar
rms_diff_FW = scans_diff(x1, y1, x2, y2)
forward_diff_list.append(rms_diff_FW)
# BACKWARD
ds1 = backward_datasets[i]
ds2 = backward_datasets[i + 1]
self.load_data(ds1)
x1, y1 = self.xvar, self.yvar
self.load_data(ds2)
x2, y2 = self.xvar, self.yvar
rms_diff_BW = scans_diff(x1, y1, x2, y2)
backward_diff_list.append(rms_diff_BW)
# PEAK VOLTAGE LIST
peak_voltage = max(x2)
peak_voltage_list.append(peak_voltage)
if new_fig:
self.init_fig()
else:
self.fig.clf()
ax = self.fig.add_subplot(111)
ax.plot(peak_voltage_list, forward_diff_list, '1r', label='Forward scans')
ax.plot(peak_voltage_list, backward_diff_list, '2b', label='Backward scans')
# plt.yscale("log") #log scale
plt.ylim(bottom=0)
x_title1 = self.xvar.label
plt.xlabel('Peak voltage on %s (mV)' % x_title1)
plt.ylabel('RMS difference (A)')
plt.legend()
plt.tight_layout()
if self.add_ppts:
self.add_ppt_slide(title='RMS difference of drift scan')
# saving diff lists for analysis
self.forward_diff_list = forward_diff_list
self.backward_diff_list = backward_diff_list
#return , backward_diff_list
|
[
"numpy.abs",
"numpy.argmax",
"numpy.polyfit",
"numpy.isnan",
"matplotlib.pyplot.figure",
"numpy.round",
"matplotlib.pyplot.tight_layout",
"numpy.transpose",
"numpy.linspace",
"matplotlib.pyplot.ylim",
"matplotlib.pyplot.legend",
"numpy.asarray",
"numpy.isinf",
"scipy.optimize.curve_fit",
"matplotlib.pyplot.ylabel",
"numpy.flip",
"matplotlib.pyplot.plot",
"colorsys.hsv_to_rgb",
"numpy.array",
"matplotlib.pyplot.xlabel"
] |
[((2870, 2882), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2880, 2882), True, 'import matplotlib.pyplot as plt\n'), ((7099, 7134), 'numpy.polyfit', 'np.polyfit', (['self.xvar', 'self.yvar', '(1)'], {}), '(self.xvar, self.yvar, 1)\n', (7109, 7134), True, 'import numpy as np\n'), ((7151, 7196), 'numpy.linspace', 'np.linspace', (['self.xvar[0]', 'self.xvar[-1]', '(100)'], {}), '(self.xvar[0], self.xvar[-1], 100)\n', (7162, 7196), True, 'import numpy as np\n'), ((8023, 8045), 'numpy.argmax', 'np.argmax', (['diff_vector'], {}), '(diff_vector)\n', (8032, 8045), True, 'import numpy as np\n'), ((10895, 10927), 'matplotlib.pyplot.plot', 'plt.plot', (['n_s_filt', 'mu_filt', '"""."""'], {}), "(n_s_filt, mu_filt, '.')\n", (10903, 10927), True, 'import matplotlib.pyplot as plt\n'), ((16318, 16336), 'matplotlib.pyplot.ylim', 'plt.ylim', ([], {'bottom': '(0)'}), '(bottom=0)\n', (16326, 16336), True, 'import matplotlib.pyplot as plt\n'), ((16380, 16428), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (["('Peak voltage on %s (mV)' % x_title1)"], {}), "('Peak voltage on %s (mV)' % x_title1)\n", (16390, 16428), True, 'import matplotlib.pyplot as plt\n'), ((16437, 16469), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""RMS difference (A)"""'], {}), "('RMS difference (A)')\n", (16447, 16469), True, 'import matplotlib.pyplot as plt\n'), ((16478, 16490), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (16488, 16490), True, 'import matplotlib.pyplot as plt\n'), ((16500, 16518), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (16516, 16518), True, 'import matplotlib.pyplot as plt\n'), ((7665, 7678), 'numpy.flip', 'np.flip', (['y', '(0)'], {}), '(y, 0)\n', (7672, 7678), True, 'import numpy as np\n'), ((7695, 7708), 'numpy.flip', 'np.flip', (['x', '(0)'], {}), '(x, 0)\n', (7702, 7708), True, 'import numpy as np\n'), ((8411, 8441), 'numpy.round', 'np.round', (['(-offset_y / slope)', '(0)'], {}), '(-offset_y / slope, 0)\n', (8419, 8441), True, 'import numpy as np\n'), ((9158, 9186), 'numpy.asarray', 'np.asarray', (['[np.nan, np.nan]'], {}), '([np.nan, np.nan])\n', (9168, 9186), True, 'import numpy as np\n'), ((9365, 9385), 'numpy.asarray', 'np.asarray', (['filtered'], {}), '(filtered)\n', (9375, 9385), True, 'import numpy as np\n'), ((11372, 11418), 'colorsys.hsv_to_rgb', 'colorsys.hsv_to_rgb', (['hv', 'saturation', 'lightness'], {}), '(hv, saturation, lightness)\n', (11391, 11418), False, 'import colorsys\n'), ((12577, 12623), 'colorsys.hsv_to_rgb', 'colorsys.hsv_to_rgb', (['hv', 'saturation', 'lightness'], {}), '(hv, saturation, lightness)\n', (12596, 12623), False, 'import colorsys\n'), ((14399, 14411), 'numpy.array', 'np.array', (['y1'], {}), '(y1)\n', (14407, 14411), True, 'import numpy as np\n'), ((14437, 14454), 'numpy.array', 'np.array', (['y2_trim'], {}), '(y2_trim)\n', (14445, 14454), True, 'import numpy as np\n'), ((5726, 5772), 'colorsys.hsv_to_rgb', 'colorsys.hsv_to_rgb', (['hv', 'saturation', 'lightness'], {}), '(hv, saturation, lightness)\n', (5745, 5772), False, 'import colorsys\n'), ((7849, 7862), 'numpy.asarray', 'np.asarray', (['y'], {}), '(y)\n', (7859, 7862), True, 'import numpy as np\n'), ((8468, 8481), 'numpy.asarray', 'np.asarray', (['x'], {}), '(x)\n', (8478, 8481), True, 'import numpy as np\n'), ((9583, 9653), 'scipy.optimize.curve_fit', 'optimisation.curve_fit', (['linearmodel', 'x_filtered', 'y_filtered'], {'p0': '[0, 0]'}), '(linearmodel, x_filtered, y_filtered, p0=[0, 0])\n', (9605, 9653), True, 'import scipy.optimize as optimisation\n'), ((13801, 13815), 'numpy.flip', 'np.flip', (['y1', '(0)'], {}), '(y1, 0)\n', (13808, 13815), True, 'import numpy as np\n'), ((13837, 13851), 'numpy.flip', 'np.flip', (['x1', '(0)'], {}), '(x1, 0)\n', (13844, 13851), True, 'import numpy as np\n'), ((13904, 13918), 'numpy.flip', 'np.flip', (['y2', '(0)'], {}), '(y2, 0)\n', (13911, 13918), True, 'import numpy as np\n'), ((13940, 13954), 'numpy.flip', 'np.flip', (['x2', '(0)'], {}), '(x2, 0)\n', (13947, 13954), True, 'import numpy as np\n'), ((9844, 9881), 'matplotlib.pyplot.plot', 'plt.plot', (['x_filtered', 'y_filtered', '"""."""'], {}), "(x_filtered, y_filtered, '.')\n", (9852, 9881), True, 'import matplotlib.pyplot as plt\n'), ((10480, 10500), 'numpy.transpose', 'np.transpose', (['rho_xy'], {}), '(rho_xy)\n', (10492, 10500), True, 'import numpy as np\n'), ((9674, 9689), 'numpy.abs', 'np.abs', (['popt[1]'], {}), '(popt[1])\n', (9680, 9689), True, 'import numpy as np\n'), ((10701, 10713), 'numpy.isinf', 'np.isinf', (['rr'], {}), '(rr)\n', (10709, 10713), True, 'import numpy as np\n'), ((10724, 10736), 'numpy.isnan', 'np.isnan', (['rr'], {}), '(rr)\n', (10732, 10736), True, 'import numpy as np\n'), ((9297, 9309), 'numpy.isinf', 'np.isinf', (['rr'], {}), '(rr)\n', (9305, 9309), True, 'import numpy as np\n'), ((9320, 9332), 'numpy.isnan', 'np.isnan', (['rr'], {}), '(rr)\n', (9328, 9332), True, 'import numpy as np\n')]
|
import os
from typing import Optional
from pytest_embedded.log import PexpectProcess, cls_redirect_stdout, live_print_call
from pytest_embedded_idf.app import IdfApp
from . import DEFAULT_IMAGE_FN
class IdfFlashImageMaker:
"""
Create a single image for qemu based on the `IdfApp`'s partition table and all the flash files.
"""
def __init__(self, app: IdfApp, image_path: str):
"""
Args:
app: `IdfApp` instance
image_path: output image path
"""
self.app = app
self.image_path = image_path
def make_bin(self) -> None:
"""
Create a single image file for qemu.
"""
# flash files is sorted, if the first offset is not 0x0, we need to fill in with empty bin
if self.app.flash_files[0][0] != 0x0:
self._write_empty_bin(count=self.app.flash_files[0][0])
for offset, file_path, encrypted in self.app.flash_files:
if encrypted:
raise NotImplementedError('will implement later')
else:
self._write_bin(file_path, seek=offset)
def _write_empty_bin(self, count: int, bs: int = 1024, seek: int = 0):
live_print_call(
f'dd if=/dev/zero bs={bs} count={count} seek={seek} of={self.image_path}',
shell=True,
)
def _write_bin(self, binary_filepath, bs: int = 1, seek: int = 0):
live_print_call(
f'dd if={binary_filepath} bs={bs} seek={seek} of={self.image_path} conv=notrunc',
shell=True,
)
def _write_encrypted_bin(self, binary_filepath, bs: int = 1, seek: int = 0):
live_print_call(
f'dd if=/dev/zero bs=1 count=32 of=key.bin',
shell=True,
) # generate a fake key bin
live_print_call(
f'espsecure.py encrypt_flash_data --keyfile key.bin --output decrypted.bin --address {seek} '
f'{binary_filepath}',
shell=True,
)
self._write_bin('decrypted.bin', bs=bs, seek=seek)
def _burn_efuse(self):
pass
class QemuApp(IdfApp):
"""
QEMU App class
Attributes:
pexpect_proc (PexpectProcess): pexpect process
image_path (str): QEMU flash-able bin path
"""
def __init__(
self,
pexpect_proc: PexpectProcess,
app_path: Optional[str] = None,
build_dir: Optional[str] = None,
part_tool: Optional[str] = None,
qemu_image_path: Optional[str] = None,
**kwargs,
):
"""
Args:
pexpect_proc: pexpect process
app_path: App path
build_dir: Build directory
part_tool: Partition tool path
qemu_image_path: QEMU flashable bin path
"""
super().__init__(app_path, build_dir=build_dir, part_tool=part_tool, **kwargs)
self.pexpect_proc = pexpect_proc
self.image_path = qemu_image_path or os.path.join(self.app_path, DEFAULT_IMAGE_FN)
if self.target != 'esp32':
raise ValueError('For now on QEMU we only support ESP32')
self.create_image()
@cls_redirect_stdout(source='create image')
def create_image(self) -> None:
"""
Create the image if not exists
"""
if os.path.exists(self.image_path):
print(f'Using image already exists: {self.image_path}')
else:
image_maker = IdfFlashImageMaker(self, self.image_path)
image_maker.make_bin()
|
[
"os.path.exists",
"os.path.join",
"pytest_embedded.log.live_print_call",
"pytest_embedded.log.cls_redirect_stdout"
] |
[((3145, 3187), 'pytest_embedded.log.cls_redirect_stdout', 'cls_redirect_stdout', ([], {'source': '"""create image"""'}), "(source='create image')\n", (3164, 3187), False, 'from pytest_embedded.log import PexpectProcess, cls_redirect_stdout, live_print_call\n'), ((1204, 1315), 'pytest_embedded.log.live_print_call', 'live_print_call', (['f"""dd if=/dev/zero bs={bs} count={count} seek={seek} of={self.image_path}"""'], {'shell': '(True)'}), "(\n f'dd if=/dev/zero bs={bs} count={count} seek={seek} of={self.image_path}',\n shell=True)\n", (1219, 1315), False, 'from pytest_embedded.log import PexpectProcess, cls_redirect_stdout, live_print_call\n'), ((1422, 1541), 'pytest_embedded.log.live_print_call', 'live_print_call', (['f"""dd if={binary_filepath} bs={bs} seek={seek} of={self.image_path} conv=notrunc"""'], {'shell': '(True)'}), "(\n f'dd if={binary_filepath} bs={bs} seek={seek} of={self.image_path} conv=notrunc'\n , shell=True)\n", (1437, 1541), False, 'from pytest_embedded.log import PexpectProcess, cls_redirect_stdout, live_print_call\n'), ((1657, 1729), 'pytest_embedded.log.live_print_call', 'live_print_call', (['f"""dd if=/dev/zero bs=1 count=32 of=key.bin"""'], {'shell': '(True)'}), "(f'dd if=/dev/zero bs=1 count=32 of=key.bin', shell=True)\n", (1672, 1729), False, 'from pytest_embedded.log import PexpectProcess, cls_redirect_stdout, live_print_call\n'), ((1800, 1949), 'pytest_embedded.log.live_print_call', 'live_print_call', (['f"""espsecure.py encrypt_flash_data --keyfile key.bin --output decrypted.bin --address {seek} {binary_filepath}"""'], {'shell': '(True)'}), "(\n f'espsecure.py encrypt_flash_data --keyfile key.bin --output decrypted.bin --address {seek} {binary_filepath}'\n , shell=True)\n", (1815, 1949), False, 'from pytest_embedded.log import PexpectProcess, cls_redirect_stdout, live_print_call\n'), ((3298, 3329), 'os.path.exists', 'os.path.exists', (['self.image_path'], {}), '(self.image_path)\n', (3312, 3329), False, 'import os\n'), ((2958, 3003), 'os.path.join', 'os.path.join', (['self.app_path', 'DEFAULT_IMAGE_FN'], {}), '(self.app_path, DEFAULT_IMAGE_FN)\n', (2970, 3003), False, 'import os\n')]
|
import sys
import gzip
import os
DEBUG = 0
if DEBUG:
inputFile="Z:/Shared/Labs/Vickers Lab/Tiger/projects/20150930_TGIRT_tRNA_human/identical/result/KCVH01_clipped_identical.fastq.gz"
originalFile="Z:/Shared/Labs/Vickers Lab/Tiger/data/20150515_tRNA/KCVH1_S6_R1_001.fastq.gz"
outputFile="H:/temp/test_cca.tsv"
else:
inputFile = sys.argv[1]
originalFile = sys.argv[2]
outputFile = sys.argv[3]
ccs={}
if(inputFile.endswith(".gz")):
f = gzip.open(inputFile, 'rt')
else:
f = open(inputFile, 'r')
try:
readCount = 0
while True:
header = f.readline()
if '' == header:
break
if not header.startswith("@"):
continue
seq = f.readline().strip()
ignore = f.readline().strip()
score = f.readline().strip()
readCount = readCount+1
if readCount % 10000 == 0:
print("%d/%d reads end with CC found" % (len(ccs), readCount))
if seq.endswith("CC"):
name = header.split(' ')[0]
ccs[name] = seq
finally:
f.close()
if(originalFile.endswith(".gz")):
f = gzip.open(originalFile, 'rt')
else:
f = open(originalFile, 'r')
try:
tempFile = outputFile + ".tmp"
with open(tempFile, "w") as sw:
readCount = 0
ccCount = 0
while True:
header = f.readline()
if '' == header:
break
if not header.startswith("@"):
continue
seq = f.readline()
f.readline()
f.readline()
readCount = readCount + 1
if readCount % 100000 == 0:
print("%d/%d reads end with CC processed" % (ccCount, readCount))
name = header.split(' ')[0]
sequence = ccs.pop(name, None)
if sequence == None:
continue
ccCount = ccCount + 1
pos = seq.find(sequence)
if pos == -1:
raise ValueError("Cannot find trimmed sequence %s in untrimmed sequence %s of read %s" % (sequence, seq, name))
if seq[len(sequence)] == 'A':
sw.write(name + "\n")
if os.path.isfile(outputFile):
os.remove(outputFile)
os.rename(tempFile, outputFile)
finally:
f.close()
if len(ccs) > 0:
unfoundFile = outputFile + ".unfound"
with open(unfoundFile, "w") as fw:
for key in ccs:
fw.write(key + "\n")
raise ValueError("Couldn't find %d reads in untrimmed file, saved to %s" %(len(ccs), unfoundFile))
|
[
"os.rename",
"os.path.isfile",
"gzip.open",
"os.remove"
] |
[((468, 494), 'gzip.open', 'gzip.open', (['inputFile', '"""rt"""'], {}), "(inputFile, 'rt')\n", (477, 494), False, 'import gzip\n'), ((1075, 1104), 'gzip.open', 'gzip.open', (['originalFile', '"""rt"""'], {}), "(originalFile, 'rt')\n", (1084, 1104), False, 'import gzip\n'), ((2018, 2044), 'os.path.isfile', 'os.path.isfile', (['outputFile'], {}), '(outputFile)\n', (2032, 2044), False, 'import os\n'), ((2076, 2107), 'os.rename', 'os.rename', (['tempFile', 'outputFile'], {}), '(tempFile, outputFile)\n', (2085, 2107), False, 'import os\n'), ((2051, 2072), 'os.remove', 'os.remove', (['outputFile'], {}), '(outputFile)\n', (2060, 2072), False, 'import os\n')]
|
from flask import Flask, render_template
from flask_socketio import SocketIO
from models import TikTok
import os
app = Flask(__name__)
socketio = SocketIO(app)
tiktok = TikTok(os.getenv('ACC_HANDLE'))
def emit_data():
data = {
'followers': tiktok.followers,
'likes': tiktok.likes
}
socketio.emit('data', data)
@app.route('/')
def dashboard():
return render_template('dashboard.html')
@socketio.on('connect')
def on_connect():
emit_data()
@socketio.on('refresh_data')
def on_refresh_data():
tiktok.refresh_data()
emit_data()
if __name__ == '__main__':
socketio.run(app)
|
[
"flask.Flask",
"os.getenv",
"flask.render_template",
"flask_socketio.SocketIO"
] |
[((120, 135), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (125, 135), False, 'from flask import Flask, render_template\n'), ((147, 160), 'flask_socketio.SocketIO', 'SocketIO', (['app'], {}), '(app)\n', (155, 160), False, 'from flask_socketio import SocketIO\n'), ((177, 200), 'os.getenv', 'os.getenv', (['"""ACC_HANDLE"""'], {}), "('ACC_HANDLE')\n", (186, 200), False, 'import os\n'), ((386, 419), 'flask.render_template', 'render_template', (['"""dashboard.html"""'], {}), "('dashboard.html')\n", (401, 419), False, 'from flask import Flask, render_template\n')]
|
import numpy as np
import matplotlib.pyplot as pl
import h5py
import platform
import os
import pickle
import scipy.io as io
import seaborn as sns
from keras.models import model_from_json
import json
from ipdb import set_trace as stop
class plot_map(object):
def __init__(self, root):
self.root = root
self.noise = noise
self.batch_size = 256
self.dataFile = "/net/duna/scratch1/aasensio/deepLearning/milne/database/database_6301_hinode_1component.h5"
f = h5py.File(self.dataFile, 'r')
self.pars = f.get("parameters")
self.lower = np.min(self.pars, axis=0)
self.upper = np.max(self.pars, axis=0)
f.close()
self.root_hinode = "/net/nas4/fis/aasensio/scratch/HINODE/SUNSPOT/"
self.label_files = ["sunspot_stokesI_512x512.sav", "sunspot_stokesQ_512x512.sav", "sunspot_stokesU_512x512.sav", "sunspot_stokesV_512x512.sav"]
self.std_values = np.load('{0}_normalization.npy'.format(self.root))
labels_data = ['data_ii', 'data_qq', 'data_uu', 'data_vv']
self.stokes = np.zeros((512,512,50,4))
for i in range(4):
print("Reading file {0}".format(self.label_files[i]))
stokes = io.readsav("/net/nas4/fis/aasensio/scratch/HINODE/SUNSPOT/{0}".format(self.label_files[i]))[labels_data[i]]
if (i == 0):
mean_stokesi = np.mean(stokes[400:500,0:100,0])
stokes = stokes[:,:,0:50] / mean_stokesi
self.stokes[:,:,:,i] = stokes / self.std_values[None,None,:,i]
self.stokes = self.stokes.reshape((512*512,50,4))
def read_network(self):
print("Reading previous network...")
f = open('{0}_model.json'.format(self.root), 'r')
json_string = f.read()
f.close()
self.model = model_from_json(json_string)
self.model.load_weights("{0}_weights.hdf5".format(self.root))
def forward_network(self):
inTest = []
for i in range(4):
inTest.append(np.atleast_3d(self.stokes[:,:,i]).astype('float32'))
self.prob = self.model.predict(inTest, batch_size=self.batch_size, verbose=1)
def plot(self):
pl.close('all')
f, ax = pl.subplots(nrows=3, ncols=3, figsize=(12,10))
ax = ax.flatten()
labels = ['B [G]', r'$\theta_B$', r'$\phi_B$', r'$v_\mathrm{mac}$', 'a', 'B$_0$', 'B$_1$', r'$\Delta \lambda_D$ [m$\AA$]', r'$\eta$']
for i in range(9):
n_pixel, n_classes = self.prob[i].shape
x = np.linspace(self.lower[i], self.upper[i], n_classes)
mean = np.sum(self.prob[i] * x[None,:], axis=1).reshape((512,512))
ax[i].imshow(mean, cmap=pl.cm.viridis)
ax[i].set_title(labels[i])
pl.tight_layout()
pl.show()
# pl.savefig("{0}_{1}_comparison.png".format(self.root, self.noise))
if (__name__ == '__main__'):
root = 'cnns/6301_hinode_1component'
noise = 1e-4
out = plot_map(root)
out.read_network()
out.forward_network()
out.plot()
|
[
"matplotlib.pyplot.tight_layout",
"h5py.File",
"numpy.atleast_3d",
"matplotlib.pyplot.show",
"numpy.sum",
"matplotlib.pyplot.close",
"numpy.zeros",
"numpy.max",
"keras.models.model_from_json",
"numpy.min",
"numpy.mean",
"numpy.linspace",
"matplotlib.pyplot.subplots"
] |
[((504, 533), 'h5py.File', 'h5py.File', (['self.dataFile', '"""r"""'], {}), "(self.dataFile, 'r')\n", (513, 533), False, 'import h5py\n'), ((611, 636), 'numpy.min', 'np.min', (['self.pars'], {'axis': '(0)'}), '(self.pars, axis=0)\n', (617, 636), True, 'import numpy as np\n'), ((658, 683), 'numpy.max', 'np.max', (['self.pars'], {'axis': '(0)'}), '(self.pars, axis=0)\n', (664, 683), True, 'import numpy as np\n'), ((1110, 1137), 'numpy.zeros', 'np.zeros', (['(512, 512, 50, 4)'], {}), '((512, 512, 50, 4))\n', (1118, 1137), True, 'import numpy as np\n'), ((1855, 1883), 'keras.models.model_from_json', 'model_from_json', (['json_string'], {}), '(json_string)\n', (1870, 1883), False, 'from keras.models import model_from_json\n'), ((2237, 2252), 'matplotlib.pyplot.close', 'pl.close', (['"""all"""'], {}), "('all')\n", (2245, 2252), True, 'import matplotlib.pyplot as pl\n'), ((2269, 2316), 'matplotlib.pyplot.subplots', 'pl.subplots', ([], {'nrows': '(3)', 'ncols': '(3)', 'figsize': '(12, 10)'}), '(nrows=3, ncols=3, figsize=(12, 10))\n', (2280, 2316), True, 'import matplotlib.pyplot as pl\n'), ((2813, 2830), 'matplotlib.pyplot.tight_layout', 'pl.tight_layout', ([], {}), '()\n', (2828, 2830), True, 'import matplotlib.pyplot as pl\n'), ((2839, 2848), 'matplotlib.pyplot.show', 'pl.show', ([], {}), '()\n', (2846, 2848), True, 'import matplotlib.pyplot as pl\n'), ((2581, 2633), 'numpy.linspace', 'np.linspace', (['self.lower[i]', 'self.upper[i]', 'n_classes'], {}), '(self.lower[i], self.upper[i], n_classes)\n', (2592, 2633), True, 'import numpy as np\n'), ((1414, 1448), 'numpy.mean', 'np.mean', (['stokes[400:500, 0:100, 0]'], {}), '(stokes[400:500, 0:100, 0])\n', (1421, 1448), True, 'import numpy as np\n'), ((2653, 2694), 'numpy.sum', 'np.sum', (['(self.prob[i] * x[None, :])'], {'axis': '(1)'}), '(self.prob[i] * x[None, :], axis=1)\n', (2659, 2694), True, 'import numpy as np\n'), ((2059, 2094), 'numpy.atleast_3d', 'np.atleast_3d', (['self.stokes[:, :, i]'], {}), '(self.stokes[:, :, i])\n', (2072, 2094), True, 'import numpy as np\n')]
|
import random
import math
def MUTATE(X,RATE=.05):
def MU(X,RATE):
if random.random()<=RATE:
return random.random()
else:
return X
return [MU(x,RATE) for x in X]
def CROSSOVER(A,B,RATE=.5):
if random.random()<=RATE:
return [*A[0:len(A)//2],*B[len(B)//2:]],[*B[0:len(B)//2],*A[len(A)//2:]]
else:
return [A,B]
def FIT(X,Y,N):
fout = PWS(N(X),Y)
return sum(DOT(fout,fout))
def LCheck(LC):
try:
return len(LC)
except:
return 0
def ACTIVATE(ATI):
return 1/(1+math.e**-ATI)
def DOT(X,Y):
return [X[F]*Y[F] for F in range(min(LCheck(X),LCheck(Y)))]
def PWS(X,Y):
return [Y[F]-X[F] for F in range(min(LCheck(X),LCheck(Y)))]
class Neuron:
def __init__(self,N=1):
self.set_bias(random.random())
self.set_weights([random.random() for n in range(N)])
def set_bias(self,BIAS):
self.bias = BIAS
def set_weights(self,WEIGHTS):
self.weights = WEIGHTS
def get_bias(self):
return self.bias
def get_weights(self):
return self.weights
def __repr__(self):
return '{}'.format(self)
def __str__(self):
return '{}+{}'.format(self.get_weights(),self.get_bias())
def get_mutate(self,MuRate=.05):
new_neur = Neuron()
new_neur.set_weights(MUTATE(self.get_weights(),MuRate))
new_neur.set_bias(MUTATE([self.get_bias()],MuRate)[0])
return new_neur
def get_crossover(self,other):
return CROSSOVER(self.get_weights(),other.get_weights())
def parent(self,other):
cross = self.get_crossover(other)
left,right = Neuron(),Neuron()
left.set_weights(cross[0]),right.set_weights(cross[1])
left.set_bias(self.get_bias()),right.set_bias(other.get_bias())
left.get_mutate()
right.get_mutate()
return [left,right]
def generate(self,other,N=1):
return [self.parent(other) for n in range(N)]
def __call__(self,other):
return ACTIVATE(sum(DOT(self.get_weights(),other))+self.get_bias())
def get_fit(self,X,Y):
return FIT(X,Y,self)
def get_copy(self):
new_sel = self.get_mutate(0)
return new_sel
class Layer(Neuron):
def __init__(self,M,N=3):
self.set_layer([Neuron(M) for n in range(N)])
self.set_m(M)
self.set_n(N)
def set_m(self,M):
self.m = M
def set_n(self,N):
self.n = N
def get_m(self):
return self.m
def get_n(self):
return self.n
def set_layer(self,LAYER):
self.layer = LAYER
def get_layer(self):
return self.layer
def __str__(self):
return '{}'.format(self.get_layer())
def __repr__(self):
return '{}'.format(self)
def __call__(self,other):
return [F(other) for F in self.get_layer()]
def get_mutate(self,MuRate=.05):
new_lay = Layer(2)
new_lay.set_layer([F.get_mutate(MuRate) for F in self.get_layer()])
return new_lay
def get_crossover(self,other):
return CROSSOVER(self.get_layer(),other.get_layer())
def parent(self,other,MuRate=.05):
cross = self.get_crossover(other)
left,right = Layer(3,3),Layer(3,3)
left.set_layer(cross[0]),right.set_layer(cross[1])
left.get_mutate(MuRate)
right.get_mutate(MuRate)
return [left,right]
def get_ranin(self):
return self([random.random() for m in range(self.get_m())])
def get_tfit(self,other):
return sum([1-F for F in self(other)])
def get_stfit(self,*other):
return sum([self.get_tfit(F) for F in other])
class Network(Layer):
def __init__(self,M,N,D=3):
self.set_network([Layer(M,N),*[Layer(N,N) for n in range(D-1)]])
def set_network(self,NETWORK):
self.layer = NETWORK
def get_network(self):
return self.layer
def __repr__(self):
return '{}'.format(self)
def __str__(self):
return '{}'.format(self.get_network())
def __call__(self,other):
calout = self.get_network()[0](other)
for L in self.get_network()[1:]:
calout = L(calout)
return calout
def get_mutate(self,MuRate=.05):
new_net = Network(2,3)
new_net.set_network([F.get_mutate(MuRate) for F in self.get_network()])
return new_net
def get_crossover(self,other):
return CROSSOVER(self.get_network(),other.get_network())
def parent(self,other,MuRate=.05):
cross = self.get_crossover(other)
left,right = Network(3,3),Network(3,3)
left.set_network(cross[0]),right.set_network(cross[1])
left.get_mutate(MuRate)
right.get_mutate(MuRate)
return [left,right]
class Decriminator(Network):
def __init__(self,M,N,D=3):
super().__init__(M,N,D)
self.layer.append(Neuron(N))
def get_mutate(self,MuRate=.05):
new_net = Decriminator(2,3)
new_net.set_network([F.get_mutate(MuRate) for F in self.get_network()])
return new_net
def get_crossover(self,other):
return CROSSOVER(self.get_network(),other.get_network())
def parent(self,other,MuRate=.05):
cross = self.get_crossover(other)
left,right = Decriminator(3,3),Decriminator(3,3)
left.set_network(cross[0]),right.set_network(cross[1])
left.get_mutate(MuRate)
right.get_mutate(MuRate)
return [left,right]
def get_tfit(self,other):
return 1-self(other)
def get_stfit(self,*other):
return sum([self.get_tfit(F) for F in other])
class GANN(Decriminator):
def __init__(self,M,N,D=3):
self.set_dec(Decriminator(M,N,D))
self.set_gen(Network(M,N,D))
self.set_network([self.get_gen(),self.get_dec()])
def set_dec(self,DEC):
self.dec = DEC
def set_gen(self,GEN):
self.gen = GEN
def get_dec(self):
return self.dec
def get_gen(self):
return self.gen
class Flask(Layer):
def __init__(self,Population,M,N):
self.set_layer([GANN(M,N) for population in range(Population)])
def get_tfit(self,other):
return sum([1-F for F in self(other)])
def get_stfit(self,*other):
return [self.get_tfit(F) for F in other]
def get_sample(self,K=1):
return random.sample(self.get_layer(),K)
def get_comp(self,other):
l,r = self.get_sample(2)
if l.get_stfit(*other)<r.get_stfit(*other):
return l
else:
return r
def get_tournament(self,other,N=8):
new_fla = Flask(N,3,2)
new_fla.set_layer([self.get_comp(other) for n in range(N)])
return new_fla
def parent(self,MuRate=.05):
l,r = self.get_sample(2)
a,b = self.get_sample(2)
new_fla = Flask(3,2,3)
new_fla.set_layer([r.get_mutate(),*l.parent(r),*a.parent(b),a.get_mutate()])
return new_fla
def get_mutate(self,MuRate=.04166):
new_net = Network(2,3)
new_net.set_network([F.get_mutate(MuRate) for F in self.get_network()])
return new_net
def get_crossover(self,other):
return CROSSOVER(self.get_network(),other.get_network())
a = Flask(100000,2,1)
f = [0,3],[1,1],[2,4],[3,1],[4,5]
b = a.get_tournament(f,100)
for epoch in range(1000):
b = b.get_tournament(f,100)
print(b.get_sample(3)[-1].get_gen().get_stfit(*f))
|
[
"random.random"
] |
[((205, 220), 'random.random', 'random.random', ([], {}), '()\n', (218, 220), False, 'import random\n'), ((72, 87), 'random.random', 'random.random', ([], {}), '()\n', (85, 87), False, 'import random\n'), ((105, 120), 'random.random', 'random.random', ([], {}), '()\n', (118, 120), False, 'import random\n'), ((699, 714), 'random.random', 'random.random', ([], {}), '()\n', (712, 714), False, 'import random\n'), ((736, 751), 'random.random', 'random.random', ([], {}), '()\n', (749, 751), False, 'import random\n'), ((2976, 2991), 'random.random', 'random.random', ([], {}), '()\n', (2989, 2991), False, 'import random\n')]
|
import socket
class PostMan(object):
def __init__(self):
self.mailBox = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
def PutOneLetter(self, ipAddr, port, letter):
encoded_letter = letter.encode("utf-8")
try:
self.mailBox.sendto(encoded_letter, (ipAddr, port))
except:
print("sending Failed")
return False
else:
response = self.mailBox.recvfrom(1024)
if not response:
print("sending Failed")
return False
return True
|
[
"socket.socket"
] |
[((85, 133), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_DGRAM'], {}), '(socket.AF_INET, socket.SOCK_DGRAM)\n', (98, 133), False, 'import socket\n')]
|
# Copyright 2018 <NAME> <<EMAIL>>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS
# IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language
# governing permissions and limitations under the License.
import serial
import numpy as np
class ActonSp300i(object):
def __init__(self, port='/dev/tty', sensor=None, debug=False):
"""
A class to interface to Princeton Instruments SpectraPro 300i
Monocrhomator via serial interface, using the protocol specified
in ftp://ftp.princetoninstruments.com/public/manuals/Acton/Sp-300i.pdf
"""
self._port = port
self._sensor = sensor
self._debug = debug
try:
self._connection = serial.Serial(self._port,
baudrate=9600,
bytesize=serial.EIGHTBITS,
parity=serial.PARITY_NONE,
stopbits=serial.STOPBITS_ONE,
timeout=5)
except serial.SerialException:
print('Unable to find or configure a serial connection to device %s' %(self._port))
return None
def _send_command(self, command):
# should be modified with something healtier
cmd_string = "%s\r" %command
self._connection.write(cmd_string.encode())
ret_string = self._connection.read_until()
return ret_string.decode().strip()
def close(self):
self._connection.close()
def set_sensor(self, sensor):
self._sensor = sensor
def get_current_position(self):
"""
This method returns the current position of the grating, i.e. the currently
selected wavelength. The value is a float representing the wavelength in
nanometers. On error, the method raises a serial.SerialException.
"""
ret_str = self._send_command('?NM')
if self._debug:
print(ret_str)
return 0.0
ret_elements = ret_str.split()
if (len(ret_elements) == 4) and (ret_elements[-1] == 'ok'):
return float(ret_elements[1])
else:
raise serial.SerialException
def init_scan(self, wavelength):
line = self._send_command('%f GOTO' %(float(wavelength)))
print(line)
def move_to(self, wavelength):
"""
Move the grating to the given wavelength
"""
line = self._send_command('%f NM' %(float(wavelength)))
print(line)
# if line.decode().strip() != 'ok':
# raise serial.SerialException
def scan(self, wavelength_range=(400, 800), n_repetitions=30, n_integrations=1):
"""
Performs a wavelength scan in 'wavelength_range' (defaults from 400 to
800 nm with 1 nm step), repeating the measure n_repetitions times (defaults
to 30) and summing up n_integrations times (defaults to 1, i.e. no sum) the
values.
The method returns a numpy array of length equal to the wavelength range
and whose structure is [wavelenght, mean measure, standard deviation].
"""
measures = []
self.init_scan(wavelength_range[0]-0.1)
for l in range(wavelength_range[0], wavelength_range[1]+1):
current_measure = []
self.move_to(l)
for _ in range(n_integrations):
tmp_measure = []
for _ in range(n_repetitions):
tmp_measure.append(self._sensor.measure())
if n_integrations == 1:
current_measure = tmp_measure
else:
current_measure.append(np.array(tmp_measure).sum())
current_measure = np.array(current_measure)
measures.append([l, current_measure.mean(), current_measure.std()])
return np.array(measures)
|
[
"serial.Serial",
"numpy.array"
] |
[((4568, 4586), 'numpy.array', 'np.array', (['measures'], {}), '(measures)\n', (4576, 4586), True, 'import numpy as np\n'), ((1144, 1284), 'serial.Serial', 'serial.Serial', (['self._port'], {'baudrate': '(9600)', 'bytesize': 'serial.EIGHTBITS', 'parity': 'serial.PARITY_NONE', 'stopbits': 'serial.STOPBITS_ONE', 'timeout': '(5)'}), '(self._port, baudrate=9600, bytesize=serial.EIGHTBITS, parity=\n serial.PARITY_NONE, stopbits=serial.STOPBITS_ONE, timeout=5)\n', (1157, 1284), False, 'import serial\n'), ((4443, 4468), 'numpy.array', 'np.array', (['current_measure'], {}), '(current_measure)\n', (4451, 4468), True, 'import numpy as np\n'), ((4381, 4402), 'numpy.array', 'np.array', (['tmp_measure'], {}), '(tmp_measure)\n', (4389, 4402), True, 'import numpy as np\n')]
|
import keras_mnist as km
import keras
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Dense, Dropout, Flatten, Conv2D, MaxPooling2D
from keras.optimizers import RMSprop
from keras.callbacks import Callback, CSVLogger
from matplotlib import pyplot as plt
from sklearn.model_selection import train_test_split
import argparse
from PIL import Image
import numpy as np
from keras.preprocessing import image
import math
from keras import backend as K
import pickle as pkl
def data_augmentation(x, y):
datagen = image.ImageDataGenerator(
rotation_range=60,
width_shift_range=0.2,
height_shift_range=0.2,
shear_range=math.pi/4, # 45 degree
zoom_range=0.4,
fill_mode="constant",
cval=0, # constant value for fill_mode
)
imgs = np.array([x.tolist()[1]]*9)
show_img(imgs, "img1")
for X_batch, y_batch in datagen.flow(imgs, y[:9], batch_size=9):
show_img(X_batch, "img2")
break
def get_mnist():
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train1, x_valid, y_train1, y_valid = train_test_split(x_train, y_train, test_size=0.1)
x_train = x_train1[:9000]
y_train = y_train1[:9000]
x_valid = x_valid[:1000]
y_valid = y_valid[:1000]
x_train = x_train.reshape(x_train.shape[0], 28, 28, 1).astype('float32')/255
x_valid = x_valid.reshape(x_valid.shape[0], 28, 28, 1).astype('float32')/255
x_test = x_test.reshape(x_test.shape[0], 28, 28, 1).astype('float32')/255
# convert one-hot vector
y_train = keras.utils.to_categorical(y_train, 10)
y_valid = keras.utils.to_categorical(y_valid, 10)
y_test = keras.utils.to_categorical(y_test, 10)
data_augmentation(x_train, y_train)
def show_noisedata():
with open("./mnist_noise.pkl", "rb") as f:
x_train, y_train, x_valid, y_valid = pkl.load(f)
for i in range(0, 9):
plt.subplot(330+1+i)
plt.imshow(x_valid[i].reshape(28, 28), cmap=plt.get_cmap('gray'))
show_y(y_valid[:9])
plt.show()
def show_img(imgs, name):
for i in range(0, 9):
plt.subplot(330+1+i)
plt.imshow(imgs[i].reshape(28, 28), cmap=plt.get_cmap('gray'))
plt.show()
plt.savefig("{}.png".format(name))
def show_y(y_valid9):
for n, y_row in enumerate(y_valid9):
for i, y in enumerate(y_row):
if y == 1:
print(i, " ", end="")
break
if n != 0 and (n+1) % 3 == 0:
print("")
if __name__=="__main__":
#km.main()
#get_mnist()
show_noisedata()
|
[
"keras.preprocessing.image.ImageDataGenerator",
"matplotlib.pyplot.subplot",
"matplotlib.pyplot.show",
"matplotlib.pyplot.get_cmap",
"sklearn.model_selection.train_test_split",
"keras.datasets.mnist.load_data",
"pickle.load",
"keras.utils.to_categorical"
] |
[((556, 725), 'keras.preprocessing.image.ImageDataGenerator', 'image.ImageDataGenerator', ([], {'rotation_range': '(60)', 'width_shift_range': '(0.2)', 'height_shift_range': '(0.2)', 'shear_range': '(math.pi / 4)', 'zoom_range': '(0.4)', 'fill_mode': '"""constant"""', 'cval': '(0)'}), "(rotation_range=60, width_shift_range=0.2,\n height_shift_range=0.2, shear_range=math.pi / 4, zoom_range=0.4,\n fill_mode='constant', cval=0)\n", (580, 725), False, 'from keras.preprocessing import image\n'), ((1074, 1091), 'keras.datasets.mnist.load_data', 'mnist.load_data', ([], {}), '()\n', (1089, 1091), False, 'from keras.datasets import mnist\n'), ((1135, 1184), 'sklearn.model_selection.train_test_split', 'train_test_split', (['x_train', 'y_train'], {'test_size': '(0.1)'}), '(x_train, y_train, test_size=0.1)\n', (1151, 1184), False, 'from sklearn.model_selection import train_test_split\n'), ((1588, 1627), 'keras.utils.to_categorical', 'keras.utils.to_categorical', (['y_train', '(10)'], {}), '(y_train, 10)\n', (1614, 1627), False, 'import keras\n'), ((1642, 1681), 'keras.utils.to_categorical', 'keras.utils.to_categorical', (['y_valid', '(10)'], {}), '(y_valid, 10)\n', (1668, 1681), False, 'import keras\n'), ((1695, 1733), 'keras.utils.to_categorical', 'keras.utils.to_categorical', (['y_test', '(10)'], {}), '(y_test, 10)\n', (1721, 1733), False, 'import keras\n'), ((2064, 2074), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2072, 2074), True, 'from matplotlib import pyplot as plt\n'), ((2232, 2242), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2240, 2242), True, 'from matplotlib import pyplot as plt\n'), ((1890, 1901), 'pickle.load', 'pkl.load', (['f'], {}), '(f)\n', (1898, 1901), True, 'import pickle as pkl\n'), ((1941, 1965), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(330 + 1 + i)'], {}), '(330 + 1 + i)\n', (1952, 1965), True, 'from matplotlib import pyplot as plt\n'), ((2136, 2160), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(330 + 1 + i)'], {}), '(330 + 1 + i)\n', (2147, 2160), True, 'from matplotlib import pyplot as plt\n'), ((2014, 2034), 'matplotlib.pyplot.get_cmap', 'plt.get_cmap', (['"""gray"""'], {}), "('gray')\n", (2026, 2034), True, 'from matplotlib import pyplot as plt\n'), ((2206, 2226), 'matplotlib.pyplot.get_cmap', 'plt.get_cmap', (['"""gray"""'], {}), "('gray')\n", (2218, 2226), True, 'from matplotlib import pyplot as plt\n')]
|
import os
from subprocess import getstatusoutput
def compiler(dirPath, string):
filePath = os.path.join(dirPath, "Demo.js")
with open(filePath, "w+") as fp:
fp.write(string)
cmd = f"node {filePath}"
print(cmd, string)
exitcode, data = getstatusoutput(cmd)
return exitcode == 0, data
|
[
"subprocess.getstatusoutput",
"os.path.join"
] |
[((98, 130), 'os.path.join', 'os.path.join', (['dirPath', '"""Demo.js"""'], {}), "(dirPath, 'Demo.js')\n", (110, 130), False, 'import os\n'), ((271, 291), 'subprocess.getstatusoutput', 'getstatusoutput', (['cmd'], {}), '(cmd)\n', (286, 291), False, 'from subprocess import getstatusoutput\n')]
|
from __future__ import print_function
import logging
log = logging.getLogger('SKQ.SnobFit')
if not log.hasHandlers():
def _setupLogger(log):
import sys
hdlr = logging.StreamHandler(sys.stdout)
frmt = logging.Formatter('%(name)-12s: %(levelname)8s %(message)s')
hdlr.setFormatter(frmt)
log.addHandler(hdlr)
log.propagate = False
_setupLogger(log)
del _setupLogger
log.info("""
------------------------------------------------------------------------
<NAME> and <NAME>, "Snobfit - Stable Noisy Optimization by Branch and Fit",
ACM Trans. Math. Software 35 (2008), Article 9.
Software available at www.mat.univie.ac.at/~neum/software/snobfit
------------------------------------------------------------------------""")
__all__ = ['minimize', 'optset', 'log', 'snobfit']
from ._snobfit import snobfit
from ._optset import optset
|
[
"logging.Formatter",
"logging.StreamHandler",
"logging.getLogger"
] |
[((60, 92), 'logging.getLogger', 'logging.getLogger', (['"""SKQ.SnobFit"""'], {}), "('SKQ.SnobFit')\n", (77, 92), False, 'import logging\n'), ((180, 213), 'logging.StreamHandler', 'logging.StreamHandler', (['sys.stdout'], {}), '(sys.stdout)\n', (201, 213), False, 'import logging\n'), ((229, 289), 'logging.Formatter', 'logging.Formatter', (['"""%(name)-12s: %(levelname)8s %(message)s"""'], {}), "('%(name)-12s: %(levelname)8s %(message)s')\n", (246, 289), False, 'import logging\n')]
|
# BUG: Change in index of empty dataframes in mode operation #43336
import pandas as pd
print(pd.__version__)
df = pd.DataFrame({"a": ["a", "b", "a"]}, index=["a", "b", "c"])
result = df.mode(numeric_only=True)
print(result)
expected = pd.DataFrame(index=["a", "b", "c"])
pd.testing.assert_frame_equal(result, expected)
|
[
"pandas.DataFrame",
"pandas.testing.assert_frame_equal"
] |
[((118, 177), 'pandas.DataFrame', 'pd.DataFrame', (["{'a': ['a', 'b', 'a']}"], {'index': "['a', 'b', 'c']"}), "({'a': ['a', 'b', 'a']}, index=['a', 'b', 'c'])\n", (130, 177), True, 'import pandas as pd\n'), ((240, 275), 'pandas.DataFrame', 'pd.DataFrame', ([], {'index': "['a', 'b', 'c']"}), "(index=['a', 'b', 'c'])\n", (252, 275), True, 'import pandas as pd\n'), ((276, 323), 'pandas.testing.assert_frame_equal', 'pd.testing.assert_frame_equal', (['result', 'expected'], {}), '(result, expected)\n', (305, 323), True, 'import pandas as pd\n')]
|
import os
import unittest
import logging
import shutil
import numpy as np
from smac.configspace import Configuration
from smac.scenario.scenario import Scenario
from smac.stats.stats import Stats
from smac.tae.execute_ta_run import StatusType
from smac.tae.execute_ta_run_old import ExecuteTARunOld
from smac.runhistory.runhistory import RunHistory
from smac.utils.io.traj_logging import TrajLogger
from smac.utils.validate import Validator, _Run
from unittest import mock
class ValidationTest(unittest.TestCase):
def setUp(self):
base_directory = os.path.split(__file__)[0]
base_directory = os.path.abspath(
os.path.join(base_directory, '..', '..'))
self.current_dir = os.getcwd()
os.chdir(base_directory)
logging.basicConfig()
self.logger = logging.getLogger(self.__module__ + "." + self.__class__.__name__)
self.logger.setLevel(logging.DEBUG)
self.rng = np.random.RandomState(seed=42)
self.scen_fn = 'test/test_files/validation/scenario.txt'
self.train_insts = ['0', '1', '2']
self.test_insts = ['3', '4', '5']
self.inst_specs = {'0': 'null', '1': 'one', '2': 'two',
'3': 'three', '4': 'four', '5': 'five'}
self.feature_dict = {'0': np.array((1, 2, 3)),
'1': np.array((1, 2, 3)),
'2': np.array((1, 2, 3)),
'3': np.array((1, 2, 3)),
'4': np.array((1, 2, 3)),
'5': np.array((1, 2, 3))}
self.output_rh = 'test/test_files/validation/'
scen = Scenario(self.scen_fn, cmd_options={'run_obj': 'quality'})
self.stats = Stats(scen)
self.trajectory = TrajLogger.read_traj_aclib_format(
fn='test/test_files/validation/test_validation_traj.json', cs=scen.cs)
self.output_dirs = [self.output_rh + 'test']
self.output_files = [self.output_rh + 'validated_runhistory_EPM.json',
self.output_rh + 'validated_runhistory.json']
self.maxDiff = None
def tearDown(self):
for output_dir in self.output_dirs:
if output_dir:
shutil.rmtree(output_dir, ignore_errors=True)
for output_file in self.output_files:
if output_file:
try:
os.remove(output_file)
except FileNotFoundError:
pass
os.chdir(self.current_dir)
def test_rng(self):
scen = Scenario(self.scen_fn, cmd_options={'run_obj': 'quality'})
validator = Validator(scen, self.trajectory, 42)
self.assertTrue(isinstance(validator.rng, np.random.RandomState))
validator = Validator(scen, self.trajectory)
self.assertTrue(isinstance(validator.rng, np.random.RandomState))
validator = Validator(scen, self.trajectory, np.random.RandomState())
self.assertTrue(isinstance(validator.rng, np.random.RandomState))
def test_nonexisting_output(self):
scen = Scenario(self.scen_fn, cmd_options={'run_obj': 'quality'})
validator = Validator(scen, self.trajectory)
path = "test/test_files/validation/test/nonexisting/output"
validator.validate(output_fn=path)
self.assertTrue(os.path.exists(path))
def test_pass_tae(self):
scen = Scenario(self.scen_fn, cmd_options={'run_obj': 'quality'})
tae = ExecuteTARunOld(ta=scen.ta, stats=self.stats)
validator = Validator(scen, self.trajectory)
rh_mock = mock.Mock()
with mock.patch.object(
Validator,
"_validate_parallel",
return_value=[mock.MagicMock(), mock.MagicMock(), mock.MagicMock(), mock.MagicMock()],
) as validate_parallel_mock:
with mock.patch.object(
Validator,
"_get_runs",
return_value=[[mock.Mock(), mock.Mock(), mock.Mock(), mock.Mock()], rh_mock],
):
validator.validate(tae=tae)
self.assertIs(validate_parallel_mock.call_args[0][0], tae)
self.assertEqual(rh_mock.add.call_count, 4)
def test_no_rh_epm(self):
scen = Scenario(self.scen_fn, cmd_options={'run_obj': 'quality'})
scen.feature_array = None
validator = Validator(scen, self.trajectory)
self.assertRaises(ValueError, validator.validate_epm)
def test_epm_reuse_rf(self):
""" if no runhistory is passed to epm, but there was a model trained
before, that model should be reused! (if reuse_epm flag is set) """
scen = Scenario(self.scen_fn, cmd_options={'run_obj': 'quality'})
scen.feature_array = None
validator = Validator(scen, self.trajectory)
old_rh = RunHistory()
for config in [e["incumbent"] for e in self.trajectory]:
old_rh.add(config, 1, 1, StatusType.SUCCESS, instance_id='0',
seed=127)
self.assertTrue(isinstance(validator.validate_epm(runhistory=old_rh),
RunHistory))
self.assertTrue(isinstance(validator.validate_epm(
output_fn="test/test_files/validation/"),
RunHistory))
self.assertRaises(ValueError, validator.validate_epm, reuse_epm=False)
def test_no_feature_dict(self):
scen = Scenario(self.scen_fn, cmd_options={'run_obj': 'quality'})
scen.feature_array = None
validator = Validator(scen, self.trajectory)
old_rh = RunHistory()
for config in [e["incumbent"] for e in self.trajectory]:
old_rh.add(config, 1, 1, StatusType.SUCCESS, instance_id='0',
seed=127)
validator.validate_epm(runhistory=old_rh)
def test_get_configs(self):
scen = Scenario(self.scen_fn, cmd_options={'run_obj': 'quality'})
validator = Validator(scen, self.trajectory, self.rng)
self.assertEqual(1, len(validator._get_configs("def")))
self.assertEqual(1, len(validator._get_configs("inc")))
self.assertEqual(2, len(validator._get_configs("def+inc")))
self.assertEqual(7, len(validator._get_configs("wallclock_time")))
self.assertEqual(8, len(validator._get_configs("cpu_time")))
self.assertEqual(10, len(validator._get_configs("all")))
# Using maxtime
validator.scen.wallclock_limit = 65
validator.scen.algo_runs_timelimit = 33
self.assertEqual(8, len(validator._get_configs("wallclock_time")))
self.assertEqual(9, len(validator._get_configs("cpu_time")))
# Exceptions
self.assertRaises(ValueError, validator._get_configs, "notanoption")
self.assertRaises(ValueError, validator._get_instances, "notanoption")
def test_get_runs_capped(self):
''' test if capped, crashed and aborted runs are ignored
during rh-recovery '''
scen = Scenario(self.scen_fn,
cmd_options={'run_obj': 'quality',
'instances': ['0']})
validator = Validator(scen, self.trajectory, self.rng)
# Get runhistory
old_configs = [Configuration(scen.cs, values={'x1': i, 'x2': i}) for i in range(1, 7)]
old_rh = RunHistory()
old_rh.add(old_configs[0], 1, 1, StatusType.SUCCESS, instance_id='0', seed=0)
old_rh.add(old_configs[1], 1, 1, StatusType.TIMEOUT, instance_id='0', seed=0)
old_rh.add(old_configs[2], 1, 1, StatusType.CRASHED, instance_id='0', seed=0)
old_rh.add(old_configs[3], 1, 1, StatusType.ABORT, instance_id='0', seed=0)
old_rh.add(old_configs[4], 1, 1, StatusType.MEMOUT, instance_id='0', seed=0)
old_rh.add(old_configs[5], 1, 1, StatusType.CAPPED, instance_id='0', seed=0)
# Get multiple configs
expected = [_Run(inst_specs='0', seed=0, inst='0', config=old_configs[2]),
_Run(inst_specs='0', seed=0, inst='0', config=old_configs[3]),
_Run(inst_specs='0', seed=0, inst='0', config=old_configs[5])]
runs = validator._get_runs(old_configs, ['0'], repetitions=1, runhistory=old_rh)
self.assertEqual(runs[0], expected)
def test_get_runs(self):
''' test if the runs are generated as expected '''
scen = Scenario(self.scen_fn,
cmd_options={'run_obj': 'quality',
'train_insts': self.train_insts,
'test_insts': self.test_insts})
scen.instance_specific = self.inst_specs
validator = Validator(scen, self.trajectory, self.rng)
# Get multiple configs
self.maxDiff = None
expected = [_Run(config='config1', inst='3', seed=1608637542, inst_specs='three'),
_Run(config='config2', inst='3', seed=1608637542, inst_specs='three'),
_Run(config='config1', inst='3', seed=1273642419, inst_specs='three'),
_Run(config='config2', inst='3', seed=1273642419, inst_specs='three'),
_Run(config='config1', inst='4', seed=1935803228, inst_specs='four'),
_Run(config='config2', inst='4', seed=1935803228, inst_specs='four'),
_Run(config='config1', inst='4', seed=787846414, inst_specs='four'),
_Run(config='config2', inst='4', seed=787846414, inst_specs='four'),
_Run(config='config1', inst='5', seed=996406378, inst_specs='five'),
_Run(config='config2', inst='5', seed=996406378, inst_specs='five'),
_Run(config='config1', inst='5', seed=1201263687, inst_specs='five'),
_Run(config='config2', inst='5', seed=1201263687, inst_specs='five')]
runs = validator._get_runs(['config1', 'config2'], scen.test_insts, repetitions=2)
self.assertEqual(runs[0], expected)
# Only train
expected = [_Run(config='config1', inst='0', seed=423734972, inst_specs='null'),
_Run(config='config1', inst='0', seed=415968276, inst_specs='null'),
_Run(config='config1', inst='1', seed=670094950, inst_specs='one'),
_Run(config='config1', inst='1', seed=1914837113, inst_specs='one'),
_Run(config='config1', inst='2', seed=669991378, inst_specs='two'),
_Run(config='config1', inst='2', seed=429389014, inst_specs='two')]
runs = validator._get_runs(['config1'], scen.train_insts, repetitions=2)
self.assertEqual(runs[0], expected)
# Test and train
expected = [_Run(config='config1', inst='0', seed=249467210, inst_specs='null'),
_Run(config='config1', inst='1', seed=1972458954, inst_specs='one'),
_Run(config='config1', inst='2', seed=1572714583, inst_specs='two'),
_Run(config='config1', inst='3', seed=1433267572, inst_specs='three'),
_Run(config='config1', inst='4', seed=434285667, inst_specs='four'),
_Run(config='config1', inst='5', seed=613608295, inst_specs='five')]
insts = self.train_insts
insts.extend(self.test_insts)
runs = validator._get_runs(['config1'], insts, repetitions=1)
self.assertEqual(runs[0], expected)
def test_validate(self):
''' test validation '''
scen = Scenario(self.scen_fn,
cmd_options={'run_obj': 'quality',
'train_insts': self.train_insts,
'test_insts': self.test_insts})
scen.instance_specific = self.inst_specs
validator = Validator(scen, self.trajectory, self.rng)
# Test basic usage
rh = validator.validate(config_mode='def', instance_mode='test',
repetitions=3)
self.assertEqual(len(rh.get_all_configs()), 1)
self.assertEqual(len(rh.get_runs_for_config(rh.get_all_configs()[0], only_max_observed_budget=True)), 9)
rh = validator.validate(config_mode='inc', instance_mode='train+test')
self.assertEqual(len(rh.get_all_configs()), 1)
self.assertEqual(len(rh.get_runs_for_config(rh.get_all_configs()[0], only_max_observed_budget=True)), 6)
rh = validator.validate(config_mode='wallclock_time', instance_mode='train')
self.assertEqual(len(rh.get_all_configs()), 7)
self.assertEqual(sum([len(rh.get_runs_for_config(c, only_max_observed_budget=True)) for c in
rh.get_all_configs()]), 21)
# Test with backend multiprocessing
rh = validator.validate(config_mode='def', instance_mode='test',
repetitions=3, backend='multiprocessing')
self.assertEqual(len(rh.get_all_configs()), 1)
self.assertEqual(len(rh.get_runs_for_config(rh.get_all_configs()[0], only_max_observed_budget=True)), 9)
def test_validate_no_insts(self):
''' no instances '''
scen = Scenario(self.scen_fn,
cmd_options={'run_obj': 'quality'})
validator = Validator(scen, self.trajectory, self.rng)
rh = validator.validate(config_mode='def+inc', instance_mode='train',
repetitions=3, output_fn=self.output_rh)
self.assertEqual(len(rh.get_all_configs()), 2)
self.assertEqual(sum([len(rh.get_runs_for_config(c, only_max_observed_budget=True)) for c in
rh.get_all_configs()]), 6)
def test_validate_deterministic(self):
''' deterministic ta '''
scen = Scenario(self.scen_fn,
cmd_options={'run_obj': 'quality',
'train_insts': self.train_insts,
'deterministic': True})
scen.instance_specific = self.inst_specs
validator = Validator(scen, self.trajectory, self.rng)
rh = validator.validate(config_mode='def+inc',
instance_mode='train', repetitions=3)
self.assertEqual(len(rh.get_all_configs()), 2)
self.assertEqual(sum([len(rh.get_runs_for_config(c, only_max_observed_budget=True)) for c in
rh.get_all_configs()]), 6)
def test_parallel(self):
''' test parallel '''
scen = Scenario(self.scen_fn,
cmd_options={'run_obj': 'quality'})
validator = Validator(scen, self.trajectory, self.rng)
validator.validate(config_mode='all', instance_mode='train+test', n_jobs=-1)
def test_passed_runhistory(self):
''' test if passed runhistory is in resulting runhistory '''
scen = Scenario(self.scen_fn,
cmd_options={'run_obj': 'quality',
'train_insts': self.train_insts,
'test_insts': self.test_insts})
scen.instance_specific = self.inst_specs
validator = Validator(scen, self.trajectory, self.rng)
# Add a few runs and check, if they are correctly processed
old_configs = [entry["incumbent"] for entry in self.trajectory]
old_rh = RunHistory()
seeds = [127 for i in range(int(len(old_configs) / 2))]
seeds[-1] = 126 # Test instance_seed-structure in validation
for config in old_configs[:int(len(old_configs) / 2)]:
old_rh.add(config, 1, 1, StatusType.SUCCESS, instance_id='0',
seed=seeds[old_configs.index(config)])
configs = validator._get_configs('all')
insts = validator._get_instances('train')
runs_w_rh = validator._get_runs(configs, insts, repetitions=2,
runhistory=old_rh)
runs_wo_rh = validator._get_runs(configs, insts, repetitions=2)
self.assertEqual(len(runs_w_rh[0]), len(runs_wo_rh[0]) - 4)
self.assertEqual(len(runs_w_rh[1].data), 4)
self.assertEqual(len(runs_wo_rh[1].data), 0)
def test_passed_runhistory_deterministic(self):
''' test if passed runhistory is in resulting runhistory '''
scen = Scenario(self.scen_fn,
cmd_options={'run_obj': 'quality',
'train_insts': self.train_insts,
'deterministic': True})
scen.instance_specific = self.inst_specs
validator = Validator(scen, self.trajectory, self.rng)
# Add a few runs and check, if they are correctly processed
old_configs = [entry["incumbent"] for entry in self.trajectory]
old_rh = RunHistory()
for config in old_configs[:int(len(old_configs) / 2)]:
old_rh.add(config, 1, 1, StatusType.SUCCESS, instance_id='0')
configs = validator._get_configs('all')
insts = validator._get_instances('train')
runs_w_rh = validator._get_runs(configs, insts, repetitions=2,
runhistory=old_rh)
runs_wo_rh = validator._get_runs(configs, insts, repetitions=2)
self.assertEqual(len(runs_w_rh[0]), len(runs_wo_rh[0]) - 4)
self.assertEqual(len(runs_w_rh[1].data), 4)
self.assertEqual(len(runs_wo_rh[1].data), 0)
def test_passed_runhistory_no_insts(self):
''' test passed runhistory, without instances '''
scen = Scenario(self.scen_fn,
cmd_options={'run_obj': 'quality'})
scen.instance_specific = self.inst_specs
validator = Validator(scen, self.trajectory, self.rng)
# Add a few runs and check, if they are correctly processed
old_configs = [entry["incumbent"] for entry in self.trajectory]
old_rh = RunHistory()
for config in old_configs[:int(len(old_configs) / 2)]:
old_rh.add(config, 1, 1, StatusType.SUCCESS, seed=127)
configs = validator._get_configs('all')
insts = validator._get_instances('train')
runs_w_rh = validator._get_runs(configs, insts, repetitions=2,
runhistory=old_rh)
runs_wo_rh = validator._get_runs(configs, insts, repetitions=2)
self.assertEqual(len(runs_w_rh[0]), len(runs_wo_rh[0]) - 4)
self.assertEqual(len(runs_w_rh[1].data), 4)
self.assertEqual(len(runs_wo_rh[1].data), 0)
def test_validate_epm(self):
''' test using epm to validate '''
scen = Scenario(self.scen_fn,
cmd_options={'run_obj': 'quality',
'train_insts': self.train_insts,
'test_insts': self.test_insts,
'features': self.feature_dict})
scen.instance_specific = self.inst_specs
validator = Validator(scen, self.trajectory, self.rng)
# Add a few runs and check, if they are correctly processed
old_configs = [entry["incumbent"] for entry in self.trajectory]
old_rh = RunHistory()
for config in old_configs[:int(len(old_configs) / 2)]:
old_rh.add(config, 1, 1, StatusType.SUCCESS, instance_id='0',
seed=127)
validator.validate_epm('all', 'train', 1, old_rh)
def test_objective_runtime(self):
''' test if everything is ok with objective runtime (imputing!) '''
scen = Scenario(self.scen_fn, cmd_options={'run_obj': 'runtime',
'cutoff_time': 5})
validator = Validator(scen, self.trajectory, self.rng)
old_configs = [entry["incumbent"] for entry in self.trajectory]
old_rh = RunHistory()
for config in old_configs[:int(len(old_configs) / 2)]:
old_rh.add(config, 1, 1, StatusType.SUCCESS, instance_id='0')
validator.validate_epm('all', 'train', 1, old_rh)
def test_inst_no_feat(self):
''' test if scenarios are treated correctly if no features are
specified.'''
scen = Scenario(self.scen_fn,
cmd_options={'run_obj': 'quality',
'train_insts': self.train_insts,
'test_insts': self.test_insts})
self.assertTrue(scen.feature_array is None)
self.assertEqual(len(scen.feature_dict), 0)
scen.instance_specific = self.inst_specs
validator = Validator(scen, self.trajectory, self.rng)
# Add a few runs and check, if they are correctly processed
old_configs = [entry["incumbent"] for entry in self.trajectory]
old_rh = RunHistory()
for config in old_configs[:int(len(old_configs) / 2)]:
old_rh.add(config, 1, 1, StatusType.SUCCESS, instance_id='0',
seed=127)
rh = validator.validate_epm('all', 'train+test', 1, old_rh)
self.assertEqual(len(old_rh.get_all_configs()), 4)
self.assertEqual(len(rh.get_all_configs()), 10)
|
[
"os.remove",
"smac.utils.validate._Run",
"smac.runhistory.runhistory.RunHistory",
"smac.utils.io.traj_logging.TrajLogger.read_traj_aclib_format",
"shutil.rmtree",
"smac.tae.execute_ta_run_old.ExecuteTARunOld",
"os.path.join",
"os.chdir",
"unittest.mock.MagicMock",
"smac.utils.validate.Validator",
"os.path.exists",
"numpy.random.RandomState",
"smac.stats.stats.Stats",
"logging.basicConfig",
"os.getcwd",
"unittest.mock.Mock",
"smac.scenario.scenario.Scenario",
"numpy.array",
"smac.configspace.Configuration",
"os.path.split",
"logging.getLogger"
] |
[((716, 727), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (725, 727), False, 'import os\n'), ((736, 760), 'os.chdir', 'os.chdir', (['base_directory'], {}), '(base_directory)\n', (744, 760), False, 'import os\n'), ((770, 791), 'logging.basicConfig', 'logging.basicConfig', ([], {}), '()\n', (789, 791), False, 'import logging\n'), ((814, 880), 'logging.getLogger', 'logging.getLogger', (["(self.__module__ + '.' + self.__class__.__name__)"], {}), "(self.__module__ + '.' + self.__class__.__name__)\n", (831, 880), False, 'import logging\n'), ((944, 974), 'numpy.random.RandomState', 'np.random.RandomState', ([], {'seed': '(42)'}), '(seed=42)\n', (965, 974), True, 'import numpy as np\n'), ((1656, 1714), 'smac.scenario.scenario.Scenario', 'Scenario', (['self.scen_fn'], {'cmd_options': "{'run_obj': 'quality'}"}), "(self.scen_fn, cmd_options={'run_obj': 'quality'})\n", (1664, 1714), False, 'from smac.scenario.scenario import Scenario\n'), ((1736, 1747), 'smac.stats.stats.Stats', 'Stats', (['scen'], {}), '(scen)\n', (1741, 1747), False, 'from smac.stats.stats import Stats\n'), ((1774, 1883), 'smac.utils.io.traj_logging.TrajLogger.read_traj_aclib_format', 'TrajLogger.read_traj_aclib_format', ([], {'fn': '"""test/test_files/validation/test_validation_traj.json"""', 'cs': 'scen.cs'}), "(fn=\n 'test/test_files/validation/test_validation_traj.json', cs=scen.cs)\n", (1807, 1883), False, 'from smac.utils.io.traj_logging import TrajLogger\n'), ((2499, 2525), 'os.chdir', 'os.chdir', (['self.current_dir'], {}), '(self.current_dir)\n', (2507, 2525), False, 'import os\n'), ((2566, 2624), 'smac.scenario.scenario.Scenario', 'Scenario', (['self.scen_fn'], {'cmd_options': "{'run_obj': 'quality'}"}), "(self.scen_fn, cmd_options={'run_obj': 'quality'})\n", (2574, 2624), False, 'from smac.scenario.scenario import Scenario\n'), ((2645, 2681), 'smac.utils.validate.Validator', 'Validator', (['scen', 'self.trajectory', '(42)'], {}), '(scen, self.trajectory, 42)\n', (2654, 2681), False, 'from smac.utils.validate import Validator, _Run\n'), ((2776, 2808), 'smac.utils.validate.Validator', 'Validator', (['scen', 'self.trajectory'], {}), '(scen, self.trajectory)\n', (2785, 2808), False, 'from smac.utils.validate import Validator, _Run\n'), ((3090, 3148), 'smac.scenario.scenario.Scenario', 'Scenario', (['self.scen_fn'], {'cmd_options': "{'run_obj': 'quality'}"}), "(self.scen_fn, cmd_options={'run_obj': 'quality'})\n", (3098, 3148), False, 'from smac.scenario.scenario import Scenario\n'), ((3169, 3201), 'smac.utils.validate.Validator', 'Validator', (['scen', 'self.trajectory'], {}), '(scen, self.trajectory)\n', (3178, 3201), False, 'from smac.utils.validate import Validator, _Run\n'), ((3404, 3462), 'smac.scenario.scenario.Scenario', 'Scenario', (['self.scen_fn'], {'cmd_options': "{'run_obj': 'quality'}"}), "(self.scen_fn, cmd_options={'run_obj': 'quality'})\n", (3412, 3462), False, 'from smac.scenario.scenario import Scenario\n'), ((3477, 3522), 'smac.tae.execute_ta_run_old.ExecuteTARunOld', 'ExecuteTARunOld', ([], {'ta': 'scen.ta', 'stats': 'self.stats'}), '(ta=scen.ta, stats=self.stats)\n', (3492, 3522), False, 'from smac.tae.execute_ta_run_old import ExecuteTARunOld\n'), ((3543, 3575), 'smac.utils.validate.Validator', 'Validator', (['scen', 'self.trajectory'], {}), '(scen, self.trajectory)\n', (3552, 3575), False, 'from smac.utils.validate import Validator, _Run\n'), ((3594, 3605), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (3603, 3605), False, 'from unittest import mock\n'), ((4257, 4315), 'smac.scenario.scenario.Scenario', 'Scenario', (['self.scen_fn'], {'cmd_options': "{'run_obj': 'quality'}"}), "(self.scen_fn, cmd_options={'run_obj': 'quality'})\n", (4265, 4315), False, 'from smac.scenario.scenario import Scenario\n'), ((4370, 4402), 'smac.utils.validate.Validator', 'Validator', (['scen', 'self.trajectory'], {}), '(scen, self.trajectory)\n', (4379, 4402), False, 'from smac.utils.validate import Validator, _Run\n'), ((4667, 4725), 'smac.scenario.scenario.Scenario', 'Scenario', (['self.scen_fn'], {'cmd_options': "{'run_obj': 'quality'}"}), "(self.scen_fn, cmd_options={'run_obj': 'quality'})\n", (4675, 4725), False, 'from smac.scenario.scenario import Scenario\n'), ((4780, 4812), 'smac.utils.validate.Validator', 'Validator', (['scen', 'self.trajectory'], {}), '(scen, self.trajectory)\n', (4789, 4812), False, 'from smac.utils.validate import Validator, _Run\n'), ((4830, 4842), 'smac.runhistory.runhistory.RunHistory', 'RunHistory', ([], {}), '()\n', (4840, 4842), False, 'from smac.runhistory.runhistory import RunHistory\n'), ((5410, 5468), 'smac.scenario.scenario.Scenario', 'Scenario', (['self.scen_fn'], {'cmd_options': "{'run_obj': 'quality'}"}), "(self.scen_fn, cmd_options={'run_obj': 'quality'})\n", (5418, 5468), False, 'from smac.scenario.scenario import Scenario\n'), ((5523, 5555), 'smac.utils.validate.Validator', 'Validator', (['scen', 'self.trajectory'], {}), '(scen, self.trajectory)\n', (5532, 5555), False, 'from smac.utils.validate import Validator, _Run\n'), ((5573, 5585), 'smac.runhistory.runhistory.RunHistory', 'RunHistory', ([], {}), '()\n', (5583, 5585), False, 'from smac.runhistory.runhistory import RunHistory\n'), ((5856, 5914), 'smac.scenario.scenario.Scenario', 'Scenario', (['self.scen_fn'], {'cmd_options': "{'run_obj': 'quality'}"}), "(self.scen_fn, cmd_options={'run_obj': 'quality'})\n", (5864, 5914), False, 'from smac.scenario.scenario import Scenario\n'), ((5935, 5977), 'smac.utils.validate.Validator', 'Validator', (['scen', 'self.trajectory', 'self.rng'], {}), '(scen, self.trajectory, self.rng)\n', (5944, 5977), False, 'from smac.utils.validate import Validator, _Run\n'), ((6972, 7050), 'smac.scenario.scenario.Scenario', 'Scenario', (['self.scen_fn'], {'cmd_options': "{'run_obj': 'quality', 'instances': ['0']}"}), "(self.scen_fn, cmd_options={'run_obj': 'quality', 'instances': ['0']})\n", (6980, 7050), False, 'from smac.scenario.scenario import Scenario\n'), ((7133, 7175), 'smac.utils.validate.Validator', 'Validator', (['scen', 'self.trajectory', 'self.rng'], {}), '(scen, self.trajectory, self.rng)\n', (7142, 7175), False, 'from smac.utils.validate import Validator, _Run\n'), ((7314, 7326), 'smac.runhistory.runhistory.RunHistory', 'RunHistory', ([], {}), '()\n', (7324, 7326), False, 'from smac.runhistory.runhistory import RunHistory\n'), ((8358, 8484), 'smac.scenario.scenario.Scenario', 'Scenario', (['self.scen_fn'], {'cmd_options': "{'run_obj': 'quality', 'train_insts': self.train_insts, 'test_insts': self.\n test_insts}"}), "(self.scen_fn, cmd_options={'run_obj': 'quality', 'train_insts':\n self.train_insts, 'test_insts': self.test_insts})\n", (8366, 8484), False, 'from smac.scenario.scenario import Scenario\n'), ((8649, 8691), 'smac.utils.validate.Validator', 'Validator', (['scen', 'self.trajectory', 'self.rng'], {}), '(scen, self.trajectory, self.rng)\n', (8658, 8691), False, 'from smac.utils.validate import Validator, _Run\n'), ((11471, 11597), 'smac.scenario.scenario.Scenario', 'Scenario', (['self.scen_fn'], {'cmd_options': "{'run_obj': 'quality', 'train_insts': self.train_insts, 'test_insts': self.\n test_insts}"}), "(self.scen_fn, cmd_options={'run_obj': 'quality', 'train_insts':\n self.train_insts, 'test_insts': self.test_insts})\n", (11479, 11597), False, 'from smac.scenario.scenario import Scenario\n'), ((11761, 11803), 'smac.utils.validate.Validator', 'Validator', (['scen', 'self.trajectory', 'self.rng'], {}), '(scen, self.trajectory, self.rng)\n', (11770, 11803), False, 'from smac.utils.validate import Validator, _Run\n'), ((13110, 13168), 'smac.scenario.scenario.Scenario', 'Scenario', (['self.scen_fn'], {'cmd_options': "{'run_obj': 'quality'}"}), "(self.scen_fn, cmd_options={'run_obj': 'quality'})\n", (13118, 13168), False, 'from smac.scenario.scenario import Scenario\n'), ((13213, 13255), 'smac.utils.validate.Validator', 'Validator', (['scen', 'self.trajectory', 'self.rng'], {}), '(scen, self.trajectory, self.rng)\n', (13222, 13255), False, 'from smac.utils.validate import Validator, _Run\n'), ((13712, 13830), 'smac.scenario.scenario.Scenario', 'Scenario', (['self.scen_fn'], {'cmd_options': "{'run_obj': 'quality', 'train_insts': self.train_insts, 'deterministic': True}"}), "(self.scen_fn, cmd_options={'run_obj': 'quality', 'train_insts':\n self.train_insts, 'deterministic': True})\n", (13720, 13830), False, 'from smac.scenario.scenario import Scenario\n'), ((13994, 14036), 'smac.utils.validate.Validator', 'Validator', (['scen', 'self.trajectory', 'self.rng'], {}), '(scen, self.trajectory, self.rng)\n', (14003, 14036), False, 'from smac.utils.validate import Validator, _Run\n'), ((14450, 14508), 'smac.scenario.scenario.Scenario', 'Scenario', (['self.scen_fn'], {'cmd_options': "{'run_obj': 'quality'}"}), "(self.scen_fn, cmd_options={'run_obj': 'quality'})\n", (14458, 14508), False, 'from smac.scenario.scenario import Scenario\n'), ((14553, 14595), 'smac.utils.validate.Validator', 'Validator', (['scen', 'self.trajectory', 'self.rng'], {}), '(scen, self.trajectory, self.rng)\n', (14562, 14595), False, 'from smac.utils.validate import Validator, _Run\n'), ((14804, 14930), 'smac.scenario.scenario.Scenario', 'Scenario', (['self.scen_fn'], {'cmd_options': "{'run_obj': 'quality', 'train_insts': self.train_insts, 'test_insts': self.\n test_insts}"}), "(self.scen_fn, cmd_options={'run_obj': 'quality', 'train_insts':\n self.train_insts, 'test_insts': self.test_insts})\n", (14812, 14930), False, 'from smac.scenario.scenario import Scenario\n'), ((15094, 15136), 'smac.utils.validate.Validator', 'Validator', (['scen', 'self.trajectory', 'self.rng'], {}), '(scen, self.trajectory, self.rng)\n', (15103, 15136), False, 'from smac.utils.validate import Validator, _Run\n'), ((15294, 15306), 'smac.runhistory.runhistory.RunHistory', 'RunHistory', ([], {}), '()\n', (15304, 15306), False, 'from smac.runhistory.runhistory import RunHistory\n'), ((16251, 16369), 'smac.scenario.scenario.Scenario', 'Scenario', (['self.scen_fn'], {'cmd_options': "{'run_obj': 'quality', 'train_insts': self.train_insts, 'deterministic': True}"}), "(self.scen_fn, cmd_options={'run_obj': 'quality', 'train_insts':\n self.train_insts, 'deterministic': True})\n", (16259, 16369), False, 'from smac.scenario.scenario import Scenario\n'), ((16533, 16575), 'smac.utils.validate.Validator', 'Validator', (['scen', 'self.trajectory', 'self.rng'], {}), '(scen, self.trajectory, self.rng)\n', (16542, 16575), False, 'from smac.utils.validate import Validator, _Run\n'), ((16733, 16745), 'smac.runhistory.runhistory.RunHistory', 'RunHistory', ([], {}), '()\n', (16743, 16745), False, 'from smac.runhistory.runhistory import RunHistory\n'), ((17478, 17536), 'smac.scenario.scenario.Scenario', 'Scenario', (['self.scen_fn'], {'cmd_options': "{'run_obj': 'quality'}"}), "(self.scen_fn, cmd_options={'run_obj': 'quality'})\n", (17486, 17536), False, 'from smac.scenario.scenario import Scenario\n'), ((17630, 17672), 'smac.utils.validate.Validator', 'Validator', (['scen', 'self.trajectory', 'self.rng'], {}), '(scen, self.trajectory, self.rng)\n', (17639, 17672), False, 'from smac.utils.validate import Validator, _Run\n'), ((17830, 17842), 'smac.runhistory.runhistory.RunHistory', 'RunHistory', ([], {}), '()\n', (17840, 17842), False, 'from smac.runhistory.runhistory import RunHistory\n'), ((18539, 18701), 'smac.scenario.scenario.Scenario', 'Scenario', (['self.scen_fn'], {'cmd_options': "{'run_obj': 'quality', 'train_insts': self.train_insts, 'test_insts': self.\n test_insts, 'features': self.feature_dict}"}), "(self.scen_fn, cmd_options={'run_obj': 'quality', 'train_insts':\n self.train_insts, 'test_insts': self.test_insts, 'features': self.\n feature_dict})\n", (18547, 18701), False, 'from smac.scenario.scenario import Scenario\n'), ((18897, 18939), 'smac.utils.validate.Validator', 'Validator', (['scen', 'self.trajectory', 'self.rng'], {}), '(scen, self.trajectory, self.rng)\n', (18906, 18939), False, 'from smac.utils.validate import Validator, _Run\n'), ((19097, 19109), 'smac.runhistory.runhistory.RunHistory', 'RunHistory', ([], {}), '()\n', (19107, 19109), False, 'from smac.runhistory.runhistory import RunHistory\n'), ((19468, 19544), 'smac.scenario.scenario.Scenario', 'Scenario', (['self.scen_fn'], {'cmd_options': "{'run_obj': 'runtime', 'cutoff_time': 5}"}), "(self.scen_fn, cmd_options={'run_obj': 'runtime', 'cutoff_time': 5})\n", (19476, 19544), False, 'from smac.scenario.scenario import Scenario\n'), ((19616, 19658), 'smac.utils.validate.Validator', 'Validator', (['scen', 'self.trajectory', 'self.rng'], {}), '(scen, self.trajectory, self.rng)\n', (19625, 19658), False, 'from smac.utils.validate import Validator, _Run\n'), ((19748, 19760), 'smac.runhistory.runhistory.RunHistory', 'RunHistory', ([], {}), '()\n', (19758, 19760), False, 'from smac.runhistory.runhistory import RunHistory\n'), ((20098, 20224), 'smac.scenario.scenario.Scenario', 'Scenario', (['self.scen_fn'], {'cmd_options': "{'run_obj': 'quality', 'train_insts': self.train_insts, 'test_insts': self.\n test_insts}"}), "(self.scen_fn, cmd_options={'run_obj': 'quality', 'train_insts':\n self.train_insts, 'test_insts': self.test_insts})\n", (20106, 20224), False, 'from smac.scenario.scenario import Scenario\n'), ((20493, 20535), 'smac.utils.validate.Validator', 'Validator', (['scen', 'self.trajectory', 'self.rng'], {}), '(scen, self.trajectory, self.rng)\n', (20502, 20535), False, 'from smac.utils.validate import Validator, _Run\n'), ((20693, 20705), 'smac.runhistory.runhistory.RunHistory', 'RunHistory', ([], {}), '()\n', (20703, 20705), False, 'from smac.runhistory.runhistory import RunHistory\n'), ((566, 589), 'os.path.split', 'os.path.split', (['__file__'], {}), '(__file__)\n', (579, 589), False, 'import os\n'), ((647, 687), 'os.path.join', 'os.path.join', (['base_directory', '""".."""', '""".."""'], {}), "(base_directory, '..', '..')\n", (659, 687), False, 'import os\n'), ((1290, 1309), 'numpy.array', 'np.array', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (1298, 1309), True, 'import numpy as np\n'), ((1345, 1364), 'numpy.array', 'np.array', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (1353, 1364), True, 'import numpy as np\n'), ((1400, 1419), 'numpy.array', 'np.array', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (1408, 1419), True, 'import numpy as np\n'), ((1455, 1474), 'numpy.array', 'np.array', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (1463, 1474), True, 'import numpy as np\n'), ((1510, 1529), 'numpy.array', 'np.array', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (1518, 1529), True, 'import numpy as np\n'), ((1565, 1584), 'numpy.array', 'np.array', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (1573, 1584), True, 'import numpy as np\n'), ((2936, 2959), 'numpy.random.RandomState', 'np.random.RandomState', ([], {}), '()\n', (2957, 2959), True, 'import numpy as np\n'), ((3337, 3357), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (3351, 3357), False, 'import os\n'), ((7225, 7274), 'smac.configspace.Configuration', 'Configuration', (['scen.cs'], {'values': "{'x1': i, 'x2': i}"}), "(scen.cs, values={'x1': i, 'x2': i})\n", (7238, 7274), False, 'from smac.configspace import Configuration\n'), ((7891, 7952), 'smac.utils.validate._Run', '_Run', ([], {'inst_specs': '"""0"""', 'seed': '(0)', 'inst': '"""0"""', 'config': 'old_configs[2]'}), "(inst_specs='0', seed=0, inst='0', config=old_configs[2])\n", (7895, 7952), False, 'from smac.utils.validate import Validator, _Run\n'), ((7974, 8035), 'smac.utils.validate._Run', '_Run', ([], {'inst_specs': '"""0"""', 'seed': '(0)', 'inst': '"""0"""', 'config': 'old_configs[3]'}), "(inst_specs='0', seed=0, inst='0', config=old_configs[3])\n", (7978, 8035), False, 'from smac.utils.validate import Validator, _Run\n'), ((8057, 8118), 'smac.utils.validate._Run', '_Run', ([], {'inst_specs': '"""0"""', 'seed': '(0)', 'inst': '"""0"""', 'config': 'old_configs[5]'}), "(inst_specs='0', seed=0, inst='0', config=old_configs[5])\n", (8061, 8118), False, 'from smac.utils.validate import Validator, _Run\n'), ((8771, 8840), 'smac.utils.validate._Run', '_Run', ([], {'config': '"""config1"""', 'inst': '"""3"""', 'seed': '(1608637542)', 'inst_specs': '"""three"""'}), "(config='config1', inst='3', seed=1608637542, inst_specs='three')\n", (8775, 8840), False, 'from smac.utils.validate import Validator, _Run\n'), ((8862, 8931), 'smac.utils.validate._Run', '_Run', ([], {'config': '"""config2"""', 'inst': '"""3"""', 'seed': '(1608637542)', 'inst_specs': '"""three"""'}), "(config='config2', inst='3', seed=1608637542, inst_specs='three')\n", (8866, 8931), False, 'from smac.utils.validate import Validator, _Run\n'), ((8953, 9022), 'smac.utils.validate._Run', '_Run', ([], {'config': '"""config1"""', 'inst': '"""3"""', 'seed': '(1273642419)', 'inst_specs': '"""three"""'}), "(config='config1', inst='3', seed=1273642419, inst_specs='three')\n", (8957, 9022), False, 'from smac.utils.validate import Validator, _Run\n'), ((9044, 9113), 'smac.utils.validate._Run', '_Run', ([], {'config': '"""config2"""', 'inst': '"""3"""', 'seed': '(1273642419)', 'inst_specs': '"""three"""'}), "(config='config2', inst='3', seed=1273642419, inst_specs='three')\n", (9048, 9113), False, 'from smac.utils.validate import Validator, _Run\n'), ((9135, 9203), 'smac.utils.validate._Run', '_Run', ([], {'config': '"""config1"""', 'inst': '"""4"""', 'seed': '(1935803228)', 'inst_specs': '"""four"""'}), "(config='config1', inst='4', seed=1935803228, inst_specs='four')\n", (9139, 9203), False, 'from smac.utils.validate import Validator, _Run\n'), ((9225, 9293), 'smac.utils.validate._Run', '_Run', ([], {'config': '"""config2"""', 'inst': '"""4"""', 'seed': '(1935803228)', 'inst_specs': '"""four"""'}), "(config='config2', inst='4', seed=1935803228, inst_specs='four')\n", (9229, 9293), False, 'from smac.utils.validate import Validator, _Run\n'), ((9315, 9382), 'smac.utils.validate._Run', '_Run', ([], {'config': '"""config1"""', 'inst': '"""4"""', 'seed': '(787846414)', 'inst_specs': '"""four"""'}), "(config='config1', inst='4', seed=787846414, inst_specs='four')\n", (9319, 9382), False, 'from smac.utils.validate import Validator, _Run\n'), ((9404, 9471), 'smac.utils.validate._Run', '_Run', ([], {'config': '"""config2"""', 'inst': '"""4"""', 'seed': '(787846414)', 'inst_specs': '"""four"""'}), "(config='config2', inst='4', seed=787846414, inst_specs='four')\n", (9408, 9471), False, 'from smac.utils.validate import Validator, _Run\n'), ((9493, 9560), 'smac.utils.validate._Run', '_Run', ([], {'config': '"""config1"""', 'inst': '"""5"""', 'seed': '(996406378)', 'inst_specs': '"""five"""'}), "(config='config1', inst='5', seed=996406378, inst_specs='five')\n", (9497, 9560), False, 'from smac.utils.validate import Validator, _Run\n'), ((9582, 9649), 'smac.utils.validate._Run', '_Run', ([], {'config': '"""config2"""', 'inst': '"""5"""', 'seed': '(996406378)', 'inst_specs': '"""five"""'}), "(config='config2', inst='5', seed=996406378, inst_specs='five')\n", (9586, 9649), False, 'from smac.utils.validate import Validator, _Run\n'), ((9671, 9739), 'smac.utils.validate._Run', '_Run', ([], {'config': '"""config1"""', 'inst': '"""5"""', 'seed': '(1201263687)', 'inst_specs': '"""five"""'}), "(config='config1', inst='5', seed=1201263687, inst_specs='five')\n", (9675, 9739), False, 'from smac.utils.validate import Validator, _Run\n'), ((9761, 9829), 'smac.utils.validate._Run', '_Run', ([], {'config': '"""config2"""', 'inst': '"""5"""', 'seed': '(1201263687)', 'inst_specs': '"""five"""'}), "(config='config2', inst='5', seed=1201263687, inst_specs='five')\n", (9765, 9829), False, 'from smac.utils.validate import Validator, _Run\n'), ((10009, 10076), 'smac.utils.validate._Run', '_Run', ([], {'config': '"""config1"""', 'inst': '"""0"""', 'seed': '(423734972)', 'inst_specs': '"""null"""'}), "(config='config1', inst='0', seed=423734972, inst_specs='null')\n", (10013, 10076), False, 'from smac.utils.validate import Validator, _Run\n'), ((10098, 10165), 'smac.utils.validate._Run', '_Run', ([], {'config': '"""config1"""', 'inst': '"""0"""', 'seed': '(415968276)', 'inst_specs': '"""null"""'}), "(config='config1', inst='0', seed=415968276, inst_specs='null')\n", (10102, 10165), False, 'from smac.utils.validate import Validator, _Run\n'), ((10187, 10253), 'smac.utils.validate._Run', '_Run', ([], {'config': '"""config1"""', 'inst': '"""1"""', 'seed': '(670094950)', 'inst_specs': '"""one"""'}), "(config='config1', inst='1', seed=670094950, inst_specs='one')\n", (10191, 10253), False, 'from smac.utils.validate import Validator, _Run\n'), ((10275, 10342), 'smac.utils.validate._Run', '_Run', ([], {'config': '"""config1"""', 'inst': '"""1"""', 'seed': '(1914837113)', 'inst_specs': '"""one"""'}), "(config='config1', inst='1', seed=1914837113, inst_specs='one')\n", (10279, 10342), False, 'from smac.utils.validate import Validator, _Run\n'), ((10364, 10430), 'smac.utils.validate._Run', '_Run', ([], {'config': '"""config1"""', 'inst': '"""2"""', 'seed': '(669991378)', 'inst_specs': '"""two"""'}), "(config='config1', inst='2', seed=669991378, inst_specs='two')\n", (10368, 10430), False, 'from smac.utils.validate import Validator, _Run\n'), ((10452, 10518), 'smac.utils.validate._Run', '_Run', ([], {'config': '"""config1"""', 'inst': '"""2"""', 'seed': '(429389014)', 'inst_specs': '"""two"""'}), "(config='config1', inst='2', seed=429389014, inst_specs='two')\n", (10456, 10518), False, 'from smac.utils.validate import Validator, _Run\n'), ((10692, 10759), 'smac.utils.validate._Run', '_Run', ([], {'config': '"""config1"""', 'inst': '"""0"""', 'seed': '(249467210)', 'inst_specs': '"""null"""'}), "(config='config1', inst='0', seed=249467210, inst_specs='null')\n", (10696, 10759), False, 'from smac.utils.validate import Validator, _Run\n'), ((10781, 10848), 'smac.utils.validate._Run', '_Run', ([], {'config': '"""config1"""', 'inst': '"""1"""', 'seed': '(1972458954)', 'inst_specs': '"""one"""'}), "(config='config1', inst='1', seed=1972458954, inst_specs='one')\n", (10785, 10848), False, 'from smac.utils.validate import Validator, _Run\n'), ((10870, 10937), 'smac.utils.validate._Run', '_Run', ([], {'config': '"""config1"""', 'inst': '"""2"""', 'seed': '(1572714583)', 'inst_specs': '"""two"""'}), "(config='config1', inst='2', seed=1572714583, inst_specs='two')\n", (10874, 10937), False, 'from smac.utils.validate import Validator, _Run\n'), ((10959, 11028), 'smac.utils.validate._Run', '_Run', ([], {'config': '"""config1"""', 'inst': '"""3"""', 'seed': '(1433267572)', 'inst_specs': '"""three"""'}), "(config='config1', inst='3', seed=1433267572, inst_specs='three')\n", (10963, 11028), False, 'from smac.utils.validate import Validator, _Run\n'), ((11050, 11117), 'smac.utils.validate._Run', '_Run', ([], {'config': '"""config1"""', 'inst': '"""4"""', 'seed': '(434285667)', 'inst_specs': '"""four"""'}), "(config='config1', inst='4', seed=434285667, inst_specs='four')\n", (11054, 11117), False, 'from smac.utils.validate import Validator, _Run\n'), ((11139, 11206), 'smac.utils.validate._Run', '_Run', ([], {'config': '"""config1"""', 'inst': '"""5"""', 'seed': '(613608295)', 'inst_specs': '"""five"""'}), "(config='config1', inst='5', seed=613608295, inst_specs='five')\n", (11143, 11206), False, 'from smac.utils.validate import Validator, _Run\n'), ((2240, 2285), 'shutil.rmtree', 'shutil.rmtree', (['output_dir'], {'ignore_errors': '(True)'}), '(output_dir, ignore_errors=True)\n', (2253, 2285), False, 'import shutil\n'), ((2401, 2423), 'os.remove', 'os.remove', (['output_file'], {}), '(output_file)\n', (2410, 2423), False, 'import os\n'), ((3721, 3737), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (3735, 3737), False, 'from unittest import mock\n'), ((3739, 3755), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (3753, 3755), False, 'from unittest import mock\n'), ((3757, 3773), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (3771, 3773), False, 'from unittest import mock\n'), ((3775, 3791), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (3789, 3791), False, 'from unittest import mock\n'), ((3954, 3965), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (3963, 3965), False, 'from unittest import mock\n'), ((3967, 3978), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (3976, 3978), False, 'from unittest import mock\n'), ((3980, 3991), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (3989, 3991), False, 'from unittest import mock\n'), ((3993, 4004), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (4002, 4004), False, 'from unittest import mock\n')]
|
from io import BytesIO
import pytest
from thumbor.engines import BaseEngine
from PIL import Image
@pytest.fixture
def config(config):
config.FILTERS = [
'thumbor_video_engine.filters.format',
'thumbor_video_engine.filters.still',
'thumbor.filters.watermark',
]
config.QUALITY = 95
return config
@pytest.mark.gen_test
@pytest.mark.parametrize('pos', ['', '00:00:00'])
def test_still_filter(http_client, base_url, pos):
response = yield http_client.fetch(
"%s/unsafe/filters:still(%s)/hotdog.mp4" % (base_url, pos))
assert response.code == 200
assert response.headers.get('content-type') == 'image/jpeg'
assert BaseEngine.get_mimetype(response.body) == 'image/jpeg'
@pytest.mark.gen_test
@pytest.mark.parametrize('format,mime_type', [
('webp', 'image/webp'),
('jpg', 'image/jpeg'),
('zpg', 'image/jpeg'),
])
def test_still_filter_with_format(http_client, base_url, format, mime_type):
response = yield http_client.fetch(
"%s/unsafe/filters:still():format(%s)/hotdog.mp4" % (base_url, format))
assert response.code == 200
assert response.headers.get('content-type') == mime_type
assert BaseEngine.get_mimetype(response.body) == mime_type
@pytest.mark.gen_test
def test_still_filter_with_watermark(http_client, base_url):
response = yield http_client.fetch(
"%s/unsafe/filters:still():format(png):"
"watermark(watermark.png,0,0,0)/hotdog.mp4" % (base_url))
assert response.code == 200
im = Image.open(BytesIO(response.body))
assert im.getpixel((85, 55))[:3] == (255, 255, 255)
assert response.headers.get('content-type') == 'image/png'
|
[
"pytest.mark.parametrize",
"thumbor.engines.BaseEngine.get_mimetype",
"io.BytesIO"
] |
[((364, 412), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""pos"""', "['', '00:00:00']"], {}), "('pos', ['', '00:00:00'])\n", (387, 412), False, 'import pytest\n'), ((761, 880), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""format,mime_type"""', "[('webp', 'image/webp'), ('jpg', 'image/jpeg'), ('zpg', 'image/jpeg')]"], {}), "('format,mime_type', [('webp', 'image/webp'), ('jpg',\n 'image/jpeg'), ('zpg', 'image/jpeg')])\n", (784, 880), False, 'import pytest\n'), ((681, 719), 'thumbor.engines.BaseEngine.get_mimetype', 'BaseEngine.get_mimetype', (['response.body'], {}), '(response.body)\n', (704, 719), False, 'from thumbor.engines import BaseEngine\n'), ((1195, 1233), 'thumbor.engines.BaseEngine.get_mimetype', 'BaseEngine.get_mimetype', (['response.body'], {}), '(response.body)\n', (1218, 1233), False, 'from thumbor.engines import BaseEngine\n'), ((1539, 1561), 'io.BytesIO', 'BytesIO', (['response.body'], {}), '(response.body)\n', (1546, 1561), False, 'from io import BytesIO\n')]
|
from django.http import HttpResponse
from django.shortcuts import render
from .one_variable_stats import OneVariableStatsQuestion, OneVariableStatsQuestionType
from .probabilities import ProbabilitiesQuestion, ProbabilitiesQuestionType
# Create your views here.
def index(request):
return render(request, 'index.html')
probabilities_context = {}
for i in range(1, 7):
probabilities_context[f'question{i}'] = ProbabilitiesQuestion(ProbabilitiesQuestionType.random_type())
def probabilities(request):
global probabilities_context
if request.method == 'POST':
answers = {}
for i in range(1, 7):
answers[f'answer{i}'] = 0 if not request.POST[f'answer{i}'] else round(float(request.POST[f'answer{i}']), 6)
for i in range(1, 7):
answers[f'solution{i}'] = round(float(probabilities_context[f'question{i}'].answer[0]), 6)
context = {}
for i in range(1, 7):
answers[f'results{i}'] = 'Good job!' if answers[f'answer{i}'] == answers[f'solution{i}'] else 'Better luck next time!'
context.update(answers)
context.update(probabilities_context)
return render(request, 'probabilities_answers.html', context)
probabilities_context = {}
for i in range(1, 7):
probabilities_context[f'question{i}'] = ProbabilitiesQuestion(ProbabilitiesQuestionType.random_type())
return render(request, 'probabilities.html', probabilities_context)
one_variable_stats_context = {}
for i in range(1, 7):
one_variable_stats_context[f'question{i}'] = OneVariableStatsQuestion(OneVariableStatsQuestionType.random_type())
def one_variable_stats(request):
global one_variable_stats_context
if request.method == 'POST':
answers = {
'answer1': 0 if not request.POST['answer1'] else round(float(request.POST['answer1']), 1),
'answer2': 0 if not request.POST['answer2'] else round(float(request.POST['answer2']), 1),
'answer3': 0 if not request.POST['answer3'] else round(float(request.POST['answer3']), 1),
'answer4': 0 if not request.POST['answer4'] else round(float(request.POST['answer4']), 1),
'answer5': 0 if not request.POST['answer5'] else round(float(request.POST['answer5']), 1),
'answer6': 0 if not request.POST['answer6'] else round(float(request.POST['answer6']), 1),
}
for i in range(1, len(one_variable_stats_context) + 1):
answers[f'solution{i}'] = round(float(one_variable_stats_context[f'question{i}'].answer[0]), 1)
context = {
'results1': 'Good job!' if answers['answer1'] == answers['solution1'] else 'Better luck next time!',
'results2': 'Good job!' if answers['answer2'] == answers['solution2'] else 'Better luck next time!',
'results3': 'Good job!' if answers['answer3'] == answers['solution3'] else 'Better luck next time!',
'results4': 'Good job!' if answers['answer4'] == answers['solution4'] else 'Better luck next time!',
'results5': 'Good job!' if answers['answer5'] == answers['solution5'] else 'Better luck next time!',
'results6': 'Good job!' if answers['answer6'] == answers['solution6'] else 'Better luck next time!',
}
context.update(answers)
context.update(one_variable_stats_context)
return render(request, 'one_variable_stats_answers.html', context)
one_variable_stats_context = {}
for i in range(1, 7):
one_variable_stats_context[f'question{i}'] = OneVariableStatsQuestion(OneVariableStatsQuestionType.random_type())
return render(request, 'one_variable_stats.html', one_variable_stats_context)
|
[
"django.shortcuts.render"
] |
[((294, 323), 'django.shortcuts.render', 'render', (['request', '"""index.html"""'], {}), "(request, 'index.html')\n", (300, 323), False, 'from django.shortcuts import render\n'), ((1401, 1461), 'django.shortcuts.render', 'render', (['request', '"""probabilities.html"""', 'probabilities_context'], {}), "(request, 'probabilities.html', probabilities_context)\n", (1407, 1461), False, 'from django.shortcuts import render\n'), ((3622, 3692), 'django.shortcuts.render', 'render', (['request', '"""one_variable_stats.html"""', 'one_variable_stats_context'], {}), "(request, 'one_variable_stats.html', one_variable_stats_context)\n", (3628, 3692), False, 'from django.shortcuts import render\n'), ((1158, 1212), 'django.shortcuts.render', 'render', (['request', '"""probabilities_answers.html"""', 'context'], {}), "(request, 'probabilities_answers.html', context)\n", (1164, 1212), False, 'from django.shortcuts import render\n'), ((3367, 3426), 'django.shortcuts.render', 'render', (['request', '"""one_variable_stats_answers.html"""', 'context'], {}), "(request, 'one_variable_stats_answers.html', context)\n", (3373, 3426), False, 'from django.shortcuts import render\n')]
|
from django.contrib import admin
from . import models
class DoNotLog:
def log_addition(self, *args, **kwargs):
return
def log_change(self, *args, **kwargs):
return
def log_deletion(self, *args, **kwargs):
return
class YamlAdmin(DoNotLog, admin.ModelAdmin):
list_display = ('path', 'task_count')
def task_count(self, obj):
return obj.tasks.count()
task_count.short_description = 'タスク数'
class CategoryAdmin(DoNotLog, admin.ModelAdmin):
list_display = ('id', 'name', 'task_count')
def task_count(self, obj):
try:
return len(obj.answers.all())
except Exception:
return 0
task_count.short_description = "タスク数"
class AnswerAdmin(DoNotLog, admin.ModelAdmin):
list_display = ('id', 'task_script', 'mode', 'replaceable', 'clearly', 'short_message')
list_filter = ('mode', 'replaceable', 'clearly')
def task_script(self, obj):
if len(obj.task.script) > 20:
return f"{obj.task.script[:20]}..."
return obj.task.script
task_script.short_description = "スクリプト"
def short_message(self, obj):
if len(obj.message) > 20:
return f"{obj.message[:20]}..."
return obj.message
short_message.short_description = '備考'
class TaskAdmin(DoNotLog, admin.ModelAdmin):
list_display = ('module', 'script', 'role_name', 'role_user')
list_filter = ('module',)
def role_name(self, obj):
return obj.role_version.role.name
role_name.short_description = 'ロール名'
role_name.admin_order_field = 'role__name'
def role_user(self, obj):
return obj.role_version.role.owner
role_user.short_description = 'ユーザ名'
role_user.admin_order_field = 'role__owner'
class RoleAdmin(DoNotLog, admin.ModelAdmin):
list_display = ('name', 'owner', 'repository')
class RoleVersionAdmin(DoNotLog, admin.ModelAdmin):
list_display = ('role_name', 'name', 'published_at', 'task_count')
def role_name(self, obj):
return f"{obj.role.owner}.{obj.role.name}"
role_name.short_description = 'ロール名'
def task_count(self, obj):
return obj.tasks.count()
task_count.short_description = 'タスク数'
admin.site.register(models.YamlFile, YamlAdmin)
admin.site.register(models.Category, CategoryAdmin)
admin.site.register(models.Answer, AnswerAdmin)
admin.site.register(models.Task, TaskAdmin)
admin.site.register(models.Role, RoleAdmin)
admin.site.register(models.RoleVersion, RoleVersionAdmin)
|
[
"django.contrib.admin.site.register"
] |
[((2217, 2264), 'django.contrib.admin.site.register', 'admin.site.register', (['models.YamlFile', 'YamlAdmin'], {}), '(models.YamlFile, YamlAdmin)\n', (2236, 2264), False, 'from django.contrib import admin\n'), ((2265, 2316), 'django.contrib.admin.site.register', 'admin.site.register', (['models.Category', 'CategoryAdmin'], {}), '(models.Category, CategoryAdmin)\n', (2284, 2316), False, 'from django.contrib import admin\n'), ((2317, 2364), 'django.contrib.admin.site.register', 'admin.site.register', (['models.Answer', 'AnswerAdmin'], {}), '(models.Answer, AnswerAdmin)\n', (2336, 2364), False, 'from django.contrib import admin\n'), ((2365, 2408), 'django.contrib.admin.site.register', 'admin.site.register', (['models.Task', 'TaskAdmin'], {}), '(models.Task, TaskAdmin)\n', (2384, 2408), False, 'from django.contrib import admin\n'), ((2409, 2452), 'django.contrib.admin.site.register', 'admin.site.register', (['models.Role', 'RoleAdmin'], {}), '(models.Role, RoleAdmin)\n', (2428, 2452), False, 'from django.contrib import admin\n'), ((2453, 2510), 'django.contrib.admin.site.register', 'admin.site.register', (['models.RoleVersion', 'RoleVersionAdmin'], {}), '(models.RoleVersion, RoleVersionAdmin)\n', (2472, 2510), False, 'from django.contrib import admin\n')]
|
import torch
import torch.nn.functional as F
import torch.nn as nn
from torch.distributions.independent import Independent
from torch.distributions.normal import Normal
import numpy as np
from ..utils import export, Named, Expression
from ..conv_parts import ResBlock,conv2d
from ..invertible import SqueezeLayer,padChannels,keepChannels,NNdownsample,iAvgPool2d#iSequential2
from ..invertible import iLogits, iBN, MeanOnlyBN, iSequential, passThrough, addZslot, Join, pad_circular_nd
from ..invertible import iConv2d, iSLReLU,iConv1x1,Flatten,RandomPadChannels,iLeakyReLU,iCoordInjection,iSimpleCoords
import scipy as sp
import scipy.sparse
def iConvSelu(channels):
return iSequential(iConv2d(channels,channels),iSLReLU())
def iCoordSelu(channels):
return iSequential(iConv2d(channels,channels),iSLReLU(),iCoordInjection(channels))
def iConvBNselu(channels):
return iSequential(iConv2d(channels,channels),iBN(channels),iSLReLU())#iSLReLU())
def StandardNormal(d,device=torch.device('cuda:0')):
return Independent(Normal(torch.zeros(d).to(device),torch.ones(d).to(device)),1)
class FlowNetwork(nn.Module,metaclass=Named):
def forward(self,x):
return self.classifier_head(self.body(x))
def sample(self,bs=1):
return self.flow.inverse(self.prior(self.device).sample([bs]))
@property
def device(self):
try: return self._device
except AttributeError:
self._device = next(self.parameters()).device
return self._device
def nll(self,x):
z = self.flow(x)
logdet = self.flow.logdet()
return -1*(self.prior(x.device).log_prob(z) + logdet)
@export
class iCNN(FlowNetwork):
"""
Very small CNN
"""
def __init__(self, num_classes=10,k=16):
super().__init__()
self.num_classes = num_classes
self.k = k
self.body = iSequential(
#iLogits(),
RandomPadChannels(k-3),
*iCoordSelu(k),
*iCoordSelu(k),
*iCoordSelu(k),
NNdownsample(),
*iCoordSelu(4*k),
*iCoordSelu(4*k),
*iCoordSelu(4*k),
NNdownsample(),
*iCoordSelu(16*k),
*iCoordSelu(16*k),
iConv2d(16*k,16*k),
)
self.classifier_head = nn.Sequential(
nn.BatchNorm2d(16*k),
Expression(lambda u:u.mean(-1).mean(-1)),
nn.Linear(16*k,num_classes)
)
self.flow = iSequential(self.body,Flatten())
self.prior = StandardNormal(k*32*32)
@export
class MultiScaleiCNN(iCNN):
def __init__(self, num_classes=10,k=64):
super().__init__(num_classes,k)
self.num_classes = num_classes
self.k = k
self.body = iSequential(
iLogits(),
RandomPadChannels(k-3),
addZslot(),
passThrough(*iConvBNselu(k)),
passThrough(*iConvBNselu(k)),
passThrough(*iConvBNselu(k)),
passThrough(NNdownsample()),
passThrough(iConv1x1(4*k)),
keepChannels(2*k),
passThrough(*iConvBNselu(2*k)),
passThrough(*iConvBNselu(2*k)),
passThrough(*iConvBNselu(2*k)),
passThrough(NNdownsample()),
passThrough(iConv1x1(8*k)),
keepChannels(4*k),
passThrough(*iConvBNselu(4*k)),
passThrough(*iConvBNselu(4*k)),
passThrough(*iConvBNselu(4*k)),
passThrough(iConv2d(4*k,4*k)),
Join(),
)
self.classifier_head = nn.Sequential(
Expression(lambda z:z[-1]),
nn.BatchNorm2d(4*k),
Expression(lambda u:u.mean(-1).mean(-1)),
nn.Linear(4*k,num_classes)
)
self.flow = iSequential(self.body,Flatten())
self.prior = StandardNormal(k*32*32)
@export
class MultiScaleiCNNv2(MultiScaleiCNN):
def __init__(self, num_classes=10,k=96):
super().__init__(num_classes,k)
self.num_classes = num_classes
self.k = k
self.body = iSequential(
#iLogits(),
RandomPadChannels(k-3),
addZslot(),
passThrough(*iConvSelu(k)),
passThrough(*iConvSelu(k)),
passThrough(*iConvSelu(k)),
passThrough(NNdownsample()),
passThrough(iConv1x1(4*k)),
keepChannels(2*k),
passThrough(*iConvSelu(2*k)),
passThrough(*iConvSelu(2*k)),
#passThrough(*iConvSelu(2*k)),
passThrough(NNdownsample()),
passThrough(iConv1x1(8*k)),
keepChannels(2*k),
passThrough(*iConvSelu(2*k)),
passThrough(*iConvSelu(2*k)),
#passThrough(*iConvSelu(2*k)),
passThrough(iConv2d(2*k,2*k)),
Join(),
)
self.classifier_head = nn.Sequential(
Expression(lambda z:z[-1]),
nn.BatchNorm2d(2*k),
Expression(lambda u:u.mean(-1).mean(-1)),
nn.Linear(2*k,num_classes)
)
self.flow = iSequential(self.body,Flatten())
self.prior = StandardNormal(k*32*32)
class iCNNsup(MultiScaleiCNN):
def __init__(self, num_classes=10,k=96):
super().__init__(num_classes,k)
self.num_classes = num_classes
self.k = k
self.body = iSequential(
#iLogits(),
RandomPadChannels(k-3),
addZslot(),
passThrough(*iConvSelu(k)),
passThrough(*iConvSelu(k)),
passThrough(iAvgPool2d()),
passThrough(iConv1x1(4*k)),
keepChannels(2*k),
passThrough(*iConvSelu(2*k)),
passThrough(*iConvSelu(2*k)),
#passThrough(*iConvSelu(2*k)),
passThrough(iAvgPool2d()),
passThrough(iConv1x1(8*k)),
keepChannels(2*k),
passThrough(*iConvSelu(2*k)),
passThrough(*iConvSelu(2*k)),
Join(),
)
self.classifier_head = nn.Sequential(
Expression(lambda z:z[-1]),
nn.BatchNorm2d(2*k),
Expression(lambda u:u.mean(-1).mean(-1)),
nn.Linear(2*k,num_classes)
)
self.flow = iSequential(self.body,Flatten())
self.prior = StandardNormal(k*32*32)
class iSimpleSup(MultiScaleiCNN):
def __init__(self, num_classes=10,k=96):
super().__init__(num_classes,k)
self.num_classes = num_classes
self.k = k
self.body = iSequential(
#iLogits(),
RandomPadChannels(k-3),
addZslot(),
passThrough(*iConvSelu(k)),
passThrough(*iConvSelu(k)),
passThrough(iAvgPool2d()),
keepChannels(2*k),
passThrough(*iConvSelu(2*k)),
passThrough(*iConvSelu(2*k)),
#passThrough(*iConvSelu(2*k)),
passThrough(iAvgPool2d()),
keepChannels(2*k),
passThrough(*iConvSelu(2*k)),
passThrough(*iConvSelu(2*k)),
Join(),
)
self.classifier_head = nn.Sequential(
Expression(lambda z:z[-1]),
nn.BatchNorm2d(2*k),
Expression(lambda u:u.mean(-1).mean(-1)),
nn.Linear(2*k,num_classes)
)
self.flow = iSequential(self.body,Flatten())
self.prior = StandardNormal(k*32*32)
@export
class iCNN3d(FlowNetwork):
def __init__(self, in_channels=3, num_classes=10,res=32):
super().__init__()
self.num_classes = num_classes
self.body = iSequential(
iLogits(),
*iConvSelu(in_channels),
*iConvSelu(in_channels),
*iConvSelu(in_channels),
iAvgPool2d(),
*iConvSelu(4*in_channels),
*iConvSelu(4*in_channels),
*iConvSelu(4*in_channels),
iAvgPool2d(),
*iConvSelu(16*in_channels),
*iConvSelu(16*in_channels),
*iConvSelu(16*in_channels),
iAvgPool2d(),
*iConvSelu(64*in_channels),
*iConvSelu(64*in_channels),
*iConvSelu(64*in_channels),
iConv2d(64*in_channels,64*in_channels),
)
self.classifier_head = nn.Sequential(
Expression(lambda u:u.mean(-1).mean(-1)),
nn.Linear(64*in_channels,num_classes)
)
self.flow = iSequential(self.body,Flatten())
self.prior = StandardNormal(in_channels*res*res)
@export
class iCNN3d2(FlowNetwork):
def __init__(self, in_channels=3, num_classes=10,res=32):
super().__init__()
self.num_classes = num_classes
self.body = nn.Sequential(
conv2d(in_channels,in_channels),
nn.ReLU(),
conv2d(in_channels,in_channels),
nn.ReLU(),
conv2d(in_channels,in_channels),
nn.ReLU(),
NNdownsample(),
conv2d(4*in_channels,4*in_channels),
nn.ReLU(),
conv2d(4*in_channels,4*in_channels),
nn.ReLU(),
conv2d(4*in_channels,4*in_channels),
nn.ReLU(),
NNdownsample(),
conv2d(16*in_channels,16*in_channels),
nn.ReLU(),
conv2d(16*in_channels,16*in_channels),
nn.ReLU(),
conv2d(16*in_channels,16*in_channels),
nn.ReLU(),
NNdownsample(),
conv2d(64*in_channels,64*in_channels),
nn.ReLU(),
conv2d(64*in_channels,64*in_channels),
nn.ReLU(),
conv2d(64*in_channels,64*in_channels),
nn.ReLU(),
)
self.classifier_head = nn.Sequential(
Expression(lambda u:u.mean(-1).mean(-1)),
nn.Linear(64*in_channels,num_classes)
)
self.flow = iSequential(self.body,Flatten())
self.prior = StandardNormal(in_channels*res*res)
@export
class iCNN3dCoords(FlowNetwork):
def __init__(self, in_channels=3, num_classes=10,res=32):
super().__init__()
self.num_classes = num_classes
self.body = iSequential(
iLogits(),
*[iCoordSelu(in_channels) for i in range(3)],
iAvgPool2d(),
*[iCoordSelu(4*in_channels) for i in range(3)],
iAvgPool2d(),
*[iCoordSelu(16*in_channels) for i in range(3)],
iAvgPool2d(),
*[iCoordSelu(64*in_channels) for i in range(3)],
iConv2d(64*in_channels,64*in_channels),
)
self.classifier_head = nn.Sequential(
Expression(lambda u:u.mean(-1).mean(-1)),
nn.Linear(64*in_channels,num_classes)
)
self.flow = iSequential(self.body,Flatten())
self.prior = StandardNormal(in_channels*res*res)
@export
class iLinear3d(iCNN3d):
def __init__(self, num_classes=10,res=32):
super().__init__()
self.num_classes = num_classes
self.body = iSequential(
iLogits(),
iCoordInjection(3),
iConv2d(3,3),
iConv2d(3,3),
iConv2d(3,3),
iAvgPool2d(),
iCoordInjection(12),
iConv2d(12,12),
iConv2d(12,12),
iConv2d(12,12),
iAvgPool2d(),
iCoordInjection(48),
iConv2d(48,48),
iConv2d(48,48),
iConv2d(48,48),
iAvgPool2d(),
iCoordInjection(192),
iConv2d(192,192),
iConv2d(192,192),
iConv2d(192,192),
)
self.classifier_head = nn.Sequential(
Expression(lambda u:u.mean(-1).mean(-1)),
nn.Linear(192,num_classes)
)
self.flow = iSequential(self.body,Flatten())
self.prior = StandardNormal(3*res*res)
|
[
"torch.ones",
"torch.nn.ReLU",
"torch.nn.BatchNorm2d",
"torch.device",
"torch.nn.Linear",
"torch.zeros"
] |
[((987, 1009), 'torch.device', 'torch.device', (['"""cuda:0"""'], {}), "('cuda:0')\n", (999, 1009), False, 'import torch\n'), ((2335, 2357), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(16 * k)'], {}), '(16 * k)\n', (2349, 2357), True, 'import torch.nn as nn\n'), ((2423, 2453), 'torch.nn.Linear', 'nn.Linear', (['(16 * k)', 'num_classes'], {}), '(16 * k, num_classes)\n', (2432, 2453), True, 'import torch.nn as nn\n'), ((3667, 3688), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(4 * k)'], {}), '(4 * k)\n', (3681, 3688), True, 'import torch.nn as nn\n'), ((3754, 3783), 'torch.nn.Linear', 'nn.Linear', (['(4 * k)', 'num_classes'], {}), '(4 * k, num_classes)\n', (3763, 3783), True, 'import torch.nn as nn\n'), ((4994, 5015), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(2 * k)'], {}), '(2 * k)\n', (5008, 5015), True, 'import torch.nn as nn\n'), ((5081, 5110), 'torch.nn.Linear', 'nn.Linear', (['(2 * k)', 'num_classes'], {}), '(2 * k, num_classes)\n', (5090, 5110), True, 'import torch.nn as nn\n'), ((6174, 6195), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(2 * k)'], {}), '(2 * k)\n', (6188, 6195), True, 'import torch.nn as nn\n'), ((6261, 6290), 'torch.nn.Linear', 'nn.Linear', (['(2 * k)', 'num_classes'], {}), '(2 * k, num_classes)\n', (6270, 6290), True, 'import torch.nn as nn\n'), ((7277, 7298), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(2 * k)'], {}), '(2 * k)\n', (7291, 7298), True, 'import torch.nn as nn\n'), ((7364, 7393), 'torch.nn.Linear', 'nn.Linear', (['(2 * k)', 'num_classes'], {}), '(2 * k, num_classes)\n', (7373, 7393), True, 'import torch.nn as nn\n'), ((8439, 8479), 'torch.nn.Linear', 'nn.Linear', (['(64 * in_channels)', 'num_classes'], {}), '(64 * in_channels, num_classes)\n', (8448, 8479), True, 'import torch.nn as nn\n'), ((8855, 8864), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (8862, 8864), True, 'import torch.nn as nn\n'), ((8923, 8932), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (8930, 8932), True, 'import torch.nn as nn\n'), ((8991, 9000), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (8998, 9000), True, 'import torch.nn as nn\n'), ((9091, 9100), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (9098, 9100), True, 'import torch.nn as nn\n'), ((9163, 9172), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (9170, 9172), True, 'import torch.nn as nn\n'), ((9235, 9244), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (9242, 9244), True, 'import torch.nn as nn\n'), ((9337, 9346), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (9344, 9346), True, 'import torch.nn as nn\n'), ((9411, 9420), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (9418, 9420), True, 'import torch.nn as nn\n'), ((9485, 9494), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (9492, 9494), True, 'import torch.nn as nn\n'), ((9587, 9596), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (9594, 9596), True, 'import torch.nn as nn\n'), ((9661, 9670), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (9668, 9670), True, 'import torch.nn as nn\n'), ((9735, 9744), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (9742, 9744), True, 'import torch.nn as nn\n'), ((9868, 9908), 'torch.nn.Linear', 'nn.Linear', (['(64 * in_channels)', 'num_classes'], {}), '(64 * in_channels, num_classes)\n', (9877, 9908), True, 'import torch.nn as nn\n'), ((10744, 10784), 'torch.nn.Linear', 'nn.Linear', (['(64 * in_channels)', 'num_classes'], {}), '(64 * in_channels, num_classes)\n', (10753, 10784), True, 'import torch.nn as nn\n'), ((11789, 11816), 'torch.nn.Linear', 'nn.Linear', (['(192)', 'num_classes'], {}), '(192, num_classes)\n', (11798, 11816), True, 'import torch.nn as nn\n'), ((1042, 1056), 'torch.zeros', 'torch.zeros', (['d'], {}), '(d)\n', (1053, 1056), False, 'import torch\n'), ((1068, 1081), 'torch.ones', 'torch.ones', (['d'], {}), '(d)\n', (1078, 1081), False, 'import torch\n')]
|
#!/bin/python
import json
YAHOO_ENDPOINT = 'https://fantasysports.yahooapis.com/fantasy/v2'
class YHandler:
"""Class that constructs the APIs to send to Yahoo"""
def __init__(self, sc):
self.sc = sc
def get(self, uri):
"""Send an API request to the URI and return the response as JSON
:param uri: URI of the API to call
:type uri: str
:return: JSON document of the response
:raises: RuntimeError if any response comes back with an error
"""
response = self.sc.session.get("{}/{}".format(YAHOO_ENDPOINT, uri),
params={'format': 'json'})
jresp = response.json()
if "error" in jresp:
raise RuntimeError(json.dumps(jresp))
return jresp
def put(self, uri, data):
"""Calls the PUT method to the uri with a payload
:param uri: URI of the API to call
:type uri: str
:param data: What to pass as the payload
:type data: str
:return: XML document of the response
:raises: RuntimeError if any response comes back with an error
"""
headers = {'Content-Type': 'application/xml'}
response = self.sc.session.put("{}/{}".format(YAHOO_ENDPOINT, uri),
data=data, headers=headers)
if response.status_code != 200:
raise RuntimeError(response.content)
return response
def post(self, uri, data):
"""Calls the POST method to the URI with a payload
:param uri: URI of the API to call
:type uri: str
:param data: What to pass as the payload
:type data: str
:return: XML document of the response
:raises: RuntimeError if any response comes back with an error
"""
headers = {'Content-Type': 'application/xml'}
response = self.sc.session.post("{}/{}".format(YAHOO_ENDPOINT, uri),
data=data, headers=headers)
if response.status_code != 201:
raise RuntimeError(response.content)
return response
def get_teams_raw(self):
"""Return the raw JSON when requesting the logged in players teams.
:return: JSON document of the request.
"""
return self.get("users;use_login=1/games/teams")
def get_standings_raw(self, league_id):
"""Return the raw JSON when requesting standings for a league.
:param league_id: League ID to get the standings for
:type league_id: str
:return: JSON document of the request.
"""
return self.get("league/{}/standings".format(league_id))
def get_settings_raw(self, league_id):
"""Return the raw JSON when requesting settings for a league.
:param league_id: League ID to get the standings for
:type league_id: str
:return: JSON document of the request.
"""
return self.get("league/{}/settings".format(league_id))
def get_matchup_raw(self, team_key, week):
"""Return the raw JSON when requesting match-ups for a team
:param team_key: Team key identifier to find the matchups for
:type team_key: str
:param week: What week number to request the matchup for?
:type week: int
:return: JSON of the request
"""
return self.get("team/{}/matchups;weeks={}".format(team_key, week))
def get_roster_raw(self, team_key, week=None, day=None):
"""Return the raw JSON when requesting a team's roster
Can request a roster for a given week or a given day. If neither is
given the current day's roster is returned.
:param team_key: Team key identifier to find the matchups for
:type team_key: str
:param week: What week number to request the roster for?
:type week: int
:param day: What day number to request the roster
:type day: datetime.date
:return: JSON of the request
"""
if week is not None:
param = ";week={}".format(week)
elif day is not None:
param = ";date={}".format(day.strftime("%Y-%m-%d"))
else:
param = ""
return self.get("team/{}/roster{}".format(team_key, param))
def get_scoreboard_raw(self, league_id, week=None):
"""Return the raw JSON when requesting the scoreboard for a week
:param league_id: League ID to get the standings for
:type league_id: str
:param week: The week number to request the scoreboard for
:type week: int
:return: JSON document of the request.
"""
week_uri = ""
if week is not None:
week_uri = ";week={}".format(week)
return self.get("league/{}/scoreboard{}".format(league_id, week_uri))
def get_players_raw(self, league_id, start, status, position=None):
"""Return the raw JSON when requesting players in the league
The result is limited to 25 players.
:param league_id: League ID to get the players for
:type league_id: str
:param start: The output is paged at 25 players each time. Use this
parameter for subsequent calls to get the players at the next page.
For example, you specify 0 for the first call, 25 for the second call,
etc.
:type start: int
:param status: A filter to limit the player status. Available values
are: 'A' - all available; 'FA' - free agents; 'W' - waivers, 'T' -
taken players, 'K' - keepers
:type status: str
:param position: A filter to return players only for a specific
position. If None is passed, then no position filtering occurs.
:type position: str
:return: JSON document of the request.
"""
if position is None:
pos_parm = ""
else:
pos_parm = ";position={}".format(position)
return self.get(
"league/{}/players;start={};count=25;status={}{}/percent_owned".
format(league_id, start, status, pos_parm))
def get_player_raw(self, league_id, player_name):
"""Return the raw JSON when requesting player details
:param league_id: League ID to get the player for
:type league_id: str
:param player_name: Name of player to get the details for
:type player_name: str
:return: JSON document of the request.
"""
player_stat_uri = ""
if player_name is not None:
player_stat_uri = "players;search={}/stats".format(player_name)
return self.get("league/{}/{}".format(league_id, player_stat_uri))
def get_percent_owned_raw(self, league_id, player_ids):
"""Return the raw JSON when requesting the percentage owned of players
:param league_id: League ID we are requesting data from
:type league_id: str
:param player_ids: Yahoo! Player IDs to retrieve % owned for
:type player_ids: list(str)
:return: JSON document of the request
"""
lg_pref = league_id[0:league_id.find(".")]
joined_ids = ",".join([lg_pref + ".p." + str(i) for i in player_ids])
return self.get(
"league/{}/players;player_keys={}/percent_owned".
format(league_id, joined_ids))
def put_roster(self, team_key, xml):
"""Calls PUT against the roster API passing it an xml document
:param team_key: The key of the team the roster move applies too
:type team_key: str
:param xml: The XML document to send
:type xml: str
:return: Response from the PUT
"""
return self.put("team/{}/roster".format(team_key), xml)
def post_transactions(self, league_id, xml):
"""Calls POST against the transaction API passing it an xml document
:param league_id: The league ID that the API request applies to
:type league_id: str
:param xml: The XML document to send as the payload
:type xml: str
:return: Response from the POST
"""
return self.post("league/{}/transactions".format(league_id), xml)
|
[
"json.dumps"
] |
[((750, 767), 'json.dumps', 'json.dumps', (['jresp'], {}), '(jresp)\n', (760, 767), False, 'import json\n')]
|
from django.db import models
class Pizza(models.Model):
name = models.CharField(max_length=120)
priceM = models.DecimalField(max_digits=4, decimal_places=2)
priceL = models.DecimalField(max_digits=4, decimal_places=2)
pImage = models.URLField()
class Burger(models.Model):
name = models.CharField(max_length=120)
priceM = models.DecimalField(max_digits=4, decimal_places=2)
priceL = models.DecimalField(max_digits=4, decimal_places=2)
bImage = models.URLField()
|
[
"django.db.models.CharField",
"django.db.models.DecimalField",
"django.db.models.URLField"
] |
[((70, 102), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(120)'}), '(max_length=120)\n', (86, 102), False, 'from django.db import models\n'), ((116, 167), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'max_digits': '(4)', 'decimal_places': '(2)'}), '(max_digits=4, decimal_places=2)\n', (135, 167), False, 'from django.db import models\n'), ((181, 232), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'max_digits': '(4)', 'decimal_places': '(2)'}), '(max_digits=4, decimal_places=2)\n', (200, 232), False, 'from django.db import models\n'), ((246, 263), 'django.db.models.URLField', 'models.URLField', ([], {}), '()\n', (261, 263), False, 'from django.db import models\n'), ((309, 341), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(120)'}), '(max_length=120)\n', (325, 341), False, 'from django.db import models\n'), ((355, 406), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'max_digits': '(4)', 'decimal_places': '(2)'}), '(max_digits=4, decimal_places=2)\n', (374, 406), False, 'from django.db import models\n'), ((420, 471), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'max_digits': '(4)', 'decimal_places': '(2)'}), '(max_digits=4, decimal_places=2)\n', (439, 471), False, 'from django.db import models\n'), ((485, 502), 'django.db.models.URLField', 'models.URLField', ([], {}), '()\n', (500, 502), False, 'from django.db import models\n')]
|
'''
This script, functions of which are in foo_vb_lib.py, is based on
https://github.com/chenzeno/FOO-VB/blob/ebc14a930ba9d1c1dadc8e835f746c567c253946/main.py
For more information, please see the original paper https://arxiv.org/abs/2010.00373 .
Author: <NAME>(@karalleyna)
'''
import numpy as np
from time import time
from jax import random, value_and_grad, tree_map, vmap, lax
import jax.numpy as jnp
from functools import partial
import foo_vb_lib
def scan(f, init, xs, length=None):
if xs is None:
xs = [None] * length
carry = init
ys = []
for x in xs:
carry, y = f(carry, x)
ys.append(y)
return carry, jnp.stack(ys)
def init_step(key, model, image_size, config):
model_key, param_key = random.split(key)
variables = model.init(model_key, jnp.zeros((config.batch_size, image_size)))
params = tree_map(jnp.transpose, variables)
pytrees = foo_vb_lib.init_param(param_key, params, config.s_init, True, config.alpha)
return pytrees
def train_step(key, pytrees, data, target, value_and_grad_fn, train_mc_iters, eta, diagonal):
weights, m, a, b, avg_psi, e_a, e_b = pytrees
def monte_carlo_step(aggregated_params, key):
# Phi ~ MN(0,I,I)
avg_psi, e_a, e_b = aggregated_params
phi_key, key = random.split(key)
phi = foo_vb_lib.gen_phi(phi_key, weights)
# W = M +B*Phi*A^t
params = foo_vb_lib.randomize_weights(m, a, b, phi)
loss, grads = value_and_grad_fn(tree_map(jnp.transpose, params), data, target)
grads = foo_vb_lib.weight_grad(grads)
avg_psi = foo_vb_lib.aggregate_grads(avg_psi, grads, train_mc_iters)
e_a = foo_vb_lib.aggregate_e_a(e_a, grads, b,
phi, train_mc_iters)
e_b = foo_vb_lib.aggregate_e_b(e_b, grads, a,
phi, train_mc_iters)
return (avg_psi, e_a, e_b), loss
# M = M - B*B^t*avg_Phi*A*A^t
keys = random.split(key, train_mc_iters)
(avg_psi, e_a, e_b), losses = scan(monte_carlo_step,
(avg_psi, e_a, e_b), keys)
print("Loss :", losses.mean())
m = foo_vb_lib.update_m(m, a, b, avg_psi, eta, diagonal=diagonal)
a, b = foo_vb_lib.update_a_b(a, b, e_a, e_b)
avg_psi, e_a, e_b = foo_vb_lib.zero_matrix(avg_psi, e_a, e_b)
pytrees = weights, m, a, b, avg_psi, e_a, e_b
return pytrees, losses
def eval_step(model, pytrees, data, target, train_mc_iters):
weights, m, a, b, avg_psi, e_a, e_b = pytrees
def monte_carlo_step(weights, phi_key):
phi = foo_vb_lib.gen_phi(phi_key, weights)
params = foo_vb_lib.randomize_weights(m, a, b, phi)
output = model.apply(tree_map(jnp.transpose, params), data)
# get the index of the max log-probability
pred = jnp.argmax(output, axis=1)
return weights, jnp.sum(pred == target)
keys = random.split(random.PRNGKey(0), train_mc_iters)
_, correct_per_iter = scan(monte_carlo_step, weights, keys)
n_correct = jnp.sum(correct_per_iter)
return n_correct
def train_continuous_mnist(key, model, train_loader,
test_loader, image_size, num_classes, config):
init_key, key = random.split(key)
pytrees = init_step(key, model, image_size, config)
criterion = partial(foo_vb_lib.cross_entropy_loss,
num_classes=num_classes,
predict_fn=model.apply)
grad_fn = value_and_grad(criterion)
ava_test = []
for task in range(len(test_loader)):
for epoch in range(1, config.epochs + 1):
start_time = time()
for batch_idx, (data, target) in enumerate(train_loader[0]):
data, target = jnp.array(data.view(-1, image_size).numpy()), jnp.array(target.numpy())
train_key, key = random.split(key)
pytrees, losses = train_step(train_key, pytrees, data, target, grad_fn,
config.train_mc_iters, config.eta, config.diagonal)
print("Time : ", time() - start_time)
total = 0
for data, target in test_loader[task]:
data, target = jnp.array(data.numpy().reshape((-1, image_size))), jnp.array(target.numpy())
n_correct = eval_step(model, pytrees, data, target, config.train_mc_iters)
total += n_correct
test_acc = 100. * total / (len(test_loader[task].dataset) * config.train_mc_iters)
print('\nTask num {}, Epoch num {} Test Accuracy: {:.2f}%\n'.format(
task, epoch, test_acc))
test_accuracies = []
for i in range(task + 1):
total = 0
for data, target in test_loader[i]:
data, target = jnp.array(data.numpy().reshape((-1, image_size))), jnp.array(target.numpy())
n_correct = eval_step(model, pytrees, data, target, config.train_mc_iters)
total += n_correct
test_acc = 100. * total / (len(test_loader[task].dataset) * config.train_mc_iters)
test_accuracies.append(test_acc)
print('\nTraning task Num: {} Test Accuracy of task {}: {:.2f}%\n'.format(
task, i, test_acc))
ava_test.append(jnp.mean(np.array(test_accuracies)))
return ava_test
def train_multiple_tasks(key, model, train_loader,
test_loader, num_classes,
permutations, image_size, config):
init_key, key = random.split(key)
pytrees = init_step(key, model, config)
criterion = partial(foo_vb_lib.cross_entropy_loss,
num_classes=num_classes, predict_fn=model.apply)
grad_fn = value_and_grad(criterion)
ava_test = []
for task in range(len(permutations)):
for epoch in range(1, config.epochs + 1):
for batch_idx, (data, target) in enumerate(train_loader):
data, target = jnp.array(data.detach().numpy().reshape((-1, image_size))), jnp.array(
target.detach().numpy())
data = data[:, permutations[task]]
train_key, key = random.split(key)
start_time = time.time()
pytrees, losses = train_step(train_key, pytrees, data, target, grad_fn,
config.train_mc_iters, config.eta, config.diagonal)
print("Time : ", start_time - time.time())
total = 0
for data, target in train_loader:
data, target = jnp.array(data.numpy().reshape((-1, image_size))), jnp.array(target.numpy())
data = data[:, permutations[task]]
n_correct = eval_step(model, pytrees, data, target, config.train_mc_iters)
total += n_correct
train_acc = 100. * total / (len(train_loader.dataset) * config.train_mc_iters)
total = 0
for data, target in test_loader:
data, target = jnp.array(data.numpy().reshape((-1, image_size))), jnp.array(target.numpy())
data = data[:, permutations[task]]
n_correct = eval_step(model, pytrees, data, target, config.train_mc_iters)
total += n_correct
test_acc = 100. * total / (len(test_loader.dataset) * config.train_mc_iters)
print('\nTask num {}, Epoch num {}, Train Accuracy: {:.2f}% Test Accuracy: {:.2f}%\n'.format(
task, epoch, train_acc, test_acc))
test_accuracies = []
for i in range(task + 1):
total = 0
for data, target in test_loader:
data, target = jnp.array(data.numpy().reshape((-1, image_size))), jnp.array(target.numpy())
data = data[:, permutations[i]]
n_correct = eval_step(model, pytrees, data, target, config.train_mc_iters)
total += n_correct
test_acc = 100. * total / (len(test_loader.dataset) * config.train_mc_iters)
test_accuracies.append(test_acc)
print('\nTraning task Num: {} Test Accuracy of task {}: {:.2f}%\n'.format(
task, i, test_acc))
print(test_accuracies)
ava_test.append(jnp.mean(np.array(test_accuracies)))
return ava_test
|
[
"jax.random.PRNGKey",
"foo_vb_lib.aggregate_e_b",
"foo_vb_lib.update_m",
"foo_vb_lib.weight_grad",
"jax.numpy.argmax",
"foo_vb_lib.aggregate_e_a",
"foo_vb_lib.init_param",
"functools.partial",
"foo_vb_lib.aggregate_grads",
"jax.numpy.sum",
"foo_vb_lib.gen_phi",
"foo_vb_lib.zero_matrix",
"jax.numpy.zeros",
"jax.random.split",
"time.time",
"numpy.array",
"jax.value_and_grad",
"foo_vb_lib.update_a_b",
"foo_vb_lib.randomize_weights",
"time.time.time",
"jax.numpy.stack",
"jax.tree_map"
] |
[((752, 769), 'jax.random.split', 'random.split', (['key'], {}), '(key)\n', (764, 769), False, 'from jax import random, value_and_grad, tree_map, vmap, lax\n'), ((865, 899), 'jax.tree_map', 'tree_map', (['jnp.transpose', 'variables'], {}), '(jnp.transpose, variables)\n', (873, 899), False, 'from jax import random, value_and_grad, tree_map, vmap, lax\n'), ((914, 989), 'foo_vb_lib.init_param', 'foo_vb_lib.init_param', (['param_key', 'params', 'config.s_init', '(True)', 'config.alpha'], {}), '(param_key, params, config.s_init, True, config.alpha)\n', (935, 989), False, 'import foo_vb_lib\n'), ((1985, 2018), 'jax.random.split', 'random.split', (['key', 'train_mc_iters'], {}), '(key, train_mc_iters)\n', (1997, 2018), False, 'from jax import random, value_and_grad, tree_map, vmap, lax\n'), ((2187, 2248), 'foo_vb_lib.update_m', 'foo_vb_lib.update_m', (['m', 'a', 'b', 'avg_psi', 'eta'], {'diagonal': 'diagonal'}), '(m, a, b, avg_psi, eta, diagonal=diagonal)\n', (2206, 2248), False, 'import foo_vb_lib\n'), ((2260, 2297), 'foo_vb_lib.update_a_b', 'foo_vb_lib.update_a_b', (['a', 'b', 'e_a', 'e_b'], {}), '(a, b, e_a, e_b)\n', (2281, 2297), False, 'import foo_vb_lib\n'), ((2322, 2363), 'foo_vb_lib.zero_matrix', 'foo_vb_lib.zero_matrix', (['avg_psi', 'e_a', 'e_b'], {}), '(avg_psi, e_a, e_b)\n', (2344, 2363), False, 'import foo_vb_lib\n'), ((3061, 3086), 'jax.numpy.sum', 'jnp.sum', (['correct_per_iter'], {}), '(correct_per_iter)\n', (3068, 3086), True, 'import jax.numpy as jnp\n'), ((3258, 3275), 'jax.random.split', 'random.split', (['key'], {}), '(key)\n', (3270, 3275), False, 'from jax import random, value_and_grad, tree_map, vmap, lax\n'), ((3348, 3440), 'functools.partial', 'partial', (['foo_vb_lib.cross_entropy_loss'], {'num_classes': 'num_classes', 'predict_fn': 'model.apply'}), '(foo_vb_lib.cross_entropy_loss, num_classes=num_classes, predict_fn=\n model.apply)\n', (3355, 3440), False, 'from functools import partial\n'), ((3499, 3524), 'jax.value_and_grad', 'value_and_grad', (['criterion'], {}), '(criterion)\n', (3513, 3524), False, 'from jax import random, value_and_grad, tree_map, vmap, lax\n'), ((5557, 5574), 'jax.random.split', 'random.split', (['key'], {}), '(key)\n', (5569, 5574), False, 'from jax import random, value_and_grad, tree_map, vmap, lax\n'), ((5635, 5727), 'functools.partial', 'partial', (['foo_vb_lib.cross_entropy_loss'], {'num_classes': 'num_classes', 'predict_fn': 'model.apply'}), '(foo_vb_lib.cross_entropy_loss, num_classes=num_classes, predict_fn=\n model.apply)\n', (5642, 5727), False, 'from functools import partial\n'), ((5762, 5787), 'jax.value_and_grad', 'value_and_grad', (['criterion'], {}), '(criterion)\n', (5776, 5787), False, 'from jax import random, value_and_grad, tree_map, vmap, lax\n'), ((662, 675), 'jax.numpy.stack', 'jnp.stack', (['ys'], {}), '(ys)\n', (671, 675), True, 'import jax.numpy as jnp\n'), ((808, 850), 'jax.numpy.zeros', 'jnp.zeros', (['(config.batch_size, image_size)'], {}), '((config.batch_size, image_size))\n', (817, 850), True, 'import jax.numpy as jnp\n'), ((1301, 1318), 'jax.random.split', 'random.split', (['key'], {}), '(key)\n', (1313, 1318), False, 'from jax import random, value_and_grad, tree_map, vmap, lax\n'), ((1333, 1369), 'foo_vb_lib.gen_phi', 'foo_vb_lib.gen_phi', (['phi_key', 'weights'], {}), '(phi_key, weights)\n', (1351, 1369), False, 'import foo_vb_lib\n'), ((1415, 1457), 'foo_vb_lib.randomize_weights', 'foo_vb_lib.randomize_weights', (['m', 'a', 'b', 'phi'], {}), '(m, a, b, phi)\n', (1443, 1457), False, 'import foo_vb_lib\n'), ((1561, 1590), 'foo_vb_lib.weight_grad', 'foo_vb_lib.weight_grad', (['grads'], {}), '(grads)\n', (1583, 1590), False, 'import foo_vb_lib\n'), ((1609, 1667), 'foo_vb_lib.aggregate_grads', 'foo_vb_lib.aggregate_grads', (['avg_psi', 'grads', 'train_mc_iters'], {}), '(avg_psi, grads, train_mc_iters)\n', (1635, 1667), False, 'import foo_vb_lib\n'), ((1682, 1742), 'foo_vb_lib.aggregate_e_a', 'foo_vb_lib.aggregate_e_a', (['e_a', 'grads', 'b', 'phi', 'train_mc_iters'], {}), '(e_a, grads, b, phi, train_mc_iters)\n', (1706, 1742), False, 'import foo_vb_lib\n'), ((1797, 1857), 'foo_vb_lib.aggregate_e_b', 'foo_vb_lib.aggregate_e_b', (['e_b', 'grads', 'a', 'phi', 'train_mc_iters'], {}), '(e_b, grads, a, phi, train_mc_iters)\n', (1821, 1857), False, 'import foo_vb_lib\n'), ((2615, 2651), 'foo_vb_lib.gen_phi', 'foo_vb_lib.gen_phi', (['phi_key', 'weights'], {}), '(phi_key, weights)\n', (2633, 2651), False, 'import foo_vb_lib\n'), ((2669, 2711), 'foo_vb_lib.randomize_weights', 'foo_vb_lib.randomize_weights', (['m', 'a', 'b', 'phi'], {}), '(m, a, b, phi)\n', (2697, 2711), False, 'import foo_vb_lib\n'), ((2846, 2872), 'jax.numpy.argmax', 'jnp.argmax', (['output'], {'axis': '(1)'}), '(output, axis=1)\n', (2856, 2872), True, 'import jax.numpy as jnp\n'), ((2946, 2963), 'jax.random.PRNGKey', 'random.PRNGKey', (['(0)'], {}), '(0)\n', (2960, 2963), False, 'from jax import random, value_and_grad, tree_map, vmap, lax\n'), ((1498, 1529), 'jax.tree_map', 'tree_map', (['jnp.transpose', 'params'], {}), '(jnp.transpose, params)\n', (1506, 1529), False, 'from jax import random, value_and_grad, tree_map, vmap, lax\n'), ((2741, 2772), 'jax.tree_map', 'tree_map', (['jnp.transpose', 'params'], {}), '(jnp.transpose, params)\n', (2749, 2772), False, 'from jax import random, value_and_grad, tree_map, vmap, lax\n'), ((2897, 2920), 'jax.numpy.sum', 'jnp.sum', (['(pred == target)'], {}), '(pred == target)\n', (2904, 2920), True, 'import jax.numpy as jnp\n'), ((3661, 3667), 'time.time', 'time', ([], {}), '()\n', (3665, 3667), False, 'from time import time\n'), ((3878, 3895), 'jax.random.split', 'random.split', (['key'], {}), '(key)\n', (3890, 3895), False, 'from jax import random, value_and_grad, tree_map, vmap, lax\n'), ((5324, 5349), 'numpy.array', 'np.array', (['test_accuracies'], {}), '(test_accuracies)\n', (5332, 5349), True, 'import numpy as np\n'), ((6202, 6219), 'jax.random.split', 'random.split', (['key'], {}), '(key)\n', (6214, 6219), False, 'from jax import random, value_and_grad, tree_map, vmap, lax\n'), ((6249, 6260), 'time.time.time', 'time.time', ([], {}), '()\n', (6258, 6260), False, 'from time import time\n'), ((8292, 8317), 'numpy.array', 'np.array', (['test_accuracies'], {}), '(test_accuracies)\n', (8300, 8317), True, 'import numpy as np\n'), ((4111, 4117), 'time.time', 'time', ([], {}), '()\n', (4115, 4117), False, 'from time import time\n'), ((6492, 6503), 'time.time.time', 'time.time', ([], {}), '()\n', (6501, 6503), False, 'from time import time\n')]
|
"""Template plugin for Home Assistant CLI (hass-cli)."""
import logging
import os
from typing import Any, Dict # noqa, flake8 issue
import click
from jinja2 import Environment, FileSystemLoader
from homeassistant_cli.cli import pass_context
from homeassistant_cli.config import Configuration
import homeassistant_cli.remote as api
_LOGGING = logging.getLogger(__name__)
def render(template_path, data, strict=False) -> str:
"""Render template."""
env = Environment(
loader=FileSystemLoader(os.path.dirname(template_path)),
keep_trailing_newline=True,
)
if strict:
from jinja2 import StrictUndefined
env.undefined = StrictUndefined
# Add environ global
env.globals["environ"] = os.environ.get
output = env.get_template(os.path.basename(template_path)).render(data)
return output
@click.command('template')
@click.argument('template', required=True, type=click.File())
@click.argument('datafile', type=click.File(), required=False)
@click.option(
'--local',
default=False,
is_flag=True,
help="If should render template locally.",
)
@pass_context
def cli(ctx: Configuration, template, datafile, local: bool) -> None:
"""Render templates on server or locally.
TEMPLATE - jinja2 template file
DATAFILE - YAML file with variables to pass to rendering
"""
variables = {} # type: Dict[str, Any]
if datafile:
variables = ctx.yamlload(datafile)
templatestr = template.read()
_LOGGING.debug("Rendering: %s Variables: %s", templatestr, variables)
if local:
output = render(template.name, variables, True)
else:
output = api.render_template(ctx, templatestr, variables)
ctx.echo(output)
|
[
"os.path.basename",
"os.path.dirname",
"click.option",
"click.File",
"click.command",
"homeassistant_cli.remote.render_template",
"logging.getLogger"
] |
[((346, 373), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (363, 373), False, 'import logging\n'), ((854, 879), 'click.command', 'click.command', (['"""template"""'], {}), "('template')\n", (867, 879), False, 'import click\n'), ((1006, 1106), 'click.option', 'click.option', (['"""--local"""'], {'default': '(False)', 'is_flag': '(True)', 'help': '"""If should render template locally."""'}), "('--local', default=False, is_flag=True, help=\n 'If should render template locally.')\n", (1018, 1106), False, 'import click\n'), ((1668, 1716), 'homeassistant_cli.remote.render_template', 'api.render_template', (['ctx', 'templatestr', 'variables'], {}), '(ctx, templatestr, variables)\n', (1687, 1716), True, 'import homeassistant_cli.remote as api\n'), ((928, 940), 'click.File', 'click.File', ([], {}), '()\n', (938, 940), False, 'import click\n'), ((975, 987), 'click.File', 'click.File', ([], {}), '()\n', (985, 987), False, 'import click\n'), ((512, 542), 'os.path.dirname', 'os.path.dirname', (['template_path'], {}), '(template_path)\n', (527, 542), False, 'import os\n'), ((787, 818), 'os.path.basename', 'os.path.basename', (['template_path'], {}), '(template_path)\n', (803, 818), False, 'import os\n')]
|
from typing import Optional, Dict
import css_inline
import jinja2
from cc_email_templates import txt_processing
env = jinja2.Environment(
loader = jinja2.PackageLoader("cc_email_templates", "templates"),
autoescape = jinja2.select_autoescape()
)
inliner = css_inline.CSSInliner()
def call_to_action_email(
title: str,
content_above: str,
action_button_text: str,
action_link: str,
content_below: Optional[str] = None,
unsub_link: Optional[str] = None,
address: Optional[str] = None,
sender: Optional[str] = None,
links: Optional[Dict[str, str]] = None,
) -> str:
"""
call_to_action_email
====================
parameters:
title (str)
content_above (str)
content_below (str)
action_button_text (str)
action_link (str)
unsub_link (Optional[str])
address (Optional[str])
returns:
str: Compiled template
"""
html = env.get_template("simple-call-to-action.html.j2").render(
title = title,
content_above = content_above,
content_below = content_below,
action_link = action_link,
action_button_text = action_button_text,
unsub_link = unsub_link,
address = address,
sender = sender,
links = links)
txt = env.get_template("simple-call-to-action.txt.j2").render(
title = title,
content_above = content_above,
content_below = content_below,
action_link = action_link,
action_button_text = action_button_text,
unsub_link = unsub_link,
address = address,
sender = sender,
links = links)
return txt_processing.process(txt), inliner.inline(html)
|
[
"jinja2.PackageLoader",
"cc_email_templates.txt_processing.process",
"jinja2.select_autoescape",
"css_inline.CSSInliner"
] |
[((278, 301), 'css_inline.CSSInliner', 'css_inline.CSSInliner', ([], {}), '()\n', (299, 301), False, 'import css_inline\n'), ((156, 211), 'jinja2.PackageLoader', 'jinja2.PackageLoader', (['"""cc_email_templates"""', '"""templates"""'], {}), "('cc_email_templates', 'templates')\n", (176, 211), False, 'import jinja2\n'), ((234, 260), 'jinja2.select_autoescape', 'jinja2.select_autoescape', ([], {}), '()\n', (258, 260), False, 'import jinja2\n'), ((2063, 2090), 'cc_email_templates.txt_processing.process', 'txt_processing.process', (['txt'], {}), '(txt)\n', (2085, 2090), False, 'from cc_email_templates import txt_processing\n')]
|
import FWCore.ParameterSet.Config as cms
from DQM.TrackingMonitor.packedCandidateTrackValidator_cfi import *
packedCandidateTrackValidatorLostTracks = packedCandidateTrackValidator.clone(
trackToPackedCandidateAssociation = "lostTracks",
rootFolder = "Tracking/PackedCandidate/lostTracks"
)
tracksDQMMiniAOD = cms.Sequence(
packedCandidateTrackValidator +
packedCandidateTrackValidatorLostTracks
)
|
[
"FWCore.ParameterSet.Config.Sequence"
] |
[((321, 410), 'FWCore.ParameterSet.Config.Sequence', 'cms.Sequence', (['(packedCandidateTrackValidator + packedCandidateTrackValidatorLostTracks)'], {}), '(packedCandidateTrackValidator +\n packedCandidateTrackValidatorLostTracks)\n', (333, 410), True, 'import FWCore.ParameterSet.Config as cms\n')]
|
import pygame
class Clock():
def __init__(self, FPS):
self.clock = pygame.time.Clock()
self.FPS = FPS
def waitForTick(self):
self.clock.tick(self.FPS)
def changeFPS(self, FPS):
self.FPS = FPS
clock = Clock(60)
|
[
"pygame.time.Clock"
] |
[((81, 100), 'pygame.time.Clock', 'pygame.time.Clock', ([], {}), '()\n', (98, 100), False, 'import pygame\n')]
|
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
preprocess.
"""
import os
import numpy as np
from src.dataset import create_ocr_val_dataset
from src.model_utils.config import config
def get_bin():
'''generate bin files.'''
prefix = "fsns.mindrecord"
if config.enable_modelarts:
mindrecord_file = os.path.join(config.data_path, prefix + "0")
else:
mindrecord_file = os.path.join(config.test_data_dir, prefix + "0")
print("mindrecord_file", mindrecord_file)
dataset = create_ocr_val_dataset(mindrecord_file, config.eval_batch_size)
data_loader = dataset.create_dict_iterator(num_epochs=1, output_numpy=True)
print("Dataset creation Done!")
sos_id = config.characters_dictionary.go_id
images_path = os.path.join(config.pre_result_path, "00_images")
decoder_input_path = os.path.join(config.pre_result_path, "01_decoder_input")
decoder_hidden_path = os.path.join(config.pre_result_path, "02_decoder_hidden")
annotation_path = os.path.join(config.pre_result_path, "annotation")
os.makedirs(images_path)
os.makedirs(decoder_input_path)
os.makedirs(decoder_hidden_path)
os.makedirs(annotation_path)
for i, data in enumerate(data_loader):
annotation = data["annotation"]
images = data["image"].astype(np.float32)
decoder_hidden = np.zeros((1, config.eval_batch_size, config.decoder_hidden_size),
dtype=np.float16)
decoder_input = (np.ones((config.eval_batch_size, 1)) * sos_id).astype(np.int32)
file_name = "ocr_bs" + str(config.eval_batch_size) + "_" + str(i) + ".bin"
images.tofile(os.path.join(images_path, file_name))
decoder_input.tofile(os.path.join(decoder_input_path, file_name))
decoder_hidden.tofile(os.path.join(decoder_hidden_path, file_name))
file_name = "ocr_bs" + str(config.eval_batch_size) + "_" + str(i) + ".npy"
np.save(os.path.join(annotation_path, file_name), annotation)
print("=" * 10, "export bin files finished.", "=" * 10)
if __name__ == '__main__':
get_bin()
|
[
"os.makedirs",
"src.dataset.create_ocr_val_dataset",
"numpy.zeros",
"numpy.ones",
"os.path.join"
] |
[((1133, 1196), 'src.dataset.create_ocr_val_dataset', 'create_ocr_val_dataset', (['mindrecord_file', 'config.eval_batch_size'], {}), '(mindrecord_file, config.eval_batch_size)\n', (1155, 1196), False, 'from src.dataset import create_ocr_val_dataset\n'), ((1381, 1430), 'os.path.join', 'os.path.join', (['config.pre_result_path', '"""00_images"""'], {}), "(config.pre_result_path, '00_images')\n", (1393, 1430), False, 'import os\n'), ((1456, 1512), 'os.path.join', 'os.path.join', (['config.pre_result_path', '"""01_decoder_input"""'], {}), "(config.pre_result_path, '01_decoder_input')\n", (1468, 1512), False, 'import os\n'), ((1539, 1596), 'os.path.join', 'os.path.join', (['config.pre_result_path', '"""02_decoder_hidden"""'], {}), "(config.pre_result_path, '02_decoder_hidden')\n", (1551, 1596), False, 'import os\n'), ((1619, 1669), 'os.path.join', 'os.path.join', (['config.pre_result_path', '"""annotation"""'], {}), "(config.pre_result_path, 'annotation')\n", (1631, 1669), False, 'import os\n'), ((1674, 1698), 'os.makedirs', 'os.makedirs', (['images_path'], {}), '(images_path)\n', (1685, 1698), False, 'import os\n'), ((1703, 1734), 'os.makedirs', 'os.makedirs', (['decoder_input_path'], {}), '(decoder_input_path)\n', (1714, 1734), False, 'import os\n'), ((1739, 1771), 'os.makedirs', 'os.makedirs', (['decoder_hidden_path'], {}), '(decoder_hidden_path)\n', (1750, 1771), False, 'import os\n'), ((1776, 1804), 'os.makedirs', 'os.makedirs', (['annotation_path'], {}), '(annotation_path)\n', (1787, 1804), False, 'import os\n'), ((943, 987), 'os.path.join', 'os.path.join', (['config.data_path', "(prefix + '0')"], {}), "(config.data_path, prefix + '0')\n", (955, 987), False, 'import os\n'), ((1024, 1072), 'os.path.join', 'os.path.join', (['config.test_data_dir', "(prefix + '0')"], {}), "(config.test_data_dir, prefix + '0')\n", (1036, 1072), False, 'import os\n'), ((1964, 2052), 'numpy.zeros', 'np.zeros', (['(1, config.eval_batch_size, config.decoder_hidden_size)'], {'dtype': 'np.float16'}), '((1, config.eval_batch_size, config.decoder_hidden_size), dtype=np.\n float16)\n', (1972, 2052), True, 'import numpy as np\n'), ((2277, 2313), 'os.path.join', 'os.path.join', (['images_path', 'file_name'], {}), '(images_path, file_name)\n', (2289, 2313), False, 'import os\n'), ((2344, 2387), 'os.path.join', 'os.path.join', (['decoder_input_path', 'file_name'], {}), '(decoder_input_path, file_name)\n', (2356, 2387), False, 'import os\n'), ((2419, 2463), 'os.path.join', 'os.path.join', (['decoder_hidden_path', 'file_name'], {}), '(decoder_hidden_path, file_name)\n', (2431, 2463), False, 'import os\n'), ((2565, 2605), 'os.path.join', 'os.path.join', (['annotation_path', 'file_name'], {}), '(annotation_path, file_name)\n', (2577, 2605), False, 'import os\n'), ((2107, 2143), 'numpy.ones', 'np.ones', (['(config.eval_batch_size, 1)'], {}), '((config.eval_batch_size, 1))\n', (2114, 2143), True, 'import numpy as np\n')]
|
from typing import *
from nansi.utils.collections import iter_flat
TFileDataValue = Union[bool, str, int, float]
TFileDataSection = Mapping[str, Union[TFileDataValue, Iterable[TFileDataValue]]]
TFileData = Mapping[str, TFileDataSection]
def file_content_for(data: TFileData) -> str:
lines = []
for section_name, section_opts in data.items():
lines.append(f"[{section_name}]")
for name, value in section_opts.items():
# pylint: disable=isinstance-second-argument-not-valid-type
if isinstance(value, Iterable) and (
not isinstance(value, (str, bytes))
):
for item in iter_flat(value):
lines.append(f"{name}={item}")
else:
lines.append(f"{name}={value}")
lines.append("")
return "\n".join(lines)
|
[
"nansi.utils.collections.iter_flat"
] |
[((661, 677), 'nansi.utils.collections.iter_flat', 'iter_flat', (['value'], {}), '(value)\n', (670, 677), False, 'from nansi.utils.collections import iter_flat\n')]
|
import sys, os
import datetime
pippath = __file__
pippath_folder, filename = os.path.split(pippath)
try:
import setuptools
except ImportError:
print("Installing setuptools...")
# install setuptools
setuptools_path = '"%s"' %os.path.join(pippath_folder, "ez_setup.py")
python_folder = os.path.split(sys.executable)[0]
python_exe = os.path.join(python_folder, "python")
os.system(" ".join([python_exe, setuptools_path]) )
# update systempath to look inside setuptools.egg, so don't have to restart python
sitepackfolder = os.path.join(os.path.split(sys.executable)[0], "Lib", "site-packages")
for filename in os.listdir(sitepackfolder):
if filename.startswith("setuptools") and filename.endswith(".egg"):
sys.path.append(os.path.join(sitepackfolder, filename))
break
# add current precompiled pip folder to path for later import
sys.path.insert(0, pippath_folder)
###################################
def _commandline_call(action, package, *options):
# (works on Windows, but needs to be tested on other OS)
import pip
from pip import main
# find the main python executable
python_folder = os.path.split(sys.executable)[0]
python_exe = os.path.join(python_folder, "python") # use the executable named "python" instead of "pythonw"
args = [python_exe]
# detect installation method (local setup.py VS online pip)
if package.endswith("setup.py"):
# local "python setup.py install"
package = os.path.abspath(package) # absolute path
os.chdir(os.path.split(package)[0]) # changes working directory to setup.py folder
args.append("setup.py")
args.append(action)
else:
# online "pip install packageorfile"
if action == "build":
raise Exception("Build can only be done on a local 'setup.py' filepath, not on '%s'" % package)
# if github url, auto direct to github master zipfile
# ...(bleeding edge, not stable release)
if package.startswith("https://github.com") and not package.endswith((".zip",".gz")):
if not package.endswith("/"): package += "/"
package += "archive/master.zip"
pip_path = os.path.abspath(os.path.split(pip.__file__)[0]) # get entire pip folder path, not the __init__.py file
args.append('"%s"'%pip_path)
args.append(action)
args.append(package)
# options
args.extend(options)
# pause after
args.append("& pause")
# send to commandline
os.system(" ".join(args) )
def add_github(githubfolder):
"""
Create a script that auto appends to the path all repos in your github folder on startup.
Only needs to be called once.
"""
import site
sitepackdir = site.getsitepackages()[-1]
# make a .py script for adding all github paths
path = os.path.join(sitepackdir, "pipy_addgithubpaths.py")
with open(path, "w") as writer:
writer.write( """
import sys
import os
folder = "%s"
for dirname in os.listdir(folder):
path = os.path.join(folder, dirname)
sys.path.append(path)
""" % githubfolder
)
# add the .pth file that runs the .py script on every startup
path = os.path.join(sitepackdir, "pipy_addgithubpaths.pth")
with open(path, "w") as writer:
writer.write("import pipy_addgithubpaths")
def install(package, *options, **kwargs):
"""
Install a package from within the IDLE, same way as using the commandline
and typing "pip install ..." Any number of additional string arguments
specify the install options that typically come after, such as "-U"
for update. See pip-documentation for valid option strings.
- gohlke: When True, downloads a precompiled version of the package from
<NAME>'s website "Unofficial Windows Binaries
for Python Extension Packages". Warning: Not fully tested.
"""
if kwargs.get("gohlke",False):
# temporarily download a precompiled Windows version from Gohlke's online collection,
# and tell pip to install from the downloaded file.
import urllib2
import tempfile
import HTMLParser
if not sys.platform == "win32":
raise Exception("You must be on a Windows computer to download binary wheels from Goehlke's website")
if package.startswith("http://") or package.endswith((".py",".zip",".tar.gz",".egg",".whl")):
raise Exception("To download from Gohlke's website you must specify the name of the package, not a filepath or a url")
indexlines = urllib2.urlopen("http://www.lfd.uci.edu/~gohlke/pythonlibs/").readlines()
parser = HTMLParser.HTMLParser()
parser.packfound = False
parser.url = ""
parser.datalist = []
def handle_data(data):
curtag = parser.get_starttag_text()
if curtag and curtag.startswith("<a href='javascript:;' onclick="):
data = data.strip()
if data and data != "\n":
parser.datalist.append(data)
parser.handle_data = handle_data
def handle_endtag(tag):
curtag = parser.get_starttag_text()
if tag == curtag[1] and curtag.startswith("<a href='javascript:;' onclick="):
parser.datalist.insert(-1, "none")
parser.filename = "-".join(parser.datalist) + ".whl"
if parser.filename.lower().startswith(package.lower()):
parser.packfound = True
major,minor = sys.version[0],sys.version[2]
correct_pyversion = "cp%s%s"%(major,minor) in parser.filename
correct_32bit = sys.maxsize == 2147483647 and "win32" in parser.filename
correct_64bit = sys.maxsize == 9223372036854775807 and "win_amd32" in parser.filename
correct_bitversion = correct_32bit or correct_64bit
if correct_pyversion and correct_bitversion:
parser.url = "http://www.lfd.uci.edu/~gohlke/pythonlibs/zutu4j59/" + parser.filename
parser.datalist = []
parser.handle_endtag = handle_endtag
for line in indexlines:
parser.feed(line)
if parser.packfound:
break
packfound = parser.packfound
url = parser.url
filename = parser.filename
## elem = next(elems)
##
## packfound = None
## url = None
##
## while elem != None:
## # find correct package
## if elem.startswith("a href='javascript:;' onclick="): #elem.endswith('.whl'):
## print "pre",repr(elem)
## elem = next(elems)
## print "post",repr(elem)
## validchar = lambda c: c if c.isalnum() or c in "._-" else "-"
## elem = "".join((char or " " for char in elem if char))
## elem = "-".join(elem.split())
## elem += ".whl"
## print "clean",repr(elem)
## if elem.lower().startswith(package.lower()):
## print elem
## packfound = True
##
## major,minor = sys.version[0],sys.version[2]
## correct_pyversion = "cp%s%s"%(major,minor) in elem
## correct_32bit = sys.maxsize == 2147483647 and "win32" in elem
## correct_64bit = sys.maxsize == 9223372036854775807 and "win_amd32" in elem
## correct_bitversion = correct_32bit or correct_64bit
##
## if correct_pyversion and correct_bitversion:
## url = "http://www.lfd.uci.edu/~gohlke/pythonlibs/zutu4j59/" + elem
## print url
## break
##
## elem = next(elems,None)
if not url:
if packfound:
raise Exception("Found the package among the Goehlke website wheels, but not a version that fits your system")
else:
raise Exception("Could not find the specified package among the Goehlke website wheels")
request = urllib2.Request(url,
headers={"Referer":"http://www.lfd.uci.edu/~gohlke/pythonlibs/",
"User-Agent": "Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2564.41 Safari/537.36"
}
)
raw = urllib2.urlopen(request)
with open(os.path.join(tempfile.gettempdir(), filename), "wb") as writer:
writer.write(raw.read())
# finally redirect the package name to the path of the temporary wheel file
filepath = writer.name
package = filepath
_commandline_call("install", package, *options)
def upgrade(package, *options):
"""
Shortcut for upgrading a package to a newer version,
without having to specify the "--upgrade" option.
"""
options = list(options)
options.insert(0, "--upgrade")
install(package, *options)
def build(package, *options):
"""
Test if a local C/C++ package will build (aka compile)
successfully without actually installing it (ie placing it in
site-packages), from within the IDLE. Same way as using the commandline
and typing "pip build ..." Any number of additional string arguments
specify the build options that typically come after.
See pip-documentation for valid option strings.
"""
_commandline_call("build", package, *options)
def uninstall(package, *options):
"""
Uninstall a package from within the IDLE, same way as using the commandline
and typing "pip uninstall ..." Any number of additional string arguments
specify the uninstall options that typically come after.
See pip-documentation for valid option strings.
"""
_commandline_call("uninstall", package, *options)
def login(username, password):
"""
Creates the .pypirc file with login info (required in order to upload).
The login (i.e. the file) persists until you call the logout function.
Note: Assumes same login info for both the testsite
and the live site, so if different must login for each switch.
"""
# if still not recognize login bug
# change to http://stackoverflow.com/questions/1569315/setup-py-upload-is-failing-with-upload-failed-401-you-must-be-identified-t/1569331#1569331
pypircstring = ""
pypircstring += "[distutils]" + "\n"
pypircstring += "index-servers = " + "\n"
pypircstring += "\t" + "pypi" + "\n"
pypircstring += "\t" + "testpypi" + "\n"
pypircstring += "\n"
pypircstring += "[pypi]" + "\n"
pypircstring += "repository: https://upload.pypi.org/legacy/" + "\n"
pypircstring += "username: " + username + "\n"
pypircstring += "password: " + password + "\n"
pypircstring += "\n"
pypircstring += "[testpypi]" + "\n"
pypircstring += "repository: https://test.pypi.org/legacy/" + "\n"
pypircstring += "username: " + username + "\n"
pypircstring += "password: " + password + "\n"
# create the file
home = os.path.expanduser("~")
path = os.path.join(home, ".pypirc")
writer = open(path, "w")
writer.write(pypircstring)
writer.close()
print("logged in")
def logout():
"""
Deletes the .pypirc file with your login info.
Requires you to login again before uploading
another package.
"""
# delete the .pypirc file for better security
home = os.path.expanduser("~")
path = os.path.join(home, ".pypirc")
os.remove(path)
print("logged out (.pypirc file removed)")
def define_upload(package, description, license, changes, **more_info):
"""
Define and prep a package for upload by creating the necessary files
(in the parent folder containing the package's meta-data).
- package: the path location of the package you wish to upload (i.e. the folder containing the actual code, not the meta-folder) or the module file (with the .py extension)
- description: a short sentence describing the package
- license: the name of the license for your package ('MIT' will automatically create an MIT license.txt file in your package)
- changes: list of change descriptions in the current upload version, used to create a changes.txt file and automatically include as a changelog in the README.
- **more_info: optional keyword arguments for defining the upload (see distutils.core.setup for valid arguments)
"""
more_info.update(description=description, license=license)
# absolute path
package = os.path.abspath(package)
# disect path
folder , name = os.path.split(package)
name, ext = os.path.splitext(name)
# autofill "packages" in case user didnt specify it
# ...toplevel and all subpackages
if os.path.isdir(package) and not "packages" in more_info:
subpacks = []
for dirr,_,files in os.walk(package):
reldirr = os.path.relpath(dirr, folder)
if "__init__.py" in files:
subpacks.append(reldirr)
more_info["packages"] = subpacks
# autofill "py_modules" in case user didnt specify it
elif os.path.isfile(package) and not "py_modules" in more_info:
more_info["py_modules"] = [name]
# autofill "package_data" arg in case user didnt specify
if os.path.isdir(package) and not "package_data" in more_info:
data = []
for dirr,_,files in os.walk(package):
reldirr = os.path.relpath(dirr, package)
# look for data
data += [os.path.join(reldirr,filename)
for filename in files
if not filename.endswith((".py",".pyc"))]
if data:
package_data = dict([(name, data)])
more_info["package_data"] = package_data
# autofill "name" in case user didnt specify it
# ...this is taken from the repository folder name,
# ...not the package/import name.
if not "name" in more_info: more_info["name"] = folder
# autofill "version" in case user didnt specify it
if not "version" in more_info:
# determine file to read
if os.path.isdir(package):
topfile = os.path.join(package, "__init__.py")
else:
topfile = package
# find line that starts with __version__
# ...NOTE: inspired by <NAME>' Fiona setup.py
with open(topfile) as fileobj:
for line in fileobj:
if line.startswith("__version__"):
version = line.split("=")[1].strip()
version = version.strip('"')
version = version.strip("'")
more_info["version"] = version
break
# raise error if none found
if not "version" in more_info:
raise Exception("""Version argument can only be omitted if your
package's __init__.py file or module file contains
a __version__ variable.""")
# make prep files
_make_changelog(package, version, changes)
#_make_readme(package)
_make_gitpack()
_make_setup(package, **more_info)
_make_cfg(package)
_make_license(package, license, more_info.get("author") )
print("package metadata prepped for upload")
def generate_docs(package, **kwargs):
"""
Generates full API html docs of all submodules to "build/doc" folder.
You do not have to use this function on your own since it
will be run automatically when uploading your package (assuming
that autodoc is set to True). However, this function can be used
for making sure the docs look good before uploading.
"""
# absolute path
package = os.path.abspath(package)
###
_make_docs(package, **kwargs)
print("documentation successfully generated")
# Upload docs to pythonhosted no longer possible
##def upload_docs(package, **kwargs):
## """
## Upload documentation html docs located in "build/doc" folder.
## """
## # absolute path
## package = os.path.abspath(package)
## ###
## _upload_docs(package, **kwargs)
## print("documentation successfully uploaded to pythonhosted.org")
def upload_test(package):
"""
Upload and distribute your package
to the online PyPi Testing website in a single
command, to test if your real upload will
work nicely or not.
"""
# absolute path
package = os.path.abspath(package)
###
folder,name = os.path.split(package)
# first remember to update the readme, in case docstring changed
#_make_readme(package)
# then try registering
# (NO LONGER REQUIRED)
# instead of typing "python setup.py register -r testpypi" in commandline
#print("registering package (test)")
#setup_path = os.path.join(folder, "setup.py")
#options = ["-r", "testpypi"]
#_commandline_call("register", setup_path, *options)
# then try uploading
# instead of typing "python setup.py sdist upload -r testpypi" in commandline
print("uploading package (test)")
setup_path = os.path.join(folder, "setup.py")
options = ["upload", "-r", "testpypi"]
_commandline_call("sdist", setup_path, *options)
def upload(package, autodoc=True):
"""
Upload and distribute your package
to the online PyPi website in a single
command, so others can find it more
easily and install it using pip.
"""
# absolute path
package = os.path.abspath(package)
###
folder,name = os.path.split(package)
# first remember to update the readme, in case docstring changed
#_make_readme(package)
# then try registering
# (NO LONGER REQUIRED)
# instead of typing "python setup.py register -r pypi" in commandline
#print("registering package")
#setup_path = os.path.join(folder, "setup.py")
#options = ["-r", "pypi"]
#_commandline_call("register", setup_path, *options)
# then try uploading
# instead of typing "python setup.py sdist upload -r pypi" in commandline
print("uploading package")
setup_path = os.path.join(folder, "setup.py")
options = ["upload", "-r", "pypi"]
_commandline_call("sdist", setup_path, *options)
# finally try generating and uploading documentation
if autodoc:
pass #generate_docs(package)
#upload_docs(package) # no longer possible
# Internal use only
def _make_readme(package):
# assumes readme should be based on toplevel package docstring
import ipandoc # comes packaged in the pypi folder
folder,name = os.path.split(package)
name,ext = os.path.splitext(name)
# get toplevel package docstring
import imp
modinfo = imp.find_module(name,[folder])
mod = imp.load_module(name, *modinfo)
mdstring = mod.__doc__
if mdstring:
# add changes text to mdstring (so it will be included in readme)
changespath = os.path.join(folder, "CHANGES.txt")
with open(changespath) as changesfile:
for line in changesfile.readlines():
mdstring += line
# use ipandoc to convert assumed markdown string to rst for display on pypi
rststring = ipandoc.convert(mdstring, "markdown", "rst")
readmepath = os.path.join(folder, "README.rst")
with open(readmepath, "w") as readme:
readme.write(rststring)
def _make_docs(package, **kwargs):
# uses pdoc to generate html folder
# ...TODO: Clean up this section, very messy
# set some defaults
if not kwargs.get("docfilter"):
kwargs["docfilter"] = ["Module", "Class", "Function"]
if kwargs.get("html_no_source") == None:
kwargs["html_no_source"] = True
# if pdoc not available, install it
try:
import pdoc
except ImportError:
install("pdoc")
# non commandline approach
# ...allowing for docfilter option
folder,name = os.path.split(package)
name,ext = os.path.splitext(name)
docfolder = kwargs.get("html_dir")
if not docfolder:
docfolder = os.path.join(folder, "docs")
# get toplevel package docstring
import imp
modinfo = imp.find_module(name,[folder])
mod = imp.load_module(name, *modinfo)
# prep pdoc paramters
mod_kwargs = dict([item for item in kwargs.items() if item[0] in ("docfilter","allsubmodules")])
# docfilter, either list of type strings or function
if isinstance(mod_kwargs.get("docfilter"), (list,tuple)):
def docfilter(obj, filtertypes=mod_kwargs["docfilter"]):
return any(isinstance(obj, getattr(pdoc, filtertype))
for filtertype in filtertypes)
mod_kwargs["docfilter"] = docfilter
# remove module prose docstring, only show API classes, funcs, and meths
mod.__doc__ = """
# **API Documentation**
"""
# html params
mod = pdoc.Module(mod, **mod_kwargs)
html_kwargs = dict([item for item in kwargs.items() if item[0] in ("external_links","link_prefix","html_no_source")])
if html_kwargs.get("html_no_source"):
html_kwargs["source"] = not html_kwargs.pop("html_no_source")
# get and write to files
if mod.is_package():
def html_out_package(mod):
# create output folders
modtree = mod.name.split('.')
# remove top package name, to avoid yet another nested folder
modtree.pop(0)
mbase = os.path.join(docfolder, *modtree)
if mod.is_package():
outpath = os.path.join(mbase, pdoc.html_package_name)
else:
outpath = '%s%s' % (mbase, pdoc.html_module_suffix)
dirpath = os.path.dirname(outpath)
if not os.path.lexists(dirpath):
os.makedirs(dirpath)
# write html
with open(outpath, 'w') as writer:
out = mod.html(**html_kwargs)
writer.write(out)
# do same for all submodules
for submodule in mod.submodules():
html_out_package(submodule)
html_out_package(mod)
else:
# create output folders
outpath = os.path.join(docfolder, "index.html")
dirpath = os.path.dirname(outpath)
if not os.path.lexists(dirpath):
os.makedirs(dirpath)
# write html
with open(outpath, 'w') as writer:
out = mod.html(**html_kwargs)
writer.write(out)
# Upload docs to pythonhosted no longer possible
##def _upload_docs(package, **kwargs):
## # instead of typing "python setup.py upload_docs" in commandline
## # by default uploads "build/doc" folder
##
## # NOTE: REQUIRES SETUP.PY TO USE SETUPTOOLS INSTEAD OF DISTUTILS
## # ...from setuptools import setup
## folder,name = os.path.split(package)
## os.chdir(folder)
## setup_path = os.path.join(folder, "setup.py")
## upload_dir = kwargs.get("upload_dir", "build/doc")
## options = ["--upload-dir=%s" % upload_dir]
## _commandline_call("upload_docs", setup_path, *options)
def _execute_setup(setup_path):
setupfile = open(setup_path)
if sys.version.startswith("3"):
exec(compile(setupfile.read(), 'setup.py', 'exec'))
else:
exec(setupfile)
def _make_gitpack():
# maybe in the future but not really necessary, for prepping and
# allowing packages to be hosted directly from github
pass
def _make_changelog(package, version, changes):
folder,name = os.path.split(package)
changespath = os.path.join(folder, "CHANGES.txt")
# if changes file already exists
if os.path.exists(changespath):
# read in existing changes file
rawlines = open(changespath).readlines()
rawlines = (line for line in rawlines)
# how to detect version start
def detectversion(_line):
# ignore strings "version" and "v"
_line = _line.lower().replace("version","").replace("v","")
_line = _line.strip()
if _line:
# ignore anything after parentheses (such as version date)
_split = _line.split("(")
if len(_split) >= 2:
_line,_date = _split[:2]
_date = _date.rstrip(")")
else:
_line,_date = _split[0],None
# is version line if all chars on line are nrs or symbols (eg dots, spaces, or hashtags), so can build on existing changes files, though it will reformat it.
if all(char.isdigit() or not char.isalpha() for char in _line):
# clean version string by stripping away anything that is not number or dot
_line = "".join([char for char in _line if char.isdigit() or char == "."])
return _line,_date
# parse into version-changes dict
versiondict = dict()
line = next(rawlines, None)
while line:
# detect version start
_versionresult = detectversion(line)
if _versionresult:
# collect change lines until next version start
_version,_date = _versionresult
_changes = []
line = next(rawlines, None)
while line != None and not detectversion(line):
line = line.strip()
if line:
# clean change string by stripping away from the left any non letter characters
firstcharindex = next( (line.index(char) for char in line if char.isalpha()) )
line = line[firstcharindex:]
_changes.append(line)
line = next(rawlines, None)
# add to versiondict
versiondict[_version] = dict(date=_date, changes=_changes)
else:
line = next(rawlines, None)
# add current version to versiondict, overwriting/updating if already exists
versiondict[version] = dict(date=datetime.date.today(), changes=changes)
# write to new updated changes file
writer = open(changespath, "w")
writer.write("\n"+"## Changes"+"\n")
for version in sorted(versiondict.keys(),
key=lambda x: map(int, x.split(".")), # sort on each version nr as int not str
reverse=True):
versionstring = version
date = versiondict[version]["date"]
if date: versionstring += " (%s)"%date
changes = versiondict[version]["changes"]
writer.write("\n"+"### "+versionstring+"\n\n")
for change in changes:
writer.write("- "+change+"\n")
writer.close()
else:
# no changes file exists, write current changes to a new document
writer = open(changespath, "w")
writer.write("\n"+"## Changes"+"\n")
versionstring = version
date = datetime.date.today()
if date: versionstring += " (%s)"%date
writer.write("\n"+"### "+versionstring+"\n\n")
for change in changes:
writer.write("- "+change+"\n")
writer.close()
def _make_setup(package, **kwargs):
folder,name = os.path.split(package)
setupstring = ""
setupstring += "try: from setuptools import setup" + "\n"
setupstring += "except: from distutils.core import setup" + "\n"
setupstring += "\n"
setupstring += "setup("
# description/readme info
long_description = kwargs.pop("long_description", None)
if long_description:
setupstring += "\t" + 'long_description="""%s""",'%long_description + "\n"
else:
# make the setup.py script dynamically autofill "long_description"
# ...from README in case user didnt specify it
for filename in os.listdir(folder):
if filename.startswith("README"):
setupstring += "\t" + 'long_description=open("%s").read(), '%filename + "\n"
if filename.endswith('.md'):
desctype = "text/markdown"
elif filename.endswith('.rst'):
desctype = "text/x-rst"
else:
desctype = "text/plain"
setupstring += "\t" + 'long_description_content_type="%s",'%desctype + "\n"
break
# general options
for param,value in kwargs.items():
if param in ["packages", "classifiers", "platforms",
"py_modules", "requires", "data_files",
"package_data"]:
valuelist = value
# paths should be crossplatform
if param in ["packages", "py_modules"]:
valuelist = [path.replace("\\","/") for path in valuelist]
elif param in ["package_data", "data_files"]:
valuelist = dict([ (
subdir.replace("\\","/"),
[path.replace("\\","/") for path in pathlist]
)
for subdir,pathlist in valuelist.items()
])
# write valuelist as list
setupstring += "\t" + '%s=%s,'%(param,valuelist) + "\n"
else:
# write single values enclosed in quote marks
setupstring += "\t" + '%s="""%s""",'%(param,value) + "\n"
setupstring += "\t" + ")" + "\n"
writer = open(os.path.join(folder, "setup.py"), "w")
writer.write(setupstring)
writer.close()
def _make_cfg(package):
folder,name = os.path.split(package)
if "README.rst" in os.listdir(folder):
setupstring = """
[metadata]
description-file = README.rst
"""
writer = open(os.path.join(folder, "setup.cfg"), "w")
writer.write(setupstring)
writer.close()
def _make_license(package, type="MIT", author=None):
if not author: author = ""
folder,name = os.path.split(package)
if type == "MIT":
licensestring = """
The MIT License (MIT)
Copyright (c) %i %s
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
""" % (datetime.datetime.today().year, author)
writer = open(os.path.join(folder, "LICENSE.txt"), "w")
writer.write(licensestring)
writer.close()
|
[
"os.remove",
"os.walk",
"os.path.isfile",
"os.path.join",
"urllib2.urlopen",
"os.path.lexists",
"os.path.abspath",
"os.path.dirname",
"urllib2.Request",
"os.path.exists",
"sys.version.startswith",
"site.getsitepackages",
"imp.load_module",
"datetime.datetime.today",
"pdoc.Module",
"imp.find_module",
"datetime.date.today",
"os.listdir",
"os.makedirs",
"os.path.isdir",
"tempfile.gettempdir",
"sys.path.insert",
"os.path.splitext",
"ipandoc.convert",
"os.path.relpath",
"HTMLParser.HTMLParser",
"os.path.split",
"os.path.expanduser"
] |
[((81, 103), 'os.path.split', 'os.path.split', (['pippath'], {}), '(pippath)\n', (94, 103), False, 'import sys, os\n'), ((906, 940), 'sys.path.insert', 'sys.path.insert', (['(0)', 'pippath_folder'], {}), '(0, pippath_folder)\n', (921, 940), False, 'import sys, os\n'), ((1238, 1275), 'os.path.join', 'os.path.join', (['python_folder', '"""python"""'], {}), "(python_folder, 'python')\n", (1250, 1275), False, 'import sys, os\n'), ((2872, 2923), 'os.path.join', 'os.path.join', (['sitepackdir', '"""pipy_addgithubpaths.py"""'], {}), "(sitepackdir, 'pipy_addgithubpaths.py')\n", (2884, 2923), False, 'import sys, os\n'), ((3253, 3305), 'os.path.join', 'os.path.join', (['sitepackdir', '"""pipy_addgithubpaths.pth"""'], {}), "(sitepackdir, 'pipy_addgithubpaths.pth')\n", (3265, 3305), False, 'import sys, os\n'), ((11413, 11436), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (11431, 11436), False, 'import sys, os\n'), ((11448, 11477), 'os.path.join', 'os.path.join', (['home', '""".pypirc"""'], {}), "(home, '.pypirc')\n", (11460, 11477), False, 'import sys, os\n'), ((11794, 11817), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (11812, 11817), False, 'import sys, os\n'), ((11829, 11858), 'os.path.join', 'os.path.join', (['home', '""".pypirc"""'], {}), "(home, '.pypirc')\n", (11841, 11858), False, 'import sys, os\n'), ((11863, 11878), 'os.remove', 'os.remove', (['path'], {}), '(path)\n', (11872, 11878), False, 'import sys, os\n'), ((12907, 12931), 'os.path.abspath', 'os.path.abspath', (['package'], {}), '(package)\n', (12922, 12931), False, 'import sys, os\n'), ((12975, 12997), 'os.path.split', 'os.path.split', (['package'], {}), '(package)\n', (12988, 12997), False, 'import sys, os\n'), ((13014, 13036), 'os.path.splitext', 'os.path.splitext', (['name'], {}), '(name)\n', (13030, 13036), False, 'import sys, os\n'), ((16079, 16103), 'os.path.abspath', 'os.path.abspath', (['package'], {}), '(package)\n', (16094, 16103), False, 'import sys, os\n'), ((16791, 16815), 'os.path.abspath', 'os.path.abspath', (['package'], {}), '(package)\n', (16806, 16815), False, 'import sys, os\n'), ((16842, 16864), 'os.path.split', 'os.path.split', (['package'], {}), '(package)\n', (16855, 16864), False, 'import sys, os\n'), ((17438, 17470), 'os.path.join', 'os.path.join', (['folder', '"""setup.py"""'], {}), "(folder, 'setup.py')\n", (17450, 17470), False, 'import sys, os\n'), ((17815, 17839), 'os.path.abspath', 'os.path.abspath', (['package'], {}), '(package)\n', (17830, 17839), False, 'import sys, os\n'), ((17866, 17888), 'os.path.split', 'os.path.split', (['package'], {}), '(package)\n', (17879, 17888), False, 'import sys, os\n'), ((18436, 18468), 'os.path.join', 'os.path.join', (['folder', '"""setup.py"""'], {}), "(folder, 'setup.py')\n", (18448, 18468), False, 'import sys, os\n'), ((18922, 18944), 'os.path.split', 'os.path.split', (['package'], {}), '(package)\n', (18935, 18944), False, 'import sys, os\n'), ((18960, 18982), 'os.path.splitext', 'os.path.splitext', (['name'], {}), '(name)\n', (18976, 18982), False, 'import sys, os\n'), ((19049, 19080), 'imp.find_module', 'imp.find_module', (['name', '[folder]'], {}), '(name, [folder])\n', (19064, 19080), False, 'import imp\n'), ((19090, 19121), 'imp.load_module', 'imp.load_module', (['name', '*modinfo'], {}), '(name, *modinfo)\n', (19105, 19121), False, 'import imp\n'), ((20258, 20280), 'os.path.split', 'os.path.split', (['package'], {}), '(package)\n', (20271, 20280), False, 'import sys, os\n'), ((20296, 20318), 'os.path.splitext', 'os.path.splitext', (['name'], {}), '(name)\n', (20312, 20318), False, 'import sys, os\n'), ((20495, 20526), 'imp.find_module', 'imp.find_module', (['name', '[folder]'], {}), '(name, [folder])\n', (20510, 20526), False, 'import imp\n'), ((20536, 20567), 'imp.load_module', 'imp.load_module', (['name', '*modinfo'], {}), '(name, *modinfo)\n', (20551, 20567), False, 'import imp\n'), ((21234, 21264), 'pdoc.Module', 'pdoc.Module', (['mod'], {}), '(mod, **mod_kwargs)\n', (21245, 21264), False, 'import pdoc\n'), ((23487, 23514), 'sys.version.startswith', 'sys.version.startswith', (['"""3"""'], {}), "('3')\n", (23509, 23514), False, 'import sys, os\n'), ((23835, 23857), 'os.path.split', 'os.path.split', (['package'], {}), '(package)\n', (23848, 23857), False, 'import sys, os\n'), ((23876, 23911), 'os.path.join', 'os.path.join', (['folder', '"""CHANGES.txt"""'], {}), "(folder, 'CHANGES.txt')\n", (23888, 23911), False, 'import sys, os\n'), ((23957, 23984), 'os.path.exists', 'os.path.exists', (['changespath'], {}), '(changespath)\n', (23971, 23984), False, 'import sys, os\n'), ((27617, 27639), 'os.path.split', 'os.path.split', (['package'], {}), '(package)\n', (27630, 27639), False, 'import sys, os\n'), ((30000, 30022), 'os.path.split', 'os.path.split', (['package'], {}), '(package)\n', (30013, 30022), False, 'import sys, os\n'), ((30359, 30381), 'os.path.split', 'os.path.split', (['package'], {}), '(package)\n', (30372, 30381), False, 'import sys, os\n'), ((360, 397), 'os.path.join', 'os.path.join', (['python_folder', '"""python"""'], {}), "(python_folder, 'python')\n", (372, 397), False, 'import sys, os\n'), ((653, 679), 'os.listdir', 'os.listdir', (['sitepackfolder'], {}), '(sitepackfolder)\n', (663, 679), False, 'import sys, os\n'), ((1188, 1217), 'os.path.split', 'os.path.split', (['sys.executable'], {}), '(sys.executable)\n', (1201, 1217), False, 'import sys, os\n'), ((1518, 1542), 'os.path.abspath', 'os.path.abspath', (['package'], {}), '(package)\n', (1533, 1542), False, 'import sys, os\n'), ((2777, 2799), 'site.getsitepackages', 'site.getsitepackages', ([], {}), '()\n', (2797, 2799), False, 'import site\n'), ((4739, 4762), 'HTMLParser.HTMLParser', 'HTMLParser.HTMLParser', ([], {}), '()\n', (4760, 4762), False, 'import HTMLParser\n'), ((8335, 8566), 'urllib2.Request', 'urllib2.Request', (['url'], {'headers': "{'Referer': 'http://www.lfd.uci.edu/~gohlke/pythonlibs/', 'User-Agent':\n 'Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2564.41 Safari/537.36'\n }"}), "(url, headers={'Referer':\n 'http://www.lfd.uci.edu/~gohlke/pythonlibs/', 'User-Agent':\n 'Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2564.41 Safari/537.36'\n })\n", (8350, 8566), False, 'import urllib2\n'), ((8723, 8747), 'urllib2.urlopen', 'urllib2.urlopen', (['request'], {}), '(request)\n', (8738, 8747), False, 'import urllib2\n'), ((13139, 13161), 'os.path.isdir', 'os.path.isdir', (['package'], {}), '(package)\n', (13152, 13161), False, 'import sys, os\n'), ((13245, 13261), 'os.walk', 'os.walk', (['package'], {}), '(package)\n', (13252, 13261), False, 'import sys, os\n'), ((13673, 13695), 'os.path.isdir', 'os.path.isdir', (['package'], {}), '(package)\n', (13686, 13695), False, 'import sys, os\n'), ((13779, 13795), 'os.walk', 'os.walk', (['package'], {}), '(package)\n', (13786, 13795), False, 'import sys, os\n'), ((14495, 14517), 'os.path.isdir', 'os.path.isdir', (['package'], {}), '(package)\n', (14508, 14517), False, 'import sys, os\n'), ((19262, 19297), 'os.path.join', 'os.path.join', (['folder', '"""CHANGES.txt"""'], {}), "(folder, 'CHANGES.txt')\n", (19274, 19297), False, 'import sys, os\n'), ((19531, 19575), 'ipandoc.convert', 'ipandoc.convert', (['mdstring', '"""markdown"""', '"""rst"""'], {}), "(mdstring, 'markdown', 'rst')\n", (19546, 19575), False, 'import ipandoc\n'), ((19597, 19631), 'os.path.join', 'os.path.join', (['folder', '"""README.rst"""'], {}), "(folder, 'README.rst')\n", (19609, 19631), False, 'import sys, os\n'), ((20400, 20428), 'os.path.join', 'os.path.join', (['folder', '"""docs"""'], {}), "(folder, 'docs')\n", (20412, 20428), False, 'import sys, os\n'), ((22514, 22551), 'os.path.join', 'os.path.join', (['docfolder', '"""index.html"""'], {}), "(docfolder, 'index.html')\n", (22526, 22551), False, 'import sys, os\n'), ((22570, 22594), 'os.path.dirname', 'os.path.dirname', (['outpath'], {}), '(outpath)\n', (22585, 22594), False, 'import sys, os\n'), ((27341, 27362), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (27360, 27362), False, 'import datetime\n'), ((28211, 28229), 'os.listdir', 'os.listdir', (['folder'], {}), '(folder)\n', (28221, 28229), False, 'import sys, os\n'), ((29869, 29901), 'os.path.join', 'os.path.join', (['folder', '"""setup.py"""'], {}), "(folder, 'setup.py')\n", (29881, 29901), False, 'import sys, os\n'), ((30046, 30064), 'os.listdir', 'os.listdir', (['folder'], {}), '(folder)\n', (30056, 30064), False, 'import sys, os\n'), ((246, 289), 'os.path.join', 'os.path.join', (['pippath_folder', '"""ez_setup.py"""'], {}), "(pippath_folder, 'ez_setup.py')\n", (258, 289), False, 'import sys, os\n'), ((310, 339), 'os.path.split', 'os.path.split', (['sys.executable'], {}), '(sys.executable)\n', (323, 339), False, 'import sys, os\n'), ((13285, 13314), 'os.path.relpath', 'os.path.relpath', (['dirr', 'folder'], {}), '(dirr, folder)\n', (13300, 13314), False, 'import sys, os\n'), ((13504, 13527), 'os.path.isfile', 'os.path.isfile', (['package'], {}), '(package)\n', (13518, 13527), False, 'import sys, os\n'), ((13819, 13849), 'os.path.relpath', 'os.path.relpath', (['dirr', 'package'], {}), '(dirr, package)\n', (13834, 13849), False, 'import sys, os\n'), ((14541, 14577), 'os.path.join', 'os.path.join', (['package', '"""__init__.py"""'], {}), "(package, '__init__.py')\n", (14553, 14577), False, 'import sys, os\n'), ((21788, 21821), 'os.path.join', 'os.path.join', (['docfolder', '*modtree'], {}), '(docfolder, *modtree)\n', (21800, 21821), False, 'import sys, os\n'), ((22033, 22057), 'os.path.dirname', 'os.path.dirname', (['outpath'], {}), '(outpath)\n', (22048, 22057), False, 'import sys, os\n'), ((22610, 22634), 'os.path.lexists', 'os.path.lexists', (['dirpath'], {}), '(dirpath)\n', (22625, 22634), False, 'import sys, os\n'), ((22648, 22668), 'os.makedirs', 'os.makedirs', (['dirpath'], {}), '(dirpath)\n', (22659, 22668), False, 'import sys, os\n'), ((30159, 30192), 'os.path.join', 'os.path.join', (['folder', '"""setup.cfg"""'], {}), "(folder, 'setup.cfg')\n", (30171, 30192), False, 'import sys, os\n'), ((31577, 31612), 'os.path.join', 'os.path.join', (['folder', '"""LICENSE.txt"""'], {}), "(folder, 'LICENSE.txt')\n", (31589, 31612), False, 'import sys, os\n'), ((575, 604), 'os.path.split', 'os.path.split', (['sys.executable'], {}), '(sys.executable)\n', (588, 604), False, 'import sys, os\n'), ((1576, 1598), 'os.path.split', 'os.path.split', (['package'], {}), '(package)\n', (1589, 1598), False, 'import sys, os\n'), ((2244, 2271), 'os.path.split', 'os.path.split', (['pip.__file__'], {}), '(pip.__file__)\n', (2257, 2271), False, 'import sys, os\n'), ((4639, 4700), 'urllib2.urlopen', 'urllib2.urlopen', (['"""http://www.lfd.uci.edu/~gohlke/pythonlibs/"""'], {}), "('http://www.lfd.uci.edu/~gohlke/pythonlibs/')\n", (4654, 4700), False, 'import urllib2\n'), ((13899, 13930), 'os.path.join', 'os.path.join', (['reldirr', 'filename'], {}), '(reldirr, filename)\n', (13911, 13930), False, 'import sys, os\n'), ((21881, 21924), 'os.path.join', 'os.path.join', (['mbase', 'pdoc.html_package_name'], {}), '(mbase, pdoc.html_package_name)\n', (21893, 21924), False, 'import sys, os\n'), ((22077, 22101), 'os.path.lexists', 'os.path.lexists', (['dirpath'], {}), '(dirpath)\n', (22092, 22101), False, 'import sys, os\n'), ((22119, 22139), 'os.makedirs', 'os.makedirs', (['dirpath'], {}), '(dirpath)\n', (22130, 22139), False, 'import sys, os\n'), ((26389, 26410), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (26408, 26410), False, 'import datetime\n'), ((785, 823), 'os.path.join', 'os.path.join', (['sitepackfolder', 'filename'], {}), '(sitepackfolder, filename)\n', (797, 823), False, 'import sys, os\n'), ((8779, 8800), 'tempfile.gettempdir', 'tempfile.gettempdir', ([], {}), '()\n', (8798, 8800), False, 'import tempfile\n'), ((31506, 31531), 'datetime.datetime.today', 'datetime.datetime.today', ([], {}), '()\n', (31529, 31531), False, 'import datetime\n')]
|
from juriscraper.lib.html_utils import get_html5_parsed_text
from juriscraper.opinions.united_states.federal_appellate import ca11_p
class Site(ca11_p.Site):
def __init__(self, *args, **kwargs):
super(Site, self).__init__(*args, **kwargs)
self.url = "http://media.ca11.uscourts.gov/opinions/unpub/logname.php"
self.court_id = self.__module__
self.back_scrape_iterable = range(20, 22000, 20)
def _make_html_tree(self, text):
return get_html5_parsed_text(text)
def _download_backwards(self, n):
self.url = "http://media.ca11.uscourts.gov/opinions/unpub/logname.php?begin={}&num={}&numBegin=1".format(
n, n / 20 - 1
)
self.html = self._download()
if self.html is not None:
# Setting status is important because it prevents the download
# function from being run a second time by the parse method.
self.status = 200
|
[
"juriscraper.lib.html_utils.get_html5_parsed_text"
] |
[((482, 509), 'juriscraper.lib.html_utils.get_html5_parsed_text', 'get_html5_parsed_text', (['text'], {}), '(text)\n', (503, 509), False, 'from juriscraper.lib.html_utils import get_html5_parsed_text\n')]
|
#!/usr/bin/env python3
#
# Copyright (c) 2019, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import unittest
import config
import ipv6
import thread_cert
LEADER = 1
ROUTER = 2
class TestIPv6SourceSelection(thread_cert.TestCase):
SUPPORT_NCP = False
TOPOLOGY = {
LEADER: {
'mode': 'rdn',
'panid': 0xcafe,
'allowlist': [ROUTER]
},
ROUTER: {
'mode': 'rdn',
'panid': 0xcafe,
'allowlist': [LEADER]
},
}
def test(self):
self.nodes[LEADER].start()
self.simulator.go(5)
self.assertEqual(self.nodes[LEADER].get_state(), 'leader')
self.nodes[ROUTER].start()
self.simulator.go(config.ROUTER_STARTUP_DELAY)
self.assertEqual(self.nodes[ROUTER].get_state(), 'router')
leader_aloc = self.nodes[LEADER].get_addr_leader_aloc()
leader_mleid = self.nodes[LEADER].get_mleid()
leader_rloc = self.nodes[LEADER].get_rloc()
leader_linklocal = self.nodes[LEADER].get_linklocal()
multicast_addr = 'fdf8:f53e:61e4::18'
external_addr = 'fc00:db20:35b:7399::5'
router_rloc = self.nodes[ROUTER].get_rloc()
router_linklocal = self.nodes[ROUTER].get_linklocal()
router_mleid = self.nodes[ROUTER].get_mleid()
# Source check - RLOC source for RLOC destination
self.assertTrue(self.nodes[ROUTER].ping(leader_rloc))
router_msgs = self.simulator.get_messages_sent_by(ROUTER)
msg = router_msgs.get_icmp_message(ipv6.ICMP_ECHO_REQUEST)
msg.assertSentFromSourceAddress(router_rloc)
# Source check - ML-EID source for ALOC destination
self.assertTrue(self.nodes[ROUTER].ping(leader_aloc))
router_msgs = self.simulator.get_messages_sent_by(ROUTER)
msg = router_msgs.get_icmp_message(ipv6.ICMP_ECHO_REQUEST)
msg.assertSentFromSourceAddress(router_mleid)
# Source check - ML-EID source for ML-EID destination
self.assertTrue(self.nodes[ROUTER].ping(leader_mleid))
router_msgs = self.simulator.get_messages_sent_by(ROUTER)
msg = router_msgs.get_icmp_message(ipv6.ICMP_ECHO_REQUEST)
msg.assertSentFromSourceAddress(router_mleid)
# Source check - link local source source for link local address
self.assertTrue(self.nodes[ROUTER].ping(leader_linklocal))
router_msgs = self.simulator.get_messages_sent_by(ROUTER)
msg = router_msgs.get_icmp_message(ipv6.ICMP_ECHO_REQUEST)
msg.assertSentFromSourceAddress(router_linklocal)
# Source check - ML-EID source for realmlocal multicast destination
self.assertTrue(self.nodes[ROUTER].ping(multicast_addr))
router_msgs = self.simulator.get_messages_sent_by(ROUTER)
msg = router_msgs.get_icmp_message(ipv6.ICMP_ECHO_REQUEST)
msg.assertSentFromSourceAddress(router_mleid)
# GUA and default gateway
self.nodes[LEADER].add_prefix('2001::/64', 'paros')
self.nodes[LEADER].register_netdata()
self.simulator.go(5)
# Set lowpan context of sniffer
self.simulator.set_lowpan_context(1, '2001::/64')
# Flushes message queue before next ping
router_msgs = self.simulator.get_messages_sent_by(ROUTER)
# Source check - GUA source for GUA destination
leader_gua = self.nodes[LEADER].get_addr("2001::/64")
router_gua = self.nodes[ROUTER].get_addr("2001::/64")
self.assertTrue(leader_gua is not None)
self.assertTrue(router_gua is not None)
self.assertTrue(self.nodes[ROUTER].ping(leader_gua))
router_msgs = self.simulator.get_messages_sent_by(ROUTER)
msg = router_msgs.get_icmp_message(ipv6.ICMP_ECHO_REQUEST)
msg.assertSentFromSourceAddress(router_gua)
# Source check - GUA source for external address (default route)
self.nodes[ROUTER].ping(external_addr)
router_msgs = self.simulator.get_messages_sent_by(ROUTER)
msg = router_msgs.get_icmp_message(ipv6.ICMP_ECHO_REQUEST)
msg.assertSentFromSourceAddress(router_gua)
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main"
] |
[((5675, 5690), 'unittest.main', 'unittest.main', ([], {}), '()\n', (5688, 5690), False, 'import unittest\n')]
|
from ignite.engine import Events
from ignite.contrib.handlers.tqdm_logger import ProgressBar
from ignite.handlers import Checkpoint, DiskSaver, global_step_from_engine
from logger.base.base_logger import BaseLogger
from logger.base.utils import *
from logger.neptune.neptune_utils import *
from ignite.contrib.handlers.neptune_logger import *
import numpy as np
import os
class MyNeptuneLogger(BaseLogger):
def __init__(self, log_every=5, **kwargs):
self.writer = NeptuneLogger(api_token=os.getenv('NEPTUNE_API_TOKEN'),
project_name=kwargs["project_name"],
name=kwargs["name"],
params=kwargs["params"],
tags=kwargs["tags"])
super().__init__(log_every=log_every)
def _add_train_events(self, model = None, optimizer=None, scheduler=None, metrics={}):
# self.writer.attach(self.trainer,
# log_handler=WeightsScalarHandler(model),
# event_name=Events.ITERATION_COMPLETED(every=100))
# self.writer.attach(self.trainer,
# log_handler=GradsScalarHandler(model),
# event_name=Events.ITERATION_COMPLETED(every=100))
iteration_events = [
training_iteration(self.writer),
lr_iteration(optimizer, self.writer)
]
completion_events = [
train_metrics_completion(self.writer)
]
self._add_train_handlers(
**{
"iteration_events": iteration_events,
"completion_events": completion_events
}
)
def _add_eval_events(self, model = None, optimizer=None, scheduler=None, metrics={}):
iteration_events = []
completion_events = [
validation_metrics_completion(self.trainer, self.writer),
]
self._add_evaluation_handlers(
**{
"iteration_events": iteration_events,
"completion_events": completion_events
}
)
def _end_of_training(self):
self.writer.experiment.stop()
|
[
"os.getenv"
] |
[((507, 537), 'os.getenv', 'os.getenv', (['"""NEPTUNE_API_TOKEN"""'], {}), "('NEPTUNE_API_TOKEN')\n", (516, 537), False, 'import os\n')]
|
from fanstatic import Library, Resource, Group
library = Library('pace', 'resources')
pace_js = Resource(library, 'js/pace.js', minified='js/pace.min.js')
pace_barber_shop_css = Resource(library, 'css/pace-barber-shop.css')
pace_center_atom_css = Resource(library, 'css/pace-center-atom.css')
pace_center_simple_css = Resource(library, 'css/pace-center-simple.css')
pace_flash_css = Resource(library, 'css/pace-flash.css')
pace_macosx_css = Resource(library, 'css/pace-macosx.css')
pace_big_counter_css = Resource(library, 'css/pace-big-counter.css')
pace_center_circle_css = Resource(library, 'css/pace-center-circle.css')
pace_corner_indicator_css = Resource(library, 'css/pace-corner-indicator.css')
pace_flat_top_css = Resource(library, 'css/pace-flat-top.css')
pace_minimal_css = Resource(library, 'css/pace-minimal.css')
pace_bounce_css = Resource(library, 'css/pace-bounce.css')
pace_center_radar_css = Resource(library, 'css/pace-center-radar.css')
pace_fill_left_css = Resource(library, 'css/pace-fill-left.css')
pace_loading_bar_css = Resource(library, 'css/pace-loading-bar.css')
pace_barber_shop = Group([pace_barber_shop_css, pace_js])
pace_center_atom = Group([pace_center_atom_css, pace_js])
pace_center_simple = Group([pace_center_simple_css, pace_js])
pace_flash = Group([pace_flash_css, pace_js])
pace_macosx = Group([pace_macosx_css, pace_js])
pace_big_counter = Group([pace_big_counter_css, pace_js])
pace_center_circle = Group([pace_center_circle_css, pace_js])
pace_corner_indicator = Group([pace_corner_indicator_css, pace_js])
pace_flat_top = Group([pace_flat_top_css, pace_js])
pace_minimal = Group([pace_minimal_css, pace_js])
pace_bounce = Group([pace_bounce_css, pace_js])
pace_center_radar = Group([pace_center_radar_css, pace_js])
pace_fill_left = Group([pace_fill_left_css, pace_js])
pace_loading_bar = Group([pace_loading_bar_css, pace_js])
pace = pace_minimal
|
[
"fanstatic.Group",
"fanstatic.Library",
"fanstatic.Resource"
] |
[((58, 86), 'fanstatic.Library', 'Library', (['"""pace"""', '"""resources"""'], {}), "('pace', 'resources')\n", (65, 86), False, 'from fanstatic import Library, Resource, Group\n'), ((98, 156), 'fanstatic.Resource', 'Resource', (['library', '"""js/pace.js"""'], {'minified': '"""js/pace.min.js"""'}), "(library, 'js/pace.js', minified='js/pace.min.js')\n", (106, 156), False, 'from fanstatic import Library, Resource, Group\n'), ((181, 226), 'fanstatic.Resource', 'Resource', (['library', '"""css/pace-barber-shop.css"""'], {}), "(library, 'css/pace-barber-shop.css')\n", (189, 226), False, 'from fanstatic import Library, Resource, Group\n'), ((250, 295), 'fanstatic.Resource', 'Resource', (['library', '"""css/pace-center-atom.css"""'], {}), "(library, 'css/pace-center-atom.css')\n", (258, 295), False, 'from fanstatic import Library, Resource, Group\n'), ((321, 368), 'fanstatic.Resource', 'Resource', (['library', '"""css/pace-center-simple.css"""'], {}), "(library, 'css/pace-center-simple.css')\n", (329, 368), False, 'from fanstatic import Library, Resource, Group\n'), ((386, 425), 'fanstatic.Resource', 'Resource', (['library', '"""css/pace-flash.css"""'], {}), "(library, 'css/pace-flash.css')\n", (394, 425), False, 'from fanstatic import Library, Resource, Group\n'), ((444, 484), 'fanstatic.Resource', 'Resource', (['library', '"""css/pace-macosx.css"""'], {}), "(library, 'css/pace-macosx.css')\n", (452, 484), False, 'from fanstatic import Library, Resource, Group\n'), ((508, 553), 'fanstatic.Resource', 'Resource', (['library', '"""css/pace-big-counter.css"""'], {}), "(library, 'css/pace-big-counter.css')\n", (516, 553), False, 'from fanstatic import Library, Resource, Group\n'), ((579, 626), 'fanstatic.Resource', 'Resource', (['library', '"""css/pace-center-circle.css"""'], {}), "(library, 'css/pace-center-circle.css')\n", (587, 626), False, 'from fanstatic import Library, Resource, Group\n'), ((655, 705), 'fanstatic.Resource', 'Resource', (['library', '"""css/pace-corner-indicator.css"""'], {}), "(library, 'css/pace-corner-indicator.css')\n", (663, 705), False, 'from fanstatic import Library, Resource, Group\n'), ((726, 768), 'fanstatic.Resource', 'Resource', (['library', '"""css/pace-flat-top.css"""'], {}), "(library, 'css/pace-flat-top.css')\n", (734, 768), False, 'from fanstatic import Library, Resource, Group\n'), ((788, 829), 'fanstatic.Resource', 'Resource', (['library', '"""css/pace-minimal.css"""'], {}), "(library, 'css/pace-minimal.css')\n", (796, 829), False, 'from fanstatic import Library, Resource, Group\n'), ((848, 888), 'fanstatic.Resource', 'Resource', (['library', '"""css/pace-bounce.css"""'], {}), "(library, 'css/pace-bounce.css')\n", (856, 888), False, 'from fanstatic import Library, Resource, Group\n'), ((913, 959), 'fanstatic.Resource', 'Resource', (['library', '"""css/pace-center-radar.css"""'], {}), "(library, 'css/pace-center-radar.css')\n", (921, 959), False, 'from fanstatic import Library, Resource, Group\n'), ((981, 1024), 'fanstatic.Resource', 'Resource', (['library', '"""css/pace-fill-left.css"""'], {}), "(library, 'css/pace-fill-left.css')\n", (989, 1024), False, 'from fanstatic import Library, Resource, Group\n'), ((1048, 1093), 'fanstatic.Resource', 'Resource', (['library', '"""css/pace-loading-bar.css"""'], {}), "(library, 'css/pace-loading-bar.css')\n", (1056, 1093), False, 'from fanstatic import Library, Resource, Group\n'), ((1114, 1152), 'fanstatic.Group', 'Group', (['[pace_barber_shop_css, pace_js]'], {}), '([pace_barber_shop_css, pace_js])\n', (1119, 1152), False, 'from fanstatic import Library, Resource, Group\n'), ((1172, 1210), 'fanstatic.Group', 'Group', (['[pace_center_atom_css, pace_js]'], {}), '([pace_center_atom_css, pace_js])\n', (1177, 1210), False, 'from fanstatic import Library, Resource, Group\n'), ((1232, 1272), 'fanstatic.Group', 'Group', (['[pace_center_simple_css, pace_js]'], {}), '([pace_center_simple_css, pace_js])\n', (1237, 1272), False, 'from fanstatic import Library, Resource, Group\n'), ((1286, 1318), 'fanstatic.Group', 'Group', (['[pace_flash_css, pace_js]'], {}), '([pace_flash_css, pace_js])\n', (1291, 1318), False, 'from fanstatic import Library, Resource, Group\n'), ((1333, 1366), 'fanstatic.Group', 'Group', (['[pace_macosx_css, pace_js]'], {}), '([pace_macosx_css, pace_js])\n', (1338, 1366), False, 'from fanstatic import Library, Resource, Group\n'), ((1386, 1424), 'fanstatic.Group', 'Group', (['[pace_big_counter_css, pace_js]'], {}), '([pace_big_counter_css, pace_js])\n', (1391, 1424), False, 'from fanstatic import Library, Resource, Group\n'), ((1446, 1486), 'fanstatic.Group', 'Group', (['[pace_center_circle_css, pace_js]'], {}), '([pace_center_circle_css, pace_js])\n', (1451, 1486), False, 'from fanstatic import Library, Resource, Group\n'), ((1511, 1554), 'fanstatic.Group', 'Group', (['[pace_corner_indicator_css, pace_js]'], {}), '([pace_corner_indicator_css, pace_js])\n', (1516, 1554), False, 'from fanstatic import Library, Resource, Group\n'), ((1571, 1606), 'fanstatic.Group', 'Group', (['[pace_flat_top_css, pace_js]'], {}), '([pace_flat_top_css, pace_js])\n', (1576, 1606), False, 'from fanstatic import Library, Resource, Group\n'), ((1622, 1656), 'fanstatic.Group', 'Group', (['[pace_minimal_css, pace_js]'], {}), '([pace_minimal_css, pace_js])\n', (1627, 1656), False, 'from fanstatic import Library, Resource, Group\n'), ((1671, 1704), 'fanstatic.Group', 'Group', (['[pace_bounce_css, pace_js]'], {}), '([pace_bounce_css, pace_js])\n', (1676, 1704), False, 'from fanstatic import Library, Resource, Group\n'), ((1725, 1764), 'fanstatic.Group', 'Group', (['[pace_center_radar_css, pace_js]'], {}), '([pace_center_radar_css, pace_js])\n', (1730, 1764), False, 'from fanstatic import Library, Resource, Group\n'), ((1782, 1818), 'fanstatic.Group', 'Group', (['[pace_fill_left_css, pace_js]'], {}), '([pace_fill_left_css, pace_js])\n', (1787, 1818), False, 'from fanstatic import Library, Resource, Group\n'), ((1838, 1876), 'fanstatic.Group', 'Group', (['[pace_loading_bar_css, pace_js]'], {}), '([pace_loading_bar_css, pace_js])\n', (1843, 1876), False, 'from fanstatic import Library, Resource, Group\n')]
|
"""Test compilation database flags generation."""
import imp
from unittest import TestCase
from os import path
from EasyClangComplete.plugin.utils import include_parser
imp.reload(include_parser)
class TestIncludeParser(TestCase):
"""Test unique list."""
def test_get_all_includes(self):
"""Test getting all includes."""
base_folder = path.dirname(__file__)
_, res = include_parser.get_all_headers(
folders=[base_folder],
prefix='',
force_unix_includes=False,
completion_request=None)
self.assertEqual(len(res), 5)
local_file_path = path.normpath('cmake_tests/lib/a.h')
expected_completion = [
'{}\t{}'.format(local_file_path, base_folder), local_file_path]
self.assertIn(expected_completion, res)
local_file_path = path.normpath('makefile_files/inc/bar.h')
expected_completion = [
'{}\t{}'.format(local_file_path, base_folder), local_file_path]
self.assertIn(expected_completion, res)
def test_get_specific_includes(self):
"""Test getting only specific includes."""
base_folder = path.dirname(__file__)
_, res = include_parser.get_all_headers(
folders=[base_folder],
prefix='cmake_',
force_unix_includes=False,
completion_request=None)
self.assertEqual(len(res), 1)
local_file_path = path.normpath('cmake_tests/lib/a.h')
expected_completion = [
'{}\t{}'.format(local_file_path, base_folder), local_file_path]
self.assertIn(expected_completion, res)
def test_get_specific_includes_force_unix(self):
"""Test getting only specific includes."""
base_folder = path.dirname(__file__)
_, res = include_parser.get_all_headers(
folders=[base_folder],
prefix='cmake_',
force_unix_includes=True,
completion_request=None)
self.assertEqual(len(res), 1)
local_file_path = 'cmake_tests/lib/a.h'
expected_completion = [
'{}\t{}'.format(local_file_path, base_folder), local_file_path]
self.assertIn(expected_completion, res)
|
[
"imp.reload",
"os.path.dirname",
"EasyClangComplete.plugin.utils.include_parser.get_all_headers",
"os.path.normpath"
] |
[((171, 197), 'imp.reload', 'imp.reload', (['include_parser'], {}), '(include_parser)\n', (181, 197), False, 'import imp\n'), ((364, 386), 'os.path.dirname', 'path.dirname', (['__file__'], {}), '(__file__)\n', (376, 386), False, 'from os import path\n'), ((404, 524), 'EasyClangComplete.plugin.utils.include_parser.get_all_headers', 'include_parser.get_all_headers', ([], {'folders': '[base_folder]', 'prefix': '""""""', 'force_unix_includes': '(False)', 'completion_request': 'None'}), "(folders=[base_folder], prefix='',\n force_unix_includes=False, completion_request=None)\n", (434, 524), False, 'from EasyClangComplete.plugin.utils import include_parser\n'), ((634, 670), 'os.path.normpath', 'path.normpath', (['"""cmake_tests/lib/a.h"""'], {}), "('cmake_tests/lib/a.h')\n", (647, 670), False, 'from os import path\n'), ((854, 895), 'os.path.normpath', 'path.normpath', (['"""makefile_files/inc/bar.h"""'], {}), "('makefile_files/inc/bar.h')\n", (867, 895), False, 'from os import path\n'), ((1168, 1190), 'os.path.dirname', 'path.dirname', (['__file__'], {}), '(__file__)\n', (1180, 1190), False, 'from os import path\n'), ((1208, 1334), 'EasyClangComplete.plugin.utils.include_parser.get_all_headers', 'include_parser.get_all_headers', ([], {'folders': '[base_folder]', 'prefix': '"""cmake_"""', 'force_unix_includes': '(False)', 'completion_request': 'None'}), "(folders=[base_folder], prefix='cmake_',\n force_unix_includes=False, completion_request=None)\n", (1238, 1334), False, 'from EasyClangComplete.plugin.utils import include_parser\n'), ((1444, 1480), 'os.path.normpath', 'path.normpath', (['"""cmake_tests/lib/a.h"""'], {}), "('cmake_tests/lib/a.h')\n", (1457, 1480), False, 'from os import path\n'), ((1764, 1786), 'os.path.dirname', 'path.dirname', (['__file__'], {}), '(__file__)\n', (1776, 1786), False, 'from os import path\n'), ((1804, 1929), 'EasyClangComplete.plugin.utils.include_parser.get_all_headers', 'include_parser.get_all_headers', ([], {'folders': '[base_folder]', 'prefix': '"""cmake_"""', 'force_unix_includes': '(True)', 'completion_request': 'None'}), "(folders=[base_folder], prefix='cmake_',\n force_unix_includes=True, completion_request=None)\n", (1834, 1929), False, 'from EasyClangComplete.plugin.utils import include_parser\n')]
|
import numpy as np
from tensorflow.keras import Sequential
from tensorflow.keras.layers import Dense
from tcn import TCN
# if you increase the sequence length make sure the receptive field of the TCN is big enough.
MAX_TIME_STEP = 30
"""
Input: sequence of length 7
Input: sequence of length 25
Input: sequence of length 29
Input: sequence of length 21
Input: sequence of length 20
Input: sequence of length 13
Input: sequence of length 9
Input: sequence of length 7
Input: sequence of length 4
Input: sequence of length 14
Input: sequence of length 10
Input: sequence of length 11
...
"""
def get_x_y(max_time_steps):
for k in range(int(1e9)):
time_steps = np.random.choice(range(1, max_time_steps), size=1)[0]
if k % 2 == 0:
x_train = np.expand_dims([np.insert(np.zeros(shape=(time_steps, 1)), 0, 1)], axis=-1)
y_train = [1]
else:
x_train = np.array([np.zeros(shape=(time_steps, 1))])
y_train = [0]
if k % 100 == 0:
print(f'({k}) Input: sequence of length {time_steps}.')
yield x_train, np.expand_dims(y_train, axis=-1)
m = Sequential([
TCN(input_shape=(None, 1)),
Dense(1, activation='sigmoid')
])
m.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])
gen = get_x_y(max_time_steps=MAX_TIME_STEP)
m.fit(gen, epochs=1, steps_per_epoch=1000, max_queue_size=1, verbose=2)
|
[
"tensorflow.keras.layers.Dense",
"numpy.zeros",
"numpy.expand_dims",
"tcn.TCN"
] |
[((1154, 1180), 'tcn.TCN', 'TCN', ([], {'input_shape': '(None, 1)'}), '(input_shape=(None, 1))\n', (1157, 1180), False, 'from tcn import TCN\n'), ((1186, 1216), 'tensorflow.keras.layers.Dense', 'Dense', (['(1)'], {'activation': '"""sigmoid"""'}), "(1, activation='sigmoid')\n", (1191, 1216), False, 'from tensorflow.keras.layers import Dense\n'), ((1098, 1130), 'numpy.expand_dims', 'np.expand_dims', (['y_train'], {'axis': '(-1)'}), '(y_train, axis=-1)\n', (1112, 1130), True, 'import numpy as np\n'), ((922, 953), 'numpy.zeros', 'np.zeros', ([], {'shape': '(time_steps, 1)'}), '(shape=(time_steps, 1))\n', (930, 953), True, 'import numpy as np\n'), ((800, 831), 'numpy.zeros', 'np.zeros', ([], {'shape': '(time_steps, 1)'}), '(shape=(time_steps, 1))\n', (808, 831), True, 'import numpy as np\n')]
|
from singlecellmultiomics.universalBamTagger.digest import DigestFlagger
from singlecellmultiomics.tagtools import tagtools
class NlaIIIFlagger(DigestFlagger):
def __init__(self, **kwargs):
DigestFlagger.__init__(self, **kwargs)
def addSite(self, reads, strand, restrictionChrom, restrictionPos):
if not reads[0].has_tag(
self.sampleTag) or not reads[0].has_tag(
self.umiTag):
return
sample = reads[0].get_tag(self.sampleTag)
umi = reads[0].get_tag(self.umiTag)
allele = None if not reads[0].has_tag(
self.alleleTag) else reads[0].get_tag(
self.alleleTag)
siteInfo = tuple([x for x in [strand, allele, umi] if x is not None])
moleculeId = self.increaseAndRecordOversequencing(
sample, restrictionChrom, restrictionPos, siteInfo=siteInfo)
for read in reads:
if read is None:
continue
self.setSiteOversequencing(read, moleculeId)
self.setSiteCoordinate(read, restrictionPos)
self.setSource(read, 'NLA'), {}
if allele is not None:
self.setAllele(read, allele)
self.setStrand(read, '+' if strand ==
1 else ('-' if strand == 0 else '?'))
def digest(self, reads):
if len(reads) != 2:
if len(reads) == 1:
self.setRejectionReason(reads[0], 'unmapped mate')
else:
self.setRejectionReason(reads[0], 'nopair')
return None # Only made for mate pair
R1, R2 = reads
self.addAlleleInfo([read for read in reads if read is not None])
""" Valid configs:
CATG######## R1 ########## ^ ########## R2 ##########
############ R2 ########## ^ ########### R1 #####CATG reverse case
!BWA inverts the query sequence if it maps to the negative strand!
or R2.is_unmapped:
if R1.is_unmapped and R2.is_unmapped:
self.setRejectionReason(R1, 'unmapped R1;R2')
elif R1.is_unmapped:
self.setRejectionReason(R1, 'unmapped R1')
self.setRejectionReason(R2, 'unmapped R1')
else:
self.setRejectionReason(R1, 'unmapped R2')
self.setRejectionReason(R2, 'unmapped R2')
return(None)
"""
# Obtain RT hexamer:
if R2 is not None:
hstart, hseq = tagtools.getRandomPrimerHash(
R2, onStart=True, primerLength=6)
self.setRandomPrimer(R1, R2, hstart, hseq)
if R1 is None or R1.is_unmapped:
self.setRejectionReason(R1, 'unmapped R1')
self.setRejectionReason(R2, 'unmapped R1')
return None
if R1.seq[:4] == 'CATG' and not R1.is_reverse:
rpos = (R1.reference_name, R1.reference_start)
self.addSite([R1, R2], strand=0,
restrictionChrom=rpos[0], restrictionPos=rpos[1])
self.setRecognizedSequence(R1, 'CATG')
self.setRecognizedSequence(R2, 'CATG')
return(rpos)
elif R1.seq[-4:] == 'CATG' and R1.is_reverse:
rpos = (R1.reference_name, R1.reference_end - 4)
self.addSite([R1, R2], strand=1,
restrictionChrom=rpos[0], restrictionPos=rpos[1])
self.setRecognizedSequence(R1, 'CATG')
self.setRecognizedSequence(R2, 'CATG')
return(rpos)
# Sometimes the cycle is off
elif R1.seq[:3] == 'ATG' and not R1.is_reverse:
rpos = (R1.reference_name, R1.reference_start - 1)
self.addSite([R1, R2], strand=0,
restrictionChrom=rpos[0], restrictionPos=rpos[1])
self.setRecognizedSequence(R1, 'ATG')
self.setRecognizedSequence(R2, 'ATG')
return(rpos)
elif R1.seq[-3:] == 'CAT' and R1.is_reverse: # First base was trimmed or lost
rpos = (R1.reference_name, R1.reference_end - 3)
self.addSite([R1, R2], strand=1,
restrictionChrom=rpos[0], restrictionPos=rpos[1])
self.setRecognizedSequence(R1, 'CAT')
self.setRecognizedSequence(R2, 'CAT')
return(rpos)
else:
if R1.seq[:4] == 'CATG' and R1.is_reverse:
self.setRejectionReason(R1, 'found CATG R1 REV exp FWD')
self.setRejectionReason(R2, 'found CATG R1 REV exp FWD')
elif R1.seq[-4:] == 'CATG' and not R1.is_reverse:
self.setRejectionReason(R1, 'found CATG R1 FWD exp REV')
self.setRejectionReason(R2, 'found CATG R1 FWD exp REV')
else:
self.setRejectionReason(R1, 'no CATG')
self.setRejectionReason(R2, 'no CATG')
return None
try:
start, end = tagtools.getPairGenomicLocations(
R1, R2, R1PrimerLength=4, R2PrimerLength=6)
self.setFragmentSize(R1, end - start)
self.setFragmentSize(R2, end - start)
self.setFragmentTrust(R1, start, end)
self.setFragmentTrust(R2, start, end)
except Exception as e:
self.setFragmentSize(R1, 'unknown')
self.setFragmentSize(R2, 'unknown')
"""
if R1.seq[:4]=='CATG' and R1.reference_start<=R2.reference_start: # Site on the start of R1, R2 should map behind
self.addSite( [R1,R2], strand=0, restrictionChrom=R1.reference_name, restrictionPos=R1.reference_start )
return(( R1.reference_name, R1.reference_start))
if R1.seq[-4:]=='CATG' and R1.reference_start>=R2.reference_start: # Site on the end of R1, R2 should map before
self.addSite( [R1,R2], strand=1, restrictionChrom=R1.reference_name, restrictionPos=R1.reference_end-4 )
return( (R1.reference_name, R1.reference_end-4))
"""
|
[
"singlecellmultiomics.tagtools.tagtools.getRandomPrimerHash",
"singlecellmultiomics.universalBamTagger.digest.DigestFlagger.__init__",
"singlecellmultiomics.tagtools.tagtools.getPairGenomicLocations"
] |
[((212, 250), 'singlecellmultiomics.universalBamTagger.digest.DigestFlagger.__init__', 'DigestFlagger.__init__', (['self'], {}), '(self, **kwargs)\n', (234, 250), False, 'from singlecellmultiomics.universalBamTagger.digest import DigestFlagger\n'), ((2563, 2625), 'singlecellmultiomics.tagtools.tagtools.getRandomPrimerHash', 'tagtools.getRandomPrimerHash', (['R2'], {'onStart': '(True)', 'primerLength': '(6)'}), '(R2, onStart=True, primerLength=6)\n', (2591, 2625), False, 'from singlecellmultiomics.tagtools import tagtools\n'), ((5063, 5139), 'singlecellmultiomics.tagtools.tagtools.getPairGenomicLocations', 'tagtools.getPairGenomicLocations', (['R1', 'R2'], {'R1PrimerLength': '(4)', 'R2PrimerLength': '(6)'}), '(R1, R2, R1PrimerLength=4, R2PrimerLength=6)\n', (5095, 5139), False, 'from singlecellmultiomics.tagtools import tagtools\n')]
|
# -*- coding: utf-8 -*-
# Copyright 2008-2011, <NAME> (inamidst.com) and <NAME>
# (yanovich.net)
# Copyright © 2012, <NAME> <<EMAIL>>
# Copyright 2012, <NAME> (embolalia.net)
# Licensed under the Eiffel Forum License 2.
from __future__ import unicode_literals
import re
import time
import base64
import lpbot
from lpbot.tools import Identifier, iteritems
from lpbot.logger import get_logger
from lpbot.module import event, rule, thread, unblockable, event, priority
LOGGER = get_logger(__name__)
@event('001', '251')
@rule('.*')
@thread(False)
@unblockable
def startup(bot, trigger):
"""Do tasks related to connecting to the network.
001 RPL_WELCOME is from RFC2812 and is the first message that is sent after
the connection has been registered on the network.
251 RPL_LUSERCLIENT is a mandatory message that is sent after client
connects to the server in rfc1459. RFC2812 does not require it and all
networks might not send it. We support both.
"""
if bot.connection_registered:
return
bot.connection_registered = True
if bot.config.core.nickserv_password is not None:
bot.msg(
'NickServ',
'IDENTIFY %s' % bot.config.core.nickserv_password
)
if (bot.config.core.oper_name is not None
and bot.config.core.oper_password is not None):
bot.write((
'OPER',
bot.config.core.oper_name + ' ' + bot.config.oper_password
))
# Use Authserv if authserv_password and authserv_account is set in config.
if (bot.config.core.authserv_password is not None
and bot.config.core.authserv_account is not None):
bot.write((
'AUTHSERV auth',
bot.config.core.authserv_account + ' ' + bot.config.authserv_password
))
#Set bot modes per config, +B if no config option is defined
if bot.config.has_option('core', 'modes'):
modes = bot.config.core.modes
else:
modes = 'B'
bot.write(('MODE ', '%s +%s' % (bot.nick, modes)))
bot.memory['retry_join'] = dict()
if bot.config.has_option('core', 'throttle_join'):
throttle_rate = int(bot.config.core.throttle_join)
channels_joined = 0
for channel in bot.config.core.get_list('channels'):
channels_joined += 1
if not channels_joined % throttle_rate:
time.sleep(1)
bot.join(channel)
else:
for channel in bot.config.core.get_list('channels'):
bot.join(channel)
@event('477')
@rule('.*')
@priority('high')
def retry_join(bot, trigger):
"""Give NickServer enough time to identify on a +R channel.
Give NickServ enough time to identify, and retry rejoining an
identified-only (+R) channel. Maximum of ten rejoin attempts.
"""
channel = trigger.args[1]
if channel in bot.memory['retry_join'].keys():
bot.memory['retry_join'][channel] += 1
if bot.memory['retry_join'][channel] > 10:
LOGGER.warning('Failed to join %s after 10 attempts.', channel)
return
else:
bot.memory['retry_join'][channel] = 0
bot.join(channel)
return
time.sleep(6)
bot.join(channel)
# Functions to maintain a list of chanops in all of lpbot's channels.
@rule('(.*)')
@event('353')
@priority('high')
@thread(False)
@unblockable
def handle_names(bot, trigger):
"""Handle NAMES response, happens when joining to channels."""
names = trigger.split()
channels = re.search('(#\S*)', trigger.raw)
if not channels:
return
channel = Identifier(channels.group(1))
if channel not in bot.privileges:
bot.privileges[channel] = dict()
bot.init_ops_list(channel)
# This could probably be made flexible in the future, but I don't think
# it'd be worth it.
mapping = {'+': lpbot.module.VOICE,
'%': lpbot.module.HALFOP,
'@': lpbot.module.OP,
'&': lpbot.module.ADMIN,
'~': lpbot.module.OWNER}
for name in names:
priv = 0
for prefix, value in iteritems(mapping):
if prefix in name:
priv = priv | value
nick = Identifier(name.lstrip(''.join(mapping.keys())))
bot.privileges[channel][nick] = priv
# Old op list maintenance is down here, and should be removed at some
# point
if '@' in name or '~' in name or '&' in name:
bot.add_op(channel, name.lstrip('@&%+~'))
bot.add_halfop(channel, name.lstrip('@&%+~'))
bot.add_voice(channel, name.lstrip('@&%+~'))
elif '%' in name:
bot.add_halfop(channel, name.lstrip('@&%+~'))
bot.add_voice(channel, name.lstrip('@&%+~'))
elif '+' in name:
bot.add_voice(channel, name.lstrip('@&%+~'))
@rule('(.*)')
@event('MODE')
@priority('high')
@thread(False)
@unblockable
def track_modes(bot, trigger):
"""Track usermode changes and keep our lists of ops up to date."""
# Mode message format: <channel> *( ( "-" / "+" ) *<modes> *<modeparams> )
channel = Identifier(trigger.args[0])
line = trigger.args[1:]
# If the first character of where the mode is being set isn't a #
# then it's a user mode, not a channel mode, so we'll ignore it.
if channel.is_nick():
return
def handle_old_modes(nick, mode):
#Old mode maintenance. Drop this crap in 5.0.
if mode[1] == 'o' or mode[1] == 'q' or mode[1] == 'a':
if mode[0] == '+':
bot.add_op(channel, nick)
else:
bot.del_op(channel, nick)
elif mode[1] == 'h': # Halfop
if mode[0] == '+':
bot.add_halfop(channel, nick)
else:
bot.del_halfop(channel, nick)
elif mode[1] == 'v':
if mode[0] == '+':
bot.add_voice(channel, nick)
else:
bot.del_voice(channel, nick)
mapping = {'v': lpbot.module.VOICE,
'h': lpbot.module.HALFOP,
'o': lpbot.module.OP,
'a': lpbot.module.ADMIN,
'q': lpbot.module.OWNER}
modes = []
for arg in line:
if len(arg) == 0:
continue
if arg[0] in '+-':
# There was a comment claiming IRC allows e.g. MODE +aB-c foo, but
# I don't see it in any RFCs. Leaving in the extra parsing for now.
sign = ''
modes = []
for char in arg:
if char == '+' or char == '-':
sign = char
else:
modes.append(sign + char)
else:
arg = Identifier(arg)
for mode in modes:
priv = bot.privileges[channel].get(arg, 0)
value = mapping.get(mode[1])
if value is not None:
if mode[0] == '+':
priv = priv | value
else:
priv = priv & ~value
bot.privileges[channel][arg] = priv
handle_old_modes(arg, mode)
@rule('.*')
@event('NICK')
@priority('high')
@thread(False)
@unblockable
def track_nicks(bot, trigger):
"""Track nickname changes and maintain our chanops list accordingly."""
old = trigger.nick
new = Identifier(trigger)
if old == bot.config.core.owner:
bot.memory['owner_auth']=False
# Give debug mssage, and PM the owner, if the bot's own nick changes.
if old == bot.nick:
privmsg = "Hi, I'm your bot, %s." + \
" Something has made my nick change." + \
" This can cause some problems for me," + \
" and make me do weird things." + \
" You'll probably want to restart me," + \
" and figure out what made that happen" + \
" so you can stop it happening again." + \
" (Usually, it means you tried to give me a nick" + \
" that's protected by NickServ.)" % bot.nick
debug_msg = "Nick changed by server." + \
" This can cause unexpected behavior. Please restart the bot."
LOGGER.critical(debug_msg)
bot.msg(bot.config.core.owner, privmsg)
return
for channel in bot.privileges:
channel = Identifier(channel)
if old in bot.privileges[channel]:
value = bot.privileges[channel].pop(old)
bot.privileges[channel][new] = value
# Old privilege maintenance
for channel in bot.halfplus:
if old in bot.halfplus[channel]:
bot.del_halfop(channel, old)
bot.add_halfop(channel, new)
for channel in bot.ops:
if old in bot.ops[channel]:
bot.del_op(channel, old)
bot.add_op(channel, new)
for channel in bot.voices:
if old in bot.voices[channel]:
bot.del_voice(channel, old)
bot.add_voice(channel, new)
@rule('(.*)')
@event('PART')
@priority('high')
@thread(False)
@unblockable
def track_part(bot, trigger):
if trigger.nick == bot.nick:
bot.channels.remove(trigger.sender)
del bot.privileges[trigger.sender]
else:
if trigger.nick == bot.config.core.owner:
bot.memory['owner_auth'] = False
try:
del bot.privileges[trigger.sender][trigger.nick]
except KeyError:
pass
@rule('.*')
@event('KICK')
@priority('high')
@thread(False)
@unblockable
def track_kick(bot, trigger):
nick = Identifier(trigger.args[1])
if nick == bot.nick:
bot.channels.remove(trigger.sender)
del bot.privileges[trigger.sender]
else:
# Temporary fix to stop KeyErrors from being sent to channel
# The privileges dict may not have all nicks stored at all times
# causing KeyErrors
if trigger.nick == bot.config.core.owner:
bot.memory['owner_auth'] = False
try:
del bot.privileges[trigger.sender][nick]
except KeyError:
pass
@rule('.*')
@event('JOIN')
@priority('high')
@thread(False)
@unblockable
def track_join(bot, trigger):
if trigger.nick == bot.nick and trigger.sender not in bot.channels:
bot.channels.append(trigger.sender)
bot.privileges[trigger.sender] = dict()
bot.privileges[trigger.sender][trigger.nick] = 0
@rule('.*')
@event('QUIT')
@priority('high')
@thread(False)
@unblockable
def track_quit(bot, trigger):
for chanprivs in bot.privileges.values():
if trigger.nick in chanprivs:
del chanprivs[trigger.nick]
if trigger.nick == bot.config.core.owner:
bot.memory['owner_auth'] = False
@rule('.*')
@event('CAP')
@thread(False)
@priority('high')
@unblockable
def recieve_cap_list(bot, trigger):
# Server is listing capabilites
if trigger.args[1] == 'LS':
recieve_cap_ls_reply(bot, trigger)
# Server denied CAP REQ
elif trigger.args[1] == 'NAK':
entry = bot._cap_reqs.get(trigger, None)
# If it was requested with bot.cap_req
if entry:
for req in entry:
# And that request was mandatory/prohibit, and a callback was
# provided
if req[0] and req[2]:
# Call it.
req[2](bot, req[0] + trigger)
# Server is acknowledinge SASL for us.
elif (trigger.args[0] == bot.nick and trigger.args[1] == 'ACK' and
'sasl' in trigger.args[2]):
recieve_cap_ack_sasl(bot)
def recieve_cap_ls_reply(bot, trigger):
if bot.server_capabilities:
# We've already seen the results, so someone sent CAP LS from a module.
# We're too late to do SASL, and we don't want to send CAP END before
# the module has done what it needs to, so just return
return
bot.server_capabilities = set(trigger.split(' '))
# If some other module requests it, we don't need to add another request.
# If some other module prohibits it, we shouldn't request it.
if 'multi-prefix' not in bot._cap_reqs:
# Whether or not the server supports multi-prefix doesn't change how we
# parse it, so we don't need to worry if it fails.
bot._cap_reqs['multi-prefix'] = (['', 'coretasks', None],)
for cap, reqs in iteritems(bot._cap_reqs):
# At this point, we know mandatory and prohibited don't co-exist, but
# we need to call back for optionals if they're also prohibited
prefix = ''
for entry in reqs:
if prefix == '-' and entry[0] != '-':
entry[2](bot, entry[0] + cap)
continue
if entry[0]:
prefix = entry[0]
# It's not required, or it's supported, so we can request it
if prefix != '=' or cap in bot.server_capabilities:
# REQs fail as a whole, so we send them one capability at a time
bot.write(('CAP', 'REQ', entry[0] + cap))
elif reqs[2]:
# Server is going to fail on it, so we call the failure function
reqs[2](bot, entry[0] + cap)
# If we want to do SASL, we have to wait before we can send CAP END. So if
# we are, wait on 903 (SASL successful) to send it.
if bot.config.core.sasl_password:
bot.write(('CAP', 'REQ', 'sasl'))
else:
bot.write(('CAP', 'END'))
def recieve_cap_ack_sasl(bot):
# Presumably we're only here if we said we actually *want* sasl, but still
# check anyway.
if not bot.config.core.sasl_password:
return
mech = bot.config.core.sasl_mechanism or 'PLAIN'
bot.write(('AUTHENTICATE', mech))
@event('AUTHENTICATE')
@rule('.*')
def auth_proceed(bot, trigger):
if trigger.args[0] != '+':
# How did we get here? I am not good with computer.
return
# Is this right?
if bot.config.core.sasl_username:
sasl_username = bot.config.core.sasl_username
else:
sasl_username = bot.nick
sasl_token = '\0'.join((sasl_username, sasl_username,
bot.config.core.sasl_password))
# Spec says we do a base 64 encode on the SASL stuff
bot.write(('AUTHENTICATE', base64.b64encode(sasl_token)))
@event('903')
@rule('.*')
def sasl_success(bot, trigger):
bot.write(('CAP', 'END'))
|
[
"lpbot.tools.iteritems",
"lpbot.logger.get_logger",
"time.sleep",
"lpbot.module.rule",
"base64.b64encode",
"lpbot.tools.Identifier",
"lpbot.module.priority",
"lpbot.module.event",
"lpbot.module.thread",
"re.search"
] |
[((481, 501), 'lpbot.logger.get_logger', 'get_logger', (['__name__'], {}), '(__name__)\n', (491, 501), False, 'from lpbot.logger import get_logger\n'), ((505, 524), 'lpbot.module.event', 'event', (['"""001"""', '"""251"""'], {}), "('001', '251')\n", (510, 524), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((526, 536), 'lpbot.module.rule', 'rule', (['""".*"""'], {}), "('.*')\n", (530, 536), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((538, 551), 'lpbot.module.thread', 'thread', (['(False)'], {}), '(False)\n', (544, 551), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((2532, 2544), 'lpbot.module.event', 'event', (['"""477"""'], {}), "('477')\n", (2537, 2544), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((2546, 2556), 'lpbot.module.rule', 'rule', (['""".*"""'], {}), "('.*')\n", (2550, 2556), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((2558, 2574), 'lpbot.module.priority', 'priority', (['"""high"""'], {}), "('high')\n", (2566, 2574), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((3298, 3310), 'lpbot.module.rule', 'rule', (['"""(.*)"""'], {}), "('(.*)')\n", (3302, 3310), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((3312, 3324), 'lpbot.module.event', 'event', (['"""353"""'], {}), "('353')\n", (3317, 3324), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((3326, 3342), 'lpbot.module.priority', 'priority', (['"""high"""'], {}), "('high')\n", (3334, 3342), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((3344, 3357), 'lpbot.module.thread', 'thread', (['(False)'], {}), '(False)\n', (3350, 3357), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((4847, 4859), 'lpbot.module.rule', 'rule', (['"""(.*)"""'], {}), "('(.*)')\n", (4851, 4859), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((4861, 4874), 'lpbot.module.event', 'event', (['"""MODE"""'], {}), "('MODE')\n", (4866, 4874), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((4876, 4892), 'lpbot.module.priority', 'priority', (['"""high"""'], {}), "('high')\n", (4884, 4892), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((4894, 4907), 'lpbot.module.thread', 'thread', (['(False)'], {}), '(False)\n', (4900, 4907), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((7158, 7168), 'lpbot.module.rule', 'rule', (['""".*"""'], {}), "('.*')\n", (7162, 7168), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((7170, 7183), 'lpbot.module.event', 'event', (['"""NICK"""'], {}), "('NICK')\n", (7175, 7183), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((7185, 7201), 'lpbot.module.priority', 'priority', (['"""high"""'], {}), "('high')\n", (7193, 7201), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((7203, 7216), 'lpbot.module.thread', 'thread', (['(False)'], {}), '(False)\n', (7209, 7216), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((9037, 9049), 'lpbot.module.rule', 'rule', (['"""(.*)"""'], {}), "('(.*)')\n", (9041, 9049), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((9051, 9064), 'lpbot.module.event', 'event', (['"""PART"""'], {}), "('PART')\n", (9056, 9064), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((9066, 9082), 'lpbot.module.priority', 'priority', (['"""high"""'], {}), "('high')\n", (9074, 9082), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((9084, 9097), 'lpbot.module.thread', 'thread', (['(False)'], {}), '(False)\n', (9090, 9097), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((9485, 9495), 'lpbot.module.rule', 'rule', (['""".*"""'], {}), "('.*')\n", (9489, 9495), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((9497, 9510), 'lpbot.module.event', 'event', (['"""KICK"""'], {}), "('KICK')\n", (9502, 9510), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((9512, 9528), 'lpbot.module.priority', 'priority', (['"""high"""'], {}), "('high')\n", (9520, 9528), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((9530, 9543), 'lpbot.module.thread', 'thread', (['(False)'], {}), '(False)\n', (9536, 9543), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((10124, 10134), 'lpbot.module.rule', 'rule', (['""".*"""'], {}), "('.*')\n", (10128, 10134), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((10136, 10149), 'lpbot.module.event', 'event', (['"""JOIN"""'], {}), "('JOIN')\n", (10141, 10149), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((10151, 10167), 'lpbot.module.priority', 'priority', (['"""high"""'], {}), "('high')\n", (10159, 10167), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((10169, 10182), 'lpbot.module.thread', 'thread', (['(False)'], {}), '(False)\n', (10175, 10182), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((10446, 10456), 'lpbot.module.rule', 'rule', (['""".*"""'], {}), "('.*')\n", (10450, 10456), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((10458, 10471), 'lpbot.module.event', 'event', (['"""QUIT"""'], {}), "('QUIT')\n", (10463, 10471), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((10473, 10489), 'lpbot.module.priority', 'priority', (['"""high"""'], {}), "('high')\n", (10481, 10489), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((10491, 10504), 'lpbot.module.thread', 'thread', (['(False)'], {}), '(False)\n', (10497, 10504), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((10766, 10776), 'lpbot.module.rule', 'rule', (['""".*"""'], {}), "('.*')\n", (10770, 10776), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((10778, 10790), 'lpbot.module.event', 'event', (['"""CAP"""'], {}), "('CAP')\n", (10783, 10790), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((10792, 10805), 'lpbot.module.thread', 'thread', (['(False)'], {}), '(False)\n', (10798, 10805), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((10807, 10823), 'lpbot.module.priority', 'priority', (['"""high"""'], {}), "('high')\n", (10815, 10823), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((13737, 13758), 'lpbot.module.event', 'event', (['"""AUTHENTICATE"""'], {}), "('AUTHENTICATE')\n", (13742, 13758), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((13760, 13770), 'lpbot.module.rule', 'rule', (['""".*"""'], {}), "('.*')\n", (13764, 13770), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((14305, 14317), 'lpbot.module.event', 'event', (['"""903"""'], {}), "('903')\n", (14310, 14317), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((14319, 14329), 'lpbot.module.rule', 'rule', (['""".*"""'], {}), "('.*')\n", (14323, 14329), False, 'from lpbot.module import event, rule, thread, unblockable, event, priority\n'), ((3187, 3200), 'time.sleep', 'time.sleep', (['(6)'], {}), '(6)\n', (3197, 3200), False, 'import time\n'), ((3514, 3547), 're.search', 're.search', (['"""(#\\\\S*)"""', 'trigger.raw'], {}), "('(#\\\\S*)', trigger.raw)\n", (3523, 3547), False, 'import re\n'), ((5116, 5143), 'lpbot.tools.Identifier', 'Identifier', (['trigger.args[0]'], {}), '(trigger.args[0])\n', (5126, 5143), False, 'from lpbot.tools import Identifier, iteritems\n'), ((7370, 7389), 'lpbot.tools.Identifier', 'Identifier', (['trigger'], {}), '(trigger)\n', (7380, 7389), False, 'from lpbot.tools import Identifier, iteritems\n'), ((9598, 9625), 'lpbot.tools.Identifier', 'Identifier', (['trigger.args[1]'], {}), '(trigger.args[1])\n', (9608, 9625), False, 'from lpbot.tools import Identifier, iteritems\n'), ((12390, 12414), 'lpbot.tools.iteritems', 'iteritems', (['bot._cap_reqs'], {}), '(bot._cap_reqs)\n', (12399, 12414), False, 'from lpbot.tools import Identifier, iteritems\n'), ((4106, 4124), 'lpbot.tools.iteritems', 'iteritems', (['mapping'], {}), '(mapping)\n', (4115, 4124), False, 'from lpbot.tools import Identifier, iteritems\n'), ((8392, 8411), 'lpbot.tools.Identifier', 'Identifier', (['channel'], {}), '(channel)\n', (8402, 8411), False, 'from lpbot.tools import Identifier, iteritems\n'), ((6712, 6727), 'lpbot.tools.Identifier', 'Identifier', (['arg'], {}), '(arg)\n', (6722, 6727), False, 'from lpbot.tools import Identifier, iteritems\n'), ((14271, 14299), 'base64.b64encode', 'base64.b64encode', (['sasl_token'], {}), '(sasl_token)\n', (14287, 14299), False, 'import base64\n'), ((2384, 2397), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (2394, 2397), False, 'import time\n')]
|
import os
import pytest
from unittest.mock import patch, MagicMock
import tempfile
from click.testing import CliRunner
from paths_cli.commands.visit_all import *
import openpathsampling as paths
# patch with this for testing
def print_test(output_storage, states, engine, initial_frame):
print(isinstance(output_storage, paths.Storage))
print(sorted([s.__uuid__ for s in states]))
print(engine.__uuid__)
print(initial_frame.__uuid__)
@pytest.fixture()
def visit_all_fixture(tps_fixture):
scheme, network, engine, init_conds = tps_fixture
states = sorted(network.all_states, key=lambda x: x.__uuid__)
init_frame = init_conds[0].trajectory[0]
return states, engine, init_frame
@patch('paths_cli.commands.visit_all.visit_all_main', print_test)
def test_visit_all(visit_all_fixture):
# this is an integration test; testing integration click & parameters
states, engine, init_frame = visit_all_fixture
runner = CliRunner()
with runner.isolated_filesystem():
storage = paths.Storage("setup.nc", 'w')
for obj in visit_all_fixture:
storage.save(obj)
storage.tags['initial_snapshot'] = init_frame
storage.close()
results = runner.invoke(
visit_all,
["setup.nc", '-o', 'foo.nc', '-s', 'A', '-s', 'B',
'-e', 'flat', '-f', 'initial_snapshot']
)
expected_output = ("True\n[" + str(states[0].__uuid__) + ", "
+ str(states[1].__uuid__) + "]\n")
expected_output += "\n".join(str(obj.__uuid__)
for obj in [engine, init_frame]) + "\n"
assert results.exit_code == 0
assert results.output == expected_output
def test_visit_all_main(visit_all_fixture):
# just a smoke test here
tempdir = tempfile.mkdtemp()
try:
store_name = os.path.join(tempdir, "visit_all.nc")
storage = paths.Storage(store_name, mode='w')
states, engine, init_frame = visit_all_fixture
traj, foo = visit_all_main(storage, states, engine, init_frame)
assert isinstance(traj, paths.Trajectory)
assert foo is None
assert len(storage.trajectories) == 1
storage.close()
finally:
os.remove(store_name)
os.rmdir(tempdir)
|
[
"os.remove",
"os.path.join",
"pytest.fixture",
"unittest.mock.patch",
"tempfile.mkdtemp",
"os.rmdir",
"click.testing.CliRunner",
"openpathsampling.Storage"
] |
[((457, 473), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (471, 473), False, 'import pytest\n'), ((716, 780), 'unittest.mock.patch', 'patch', (['"""paths_cli.commands.visit_all.visit_all_main"""', 'print_test'], {}), "('paths_cli.commands.visit_all.visit_all_main', print_test)\n", (721, 780), False, 'from unittest.mock import patch, MagicMock\n'), ((958, 969), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (967, 969), False, 'from click.testing import CliRunner\n'), ((1803, 1821), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (1819, 1821), False, 'import tempfile\n'), ((1027, 1057), 'openpathsampling.Storage', 'paths.Storage', (['"""setup.nc"""', '"""w"""'], {}), "('setup.nc', 'w')\n", (1040, 1057), True, 'import openpathsampling as paths\n'), ((1852, 1889), 'os.path.join', 'os.path.join', (['tempdir', '"""visit_all.nc"""'], {}), "(tempdir, 'visit_all.nc')\n", (1864, 1889), False, 'import os\n'), ((1908, 1943), 'openpathsampling.Storage', 'paths.Storage', (['store_name'], {'mode': '"""w"""'}), "(store_name, mode='w')\n", (1921, 1943), True, 'import openpathsampling as paths\n'), ((2239, 2260), 'os.remove', 'os.remove', (['store_name'], {}), '(store_name)\n', (2248, 2260), False, 'import os\n'), ((2269, 2286), 'os.rmdir', 'os.rmdir', (['tempdir'], {}), '(tempdir)\n', (2277, 2286), False, 'import os\n')]
|
# -*- coding: utf-8 -*-
"""
Created on Mon Jun 19 11:25:16 2017
@author: flwe6397
"""
import scipy
import statsmodels.api as sm
import matplotlib
matplotlib.rcParams.update({'font.size': 12})
from matplotlib import pyplot
import numpy as np
from pylab import rcParams
rcParams['figure.figsize'] = 16/2,12/2
import statistics
def CalcNonParametric_MannWhitneyWithShapiro(list1, list2, my_alternative='two-sided', printHint=False, printNorm=False):
if (False):
print ('\t\tnormality list1 (Shapiro) = ' + str(scipy.stats.shapiro(list1)))
print ('\t\tnormality list2 (Shapiro) = ' + str(scipy.stats.shapiro(list2)))
print('\t\t' + str(scipy.stats.mannwhitneyu(list1, list2, alternative=my_alternative)))
prefix = ''
if (True):
val1= statistics.median(list1)
val2 = statistics.median(list2)
prefix = 'Median'
else:
val1= statistics.mean(list1)
val2 = statistics.mean(list2)
prefix = 'Mean'
stdevl1 = statistics.stdev(list1)
stdevl2 = statistics.stdev(list2)
print ('\t\t'+prefix+' l1 : ' + str(val1) + '\tStDev l1: ' + str(statistics.stdev(list1)) + "\tN = "+str(len(list1)) + ' [' +str(list1[np.argmin(list1)]) + ';' + str(list1[np.argmax(list1)]) + ']')
print ('\t\t'+prefix+' l2 : ' + str(val2) + '\tStDev l2: ' + str(statistics.stdev(list2)) + "\tN = "+str(len(list2)) + ' [' +str(list2[np.argmin(list2)]) + ';' + str(list2[np.argmax(list2)]) + ']')
if (printNorm):
print (str(len(list1)))
print (str(len(list2)))
if(printHint):
print('\t\tMann-Whitney:' + 'If P <= 0.05, we are confident that the distributions significantly differ')
print('\t\tShapiro :' + 'If P > 0.05, it may be assumed that the data have a normal distribution.')
#http://www.randalolson.com/2012/08/06/statistical-analysis-made-easy-in-python/
return val1, stdevl1, val2, stdevl2
def CalcParametric_WelshWithShapiroAndLevene(list1, list2, printHint=False, printNorm=False):
if (True):
print ('\t\tnormality list1 (Shapiro) = ' + str(scipy.stats.shapiro(list1)))
print ('\t\tnormality list2 (Shapiro) = ' + str(scipy.stats.shapiro(list2)) )
#print (str(len(list1)))
#print (str(len(list2)))
equalvar = scipy.stats.levene(list1, list2, center='mean')
if (equalvar[1] < 0.05):
print ('\t\t' + str(scipy.stats.ttest_ind(list1, list2, equal_var=False)) + '; Welch: '+ str(equalvar[1]) ) #Welch
else:
print ('\t\t' + str(scipy.stats.ttest_ind(list1, list2, equal_var=True))+ '; t-test: '+str(equalvar[1]) )
# a negative sign implies tahat the sample mean is less than the hypothesized mean
meanl1 = statistics.mean(list1)
meanl2 = statistics.mean(list2)
stdevl1 = statistics.stdev(list1)
stdevl2 = statistics.stdev(list2)
print ('\t\tmean l1 : ' + str(statistics.mean(list1)) + '\tStDev l1: ' + str(statistics.stdev(list1))+ "\tN = "+str(len(list1)))
print ('\t\tmean l2 : ' + str(statistics.mean(list2))+ '\tStDev l2: ' + str(statistics.stdev(list2))+ "\tN = "+str(len(list2)))
if (printHint):
print ('\t\tLevene : If p < 0.05 indicates a violation of the assumption that variance is equal across groups. ')
print ('\t\tT-Test : If p < 0.05, then we can reject the null hypothesis of identical average scores. (they differ)')
print ('\t\tShapiro : If P > 0.05, it may be assumed that the data have a normal distribution.')
return meanl1, stdevl1, meanl2, stdevl2
def PrintQQPlot(list1):
res = np.array(list1)
fig = sm.qqplot(res)
pyplot.show()
def plotBarChartWithStDev(means, stdev):
ind = np.arange(len(means))
width = 0.35
colours = ['red','blue','green','yellow', 'orange']
pyplot.figure()
#pyplot.title('Average Age')
for i in range(len(means)):
pyplot.bar(ind[i],means[i],width,color=colours[i],align='center',yerr=stdev[i],ecolor='k')
pyplot.ylabel('bla')
pyplot.xticks(ind,('ul','uc','ur','dr','t'))
def plotBarChartWithStdDevDouble(n, means1, means2, stdev1, stdev2, axislist, axistitle = '', newwidth=.4, vlabeloffset=2, x=16, y = 9, bLog = False, pos='best', dolabel=False):
N = n
ind = np.arange(N) # the x locations for the groups
width = newwidth # the width of the bars
rcParams['figure.figsize'] = x/2,y/2
fig = pyplot.figure()
ax = fig.add_subplot(111)
yvals = means1#[4, 9,6,9,2]
rects1 = ax.bar(left=ind+width, height=yvals, width=newwidth, ecolor='black', error_kw=dict(lw=1, capsize=2, capthick=1), color='#4472c4',edgecolor='none',)
zvals = means2#[1,2,21,1,2]
rects2 = ax.bar(left=ind+width*2, height=zvals, width=newwidth, ecolor='black',error_kw=dict(lw=1, capsize=3, capthick=1), color = '#ed7d31', edgecolor='none')#color='#D3D3D3')#, hatch='..')
ax.set_ylabel(axistitle)
ax.set_xticks(ind+width*vlabeloffset)
ax.set_xticklabels( axislist )
ax.legend( (rects1[0], rects2[0]), ('2D', 'S3D') ,loc=pos )
if (bLog):
ax.set_yscale('symlog')
#pyplot.yscale('log',nonposx='clip')
#pyplot.ylim( (pow(-10,1),pow(10,2)) )
def autolabel(rects):
for rect in rects:
h = rect.get_height()
ax.text(rect.get_x()+rect.get_width()/2., 2.05*h, '%d'%int(h),
ha='center', va='bottom')
if (dolabel):
autolabel(rects1)
autolabel(rects2)
#fig.autofmt_xdate()
# ax.set_xticklabels(ax.xaxis.get_majorticklabels(), rotation=45)
|
[
"matplotlib.pyplot.show",
"statistics.median",
"scipy.stats.shapiro",
"numpy.argmax",
"scipy.stats.mannwhitneyu",
"statistics.stdev",
"matplotlib.rcParams.update",
"matplotlib.pyplot.bar",
"scipy.stats.levene",
"scipy.stats.ttest_ind",
"numpy.argmin",
"matplotlib.pyplot.figure",
"numpy.array",
"statistics.mean",
"statsmodels.api.qqplot",
"numpy.arange",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xticks"
] |
[((153, 198), 'matplotlib.rcParams.update', 'matplotlib.rcParams.update', (["{'font.size': 12}"], {}), "({'font.size': 12})\n", (179, 198), False, 'import matplotlib\n'), ((1008, 1031), 'statistics.stdev', 'statistics.stdev', (['list1'], {}), '(list1)\n', (1024, 1031), False, 'import statistics\n'), ((1046, 1069), 'statistics.stdev', 'statistics.stdev', (['list2'], {}), '(list2)\n', (1062, 1069), False, 'import statistics\n'), ((2320, 2367), 'scipy.stats.levene', 'scipy.stats.levene', (['list1', 'list2'], {'center': '"""mean"""'}), "(list1, list2, center='mean')\n", (2338, 2367), False, 'import scipy\n'), ((2763, 2785), 'statistics.mean', 'statistics.mean', (['list1'], {}), '(list1)\n', (2778, 2785), False, 'import statistics\n'), ((2799, 2821), 'statistics.mean', 'statistics.mean', (['list2'], {}), '(list2)\n', (2814, 2821), False, 'import statistics\n'), ((2841, 2864), 'statistics.stdev', 'statistics.stdev', (['list1'], {}), '(list1)\n', (2857, 2864), False, 'import statistics\n'), ((2879, 2902), 'statistics.stdev', 'statistics.stdev', (['list2'], {}), '(list2)\n', (2895, 2902), False, 'import statistics\n'), ((3660, 3675), 'numpy.array', 'np.array', (['list1'], {}), '(list1)\n', (3668, 3675), True, 'import numpy as np\n'), ((3686, 3700), 'statsmodels.api.qqplot', 'sm.qqplot', (['res'], {}), '(res)\n', (3695, 3700), True, 'import statsmodels.api as sm\n'), ((3705, 3718), 'matplotlib.pyplot.show', 'pyplot.show', ([], {}), '()\n', (3716, 3718), False, 'from matplotlib import pyplot\n'), ((3879, 3894), 'matplotlib.pyplot.figure', 'pyplot.figure', ([], {}), '()\n', (3892, 3894), False, 'from matplotlib import pyplot\n'), ((4063, 4083), 'matplotlib.pyplot.ylabel', 'pyplot.ylabel', (['"""bla"""'], {}), "('bla')\n", (4076, 4083), False, 'from matplotlib import pyplot\n'), ((4088, 4137), 'matplotlib.pyplot.xticks', 'pyplot.xticks', (['ind', "('ul', 'uc', 'ur', 'dr', 't')"], {}), "(ind, ('ul', 'uc', 'ur', 'dr', 't'))\n", (4101, 4137), False, 'from matplotlib import pyplot\n'), ((4332, 4344), 'numpy.arange', 'np.arange', (['N'], {}), '(N)\n', (4341, 4344), True, 'import numpy as np\n'), ((4483, 4498), 'matplotlib.pyplot.figure', 'pyplot.figure', ([], {}), '()\n', (4496, 4498), False, 'from matplotlib import pyplot\n'), ((789, 813), 'statistics.median', 'statistics.median', (['list1'], {}), '(list1)\n', (806, 813), False, 'import statistics\n'), ((829, 853), 'statistics.median', 'statistics.median', (['list2'], {}), '(list2)\n', (846, 853), False, 'import statistics\n'), ((904, 926), 'statistics.mean', 'statistics.mean', (['list1'], {}), '(list1)\n', (919, 926), False, 'import statistics\n'), ((942, 964), 'statistics.mean', 'statistics.mean', (['list2'], {}), '(list2)\n', (957, 964), False, 'import statistics\n'), ((3968, 4069), 'matplotlib.pyplot.bar', 'pyplot.bar', (['ind[i]', 'means[i]', 'width'], {'color': 'colours[i]', 'align': '"""center"""', 'yerr': 'stdev[i]', 'ecolor': '"""k"""'}), "(ind[i], means[i], width, color=colours[i], align='center', yerr=\n stdev[i], ecolor='k')\n", (3978, 4069), False, 'from matplotlib import pyplot\n'), ((675, 741), 'scipy.stats.mannwhitneyu', 'scipy.stats.mannwhitneyu', (['list1', 'list2'], {'alternative': 'my_alternative'}), '(list1, list2, alternative=my_alternative)\n', (699, 741), False, 'import scipy\n'), ((533, 559), 'scipy.stats.shapiro', 'scipy.stats.shapiro', (['list1'], {}), '(list1)\n', (552, 559), False, 'import scipy\n'), ((618, 644), 'scipy.stats.shapiro', 'scipy.stats.shapiro', (['list2'], {}), '(list2)\n', (637, 644), False, 'import scipy\n'), ((2124, 2150), 'scipy.stats.shapiro', 'scipy.stats.shapiro', (['list1'], {}), '(list1)\n', (2143, 2150), False, 'import scipy\n'), ((2209, 2235), 'scipy.stats.shapiro', 'scipy.stats.shapiro', (['list2'], {}), '(list2)\n', (2228, 2235), False, 'import scipy\n'), ((1262, 1278), 'numpy.argmax', 'np.argmax', (['list1'], {}), '(list1)\n', (1271, 1278), True, 'import numpy as np\n'), ((1464, 1480), 'numpy.argmax', 'np.argmax', (['list2'], {}), '(list2)\n', (1473, 1480), True, 'import numpy as np\n'), ((2996, 3019), 'statistics.stdev', 'statistics.stdev', (['list1'], {}), '(list1)\n', (3012, 3019), False, 'import statistics\n'), ((3128, 3151), 'statistics.stdev', 'statistics.stdev', (['list2'], {}), '(list2)\n', (3144, 3151), False, 'import statistics\n'), ((2432, 2484), 'scipy.stats.ttest_ind', 'scipy.stats.ttest_ind', (['list1', 'list2'], {'equal_var': '(False)'}), '(list1, list2, equal_var=False)\n', (2453, 2484), False, 'import scipy\n'), ((2567, 2618), 'scipy.stats.ttest_ind', 'scipy.stats.ttest_ind', (['list1', 'list2'], {'equal_var': '(True)'}), '(list1, list2, equal_var=True)\n', (2588, 2618), False, 'import scipy\n'), ((1225, 1241), 'numpy.argmin', 'np.argmin', (['list1'], {}), '(list1)\n', (1234, 1241), True, 'import numpy as np\n'), ((1427, 1443), 'numpy.argmin', 'np.argmin', (['list2'], {}), '(list2)\n', (1436, 1443), True, 'import numpy as np\n'), ((2949, 2971), 'statistics.mean', 'statistics.mean', (['list1'], {}), '(list1)\n', (2964, 2971), False, 'import statistics\n'), ((3082, 3104), 'statistics.mean', 'statistics.mean', (['list2'], {}), '(list2)\n', (3097, 3104), False, 'import statistics\n'), ((1155, 1178), 'statistics.stdev', 'statistics.stdev', (['list1'], {}), '(list1)\n', (1171, 1178), False, 'import statistics\n'), ((1357, 1380), 'statistics.stdev', 'statistics.stdev', (['list2'], {}), '(list2)\n', (1373, 1380), False, 'import statistics\n')]
|
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
import json
import os
from unittest import TestCase
from unittest.mock import patch, MagicMock
from functionsTests.helpers.sample_lambda_events import http_event
with patch.dict(os.environ, {
'ACHIEVEMENTS_TABLE_NAME': 'gamekit_dev_foogamename_game_achievements',
'ACHIEVEMENTS_BUCKET_NAME': 'gamekit-dev-uswe2-abcd123-foogamename-achievements'
}) as mock_env:
with patch("gamekithelpers.ddb.get_table") as mock_ddb_get_table:
with patch("gamekithelpers.s3.get_s3_client") as mock_get_s3_client:
from functions.achievements.AdminAddAchievements import index
NEW_UNLOCKED_ICON = 'https://mygame.cloudfront.net/achievements/icon/new_unlocked_fc98837a-99bb-4eae-a69c-d5b965878164.png'
NEW_LOCKED_ICON = 'https://mygame.cloudfront.net/achievements/icon/new_locked_f44bfb64-6d92-427e-ac82-24ee96476913.png'
OLD_UNLOCKED_ICON = 'https://mygame.cloudfront.net/achievements/icon/old_unlocked_5f8c4f2d-9002-488b-92c6-8feaca433816.png'
OLD_LOCKED_ICON = 'https://mygame.cloudfront.net/achievements/icon/old_locked_2dc7f9a7-a7cd-4278-aa8b-efe75343ba86.png'
class TestIndex(TestCase):
def setUp(self) -> None:
index.s3_client = MagicMock()
index.achievements_table = MagicMock()
def test_lambda_returns_a_400_error_code_when_body_is_empty(self):
# Arrange
event = self.get_lambda_event()
event['body'] = None
# Act
result = index.lambda_handler(event, None)
# Assert
self.assertEqual(400, result['statusCode'])
index.achievements_table.update_item.assert_not_called()
index.s3_client.delete_objects.assert_not_called()
def test_lambda_returns_a_400_error_code_when_achievements_key_is_missing(self):
# Arrange
event = self.get_lambda_event()
event['body'] = '{}'
# Act
result = index.lambda_handler(event, None)
# Assert
self.assertEqual(400, result['statusCode'])
index.achievements_table.update_item.assert_not_called()
index.s3_client.delete_objects.assert_not_called()
def test_lambda_returns_a_400_error_code_when_achievements_array_is_empty(self):
# Arrange
event = self.get_lambda_event()
event['body'] = '{"achievements": []}'
# Act
result = index.lambda_handler(event, None)
# Assert
self.assertEqual(400, result['statusCode'])
index.achievements_table.update_item.assert_not_called()
index.s3_client.delete_objects.assert_not_called()
def test_lambda_returns_a_200_success_code_when_achievements_passed_in_body(self):
# Arrange
event = self.get_lambda_event()
index.achievements_table.get_item.return_value = {'Item': None}
index.achievements_table.update_item.return_value = self.get_achievement_update_value()
# Act
result = index.lambda_handler(event, None)
# Assert
self.assertEqual(200, result['statusCode'])
achievements = json.loads(result['body']).get('data').get('achievements')
self.assertIsNotNone(achievements)
self.assertEqual(1, len(achievements))
index.achievements_table.update_item.assert_called_once()
index.achievements_table.get_item.assert_called_once()
index.s3_client.delete_objects.assert_not_called()
def test_lambda_only_deletes_old_icons(self):
test_cases = [
('existing icons - both icons new', OLD_LOCKED_ICON, NEW_LOCKED_ICON, OLD_UNLOCKED_ICON, NEW_UNLOCKED_ICON, True),
('existing icons - unlocked icon new', OLD_LOCKED_ICON, OLD_LOCKED_ICON, OLD_UNLOCKED_ICON, NEW_UNLOCKED_ICON, True),
('existing icons - locked icon new', OLD_LOCKED_ICON, NEW_LOCKED_ICON, OLD_UNLOCKED_ICON, OLD_UNLOCKED_ICON, True),
('existing icons - no new icons', OLD_LOCKED_ICON, OLD_LOCKED_ICON, OLD_UNLOCKED_ICON, OLD_UNLOCKED_ICON, False),
('no unlocked icon - new unlocked icon', OLD_LOCKED_ICON, NEW_LOCKED_ICON, '', NEW_UNLOCKED_ICON, True),
('no locked icon - new locked icon', '', NEW_LOCKED_ICON, OLD_UNLOCKED_ICON, NEW_UNLOCKED_ICON, True),
('no icons - both new icons', '', NEW_LOCKED_ICON, '', NEW_UNLOCKED_ICON, False)
]
for test_name, old_locked_url, new_locked_url, old_unlocked_url, new_unlocked_url, should_delete in test_cases:
with self.subTest(test_name):
# Arrange
# Reset all mocks between subtests - this isn't done automatically
self.setUp()
event = self.get_lambda_event(new_locked_url, new_unlocked_url)
index.achievements_table.get_item.return_value = {'Item': {
'unlocked_icon_url': old_unlocked_url,
'locked_icon_url': old_locked_url
}}
index.achievements_table.update_item.return_value = self.get_achievement_update_value(new_locked_url, new_unlocked_url)
# Act
result = index.lambda_handler(event, None)
# Assert
self.assertEqual(200, result['statusCode'])
achievements = json.loads(result['body']).get('data').get('achievements')
self.assertIsNotNone(achievements)
self.assertEqual(1, len(achievements))
index.achievements_table.update_item.assert_called_once()
index.achievements_table.get_item.assert_called_once()
if should_delete:
index.s3_client.delete_objects.assert_called_once()
# Expect old icons to be deleted if a new icon was uploaded
def append_if_should_delete(items, old_url, new_url):
if old_url and old_url != new_url:
items.append({'Key': old_url})
deleted_objects = []
append_if_should_delete(deleted_objects, old_locked_url, new_locked_url)
append_if_should_delete(deleted_objects, old_unlocked_url, new_unlocked_url)
delete_args = index.s3_client.delete_objects.call_args_list[0][1]['Delete']
self.assertEqual(delete_args, {'Objects': deleted_objects})
else:
index.s3_client.delete_objects.assert_not_called()
@staticmethod
def get_achievement_update_value(locked_icon=NEW_LOCKED_ICON, unlocked_icon=NEW_UNLOCKED_ICON):
return {
'Attributes': {
'created_at': '2021-07-27T00:38:42.927855+00:00',
'locked_description': 'Eat 1,000 bananas',
'achievement_id': 'EAT_THOUSAND_BANANAS',
'unlocked_icon_url': locked_icon,
'max_value': 1000,
'unlocked_description': 'You ate 1,000 bananas!',
'is_secret': False,
'locked_icon_url': unlocked_icon,
'updated_at': '2021-07-27T17:25:39.302081+00:00',
'is_stateful': True,
'points': 10,
'order_number': 1,
'is_hidden': False,
'title': 'Hangry Chicken'
}
}
@staticmethod
def get_lambda_event(locked_icon=NEW_LOCKED_ICON, unlocked_icon=NEW_UNLOCKED_ICON):
body = {
"achievements": [{
"achievement_id": "EAT_THOUSAND_BANANAS",
"title": "Hangry Chicken",
"locked_description": "Eat 1,000 bananas",
"unlocked_description": "You ate 1,000 bananas!",
"locked_icon_url": locked_icon,
"unlocked_icon_url": unlocked_icon,
"points": 10, "is_stateful": True, "max_value": 1000, "is_secret": False,
"is_hidden": False, "order_number": 1
}]
}
return http_event(body=json.dumps(body))
|
[
"functions.achievements.AdminAddAchievements.index.achievements_table.update_item.assert_called_once",
"unittest.mock.MagicMock",
"json.loads",
"functions.achievements.AdminAddAchievements.index.achievements_table.get_item.assert_called_once",
"functions.achievements.AdminAddAchievements.index.s3_client.delete_objects.assert_called_once",
"unittest.mock.patch.dict",
"functions.achievements.AdminAddAchievements.index.lambda_handler",
"unittest.mock.patch",
"functions.achievements.AdminAddAchievements.index.s3_client.delete_objects.assert_not_called",
"json.dumps",
"functions.achievements.AdminAddAchievements.index.achievements_table.update_item.assert_not_called"
] |
[((277, 463), 'unittest.mock.patch.dict', 'patch.dict', (['os.environ', "{'ACHIEVEMENTS_TABLE_NAME': 'gamekit_dev_foogamename_game_achievements',\n 'ACHIEVEMENTS_BUCKET_NAME':\n 'gamekit-dev-uswe2-abcd123-foogamename-achievements'}"], {}), "(os.environ, {'ACHIEVEMENTS_TABLE_NAME':\n 'gamekit_dev_foogamename_game_achievements', 'ACHIEVEMENTS_BUCKET_NAME':\n 'gamekit-dev-uswe2-abcd123-foogamename-achievements'})\n", (287, 463), False, 'from unittest.mock import patch, MagicMock\n'), ((488, 525), 'unittest.mock.patch', 'patch', (['"""gamekithelpers.ddb.get_table"""'], {}), "('gamekithelpers.ddb.get_table')\n", (493, 525), False, 'from unittest.mock import patch, MagicMock\n'), ((1275, 1286), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (1284, 1286), False, 'from unittest.mock import patch, MagicMock\n'), ((1322, 1333), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (1331, 1333), False, 'from unittest.mock import patch, MagicMock\n'), ((1525, 1558), 'functions.achievements.AdminAddAchievements.index.lambda_handler', 'index.lambda_handler', (['event', 'None'], {}), '(event, None)\n', (1545, 1558), False, 'from functions.achievements.AdminAddAchievements import index\n'), ((1637, 1693), 'functions.achievements.AdminAddAchievements.index.achievements_table.update_item.assert_not_called', 'index.achievements_table.update_item.assert_not_called', ([], {}), '()\n', (1691, 1693), False, 'from functions.achievements.AdminAddAchievements import index\n'), ((1702, 1752), 'functions.achievements.AdminAddAchievements.index.s3_client.delete_objects.assert_not_called', 'index.s3_client.delete_objects.assert_not_called', ([], {}), '()\n', (1750, 1752), False, 'from functions.achievements.AdminAddAchievements import index\n'), ((1958, 1991), 'functions.achievements.AdminAddAchievements.index.lambda_handler', 'index.lambda_handler', (['event', 'None'], {}), '(event, None)\n', (1978, 1991), False, 'from functions.achievements.AdminAddAchievements import index\n'), ((2070, 2126), 'functions.achievements.AdminAddAchievements.index.achievements_table.update_item.assert_not_called', 'index.achievements_table.update_item.assert_not_called', ([], {}), '()\n', (2124, 2126), False, 'from functions.achievements.AdminAddAchievements import index\n'), ((2135, 2185), 'functions.achievements.AdminAddAchievements.index.s3_client.delete_objects.assert_not_called', 'index.s3_client.delete_objects.assert_not_called', ([], {}), '()\n', (2183, 2185), False, 'from functions.achievements.AdminAddAchievements import index\n'), ((2409, 2442), 'functions.achievements.AdminAddAchievements.index.lambda_handler', 'index.lambda_handler', (['event', 'None'], {}), '(event, None)\n', (2429, 2442), False, 'from functions.achievements.AdminAddAchievements import index\n'), ((2521, 2577), 'functions.achievements.AdminAddAchievements.index.achievements_table.update_item.assert_not_called', 'index.achievements_table.update_item.assert_not_called', ([], {}), '()\n', (2575, 2577), False, 'from functions.achievements.AdminAddAchievements import index\n'), ((2586, 2636), 'functions.achievements.AdminAddAchievements.index.s3_client.delete_objects.assert_not_called', 'index.s3_client.delete_objects.assert_not_called', ([], {}), '()\n', (2634, 2636), False, 'from functions.achievements.AdminAddAchievements import index\n'), ((2984, 3017), 'functions.achievements.AdminAddAchievements.index.lambda_handler', 'index.lambda_handler', (['event', 'None'], {}), '(event, None)\n', (3004, 3017), False, 'from functions.achievements.AdminAddAchievements import index\n'), ((3268, 3325), 'functions.achievements.AdminAddAchievements.index.achievements_table.update_item.assert_called_once', 'index.achievements_table.update_item.assert_called_once', ([], {}), '()\n', (3323, 3325), False, 'from functions.achievements.AdminAddAchievements import index\n'), ((3334, 3388), 'functions.achievements.AdminAddAchievements.index.achievements_table.get_item.assert_called_once', 'index.achievements_table.get_item.assert_called_once', ([], {}), '()\n', (3386, 3388), False, 'from functions.achievements.AdminAddAchievements import index\n'), ((3397, 3447), 'functions.achievements.AdminAddAchievements.index.s3_client.delete_objects.assert_not_called', 'index.s3_client.delete_objects.assert_not_called', ([], {}), '()\n', (3445, 3447), False, 'from functions.achievements.AdminAddAchievements import index\n'), ((562, 602), 'unittest.mock.patch', 'patch', (['"""gamekithelpers.s3.get_s3_client"""'], {}), "('gamekithelpers.s3.get_s3_client')\n", (567, 602), False, 'from unittest.mock import patch, MagicMock\n'), ((5142, 5175), 'functions.achievements.AdminAddAchievements.index.lambda_handler', 'index.lambda_handler', (['event', 'None'], {}), '(event, None)\n', (5162, 5175), False, 'from functions.achievements.AdminAddAchievements import index\n'), ((5475, 5532), 'functions.achievements.AdminAddAchievements.index.achievements_table.update_item.assert_called_once', 'index.achievements_table.update_item.assert_called_once', ([], {}), '()\n', (5530, 5532), False, 'from functions.achievements.AdminAddAchievements import index\n'), ((5549, 5603), 'functions.achievements.AdminAddAchievements.index.achievements_table.get_item.assert_called_once', 'index.achievements_table.get_item.assert_called_once', ([], {}), '()\n', (5601, 5603), False, 'from functions.achievements.AdminAddAchievements import index\n'), ((8021, 8037), 'json.dumps', 'json.dumps', (['body'], {}), '(body)\n', (8031, 8037), False, 'import json\n'), ((5659, 5710), 'functions.achievements.AdminAddAchievements.index.s3_client.delete_objects.assert_called_once', 'index.s3_client.delete_objects.assert_called_once', ([], {}), '()\n', (5708, 5710), False, 'from functions.achievements.AdminAddAchievements import index\n'), ((6435, 6485), 'functions.achievements.AdminAddAchievements.index.s3_client.delete_objects.assert_not_called', 'index.s3_client.delete_objects.assert_not_called', ([], {}), '()\n', (6483, 6485), False, 'from functions.achievements.AdminAddAchievements import index\n'), ((3111, 3137), 'json.loads', 'json.loads', (["result['body']"], {}), "(result['body'])\n", (3121, 3137), False, 'import json\n'), ((5293, 5319), 'json.loads', 'json.loads', (["result['body']"], {}), "(result['body'])\n", (5303, 5319), False, 'import json\n')]
|
import json
import os
import shutil
from unittest import TestCase
from flask import Flask
from micro.core.params import Params
class TestAPIRestEndpoints(TestCase):
@classmethod
def setUpClass(cls):
parent = os.path.abspath(os.path.join(os.path.dirname(__file__),
os.path.pardir))
plugins = os.path.join(parent, "resources", "plugin")
os.environ["MICRO_PLUGIN_PATH"] = plugins
cls.test_folders = [
["MICRO_LOG_FOLDER_PATH", "/tmp/micro_apirest_logs"],
["MICRO_PID_FOLDER_PATH", "/tmp/micro_apirest_pids"]
]
for f in cls.test_folders:
os.environ[f[0]] = f[1]
os.makedirs(f[1], exist_ok=True)
Params(setall=True).set_params()
from micro.api.apirest import endpoints
cls.app = Flask("micro_test")
cls.app.register_blueprint(endpoints)
@classmethod
def tearDownClass(cls):
for f in cls.test_folders:
del os.environ[f[0]]
shutil.rmtree(f[1])
def test_plugins_api(self):
resp = [{
"name": "Example Plugin",
"version": None,
"description": "A very simple example plugin"
}]
with self.app.test_client() as client:
response = client.get("/plugins")
self.assertEquals(response.status_code, 200)
self.assertEquals(response.mimetype, "application/json")
self.assertEquals(json.loads(response.data), resp)
def test_info(self):
resp = {"error": "plugin not found"}
with self.app.test_client() as client:
response = client.get("/info/not-existent-plugin")
self.assertEquals(response.status_code, 200)
self.assertEquals(response.mimetype, "application/json")
self.assertEquals(json.loads(response.data), resp)
long_description = "This plugin is a very simple example, " + \
"for that reason, we don't have a long description"
resp = {
"name": "Example Plugin",
"version": None,
"url": None,
"author": "<NAME>",
"author_email": None,
"description": "A very simple example plugin",
"long_description": long_description
}
with self.app.test_client() as client:
response = client.get("/info/Example%20Plugin")
self.assertTrue(True)
self.assertEquals(response.status_code, 200)
self.assertEquals(response.mimetype, "application/json")
self.assertEquals(json.loads(response.data), resp)
def test_help(self):
resp = {"error": "plugin not found"}
with self.app.test_client() as client:
response = client.get("/help/not-existent-plugin")
self.assertEquals(response.status_code, 200)
self.assertEquals(response.mimetype, "application/json")
self.assertEquals(json.loads(response.data), resp)
resp = {
"name": "Example Plugin",
"version": None,
"help": "Params: name type string; A name to greet"
}
with self.app.test_client() as client:
response = client.get("/help/Example%20Plugin")
self.assertEquals(response.status_code, 200)
self.assertEquals(response.mimetype, "application/json")
self.assertEquals(json.loads(response.data), resp)
def test_run(self):
data = {"wrong_arg": "World"}
resp = {"error": "plugin not found"}
with self.app.test_client() as client:
response = client.post("/run/not-existent-plugin",
data=json.dumps(data),
content_type="application/json")
self.assertEquals(response.status_code, 200)
self.assertEquals(response.mimetype, "application/json")
self.assertEquals(json.loads(response.data), resp)
resp = {
"error": "run() got an unexpected keyword argument 'wrong_arg'"
}
with self.app.test_client() as client:
response = client.post("/run/Example%20Plugin",
data=json.dumps(data),
content_type="application/json")
self.assertEquals(response.status_code, 200)
self.assertEquals(response.mimetype, "application/json")
self.assertEquals(json.loads(response.data), resp)
data = {"name": "World"}
with self.app.test_client() as client:
response = client.post("/run/Example%20Plugin",
data=json.dumps(data),
content_type="application/json")
self.assertEquals(response.status_code, 200)
self.assertEquals(response.mimetype, "application/json")
self.assertDictEqual(json.loads(response.data),
{"msg": "Hello World!!!"})
|
[
"micro.core.params.Params",
"os.makedirs",
"json.loads",
"os.path.dirname",
"flask.Flask",
"json.dumps",
"shutil.rmtree",
"os.path.join"
] |
[((363, 406), 'os.path.join', 'os.path.join', (['parent', '"""resources"""', '"""plugin"""'], {}), "(parent, 'resources', 'plugin')\n", (375, 406), False, 'import os\n'), ((851, 870), 'flask.Flask', 'Flask', (['"""micro_test"""'], {}), "('micro_test')\n", (856, 870), False, 'from flask import Flask\n'), ((710, 742), 'os.makedirs', 'os.makedirs', (['f[1]'], {'exist_ok': '(True)'}), '(f[1], exist_ok=True)\n', (721, 742), False, 'import os\n'), ((1043, 1062), 'shutil.rmtree', 'shutil.rmtree', (['f[1]'], {}), '(f[1])\n', (1056, 1062), False, 'import shutil\n'), ((255, 280), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (270, 280), False, 'import os\n'), ((752, 771), 'micro.core.params.Params', 'Params', ([], {'setall': '(True)'}), '(setall=True)\n', (758, 771), False, 'from micro.core.params import Params\n'), ((1499, 1524), 'json.loads', 'json.loads', (['response.data'], {}), '(response.data)\n', (1509, 1524), False, 'import json\n'), ((1869, 1894), 'json.loads', 'json.loads', (['response.data'], {}), '(response.data)\n', (1879, 1894), False, 'import json\n'), ((2644, 2669), 'json.loads', 'json.loads', (['response.data'], {}), '(response.data)\n', (2654, 2669), False, 'import json\n'), ((3014, 3039), 'json.loads', 'json.loads', (['response.data'], {}), '(response.data)\n', (3024, 3039), False, 'import json\n'), ((3469, 3494), 'json.loads', 'json.loads', (['response.data'], {}), '(response.data)\n', (3479, 3494), False, 'import json\n'), ((4002, 4027), 'json.loads', 'json.loads', (['response.data'], {}), '(response.data)\n', (4012, 4027), False, 'import json\n'), ((4528, 4553), 'json.loads', 'json.loads', (['response.data'], {}), '(response.data)\n', (4538, 4553), False, 'import json\n'), ((4987, 5012), 'json.loads', 'json.loads', (['response.data'], {}), '(response.data)\n', (4997, 5012), False, 'import json\n'), ((3760, 3776), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (3770, 3776), False, 'import json\n'), ((4286, 4302), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (4296, 4302), False, 'import json\n'), ((4742, 4758), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (4752, 4758), False, 'import json\n')]
|
import os
import numpy as np
import logging
from pystella.model.sn_eve import PreSN
from pystella.util.phys_var import phys
logger = logging.getLogger(__name__)
try:
import matplotlib.pyplot as plt
from matplotlib import gridspec
is_matplotlib = True
except ImportError:
logging.debug('matplotlib failed to import', exc_info=True)
is_matplotlib = False
pass
# logger.setLevel(logging.INFO)
# logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
__author__ = 'bakl'
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
# 681 15
# 1.0d0 1.0d0 4.0d0 12.0d0 16.0d0 20.0d0 24.0d0 28.0d0 32.0d0 36.0d0 40.0d0 44.0d0 48.0d0 52.0d0 56.0d0
# 0.0d0 1.0d0 2.0d0 6.0d0 8.0d0 10.0d0 12.0d0 14.0d0 16.0d0 18.0d0 20.0d0 22.0d0 24.0d0 26.0d0 28.0d0
snec_elements = "NN H He C O Ne Mg Si S Ar Ca Ti Cr Fe Ni".split()
snec_elements_Z_str = "0.0 1.0 2.0 6.0 8.0 10.0 12.0 14.0 16.0 18.0 20.0 22.0 24.0 26.0 28.0"
snec_elements_Z = [float(s) for s in snec_elements_Z_str.split()]
snec_elements_A_str = "1.0 1.0 4.0 12.0 16.0 20.0 24.0 28.0 32.0 36.0 40.0 44.0 48.0 52.0 56.0"
snec_elements_A = [float(s) for s in snec_elements_A_str.split()]
snec_el_colors = dict(NN="yellow", H="blue", He="cyan", C="darkorange",
O="violet", Ne="green", Mg="skyblue", Si="olive",
S="indigo", Ar="brown", Ca="purple", Ti="hotpink",
Cr="m", Fe='maroon', Ni='magenta')
snec_el_lntypes = dict((k, '--') for k, v in snec_el_colors.items()) # no y-shift
snec_el_lntypes['H'] = '-'
snec_el_lntypes['He'] = '-'
snec_el_lntypes['O'] = '-'
snec_el_lntypes['C'] = '-'
snec_el_lntypes['Ni56'] = '-'
snec_profile_cols = "i M R T Rho V".split()
class Snec:
def __init__(self, name):
"""Creates a Problem instance. It's initial conditions for SNEC. Required parameters: name."""
self._name = name
self._chem_file = None
self._chem = None
self._profile_file = None
self._profile = None
@property
def Name(self):
return self._name
@property
def chem_file(self):
return self._chem_file
@property
def r(self):
"""radius"""
return self._chem[PreSN.sR]
@property
def nzon(self):
"""Number of zones"""
return len(self.r)
@property
def m(self):
"""Mass"""
return self._chem[PreSN.sM]
@property
def is_chem_load(self):
"""Check if data has been loaded."""
return self._chem is not None
@property
def chem(self):
"""Full data"""
return self._chem
# Profile structure
@property
def profile_file(self):
return self._profile_file
@property
def profile(self):
"""Full data"""
return self._profile
@property
def is_profile_load(self):
"""Check if data has been loaded."""
return self._profile is not None
@property
def pmass(self):
"""Mass"""
return self.hyd(PreSN.sM)
@property
def pradius(self):
"""Radius"""
return self.hyd(PreSN.sR)
@property
def ptemp(self):
"""Temperature"""
return self.hyd(PreSN.sT)
@property
def prho(self):
"""Density"""
return self.hyd(PreSN.sRho)
@property
def pvel(self):
"""Velocity"""
return self.hyd(PreSN.sV)
@property
def Elements(self):
"""Elements"""
els = []
keys = self.chem.dtype.names
for el in snec_elements:
if el in keys:
els.append(el)
return els
def hyd(self, v):
"""Hydro data"""
if v not in snec_profile_cols:
raise ValueError("There is no information about the parameter [%s]. You should set it." % v)
return self._profile[v]
def load_profile(self, fname):
if not os.path.isfile(fname):
logger.error(' No snec profile-data for %s' % fname)
return None
self._profile_file = fname
logger.info('Load profile data from %s' % self.profile_file)
use_cols = list(range(0, len(snec_profile_cols)))
dtype = np.dtype({'names': snec_profile_cols, 'formats': [np.float64] * len(snec_profile_cols)})
self._profile = np.loadtxt(self.profile_file, skiprows=1, dtype=dtype, usecols=use_cols)
return self
def write_profile(self, fname):
"""
Write profile to file
Format:
# ibuffer, pmass(i), pradius(i), ptemp(i), prho(i), pvel(i)
# 1 1.04019E+31 7.94499E+06 1.00140E+10 4.91485E+09 -1.21857E+07 4.57036E-01 0.00000E+00
:return: True if fname exists
"""
# dum = np.zeros(self.nzon)
logger.info(' Write profile-data to %s' % fname)
zones = range(1, self.nzon + 1)
with open(fname, 'w') as f:
f.write('{:6d}\n'.format(self.nzon))
for _ in zip(zones, self.pmass, self.pradius, self.ptemp, self.prho, self.pvel):
f.write('%4d %12.5e %12.5e %12.5e %12.5e %12.5e\n' % _)
return os.path.isfile(fname)
def el(self, el):
if el not in snec_elements:
raise ValueError("There is no such element [%s]." % el)
if not self.is_chem_load:
raise Exception("SNEC chem-data has not been loaded. Check and load from %s" % self._chem_file)
return self._chem[el]
def set_el(self, el, data):
if el not in snec_elements:
raise ValueError("There is no such element [%s]." % el)
if not self.is_chem_load:
raise Exception("SNEC chem-data has not been created.")
if self.nzon != len(data):
raise ValueError("The data(len={}) should be have the same nzon={} as SNEC. ".format(len(data), self.nzon))
self._chem[el] = data
def load_chem(self, fname):
if not os.path.isfile(fname):
logger.error(' No snec chem-data for %s' % fname)
return None
self._chem_file = fname
logger.info('Load chemical data from %s' % self.chem_file)
names = [PreSN.sM, PreSN.sR] + snec_elements
print("Names: %s" % ' '.join(names))
dtype = np.dtype({'names': names, 'formats': [np.float64] * len(names)})
self._chem = np.loadtxt(fname, skiprows=3, dtype=dtype, comments='#')
return self
def write_chem(self, fname, is_header=True):
"""
Write data to file in iso.dat format
:return:
"""
logger.info(' Write chem-data to %s' % fname)
with open(fname, 'w') as f:
# write nzon nElements
if is_header:
f.write('{:d} {:d}\n'.format(self.nzon, len(snec_elements)))
f.write('{}\n'.format(snec_elements_A_str))
f.write('{}\n'.format(snec_elements_Z_str))
for i in range(self.nzon):
s = '{:.5e} {:.5e}'.format(self.pmass[i], self.pradius[i])
for el in snec_elements:
s += ' {:.5e}'.format(self.el(el)[i])
f.write('{}\n'.format(s))
return os.path.isfile(fname)
# Plotting
def plot_chem(self, x='m', ax=None, xlim=None, ylim=None, **kwargs):
elements = kwargs.get('elements', snec_elements)
lntypes = kwargs.get('lntypes', snec_el_lntypes)
if isinstance(lntypes, str):
lntypes = {el: lntypes for el in elements}
colors = kwargs.get('colors', snec_el_colors)
loc = kwargs.get('leg_loc', 3)
font_size = kwargs.get('font_size', 14)
leg_ncol = kwargs.get('leg_ncol', 4)
lw = kwargs.get('lw', 2)
is_save = kwargs.get('is_save', False)
alpha = kwargs.get('alpha', 1.)
is_new_plot = ax is None
# setup figure
if is_new_plot:
plt.matplotlib.rcParams.update({'font.size': font_size})
fig = plt.figure(num=None, figsize=(12, 12), dpi=100, facecolor='w', edgecolor='k')
gs1 = gridspec.GridSpec(1, 1)
gs1.update(wspace=0.1, hspace=0.1, top=None, left=0.1, right=0.98)
ax = fig.add_subplot(gs1[0, 0])
if x == 'r':
ax.set_xlabel(r'R [cm]')
elif x == 'm':
ax.set_xlabel(r'M [$M_\odot$]')
else:
ax.set_xlabel(r'R [cm]')
ax.set_xscale('log')
is_x_lim = xlim is not None
is_y_lim = ylim is not None
if x == 'r':
x = self.r
elif x == 'm':
x = self.m / phys.M_sun
else:
x = self.r
y_min = []
y_max = []
for el in elements:
y = self.el(el)
# y = np.log10(self.el(el))
ax.semilogy(x, y, label='%s' % el, color=colors[el], ls=lntypes[el], linewidth=lw, alpha=alpha)
# ax.plot(x, y, label='%s' % el, color=colors[el], marker='o', ls=':', markersize=3)
if not is_y_lim:
y_min.append(np.min(y))
y_max.append(np.max(y))
if not is_y_lim:
ylim = [np.min(y_min), np.max(y_min)]
if not is_x_lim:
xlim = np.min(x), np.max(x)
ax.set_xlim(xlim)
# ax.set_yscale('log')
# ax.set_ylim(ylim)
# ax.set_ylabel(r'$log10(X_i)$')
ax.set_ylim(ylim)
ax.set_ylabel(r'$X_i$')
if is_new_plot:
ax.legend(prop={'size': 9}, loc=loc, ncol=leg_ncol, fancybox=False, frameon=True)
if is_save:
fsave = os.path.join(os.path.expanduser('~/'), 'chem_%s.pdf' % self._name)
logger.info(" Save plot to %s " % fsave)
ax.get_figure().savefig(fsave, bbox_inches='tight', format='pdf')
return ax
@staticmethod
def presn2snec(presn):
snec = Snec(presn.Name)
# Create profile
dtype = [('i', '<f8'), (PreSN.sM, '<f8'), (PreSN.sR, '<f8'), (PreSN.sT, '<f8'),
(PreSN.sRho, '<f8'), (PreSN.sV, '<f8')]
aprofile = np.zeros((presn.nzon,), dtype=dtype)
# Fill profile
aprofile[PreSN.sM] = presn.m
aprofile[PreSN.sR] = presn.r
aprofile[PreSN.sT] = presn.T
aprofile[PreSN.sRho] = presn.rho
aprofile[PreSN.sV] = presn.V
snec._profile = aprofile
# Create chemical composition
dtype = [(PreSN.sM, '<f8'), (PreSN.sR, '<f8'), ('NN', '<f8'), ('H', '<f8'), ('He', '<f8'),
('C', '<f8'), ('O', '<f8'), ('Ne', '<f8'), ('Mg', '<f8'), ('Si', '<f8'),
('S', '<f8'), ('Ar', '<f8'), ('Ca', '<f8'), ('Ti', '<f8'), ('Cr', '<f8'),
('Fe', '<f8'), ('Ni', '<f8')]
achem = np.zeros((presn.nzon,), dtype=dtype)
# Fill
achem[PreSN.sM] = presn.m
achem[PreSN.sR] = presn.r
for e in presn.Elements:
if e in snec_elements:
achem[e] = presn.el(e)
snec._chem = achem
return snec
class ParserXg:
pass
def to_presn(p):
if not p.is_profile_load:
raise ValueError("There are no data in SNEC problem. "
"Probably, You should run: load_profile and load_chem.")
presn = PreSN(p.Name, p.nzon)
col_map = {'R', 'M', 'T', 'Rho', 'V'}
for v in col_map:
presn.set_hyd(v, p.hyd(v))
for e in presn.Elements:
if e in snec_elements:
presn.set_chem(e, p.el(e))
else:
presn.set_chem(e, np.zeros(presn.nzon))
# todo check with Viktoriya: in SNEC Ni used Ni as Ni56
presn.set_chem('Ni56', presn.el('Ni'))
return presn
|
[
"os.path.expanduser",
"logging.debug",
"numpy.zeros",
"os.path.isfile",
"matplotlib.pyplot.figure",
"numpy.min",
"numpy.loadtxt",
"numpy.max",
"matplotlib.gridspec.GridSpec",
"matplotlib.pyplot.matplotlib.rcParams.update",
"pystella.model.sn_eve.PreSN",
"logging.getLogger"
] |
[((136, 163), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (153, 163), False, 'import logging\n'), ((513, 540), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (530, 540), False, 'import logging\n'), ((11263, 11284), 'pystella.model.sn_eve.PreSN', 'PreSN', (['p.Name', 'p.nzon'], {}), '(p.Name, p.nzon)\n', (11268, 11284), False, 'from pystella.model.sn_eve import PreSN\n'), ((292, 351), 'logging.debug', 'logging.debug', (['"""matplotlib failed to import"""'], {'exc_info': '(True)'}), "('matplotlib failed to import', exc_info=True)\n", (305, 351), False, 'import logging\n'), ((4329, 4401), 'numpy.loadtxt', 'np.loadtxt', (['self.profile_file'], {'skiprows': '(1)', 'dtype': 'dtype', 'usecols': 'use_cols'}), '(self.profile_file, skiprows=1, dtype=dtype, usecols=use_cols)\n', (4339, 4401), True, 'import numpy as np\n'), ((5143, 5164), 'os.path.isfile', 'os.path.isfile', (['fname'], {}), '(fname)\n', (5157, 5164), False, 'import os\n'), ((6346, 6402), 'numpy.loadtxt', 'np.loadtxt', (['fname'], {'skiprows': '(3)', 'dtype': 'dtype', 'comments': '"""#"""'}), "(fname, skiprows=3, dtype=dtype, comments='#')\n", (6356, 6402), True, 'import numpy as np\n'), ((7178, 7199), 'os.path.isfile', 'os.path.isfile', (['fname'], {}), '(fname)\n', (7192, 7199), False, 'import os\n'), ((10088, 10124), 'numpy.zeros', 'np.zeros', (['(presn.nzon,)'], {'dtype': 'dtype'}), '((presn.nzon,), dtype=dtype)\n', (10096, 10124), True, 'import numpy as np\n'), ((10754, 10790), 'numpy.zeros', 'np.zeros', (['(presn.nzon,)'], {'dtype': 'dtype'}), '((presn.nzon,), dtype=dtype)\n', (10762, 10790), True, 'import numpy as np\n'), ((3924, 3945), 'os.path.isfile', 'os.path.isfile', (['fname'], {}), '(fname)\n', (3938, 3945), False, 'import os\n'), ((5936, 5957), 'os.path.isfile', 'os.path.isfile', (['fname'], {}), '(fname)\n', (5950, 5957), False, 'import os\n'), ((7894, 7950), 'matplotlib.pyplot.matplotlib.rcParams.update', 'plt.matplotlib.rcParams.update', (["{'font.size': font_size}"], {}), "({'font.size': font_size})\n", (7924, 7950), True, 'import matplotlib.pyplot as plt\n'), ((7969, 8046), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'num': 'None', 'figsize': '(12, 12)', 'dpi': '(100)', 'facecolor': '"""w"""', 'edgecolor': '"""k"""'}), "(num=None, figsize=(12, 12), dpi=100, facecolor='w', edgecolor='k')\n", (7979, 8046), True, 'import matplotlib.pyplot as plt\n'), ((8066, 8089), 'matplotlib.gridspec.GridSpec', 'gridspec.GridSpec', (['(1)', '(1)'], {}), '(1, 1)\n', (8083, 8089), False, 'from matplotlib import gridspec\n'), ((9161, 9174), 'numpy.min', 'np.min', (['y_min'], {}), '(y_min)\n', (9167, 9174), True, 'import numpy as np\n'), ((9176, 9189), 'numpy.max', 'np.max', (['y_min'], {}), '(y_min)\n', (9182, 9189), True, 'import numpy as np\n'), ((9236, 9245), 'numpy.min', 'np.min', (['x'], {}), '(x)\n', (9242, 9245), True, 'import numpy as np\n'), ((9247, 9256), 'numpy.max', 'np.max', (['x'], {}), '(x)\n', (9253, 9256), True, 'import numpy as np\n'), ((9616, 9640), 'os.path.expanduser', 'os.path.expanduser', (['"""~/"""'], {}), "('~/')\n", (9634, 9640), False, 'import os\n'), ((11528, 11548), 'numpy.zeros', 'np.zeros', (['presn.nzon'], {}), '(presn.nzon)\n', (11536, 11548), True, 'import numpy as np\n'), ((9064, 9073), 'numpy.min', 'np.min', (['y'], {}), '(y)\n', (9070, 9073), True, 'import numpy as np\n'), ((9104, 9113), 'numpy.max', 'np.max', (['y'], {}), '(y)\n', (9110, 9113), True, 'import numpy as np\n')]
|
from pathlib import Path
from collections import defaultdict
import json
import subprocess
from multiprocessing import Process, Queue, current_process, cpu_count
import shutil
import os
import glob
DATASET_ROOT = Path("/media/gitumarkk/Seagate Backup Plus Drive//Dancelogue/DATASETS/Kinetics/")
DATA_GIF_ROOT = Path("/home/gitumarkk/Desktop/WORK_DIR/kinetics_gif/")
DATA_GIF_ROOT.mkdir(exist_ok=True, parents=True)
class Pool:
def __init__(self):
self.num_workers = cpu_count() * 2
self.workers = []
self.videos_queue = Queue(100)
def start_workers(self):
for _ in range(self.num_workers):
worker = Process(
target=video_worker,
args=(self.videos_queue,)
)
worker.start()
self.workers.append(worker)
def stop_workers(self):
"""
Stop all workers.
:return: None.
"""
# clean_png()
# send end signal to all download workers
for _ in range(len(self.workers)):
self.videos_queue.put(None)
# wait for the processes to finish
for worker in self.workers:
worker.join()
def feed_videos(self):
read_files(self.videos_queue)
def video_worker(videos_queue):
while True:
request = videos_queue.get(timeout=60)
if request is None:
break
file_path, output_path, index, folder, png_parent = request
generate_gif(file_path, output_path, index, folder, png_parent)
def generate_gif(filename, output_path, index, folder, png_parent):
output_gif = output_path / "{}.gif".format(filename.stem)
if output_gif.exists() or filename.suffix != '.mp4':
print('iter {} - skipping {} of {} - {}'.format(index, filename.stem, filename.parent.stem, folder))
return
png_file = str(png_parent / "{}_%06d.png".format(output_gif.stem))
subprocess.call([
'ffmpeg',
'-i', str(filename),
'-filter_complex',
"[0:v] fps=4,scale={scale}:-1".format(scale='180'),
'-f', 'image2',
'-loglevel', 'warning',
'-y',
png_file
])
subprocess.call([
'ffmpeg',
'-i', png_file,
'-f', 'gif',
'-framerate', '16',
'-loglevel', 'warning',
'-y',
str(output_gif)
])
print('iter {} - {} - {} - completed of {}'.format(index, filename.stem, filename.parent.stem, folder))
for png in glob.glob(str(png_parent / "{}_*".format(output_gif.stem))):
os.remove(png)
def read_files(videos_queue):
file_dict = defaultdict(dict)
for f1 in DATASET_ROOT.iterdir():
if f1.stem in ['train']:
folder_path = DATA_GIF_ROOT / f1.stem
folder_path.mkdir(exist_ok=True, parents=True)
png_parent = folder_path / 'png'
png_parent.mkdir(exist_ok=True, parents=True)
for index_1, f2 in enumerate(f1.iterdir()):
if f2.is_dir():
f2_folder_path = (folder_path / f2.stem)
f2_folder_path.mkdir(exist_ok=True, parents=True)
for index_2, f3 in enumerate(f2.iterdir()):
videos_queue.put((f3, f2_folder_path, index_2, f1.stem, png_parent))
else:
pass
# videos_queue.put((f2, folder_path, index_1, f1.stem))
# folder_path = DATA_GIF_ROOT / f2.stem
# folder_path.mkdir(exist_ok=True, parents=True)
# file_dict[f2.name]['folder'] = f1.stem
# file_dict[f2.name]['size'] = f2.stat().st_size
#
return file_dict
def clean_png():
for f1 in DATASET_ROOT.iterdir():
if f1.stem in ['val']:
folder_path = DATA_GIF_ROOT / f1.stem
png_parent = folder_path / 'png'
if png_parent.is_dir():
shutil.rmtree(png_parent)
def run():
pool = Pool()
pool.start_workers()
pool.feed_videos()
pool.stop_workers()
run()
|
[
"os.remove",
"collections.defaultdict",
"pathlib.Path",
"multiprocessing.Queue",
"shutil.rmtree",
"multiprocessing.Process",
"multiprocessing.cpu_count"
] |
[((214, 305), 'pathlib.Path', 'Path', (['"""/media/gitumarkk/Seagate Backup Plus Drive//Dancelogue/DATASETS/Kinetics/"""'], {}), "(\n '/media/gitumarkk/Seagate Backup Plus Drive//Dancelogue/DATASETS/Kinetics/'\n )\n", (218, 305), False, 'from pathlib import Path\n'), ((312, 366), 'pathlib.Path', 'Path', (['"""/home/gitumarkk/Desktop/WORK_DIR/kinetics_gif/"""'], {}), "('/home/gitumarkk/Desktop/WORK_DIR/kinetics_gif/')\n", (316, 366), False, 'from pathlib import Path\n'), ((2393, 2410), 'collections.defaultdict', 'defaultdict', (['dict'], {}), '(dict)\n', (2404, 2410), False, 'from collections import defaultdict\n'), ((538, 548), 'multiprocessing.Queue', 'Queue', (['(100)'], {}), '(100)\n', (543, 548), False, 'from multiprocessing import Process, Queue, current_process, cpu_count\n'), ((2333, 2347), 'os.remove', 'os.remove', (['png'], {}), '(png)\n', (2342, 2347), False, 'import os\n'), ((476, 487), 'multiprocessing.cpu_count', 'cpu_count', ([], {}), '()\n', (485, 487), False, 'from multiprocessing import Process, Queue, current_process, cpu_count\n'), ((630, 685), 'multiprocessing.Process', 'Process', ([], {'target': 'video_worker', 'args': '(self.videos_queue,)'}), '(target=video_worker, args=(self.videos_queue,))\n', (637, 685), False, 'from multiprocessing import Process, Queue, current_process, cpu_count\n'), ((3534, 3559), 'shutil.rmtree', 'shutil.rmtree', (['png_parent'], {}), '(png_parent)\n', (3547, 3559), False, 'import shutil\n')]
|
from PyQt5 import QtCore, QtGui, QtWidgets
from AddStudent import Ui_AddStudent
from ViewStudents import Ui_ViewStudents
from Reports import Ui_Reports
class Ui_AdminHome(object):
def __init__(self, Dialog,unm):
self.dialog = Dialog
self.unm = unm
def addstdnts(self, event):
try:
self.adstdnt = QtWidgets.QDialog()
self.ui1 = Ui_AddStudent()
self.ui1.setupUi(self.adstdnt)
self.adstdnt.show()
except Exception as e:
print(e.args[0])
tb = sys.exc_info()[2]
print(tb.tb_lineno)
event.accept()
def viewstdents(self, event):
try:
self.viewstdnt = QtWidgets.QDialog()
self.ui1 = Ui_ViewStudents()
self.ui1.setupUi(self.viewstdnt)
self.ui1.studentdetails()
self.viewstdnt.show()
except Exception as e:
print(e.args[0])
tb = sys.exc_info()[2]
print(tb.tb_lineno)
event.accept()
def reports(self, event):
try:
self.reprts = QtWidgets.QDialog()
self.ui1 = Ui_Reports(self.reprts, self.unm)
self.ui1.setupUi(self.reprts)
self.reprts.show()
event.accept()
except Exception as e:
print("Error=" + e.args[0])
tb = sys.exc_info()[2]
print(tb.tb_lineno)
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(1000, 600)
Dialog.setStyleSheet("background-color: rgb(144, 179, 212);")
self.addstdnt = QtWidgets.QLabel(Dialog)
self.addstdnt.setGeometry(QtCore.QRect(45, 200, 221, 201))
self.addstdnt.setStyleSheet("image: url(../AttendanceSystem/images/addstudent2.png);")
self.addstdnt.setText("")
self.addstdnt.setObjectName("addstdnt")
self.addstdnt.mousePressEvent = self.addstdnts
self.label_2 = QtWidgets.QLabel(Dialog)
self.label_2.setGeometry(QtCore.QRect(360, 210, 211, 181))
self.label_2.setStyleSheet("image: url(../AttendanceSystem/images/addstudents.png);")
self.label_2.setText("")
self.label_2.setObjectName("label_2")
self.label_2.mousePressEvent = self.viewstdents
self.label = QtWidgets.QLabel(Dialog)
self.label.setGeometry(QtCore.QRect(60, 430, 251, 101))
self.label.setStyleSheet("color: rgb(25, 25, 112);\n"
"font: 75 18pt \"garamond\";")
self.label.setObjectName("label")
self.label_3 = QtWidgets.QLabel(Dialog)
self.label_3.setGeometry(QtCore.QRect(380, 430, 251, 101))
self.label_3.setStyleSheet("color: rgb(25, 25, 112);\n"
"font: 75 18pt \"garamond\";")
self.label_3.setObjectName("label_3")
self.label_4 = QtWidgets.QLabel(Dialog)
self.label_4.setGeometry(QtCore.QRect(700, 195, 201, 211))
self.label_4.setStyleSheet("image: url(../AttendanceSystem/images/adminreport.png);")
self.label_4.setText("")
self.label_4.setObjectName("label_4")
self.label_4.mousePressEvent = self.reports
self.label_5 = QtWidgets.QLabel(Dialog)
self.label_5.setGeometry(QtCore.QRect(750, 430, 201, 101))
self.label_5.setStyleSheet("color: rgb(25, 25, 112);\n"
"font: 75 18pt \"garamond\";")
self.label_5.setObjectName("label_5")
self.label_6 = QtWidgets.QLabel(Dialog)
self.label_6.setGeometry(QtCore.QRect(300, 40, 661, 101))
self.label_6.setStyleSheet("color: rgb(25, 25, 112);\n"
"font-weight: bold;\n"
"font: 30pt \"garamond\";")
self.label_6.setObjectName("label_6")
self.retranslateUi(Dialog)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
_translate = QtCore.QCoreApplication.translate
Dialog.setWindowTitle(_translate("Dialog", "AdminHome"))
self.label.setText(_translate("Dialog", "Add Students"))
self.label_3.setText(_translate("Dialog", "View Students"))
self.label_5.setText(_translate("Dialog", "Reports"))
self.label_6.setText(_translate("Dialog", "Admin Home"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
Dialog = QtWidgets.QDialog()
ui = Ui_Dialog()
ui.setupUi(Dialog)
Dialog.show()
sys.exit(app.exec_())
|
[
"PyQt5.QtWidgets.QLabel",
"PyQt5.QtCore.QRect",
"ViewStudents.Ui_ViewStudents",
"PyQt5.QtWidgets.QDialog",
"Reports.Ui_Reports",
"AddStudent.Ui_AddStudent",
"sys.exc_info",
"PyQt5.QtCore.QMetaObject.connectSlotsByName",
"PyQt5.QtWidgets.QApplication"
] |
[((4222, 4254), 'PyQt5.QtWidgets.QApplication', 'QtWidgets.QApplication', (['sys.argv'], {}), '(sys.argv)\n', (4244, 4254), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4268, 4287), 'PyQt5.QtWidgets.QDialog', 'QtWidgets.QDialog', ([], {}), '()\n', (4285, 4287), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1618, 1642), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['Dialog'], {}), '(Dialog)\n', (1634, 1642), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1965, 1989), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['Dialog'], {}), '(Dialog)\n', (1981, 1989), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2307, 2331), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['Dialog'], {}), '(Dialog)\n', (2323, 2331), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2554, 2578), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['Dialog'], {}), '(Dialog)\n', (2570, 2578), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2810, 2834), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['Dialog'], {}), '(Dialog)\n', (2826, 2834), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3150, 3174), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['Dialog'], {}), '(Dialog)\n', (3166, 3174), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3407, 3431), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['Dialog'], {}), '(Dialog)\n', (3423, 3431), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3704, 3749), 'PyQt5.QtCore.QMetaObject.connectSlotsByName', 'QtCore.QMetaObject.connectSlotsByName', (['Dialog'], {}), '(Dialog)\n', (3741, 3749), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((343, 362), 'PyQt5.QtWidgets.QDialog', 'QtWidgets.QDialog', ([], {}), '()\n', (360, 362), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((386, 401), 'AddStudent.Ui_AddStudent', 'Ui_AddStudent', ([], {}), '()\n', (399, 401), False, 'from AddStudent import Ui_AddStudent\n'), ((704, 723), 'PyQt5.QtWidgets.QDialog', 'QtWidgets.QDialog', ([], {}), '()\n', (721, 723), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((747, 764), 'ViewStudents.Ui_ViewStudents', 'Ui_ViewStudents', ([], {}), '()\n', (762, 764), False, 'from ViewStudents import Ui_ViewStudents\n'), ((1103, 1122), 'PyQt5.QtWidgets.QDialog', 'QtWidgets.QDialog', ([], {}), '()\n', (1120, 1122), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1146, 1179), 'Reports.Ui_Reports', 'Ui_Reports', (['self.reprts', 'self.unm'], {}), '(self.reprts, self.unm)\n', (1156, 1179), False, 'from Reports import Ui_Reports\n'), ((1677, 1708), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(45)', '(200)', '(221)', '(201)'], {}), '(45, 200, 221, 201)\n', (1689, 1708), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2023, 2055), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(360)', '(210)', '(211)', '(181)'], {}), '(360, 210, 211, 181)\n', (2035, 2055), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2363, 2394), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(60)', '(430)', '(251)', '(101)'], {}), '(60, 430, 251, 101)\n', (2375, 2394), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2612, 2644), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(380)', '(430)', '(251)', '(101)'], {}), '(380, 430, 251, 101)\n', (2624, 2644), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2868, 2900), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(700)', '(195)', '(201)', '(211)'], {}), '(700, 195, 201, 211)\n', (2880, 2900), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3208, 3240), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(750)', '(430)', '(201)', '(101)'], {}), '(750, 430, 201, 101)\n', (3220, 3240), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3465, 3496), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(300)', '(40)', '(661)', '(101)'], {}), '(300, 40, 661, 101)\n', (3477, 3496), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((554, 568), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (566, 568), False, 'import sys\n'), ((960, 974), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (972, 974), False, 'import sys\n'), ((1369, 1383), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (1381, 1383), False, 'import sys\n')]
|
from multiprocessing import Pool
from Model import Model
from util.Document import Document
from util.Sentence import Sentence
from util.Word import Word
from util.data import parse_doc
import time
import argparse
def label_doc(document):
sentences_to_process = []
doc_output = document.meta + "\n"
for sentence in document.sentences:
tokens = [word.FORM for word in sentence.words_conll]
sentences_to_process.append({'tokens': tokens})
labeled_doc = model.label_structured_with_probs(sentences_to_process)
for sentence_index, sentence in enumerate(document.sentences):
labeled_sentence = labeled_doc[sentence_index]
doc_output += "\n" + sentence.sent_id + "\n"
doc_output += sentence.text + "\n"
for word_index, word in enumerate(sentence.words_conll):
try:
dictionary = labeled_sentence[word_index]
word.ARGUMENT = dictionary["label"]
word.CONFIDENCE = dictionary["prob"]
doc_output += word.get_CONLL_row() + "\n"
except Exception as e:
print(e)
pass
return doc_output
def initializer(path):
global model
start_time = time.time()
model = Model(path)
print("model loaded in ", time.time() - start_time)
def write(text, filename):
with open(filename, "a") as myfile:
myfile.write("\n")
myfile.write(text)
if __name__ == '__main__':
time_start = time.time()
parser = argparse.ArgumentParser(description='Argument labeling')
parser.add_argument('--input',
required=True,
help='input data',
default='input')
parser.add_argument('--model',
required=True,
help='model to use',
default='model.h5')
parser.add_argument('--output',
required=True,
help='labeled data',
default='output')
parser.add_argument('--workers',
type=int,
required=False,
help='labeled data',
default=1)
args = parser.parse_args()
with open(args.output, "w") as myfile:
myfile.write("")
texts_to_label = []
with open(args.input) as f:
data = f.read()
splt = data.split('# newdoc')
for sp in splt:
if len(sp.strip()) > 0:
doc = parse_doc(sp)
if doc != None:
texts_to_label.append(doc)
with Pool(processes = args.workers, initializer = initializer, initargs = [args.model]) as p:
for result in p.map(label_doc, texts_to_label):
write(result, args.output)
print("Time: ", time.time() - time_start)
|
[
"argparse.ArgumentParser",
"Model.Model",
"time.time",
"multiprocessing.Pool",
"util.data.parse_doc"
] |
[((1225, 1236), 'time.time', 'time.time', ([], {}), '()\n', (1234, 1236), False, 'import time\n'), ((1249, 1260), 'Model.Model', 'Model', (['path'], {}), '(path)\n', (1254, 1260), False, 'from Model import Model\n'), ((1485, 1496), 'time.time', 'time.time', ([], {}), '()\n', (1494, 1496), False, 'import time\n'), ((1511, 1567), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Argument labeling"""'}), "(description='Argument labeling')\n", (1534, 1567), False, 'import argparse\n'), ((2649, 2725), 'multiprocessing.Pool', 'Pool', ([], {'processes': 'args.workers', 'initializer': 'initializer', 'initargs': '[args.model]'}), '(processes=args.workers, initializer=initializer, initargs=[args.model])\n', (2653, 2725), False, 'from multiprocessing import Pool\n'), ((1291, 1302), 'time.time', 'time.time', ([], {}), '()\n', (1300, 1302), False, 'import time\n'), ((2855, 2866), 'time.time', 'time.time', ([], {}), '()\n', (2864, 2866), False, 'import time\n'), ((2546, 2559), 'util.data.parse_doc', 'parse_doc', (['sp'], {}), '(sp)\n', (2555, 2559), False, 'from util.data import parse_doc\n')]
|
#encoding: utf8
import datetime
import json
from unittest import TestCase
import mock
from dateutil.parser import parse as date_parse
from tests import BaseTestCase
from redash import models
from redash.utils import gen_query_hash, utcnow
class DashboardTest(BaseTestCase):
def test_appends_suffix_to_slug_when_duplicate(self):
d1 = self.factory.create_dashboard()
self.assertEquals(d1.slug, 'test')
d2 = self.factory.create_dashboard(user=d1.user)
self.assertNotEquals(d1.slug, d2.slug)
d3 = self.factory.create_dashboard(user=d1.user)
self.assertNotEquals(d1.slug, d3.slug)
self.assertNotEquals(d2.slug, d3.slug)
class QueryTest(BaseTestCase):
def test_changing_query_text_changes_hash(self):
q = self.factory.create_query()
old_hash = q.query_hash
q.update_instance(query="SELECT 2;")
q = models.Query.get_by_id(q.id)
self.assertNotEquals(old_hash, q.query_hash)
def test_search_finds_in_name(self):
q1 = self.factory.create_query(name=u"Testing seåřċħ")
q2 = self.factory.create_query(name=u"Testing seåřċħing")
q3 = self.factory.create_query(name=u"Testing seå řċħ")
queries = models.Query.search(u"seåřċħ", [self.factory.default_group])
self.assertIn(q1, queries)
self.assertIn(q2, queries)
self.assertNotIn(q3, queries)
def test_search_finds_in_description(self):
q1 = self.factory.create_query(description=u"Testing seåřċħ")
q2 = self.factory.create_query(description=u"Testing seåřċħing")
q3 = self.factory.create_query(description=u"Testing seå řċħ")
queries = models.Query.search(u"seåřċħ", [self.factory.default_group])
self.assertIn(q1, queries)
self.assertIn(q2, queries)
self.assertNotIn(q3, queries)
def test_search_by_id_returns_query(self):
q1 = self.factory.create_query(description="Testing search")
q2 = self.factory.create_query(description="Testing searching")
q3 = self.factory.create_query(description="Testing sea rch")
queries = models.Query.search(str(q3.id), [self.factory.default_group])
self.assertIn(q3, queries)
self.assertNotIn(q1, queries)
self.assertNotIn(q2, queries)
def test_search_respects_groups(self):
other_group = models.Group.create(org=self.factory.org, name="Other Group")
ds = self.factory.create_data_source(group=other_group)
q1 = self.factory.create_query(description="Testing search", data_source=ds)
q2 = self.factory.create_query(description="Testing searching")
q3 = self.factory.create_query(description="Testing sea rch")
queries = models.Query.search("Testing", [self.factory.default_group])
self.assertNotIn(q1, queries)
self.assertIn(q2, queries)
self.assertIn(q3, queries)
queries = models.Query.search("Testing", [other_group, self.factory.default_group])
self.assertIn(q1, queries)
self.assertIn(q2, queries)
self.assertIn(q3, queries)
queries = models.Query.search("Testing", [other_group])
self.assertIn(q1, queries)
self.assertNotIn(q2, queries)
self.assertNotIn(q3, queries)
def test_returns_each_query_only_once(self):
other_group = self.factory.create_group()
second_group = self.factory.create_group()
ds = self.factory.create_data_source(group=other_group)
ds.add_group(second_group, False)
q1 = self.factory.create_query(description="Testing search", data_source=ds)
queries = list(models.Query.search("Testing", [self.factory.default_group, other_group, second_group]))
self.assertEqual(1, len(queries))
def test_save_creates_default_visualization(self):
q = self.factory.create_query()
self.assertEquals(q.visualizations.count(), 1)
def test_save_updates_updated_at_field(self):
# This should be a test of ModelTimestampsMixin, but it's easier to test in context of existing model... :-\
one_day_ago = datetime.datetime.today() - datetime.timedelta(days=1)
q = self.factory.create_query(created_at=one_day_ago, updated_at=one_day_ago)
q.save()
self.assertNotEqual(q.updated_at, one_day_ago)
class QueryRecentTest(BaseTestCase):
def test_global_recent(self):
q1 = self.factory.create_query()
q2 = self.factory.create_query()
models.Event.create(org=self.factory.org, user=self.factory.user, action="edit",
object_type="query", object_id=q1.id)
recent = models.Query.recent([self.factory.default_group])
self.assertIn(q1, recent)
self.assertNotIn(q2, recent)
def test_recent_for_user(self):
q1 = self.factory.create_query()
q2 = self.factory.create_query()
models.Event.create(org=self.factory.org, user=self.factory.user, action="edit",
object_type="query", object_id=q1.id)
recent = models.Query.recent([self.factory.default_group], user_id=self.factory.user.id)
self.assertIn(q1, recent)
self.assertNotIn(q2, recent)
recent = models.Query.recent([self.factory.default_group], user_id=self.factory.user.id + 1)
self.assertNotIn(q1, recent)
self.assertNotIn(q2, recent)
def test_respects_groups(self):
q1 = self.factory.create_query()
ds = self.factory.create_data_source(group=self.factory.create_group())
q2 = self.factory.create_query(data_source=ds)
models.Event.create(org=self.factory.org, user=self.factory.user, action="edit",
object_type="query", object_id=q1.id)
models.Event.create(org=self.factory.org, user=self.factory.user, action="edit",
object_type="query", object_id=q2.id)
recent = models.Query.recent([self.factory.default_group])
self.assertIn(q1, recent)
self.assertNotIn(q2, recent)
class ShouldScheduleNextTest(TestCase):
def test_interval_schedule_that_needs_reschedule(self):
now = datetime.datetime.now()
two_hours_ago = now - datetime.timedelta(hours=2)
self.assertTrue(models.should_schedule_next(two_hours_ago, now, "3600"))
def test_interval_schedule_that_doesnt_need_reschedule(self):
now = datetime.datetime.now()
half_an_hour_ago = now - datetime.timedelta(minutes=30)
self.assertFalse(models.should_schedule_next(half_an_hour_ago, now, "3600"))
def test_exact_time_that_needs_reschedule(self):
now = datetime.datetime.now()
yesterday = now - datetime.timedelta(days=1)
scheduled_datetime = now - datetime.timedelta(hours=3)
scheduled_time = "{:02d}:00".format(scheduled_datetime.hour)
self.assertTrue(models.should_schedule_next(yesterday, now, scheduled_time))
def test_exact_time_that_doesnt_need_reschedule(self):
now = date_parse("2015-10-16 20:10")
yesterday = date_parse("2015-10-15 23:07")
schedule = "23:00"
self.assertFalse(models.should_schedule_next(yesterday, now, schedule))
def test_exact_time_with_day_change(self):
now = datetime.datetime.now().replace(hour=0, minute=1)
previous = (now - datetime.timedelta(days=2)).replace(hour=23, minute=59)
schedule = "23:59".format(now.hour + 3)
self.assertTrue(models.should_schedule_next(previous, now, schedule))
class QueryOutdatedQueriesTest(BaseTestCase):
# TODO: this test can be refactored to use mock version of should_schedule_next to simplify it.
def test_outdated_queries_skips_unscheduled_queries(self):
query = self.factory.create_query(schedule=None)
queries = models.Query.outdated_queries()
self.assertNotIn(query, queries)
def test_outdated_queries_works_with_ttl_based_schedule(self):
two_hours_ago = datetime.datetime.now() - datetime.timedelta(hours=2)
query = self.factory.create_query(schedule="3600")
query_result = self.factory.create_query_result(query=query, retrieved_at=two_hours_ago)
query.latest_query_data = query_result
query.save()
queries = models.Query.outdated_queries()
self.assertIn(query, queries)
def test_skips_fresh_queries(self):
half_an_hour_ago = datetime.datetime.now() - datetime.timedelta(minutes=30)
query = self.factory.create_query(schedule="3600")
query_result = self.factory.create_query_result(query=query, retrieved_at=half_an_hour_ago)
query.latest_query_data = query_result
query.save()
queries = models.Query.outdated_queries()
self.assertNotIn(query, queries)
def test_outdated_queries_works_with_specific_time_schedule(self):
half_an_hour_ago = utcnow() - datetime.timedelta(minutes=30)
query = self.factory.create_query(schedule=half_an_hour_ago.strftime('%H:%M'))
query_result = self.factory.create_query_result(query=query, retrieved_at=half_an_hour_ago - datetime.timedelta(days=1))
query.latest_query_data = query_result
query.save()
queries = models.Query.outdated_queries()
self.assertIn(query, queries)
class QueryArchiveTest(BaseTestCase):
def setUp(self):
super(QueryArchiveTest, self).setUp()
def test_archive_query_sets_flag(self):
query = self.factory.create_query()
query.archive()
query = models.Query.get_by_id(query.id)
self.assertEquals(query.is_archived, True)
def test_archived_query_doesnt_return_in_all(self):
query = self.factory.create_query(schedule="1")
yesterday = datetime.datetime.now() - datetime.timedelta(days=1)
query_result, _ = models.QueryResult.store_result(query.org, query.data_source.id, query.query_hash, query.query, "1",
123, yesterday)
query.latest_query_data = query_result
query.save()
self.assertIn(query, models.Query.all_queries(query.groups.keys()))
self.assertIn(query, models.Query.outdated_queries())
query.archive()
self.assertNotIn(query, models.Query.all_queries(query.groups.keys()))
self.assertNotIn(query, models.Query.outdated_queries())
def test_removes_associated_widgets_from_dashboards(self):
widget = self.factory.create_widget()
query = widget.visualization.query
query.archive()
self.assertRaises(models.Widget.DoesNotExist, models.Widget.get_by_id, widget.id)
def test_removes_scheduling(self):
query = self.factory.create_query(schedule="1")
query.archive()
query = models.Query.get_by_id(query.id)
self.assertEqual(None, query.schedule)
class DataSourceTest(BaseTestCase):
def test_get_schema(self):
return_value = [{'name': 'table', 'columns': []}]
with mock.patch('redash.query_runner.pg.PostgreSQL.get_schema') as patched_get_schema:
patched_get_schema.return_value = return_value
schema = self.factory.data_source.get_schema()
self.assertEqual(return_value, schema)
def test_get_schema_uses_cache(self):
return_value = [{'name': 'table', 'columns': []}]
with mock.patch('redash.query_runner.pg.PostgreSQL.get_schema') as patched_get_schema:
patched_get_schema.return_value = return_value
self.factory.data_source.get_schema()
schema = self.factory.data_source.get_schema()
self.assertEqual(return_value, schema)
self.assertEqual(patched_get_schema.call_count, 1)
def test_get_schema_skips_cache_with_refresh_true(self):
return_value = [{'name': 'table', 'columns': []}]
with mock.patch('redash.query_runner.pg.PostgreSQL.get_schema') as patched_get_schema:
patched_get_schema.return_value = return_value
self.factory.data_source.get_schema()
new_return_value = [{'name': 'new_table', 'columns': []}]
patched_get_schema.return_value = new_return_value
schema = self.factory.data_source.get_schema(refresh=True)
self.assertEqual(new_return_value, schema)
self.assertEqual(patched_get_schema.call_count, 2)
class QueryResultTest(BaseTestCase):
def setUp(self):
super(QueryResultTest, self).setUp()
def test_get_latest_returns_none_if_not_found(self):
found_query_result = models.QueryResult.get_latest(self.factory.data_source, "SELECT 1", 60)
self.assertIsNone(found_query_result)
def test_get_latest_returns_when_found(self):
qr = self.factory.create_query_result()
found_query_result = models.QueryResult.get_latest(qr.data_source, qr.query, 60)
self.assertEqual(qr, found_query_result)
def test_get_latest_works_with_data_source_id(self):
qr = self.factory.create_query_result()
found_query_result = models.QueryResult.get_latest(qr.data_source.id, qr.query, 60)
self.assertEqual(qr, found_query_result)
def test_get_latest_doesnt_return_query_from_different_data_source(self):
qr = self.factory.create_query_result()
data_source = self.factory.create_data_source()
found_query_result = models.QueryResult.get_latest(data_source, qr.query, 60)
self.assertIsNone(found_query_result)
def test_get_latest_doesnt_return_if_ttl_expired(self):
yesterday = datetime.datetime.now() - datetime.timedelta(days=1)
qr = self.factory.create_query_result(retrieved_at=yesterday)
found_query_result = models.QueryResult.get_latest(qr.data_source, qr.query, max_age=60)
self.assertIsNone(found_query_result)
def test_get_latest_returns_if_ttl_not_expired(self):
yesterday = datetime.datetime.now() - datetime.timedelta(seconds=30)
qr = self.factory.create_query_result(retrieved_at=yesterday)
found_query_result = models.QueryResult.get_latest(qr.data_source, qr.query, max_age=120)
self.assertEqual(found_query_result, qr)
def test_get_latest_returns_the_most_recent_result(self):
yesterday = datetime.datetime.now() - datetime.timedelta(seconds=30)
old_qr = self.factory.create_query_result(retrieved_at=yesterday)
qr = self.factory.create_query_result()
found_query_result = models.QueryResult.get_latest(qr.data_source, qr.query, 60)
self.assertEqual(found_query_result.id, qr.id)
def test_get_latest_returns_the_last_cached_result_for_negative_ttl(self):
yesterday = datetime.datetime.now() + datetime.timedelta(days=-100)
very_old = self.factory.create_query_result(retrieved_at=yesterday)
yesterday = datetime.datetime.now() + datetime.timedelta(days=-1)
qr = self.factory.create_query_result(retrieved_at=yesterday)
found_query_result = models.QueryResult.get_latest(qr.data_source, qr.query, -1)
self.assertEqual(found_query_result.id, qr.id)
class TestUnusedQueryResults(BaseTestCase):
def test_returns_only_unused_query_results(self):
two_weeks_ago = datetime.datetime.now() - datetime.timedelta(days=14)
qr = self.factory.create_query_result()
query = self.factory.create_query(latest_query_data=qr)
unused_qr = self.factory.create_query_result(retrieved_at=two_weeks_ago)
self.assertIn(unused_qr, models.QueryResult.unused())
self.assertNotIn(qr, models.QueryResult.unused())
def test_returns_only_over_a_week_old_results(self):
two_weeks_ago = datetime.datetime.now() - datetime.timedelta(days=14)
unused_qr = self.factory.create_query_result(retrieved_at=two_weeks_ago)
new_unused_qr = self.factory.create_query_result()
self.assertIn(unused_qr, models.QueryResult.unused())
self.assertNotIn(new_unused_qr, models.QueryResult.unused())
class TestQueryAll(BaseTestCase):
def test_returns_only_queries_in_given_groups(self):
ds1 = self.factory.create_data_source()
ds2 = self.factory.create_data_source()
group1 = models.Group.create(name="g1", org=ds1.org)
group2 = models.Group.create(name="g2", org=ds1.org)
models.DataSourceGroup.create(group=group1, data_source=ds1, permissions=['create', 'view'])
models.DataSourceGroup.create(group=group2, data_source=ds2, permissions=['create', 'view'])
q1 = self.factory.create_query(data_source=ds1)
q2 = self.factory.create_query(data_source=ds2)
self.assertIn(q1, models.Query.all_queries([group1]))
self.assertNotIn(q2, models.Query.all_queries([group1]))
self.assertIn(q1, models.Query.all_queries([group1, group2]))
self.assertIn(q2, models.Query.all_queries([group1, group2]))
class TestUser(BaseTestCase):
def test_default_group_always_added(self):
user = self.factory.create_user()
user.update_group_assignments(["g_unknown"])
self.assertItemsEqual([user.org.default_group.id], user.groups)
def test_update_group_assignments(self):
user = self.factory.user
new_group = models.Group.create(id='999', name="g1", org=user.org)
user.update_group_assignments(["g1"])
self.assertItemsEqual([user.org.default_group.id, new_group.id], user.groups)
class TestGroup(BaseTestCase):
def test_returns_groups_with_specified_names(self):
org1 = self.factory.create_org()
org2 = self.factory.create_org()
matching_group1 = models.Group.create(id='999', name="g1", org=org1)
matching_group2 = models.Group.create(id='888', name="g2", org=org1)
non_matching_group = models.Group.create(id='777', name="g1", org=org2)
groups = models.Group.find_by_name(org1, ["g1", "g2"])
self.assertIn(matching_group1, groups)
self.assertIn(matching_group2, groups)
self.assertNotIn(non_matching_group, groups)
def test_returns_no_groups(self):
org1 = self.factory.create_org()
models.Group.create(id='999', name="g1", org=org1)
self.assertEqual([], models.Group.find_by_name(org1, ["non-existing"]))
class TestQueryResultStoreResult(BaseTestCase):
def setUp(self):
super(TestQueryResultStoreResult, self).setUp()
self.data_source = self.factory.data_source
self.query = "SELECT 1"
self.query_hash = gen_query_hash(self.query)
self.runtime = 123
self.utcnow = utcnow()
self.data = "data"
def test_stores_the_result(self):
query_result, _ = models.QueryResult.store_result(self.data_source.org_id, self.data_source.id, self.query_hash,
self.query,
self.data, self.runtime, self.utcnow)
self.assertEqual(query_result.data, self.data)
self.assertEqual(query_result.runtime, self.runtime)
self.assertEqual(query_result.retrieved_at, self.utcnow)
self.assertEqual(query_result.query, self.query)
self.assertEqual(query_result.query_hash, self.query_hash)
self.assertEqual(query_result.data_source, self.data_source)
def test_updates_existing_queries(self):
query1 = self.factory.create_query(query=self.query)
query2 = self.factory.create_query(query=self.query)
query3 = self.factory.create_query(query=self.query)
query_result, _ = models.QueryResult.store_result(self.data_source.org_id, self.data_source.id, self.query_hash,
self.query, self.data,
self.runtime, self.utcnow)
self.assertEqual(models.Query.get_by_id(query1.id)._data['latest_query_data'], query_result.id)
self.assertEqual(models.Query.get_by_id(query2.id)._data['latest_query_data'], query_result.id)
self.assertEqual(models.Query.get_by_id(query3.id)._data['latest_query_data'], query_result.id)
def test_doesnt_update_queries_with_different_hash(self):
query1 = self.factory.create_query(query=self.query)
query2 = self.factory.create_query(query=self.query)
query3 = self.factory.create_query(query=self.query + "123")
query_result, _ = models.QueryResult.store_result(self.data_source.org_id, self.data_source.id, self.query_hash,
self.query, self.data,
self.runtime, self.utcnow)
self.assertEqual(models.Query.get_by_id(query1.id)._data['latest_query_data'], query_result.id)
self.assertEqual(models.Query.get_by_id(query2.id)._data['latest_query_data'], query_result.id)
self.assertNotEqual(models.Query.get_by_id(query3.id)._data['latest_query_data'], query_result.id)
def test_doesnt_update_queries_with_different_data_source(self):
query1 = self.factory.create_query(query=self.query)
query2 = self.factory.create_query(query=self.query)
query3 = self.factory.create_query(query=self.query, data_source=self.factory.create_data_source())
query_result, _ = models.QueryResult.store_result(self.data_source.org_id, self.data_source.id, self.query_hash,
self.query, self.data,
self.runtime, self.utcnow)
self.assertEqual(models.Query.get_by_id(query1.id)._data['latest_query_data'], query_result.id)
self.assertEqual(models.Query.get_by_id(query2.id)._data['latest_query_data'], query_result.id)
self.assertNotEqual(models.Query.get_by_id(query3.id)._data['latest_query_data'], query_result.id)
class TestEvents(BaseTestCase):
def raw_event(self):
timestamp = 1411778709.791
user = self.factory.user
created_at = datetime.datetime.utcfromtimestamp(timestamp)
raw_event = {"action": "view",
"timestamp": timestamp,
"object_type": "dashboard",
"user_id": user.id,
"object_id": 1,
"org_id": 1}
return raw_event, user, created_at
def test_records_event(self):
raw_event, user, created_at = self.raw_event()
event = models.Event.record(raw_event)
self.assertEqual(event.user, user)
self.assertEqual(event.action, "view")
self.assertEqual(event.object_type, "dashboard")
self.assertEqual(event.object_id, 1)
self.assertEqual(event.created_at, created_at)
def test_records_additional_properties(self):
raw_event, _, _ = self.raw_event()
additional_properties = {'test': 1, 'test2': 2, 'whatever': "abc"}
raw_event.update(additional_properties)
event = models.Event.record(raw_event)
self.assertDictEqual(json.loads(event.additional_properties), additional_properties)
class TestWidgetDeleteInstance(BaseTestCase):
def test_delete_removes_from_layout(self):
widget = self.factory.create_widget()
widget2 = self.factory.create_widget(dashboard=widget.dashboard)
widget.dashboard.layout = json.dumps([[widget.id, widget2.id]])
widget.dashboard.save()
widget.delete_instance()
self.assertEquals(json.dumps([[widget2.id]]), widget.dashboard.layout)
def test_delete_removes_empty_rows(self):
widget = self.factory.create_widget()
widget2 = self.factory.create_widget(dashboard=widget.dashboard)
widget.dashboard.layout = json.dumps([[widget.id, widget2.id]])
widget.dashboard.save()
widget.delete_instance()
widget2.delete_instance()
self.assertEquals("[]", widget.dashboard.layout)
def _set_up_dashboard_test(d):
d.g1 = d.factory.create_group(name='First')
d.g2 = d.factory.create_group(name='Second')
d.ds1 = d.factory.create_data_source()
d.ds2 = d.factory.create_data_source()
d.u1 = d.factory.create_user(groups=[d.g1.id])
d.u2 = d.factory.create_user(groups=[d.g2.id])
models.DataSourceGroup.create(group=d.g1, data_source=d.ds1, permissions=['create', 'view'])
models.DataSourceGroup.create(group=d.g2, data_source=d.ds2, permissions=['create', 'view'])
d.q1 = d.factory.create_query(data_source=d.ds1)
d.q2 = d.factory.create_query(data_source=d.ds2)
d.v1 = d.factory.create_visualization(query=d.q1)
d.v2 = d.factory.create_visualization(query=d.q2)
d.w1 = d.factory.create_widget(visualization=d.v1)
d.w2 = d.factory.create_widget(visualization=d.v2)
d.w3 = d.factory.create_widget(visualization=d.v2, dashboard=d.w2.dashboard)
d.w4 = d.factory.create_widget(visualization=d.v2)
d.w5 = d.factory.create_widget(visualization=d.v1, dashboard=d.w4.dashboard)
class TestDashboardAll(BaseTestCase):
def setUp(self):
super(TestDashboardAll, self).setUp()
_set_up_dashboard_test(self)
def test_requires_group_or_user_id(self):
d1 = self.factory.create_dashboard()
self.assertNotIn(d1, models.Dashboard.all(d1.user.org, d1.user.groups, None))
self.assertIn(d1, models.Dashboard.all(d1.user.org, [0], d1.user.id))
def test_returns_dashboards_based_on_groups(self):
self.assertIn(self.w1.dashboard, models.Dashboard.all(self.u1.org, self.u1.groups, None))
self.assertIn(self.w2.dashboard, models.Dashboard.all(self.u2.org, self.u2.groups, None))
self.assertNotIn(self.w1.dashboard, models.Dashboard.all(self.u2.org, self.u2.groups, None))
self.assertNotIn(self.w2.dashboard, models.Dashboard.all(self.u1.org, self.u1.groups, None))
def test_returns_each_dashboard_once(self):
dashboards = list(models.Dashboard.all(self.u2.org, self.u2.groups, None))
self.assertEqual(len(dashboards), 2)
def test_returns_dashboard_you_have_partial_access_to(self):
self.assertIn(self.w5.dashboard, models.Dashboard.all(self.u1.org, self.u1.groups, None))
def test_returns_dashboards_created_by_user(self):
d1 = self.factory.create_dashboard(user=self.u1)
self.assertIn(d1, models.Dashboard.all(self.u1.org, self.u1.groups, self.u1.id))
self.assertIn(d1, models.Dashboard.all(self.u1.org, [0], self.u1.id))
self.assertNotIn(d1, models.Dashboard.all(self.u2.org, self.u2.groups, self.u2.id))
def test_returns_dashboards_with_text_widgets(self):
w1 = self.factory.create_widget(visualization=None)
self.assertIn(w1.dashboard, models.Dashboard.all(self.u1.org, self.u1.groups, None))
self.assertIn(w1.dashboard, models.Dashboard.all(self.u2.org, self.u2.groups, None))
def test_returns_dashboards_from_current_org_only(self):
w1 = self.factory.create_widget(visualization=None)
user = self.factory.create_user(org=self.factory.create_org())
self.assertIn(w1.dashboard, models.Dashboard.all(self.u1.org, self.u1.groups, None))
self.assertNotIn(w1.dashboard, models.Dashboard.all(user.org, user.groups, None))
class TestDashboardRecent(BaseTestCase):
def setUp(self):
super(TestDashboardRecent, self).setUp()
_set_up_dashboard_test(self)
def test_returns_recent_dashboards_basic(self):
models.Event.create(org=self.factory.org, user=self.u1, action="view",
object_type="dashboard", object_id=self.w1.dashboard.id)
self.assertIn(self.w1.dashboard, models.Dashboard.recent(self.u1.org, self.u1.groups, None))
self.assertNotIn(self.w2.dashboard, models.Dashboard.recent(self.u1.org, self.u1.groups, None))
self.assertNotIn(self.w1.dashboard, models.Dashboard.recent(self.u1.org, self.u2.groups, None))
def test_returns_recent_dashboards_created_by_user(self):
d1 = self.factory.create_dashboard(user=self.u1)
models.Event.create(org=self.factory.org, user=self.u1, action="view",
object_type="dashboard", object_id=d1.id)
self.assertIn(d1, models.Dashboard.recent(self.u1.org, [0], self.u1.id))
self.assertNotIn(self.w2.dashboard, models.Dashboard.recent(self.u1.org, [0], self.u1.id))
self.assertNotIn(d1, models.Dashboard.recent(self.u2.org, [0], self.u2.id))
def test_returns_recent_dashboards_with_no_visualizations(self):
w1 = self.factory.create_widget(visualization=None)
models.Event.create(org=self.factory.org, user=self.u1, action="view",
object_type="dashboard", object_id=w1.dashboard.id)
self.assertIn(w1.dashboard, models.Dashboard.recent(self.u1.org, [0], self.u1.id))
self.assertNotIn(self.w2.dashboard, models.Dashboard.recent(self.u1.org, [0], self.u1.id))
def test_restricts_dashboards_for_user(self):
models.Event.create(org=self.factory.org, user=self.u1, action="view",
object_type="dashboard", object_id=self.w1.dashboard.id)
models.Event.create(org=self.factory.org, user=self.u2, action="view",
object_type="dashboard", object_id=self.w2.dashboard.id)
models.Event.create(org=self.factory.org, user=self.u1, action="view",
object_type="dashboard", object_id=self.w5.dashboard.id)
models.Event.create(org=self.factory.org, user=self.u2, action="view",
object_type="dashboard", object_id=self.w5.dashboard.id)
self.assertIn(self.w1.dashboard, models.Dashboard.recent(self.u1.org, self.u1.groups, self.u1.id, for_user=True))
self.assertIn(self.w2.dashboard, models.Dashboard.recent(self.u2.org, self.u2.groups, self.u2.id, for_user=True))
self.assertNotIn(self.w1.dashboard, models.Dashboard.recent(self.u2.org, self.u2.groups, self.u2.id, for_user=True))
self.assertNotIn(self.w2.dashboard, models.Dashboard.recent(self.u1.org, self.u1.groups, self.u1.id, for_user=True))
self.assertIn(self.w5.dashboard, models.Dashboard.recent(self.u1.org, self.u1.groups, self.u1.id, for_user=True))
self.assertIn(self.w5.dashboard, models.Dashboard.recent(self.u2.org, self.u2.groups, self.u2.id, for_user=True))
def test_returns_each_dashboard_once(self):
models.Event.create(org=self.factory.org, user=self.u1, action="view",
object_type="dashboard", object_id=self.w1.dashboard.id)
models.Event.create(org=self.factory.org, user=self.u1, action="view",
object_type="dashboard", object_id=self.w1.dashboard.id)
dashboards = list(models.Dashboard.recent(self.u1.org, self.u1.groups, None))
self.assertEqual(len(dashboards), 1)
def test_returns_dashboards_from_current_org_only(self):
w1 = self.factory.create_widget(visualization=None)
models.Event.create(org=self.factory.org, user=self.u1, action="view",
object_type="dashboard", object_id=w1.dashboard.id)
user = self.factory.create_user(org=self.factory.create_org())
self.assertIn(w1.dashboard, models.Dashboard.recent(self.u1.org, self.u1.groups, None))
self.assertNotIn(w1.dashboard, models.Dashboard.recent(user.org, user.groups, None))
|
[
"redash.utils.utcnow",
"json.dumps",
"redash.models.Query.all_queries",
"redash.models.Dashboard.recent",
"redash.models.Group.find_by_name",
"redash.models.QueryResult.get_latest",
"json.loads",
"redash.models.Event.create",
"datetime.datetime.utcfromtimestamp",
"redash.models.DataSourceGroup.create",
"redash.models.Group.create",
"datetime.timedelta",
"datetime.datetime.now",
"redash.models.QueryResult.store_result",
"redash.models.Query.search",
"dateutil.parser.parse",
"datetime.datetime.today",
"redash.models.Event.record",
"mock.patch",
"redash.models.Dashboard.all",
"redash.models.QueryResult.unused",
"redash.models.should_schedule_next",
"redash.utils.gen_query_hash",
"redash.models.Query.recent",
"redash.models.Query.outdated_queries",
"redash.models.Query.get_by_id"
] |
[((24364, 24461), 'redash.models.DataSourceGroup.create', 'models.DataSourceGroup.create', ([], {'group': 'd.g1', 'data_source': 'd.ds1', 'permissions': "['create', 'view']"}), "(group=d.g1, data_source=d.ds1, permissions=[\n 'create', 'view'])\n", (24393, 24461), False, 'from redash import models\n'), ((24461, 24558), 'redash.models.DataSourceGroup.create', 'models.DataSourceGroup.create', ([], {'group': 'd.g2', 'data_source': 'd.ds2', 'permissions': "['create', 'view']"}), "(group=d.g2, data_source=d.ds2, permissions=[\n 'create', 'view'])\n", (24490, 24558), False, 'from redash import models\n'), ((896, 924), 'redash.models.Query.get_by_id', 'models.Query.get_by_id', (['q.id'], {}), '(q.id)\n', (918, 924), False, 'from redash import models\n'), ((1233, 1293), 'redash.models.Query.search', 'models.Query.search', (['u"""seåřċħ"""', '[self.factory.default_group]'], {}), "(u'seåřċħ', [self.factory.default_group])\n", (1252, 1293), False, 'from redash import models\n'), ((1685, 1745), 'redash.models.Query.search', 'models.Query.search', (['u"""seåřċħ"""', '[self.factory.default_group]'], {}), "(u'seåřċħ', [self.factory.default_group])\n", (1704, 1745), False, 'from redash import models\n'), ((2373, 2434), 'redash.models.Group.create', 'models.Group.create', ([], {'org': 'self.factory.org', 'name': '"""Other Group"""'}), "(org=self.factory.org, name='Other Group')\n", (2392, 2434), False, 'from redash import models\n'), ((2746, 2806), 'redash.models.Query.search', 'models.Query.search', (['"""Testing"""', '[self.factory.default_group]'], {}), "('Testing', [self.factory.default_group])\n", (2765, 2806), False, 'from redash import models\n'), ((2935, 3008), 'redash.models.Query.search', 'models.Query.search', (['"""Testing"""', '[other_group, self.factory.default_group]'], {}), "('Testing', [other_group, self.factory.default_group])\n", (2954, 3008), False, 'from redash import models\n'), ((3133, 3178), 'redash.models.Query.search', 'models.Query.search', (['"""Testing"""', '[other_group]'], {}), "('Testing', [other_group])\n", (3152, 3178), False, 'from redash import models\n'), ((4509, 4632), 'redash.models.Event.create', 'models.Event.create', ([], {'org': 'self.factory.org', 'user': 'self.factory.user', 'action': '"""edit"""', 'object_type': '"""query"""', 'object_id': 'q1.id'}), "(org=self.factory.org, user=self.factory.user, action=\n 'edit', object_type='query', object_id=q1.id)\n", (4528, 4632), False, 'from redash import models\n'), ((4674, 4723), 'redash.models.Query.recent', 'models.Query.recent', (['[self.factory.default_group]'], {}), '([self.factory.default_group])\n', (4693, 4723), False, 'from redash import models\n'), ((4924, 5047), 'redash.models.Event.create', 'models.Event.create', ([], {'org': 'self.factory.org', 'user': 'self.factory.user', 'action': '"""edit"""', 'object_type': '"""query"""', 'object_id': 'q1.id'}), "(org=self.factory.org, user=self.factory.user, action=\n 'edit', object_type='query', object_id=q1.id)\n", (4943, 5047), False, 'from redash import models\n'), ((5089, 5168), 'redash.models.Query.recent', 'models.Query.recent', (['[self.factory.default_group]'], {'user_id': 'self.factory.user.id'}), '([self.factory.default_group], user_id=self.factory.user.id)\n', (5108, 5168), False, 'from redash import models\n'), ((5259, 5347), 'redash.models.Query.recent', 'models.Query.recent', (['[self.factory.default_group]'], {'user_id': '(self.factory.user.id + 1)'}), '([self.factory.default_group], user_id=self.factory.user\n .id + 1)\n', (5278, 5347), False, 'from redash import models\n'), ((5639, 5762), 'redash.models.Event.create', 'models.Event.create', ([], {'org': 'self.factory.org', 'user': 'self.factory.user', 'action': '"""edit"""', 'object_type': '"""query"""', 'object_id': 'q1.id'}), "(org=self.factory.org, user=self.factory.user, action=\n 'edit', object_type='query', object_id=q1.id)\n", (5658, 5762), False, 'from redash import models\n'), ((5794, 5917), 'redash.models.Event.create', 'models.Event.create', ([], {'org': 'self.factory.org', 'user': 'self.factory.user', 'action': '"""edit"""', 'object_type': '"""query"""', 'object_id': 'q2.id'}), "(org=self.factory.org, user=self.factory.user, action=\n 'edit', object_type='query', object_id=q2.id)\n", (5813, 5917), False, 'from redash import models\n'), ((5959, 6008), 'redash.models.Query.recent', 'models.Query.recent', (['[self.factory.default_group]'], {}), '([self.factory.default_group])\n', (5978, 6008), False, 'from redash import models\n'), ((6197, 6220), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (6218, 6220), False, 'import datetime\n'), ((6441, 6464), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (6462, 6464), False, 'import datetime\n'), ((6682, 6705), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (6703, 6705), False, 'import datetime\n'), ((7050, 7080), 'dateutil.parser.parse', 'date_parse', (['"""2015-10-16 20:10"""'], {}), "('2015-10-16 20:10')\n", (7060, 7080), True, 'from dateutil.parser import parse as date_parse\n'), ((7101, 7131), 'dateutil.parser.parse', 'date_parse', (['"""2015-10-15 23:07"""'], {}), "('2015-10-15 23:07')\n", (7111, 7131), True, 'from dateutil.parser import parse as date_parse\n'), ((7845, 7876), 'redash.models.Query.outdated_queries', 'models.Query.outdated_queries', ([], {}), '()\n', (7874, 7876), False, 'from redash import models\n'), ((8308, 8339), 'redash.models.Query.outdated_queries', 'models.Query.outdated_queries', ([], {}), '()\n', (8337, 8339), False, 'from redash import models\n'), ((8749, 8780), 'redash.models.Query.outdated_queries', 'models.Query.outdated_queries', ([], {}), '()\n', (8778, 8780), False, 'from redash import models\n'), ((9266, 9297), 'redash.models.Query.outdated_queries', 'models.Query.outdated_queries', ([], {}), '()\n', (9295, 9297), False, 'from redash import models\n'), ((9572, 9604), 'redash.models.Query.get_by_id', 'models.Query.get_by_id', (['query.id'], {}), '(query.id)\n', (9594, 9604), False, 'from redash import models\n'), ((9869, 9990), 'redash.models.QueryResult.store_result', 'models.QueryResult.store_result', (['query.org', 'query.data_source.id', 'query.query_hash', 'query.query', '"""1"""', '(123)', 'yesterday'], {}), "(query.org, query.data_source.id, query.\n query_hash, query.query, '1', 123, yesterday)\n", (9900, 9990), False, 'from redash import models\n'), ((10826, 10858), 'redash.models.Query.get_by_id', 'models.Query.get_by_id', (['query.id'], {}), '(query.id)\n', (10848, 10858), False, 'from redash import models\n'), ((12620, 12691), 'redash.models.QueryResult.get_latest', 'models.QueryResult.get_latest', (['self.factory.data_source', '"""SELECT 1"""', '(60)'], {}), "(self.factory.data_source, 'SELECT 1', 60)\n", (12649, 12691), False, 'from redash import models\n'), ((12866, 12925), 'redash.models.QueryResult.get_latest', 'models.QueryResult.get_latest', (['qr.data_source', 'qr.query', '(60)'], {}), '(qr.data_source, qr.query, 60)\n', (12895, 12925), False, 'from redash import models\n'), ((13111, 13173), 'redash.models.QueryResult.get_latest', 'models.QueryResult.get_latest', (['qr.data_source.id', 'qr.query', '(60)'], {}), '(qr.data_source.id, qr.query, 60)\n', (13140, 13173), False, 'from redash import models\n'), ((13436, 13492), 'redash.models.QueryResult.get_latest', 'models.QueryResult.get_latest', (['data_source', 'qr.query', '(60)'], {}), '(data_source, qr.query, 60)\n', (13465, 13492), False, 'from redash import models\n'), ((13774, 13841), 'redash.models.QueryResult.get_latest', 'models.QueryResult.get_latest', (['qr.data_source', 'qr.query'], {'max_age': '(60)'}), '(qr.data_source, qr.query, max_age=60)\n', (13803, 13841), False, 'from redash import models\n'), ((14125, 14193), 'redash.models.QueryResult.get_latest', 'models.QueryResult.get_latest', (['qr.data_source', 'qr.query'], {'max_age': '(120)'}), '(qr.data_source, qr.query, max_age=120)\n', (14154, 14193), False, 'from redash import models\n'), ((14536, 14595), 'redash.models.QueryResult.get_latest', 'models.QueryResult.get_latest', (['qr.data_source', 'qr.query', '(60)'], {}), '(qr.data_source, qr.query, 60)\n', (14565, 14595), False, 'from redash import models\n'), ((15058, 15117), 'redash.models.QueryResult.get_latest', 'models.QueryResult.get_latest', (['qr.data_source', 'qr.query', '(-1)'], {}), '(qr.data_source, qr.query, -1)\n', (15087, 15117), False, 'from redash import models\n'), ((16281, 16324), 'redash.models.Group.create', 'models.Group.create', ([], {'name': '"""g1"""', 'org': 'ds1.org'}), "(name='g1', org=ds1.org)\n", (16300, 16324), False, 'from redash import models\n'), ((16342, 16385), 'redash.models.Group.create', 'models.Group.create', ([], {'name': '"""g2"""', 'org': 'ds1.org'}), "(name='g2', org=ds1.org)\n", (16361, 16385), False, 'from redash import models\n'), ((16395, 16492), 'redash.models.DataSourceGroup.create', 'models.DataSourceGroup.create', ([], {'group': 'group1', 'data_source': 'ds1', 'permissions': "['create', 'view']"}), "(group=group1, data_source=ds1, permissions=[\n 'create', 'view'])\n", (16424, 16492), False, 'from redash import models\n'), ((16496, 16593), 'redash.models.DataSourceGroup.create', 'models.DataSourceGroup.create', ([], {'group': 'group2', 'data_source': 'ds2', 'permissions': "['create', 'view']"}), "(group=group2, data_source=ds2, permissions=[\n 'create', 'view'])\n", (16525, 16593), False, 'from redash import models\n'), ((17316, 17370), 'redash.models.Group.create', 'models.Group.create', ([], {'id': '"""999"""', 'name': '"""g1"""', 'org': 'user.org'}), "(id='999', name='g1', org=user.org)\n", (17335, 17370), False, 'from redash import models\n'), ((17702, 17752), 'redash.models.Group.create', 'models.Group.create', ([], {'id': '"""999"""', 'name': '"""g1"""', 'org': 'org1'}), "(id='999', name='g1', org=org1)\n", (17721, 17752), False, 'from redash import models\n'), ((17779, 17829), 'redash.models.Group.create', 'models.Group.create', ([], {'id': '"""888"""', 'name': '"""g2"""', 'org': 'org1'}), "(id='888', name='g2', org=org1)\n", (17798, 17829), False, 'from redash import models\n'), ((17859, 17909), 'redash.models.Group.create', 'models.Group.create', ([], {'id': '"""777"""', 'name': '"""g1"""', 'org': 'org2'}), "(id='777', name='g1', org=org2)\n", (17878, 17909), False, 'from redash import models\n'), ((17928, 17973), 'redash.models.Group.find_by_name', 'models.Group.find_by_name', (['org1', "['g1', 'g2']"], {}), "(org1, ['g1', 'g2'])\n", (17953, 17973), False, 'from redash import models\n'), ((18210, 18260), 'redash.models.Group.create', 'models.Group.create', ([], {'id': '"""999"""', 'name': '"""g1"""', 'org': 'org1'}), "(id='999', name='g1', org=org1)\n", (18229, 18260), False, 'from redash import models\n'), ((18578, 18604), 'redash.utils.gen_query_hash', 'gen_query_hash', (['self.query'], {}), '(self.query)\n', (18592, 18604), False, 'from redash.utils import gen_query_hash, utcnow\n'), ((18654, 18662), 'redash.utils.utcnow', 'utcnow', ([], {}), '()\n', (18660, 18662), False, 'from redash.utils import gen_query_hash, utcnow\n'), ((18755, 18904), 'redash.models.QueryResult.store_result', 'models.QueryResult.store_result', (['self.data_source.org_id', 'self.data_source.id', 'self.query_hash', 'self.query', 'self.data', 'self.runtime', 'self.utcnow'], {}), '(self.data_source.org_id, self.data_source.\n id, self.query_hash, self.query, self.data, self.runtime, self.utcnow)\n', (18786, 18904), False, 'from redash import models\n'), ((19647, 19796), 'redash.models.QueryResult.store_result', 'models.QueryResult.store_result', (['self.data_source.org_id', 'self.data_source.id', 'self.query_hash', 'self.query', 'self.data', 'self.runtime', 'self.utcnow'], {}), '(self.data_source.org_id, self.data_source.\n id, self.query_hash, self.query, self.data, self.runtime, self.utcnow)\n', (19678, 19796), False, 'from redash import models\n'), ((20502, 20651), 'redash.models.QueryResult.store_result', 'models.QueryResult.store_result', (['self.data_source.org_id', 'self.data_source.id', 'self.query_hash', 'self.query', 'self.data', 'self.runtime', 'self.utcnow'], {}), '(self.data_source.org_id, self.data_source.\n id, self.query_hash, self.query, self.data, self.runtime, self.utcnow)\n', (20533, 20651), False, 'from redash import models\n'), ((21406, 21555), 'redash.models.QueryResult.store_result', 'models.QueryResult.store_result', (['self.data_source.org_id', 'self.data_source.id', 'self.query_hash', 'self.query', 'self.data', 'self.runtime', 'self.utcnow'], {}), '(self.data_source.org_id, self.data_source.\n id, self.query_hash, self.query, self.data, self.runtime, self.utcnow)\n', (21437, 21555), False, 'from redash import models\n'), ((22131, 22176), 'datetime.datetime.utcfromtimestamp', 'datetime.datetime.utcfromtimestamp', (['timestamp'], {}), '(timestamp)\n', (22165, 22176), False, 'import datetime\n'), ((22578, 22608), 'redash.models.Event.record', 'models.Event.record', (['raw_event'], {}), '(raw_event)\n', (22597, 22608), False, 'from redash import models\n'), ((23091, 23121), 'redash.models.Event.record', 'models.Event.record', (['raw_event'], {}), '(raw_event)\n', (23110, 23121), False, 'from redash import models\n'), ((23464, 23501), 'json.dumps', 'json.dumps', (['[[widget.id, widget2.id]]'], {}), '([[widget.id, widget2.id]])\n', (23474, 23501), False, 'import json\n'), ((23847, 23884), 'json.dumps', 'json.dumps', (['[[widget.id, widget2.id]]'], {}), '([[widget.id, widget2.id]])\n', (23857, 23884), False, 'import json\n'), ((27558, 27689), 'redash.models.Event.create', 'models.Event.create', ([], {'org': 'self.factory.org', 'user': 'self.u1', 'action': '"""view"""', 'object_type': '"""dashboard"""', 'object_id': 'self.w1.dashboard.id'}), "(org=self.factory.org, user=self.u1, action='view',\n object_type='dashboard', object_id=self.w1.dashboard.id)\n", (27577, 27689), False, 'from redash import models\n'), ((28152, 28268), 'redash.models.Event.create', 'models.Event.create', ([], {'org': 'self.factory.org', 'user': 'self.u1', 'action': '"""view"""', 'object_type': '"""dashboard"""', 'object_id': 'd1.id'}), "(org=self.factory.org, user=self.u1, action='view',\n object_type='dashboard', object_id=d1.id)\n", (28171, 28268), False, 'from redash import models\n'), ((28696, 28822), 'redash.models.Event.create', 'models.Event.create', ([], {'org': 'self.factory.org', 'user': 'self.u1', 'action': '"""view"""', 'object_type': '"""dashboard"""', 'object_id': 'w1.dashboard.id'}), "(org=self.factory.org, user=self.u1, action='view',\n object_type='dashboard', object_id=w1.dashboard.id)\n", (28715, 28822), False, 'from redash import models\n'), ((29097, 29228), 'redash.models.Event.create', 'models.Event.create', ([], {'org': 'self.factory.org', 'user': 'self.u1', 'action': '"""view"""', 'object_type': '"""dashboard"""', 'object_id': 'self.w1.dashboard.id'}), "(org=self.factory.org, user=self.u1, action='view',\n object_type='dashboard', object_id=self.w1.dashboard.id)\n", (29116, 29228), False, 'from redash import models\n'), ((29261, 29392), 'redash.models.Event.create', 'models.Event.create', ([], {'org': 'self.factory.org', 'user': 'self.u2', 'action': '"""view"""', 'object_type': '"""dashboard"""', 'object_id': 'self.w2.dashboard.id'}), "(org=self.factory.org, user=self.u2, action='view',\n object_type='dashboard', object_id=self.w2.dashboard.id)\n", (29280, 29392), False, 'from redash import models\n'), ((29425, 29556), 'redash.models.Event.create', 'models.Event.create', ([], {'org': 'self.factory.org', 'user': 'self.u1', 'action': '"""view"""', 'object_type': '"""dashboard"""', 'object_id': 'self.w5.dashboard.id'}), "(org=self.factory.org, user=self.u1, action='view',\n object_type='dashboard', object_id=self.w5.dashboard.id)\n", (29444, 29556), False, 'from redash import models\n'), ((29589, 29720), 'redash.models.Event.create', 'models.Event.create', ([], {'org': 'self.factory.org', 'user': 'self.u2', 'action': '"""view"""', 'object_type': '"""dashboard"""', 'object_id': 'self.w5.dashboard.id'}), "(org=self.factory.org, user=self.u2, action='view',\n object_type='dashboard', object_id=self.w5.dashboard.id)\n", (29608, 29720), False, 'from redash import models\n'), ((30541, 30672), 'redash.models.Event.create', 'models.Event.create', ([], {'org': 'self.factory.org', 'user': 'self.u1', 'action': '"""view"""', 'object_type': '"""dashboard"""', 'object_id': 'self.w1.dashboard.id'}), "(org=self.factory.org, user=self.u1, action='view',\n object_type='dashboard', object_id=self.w1.dashboard.id)\n", (30560, 30672), False, 'from redash import models\n'), ((30705, 30836), 'redash.models.Event.create', 'models.Event.create', ([], {'org': 'self.factory.org', 'user': 'self.u1', 'action': '"""view"""', 'object_type': '"""dashboard"""', 'object_id': 'self.w1.dashboard.id'}), "(org=self.factory.org, user=self.u1, action='view',\n object_type='dashboard', object_id=self.w1.dashboard.id)\n", (30724, 30836), False, 'from redash import models\n'), ((31123, 31249), 'redash.models.Event.create', 'models.Event.create', ([], {'org': 'self.factory.org', 'user': 'self.u1', 'action': '"""view"""', 'object_type': '"""dashboard"""', 'object_id': 'w1.dashboard.id'}), "(org=self.factory.org, user=self.u1, action='view',\n object_type='dashboard', object_id=w1.dashboard.id)\n", (31142, 31249), False, 'from redash import models\n'), ((3657, 3748), 'redash.models.Query.search', 'models.Query.search', (['"""Testing"""', '[self.factory.default_group, other_group, second_group]'], {}), "('Testing', [self.factory.default_group, other_group,\n second_group])\n", (3676, 3748), False, 'from redash import models\n'), ((4130, 4155), 'datetime.datetime.today', 'datetime.datetime.today', ([], {}), '()\n', (4153, 4155), False, 'import datetime\n'), ((4158, 4184), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (4176, 4184), False, 'import datetime\n'), ((6251, 6278), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(2)'}), '(hours=2)\n', (6269, 6278), False, 'import datetime\n'), ((6303, 6358), 'redash.models.should_schedule_next', 'models.should_schedule_next', (['two_hours_ago', 'now', '"""3600"""'], {}), "(two_hours_ago, now, '3600')\n", (6330, 6358), False, 'from redash import models\n'), ((6498, 6528), 'datetime.timedelta', 'datetime.timedelta', ([], {'minutes': '(30)'}), '(minutes=30)\n', (6516, 6528), False, 'import datetime\n'), ((6554, 6612), 'redash.models.should_schedule_next', 'models.should_schedule_next', (['half_an_hour_ago', 'now', '"""3600"""'], {}), "(half_an_hour_ago, now, '3600')\n", (6581, 6612), False, 'from redash import models\n'), ((6732, 6758), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (6750, 6758), False, 'import datetime\n'), ((6794, 6821), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(3)'}), '(hours=3)\n', (6812, 6821), False, 'import datetime\n'), ((6915, 6974), 'redash.models.should_schedule_next', 'models.should_schedule_next', (['yesterday', 'now', 'scheduled_time'], {}), '(yesterday, now, scheduled_time)\n', (6942, 6974), False, 'from redash import models\n'), ((7184, 7237), 'redash.models.should_schedule_next', 'models.should_schedule_next', (['yesterday', 'now', 'schedule'], {}), '(yesterday, now, schedule)\n', (7211, 7237), False, 'from redash import models\n'), ((7505, 7557), 'redash.models.should_schedule_next', 'models.should_schedule_next', (['previous', 'now', 'schedule'], {}), '(previous, now, schedule)\n', (7532, 7557), False, 'from redash import models\n'), ((8011, 8034), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (8032, 8034), False, 'import datetime\n'), ((8037, 8064), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(2)'}), '(hours=2)\n', (8055, 8064), False, 'import datetime\n'), ((8446, 8469), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (8467, 8469), False, 'import datetime\n'), ((8472, 8502), 'datetime.timedelta', 'datetime.timedelta', ([], {'minutes': '(30)'}), '(minutes=30)\n', (8490, 8502), False, 'import datetime\n'), ((8921, 8929), 'redash.utils.utcnow', 'utcnow', ([], {}), '()\n', (8927, 8929), False, 'from redash.utils import gen_query_hash, utcnow\n'), ((8932, 8962), 'datetime.timedelta', 'datetime.timedelta', ([], {'minutes': '(30)'}), '(minutes=30)\n', (8950, 8962), False, 'import datetime\n'), ((9790, 9813), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (9811, 9813), False, 'import datetime\n'), ((9816, 9842), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (9834, 9842), False, 'import datetime\n'), ((10216, 10247), 'redash.models.Query.outdated_queries', 'models.Query.outdated_queries', ([], {}), '()\n', (10245, 10247), False, 'from redash import models\n'), ((10386, 10417), 'redash.models.Query.outdated_queries', 'models.Query.outdated_queries', ([], {}), '()\n', (10415, 10417), False, 'from redash import models\n'), ((11047, 11105), 'mock.patch', 'mock.patch', (['"""redash.query_runner.pg.PostgreSQL.get_schema"""'], {}), "('redash.query_runner.pg.PostgreSQL.get_schema')\n", (11057, 11105), False, 'import mock\n'), ((11414, 11472), 'mock.patch', 'mock.patch', (['"""redash.query_runner.pg.PostgreSQL.get_schema"""'], {}), "('redash.query_runner.pg.PostgreSQL.get_schema')\n", (11424, 11472), False, 'import mock\n'), ((11913, 11971), 'mock.patch', 'mock.patch', (['"""redash.query_runner.pg.PostgreSQL.get_schema"""'], {}), "('redash.query_runner.pg.PostgreSQL.get_schema')\n", (11923, 11971), False, 'import mock\n'), ((13621, 13644), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (13642, 13644), False, 'import datetime\n'), ((13647, 13673), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (13665, 13673), False, 'import datetime\n'), ((13968, 13991), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (13989, 13991), False, 'import datetime\n'), ((13994, 14024), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(30)'}), '(seconds=30)\n', (14012, 14024), False, 'import datetime\n'), ((14327, 14350), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (14348, 14350), False, 'import datetime\n'), ((14353, 14383), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(30)'}), '(seconds=30)\n', (14371, 14383), False, 'import datetime\n'), ((14752, 14775), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (14773, 14775), False, 'import datetime\n'), ((14778, 14807), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(-100)'}), '(days=-100)\n', (14796, 14807), False, 'import datetime\n'), ((14905, 14928), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (14926, 14928), False, 'import datetime\n'), ((14931, 14958), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(-1)'}), '(days=-1)\n', (14949, 14958), False, 'import datetime\n'), ((15298, 15321), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (15319, 15321), False, 'import datetime\n'), ((15324, 15351), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(14)'}), '(days=14)\n', (15342, 15351), False, 'import datetime\n'), ((15579, 15606), 'redash.models.QueryResult.unused', 'models.QueryResult.unused', ([], {}), '()\n', (15604, 15606), False, 'from redash import models\n'), ((15637, 15664), 'redash.models.QueryResult.unused', 'models.QueryResult.unused', ([], {}), '()\n', (15662, 15664), False, 'from redash import models\n'), ((15748, 15771), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (15769, 15771), False, 'import datetime\n'), ((15774, 15801), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(14)'}), '(days=14)\n', (15792, 15801), False, 'import datetime\n'), ((15976, 16003), 'redash.models.QueryResult.unused', 'models.QueryResult.unused', ([], {}), '()\n', (16001, 16003), False, 'from redash import models\n'), ((16045, 16072), 'redash.models.QueryResult.unused', 'models.QueryResult.unused', ([], {}), '()\n', (16070, 16072), False, 'from redash import models\n'), ((16729, 16763), 'redash.models.Query.all_queries', 'models.Query.all_queries', (['[group1]'], {}), '([group1])\n', (16753, 16763), False, 'from redash import models\n'), ((16794, 16828), 'redash.models.Query.all_queries', 'models.Query.all_queries', (['[group1]'], {}), '([group1])\n', (16818, 16828), False, 'from redash import models\n'), ((16856, 16898), 'redash.models.Query.all_queries', 'models.Query.all_queries', (['[group1, group2]'], {}), '([group1, group2])\n', (16880, 16898), False, 'from redash import models\n'), ((16926, 16968), 'redash.models.Query.all_queries', 'models.Query.all_queries', (['[group1, group2]'], {}), '([group1, group2])\n', (16950, 16968), False, 'from redash import models\n'), ((18290, 18339), 'redash.models.Group.find_by_name', 'models.Group.find_by_name', (['org1', "['non-existing']"], {}), "(org1, ['non-existing'])\n", (18315, 18339), False, 'from redash import models\n'), ((23152, 23191), 'json.loads', 'json.loads', (['event.additional_properties'], {}), '(event.additional_properties)\n', (23162, 23191), False, 'import json\n'), ((23594, 23620), 'json.dumps', 'json.dumps', (['[[widget2.id]]'], {}), '([[widget2.id]])\n', (23604, 23620), False, 'import json\n'), ((25361, 25416), 'redash.models.Dashboard.all', 'models.Dashboard.all', (['d1.user.org', 'd1.user.groups', 'None'], {}), '(d1.user.org, d1.user.groups, None)\n', (25381, 25416), False, 'from redash import models\n'), ((25444, 25494), 'redash.models.Dashboard.all', 'models.Dashboard.all', (['d1.user.org', '[0]', 'd1.user.id'], {}), '(d1.user.org, [0], d1.user.id)\n', (25464, 25494), False, 'from redash import models\n'), ((25593, 25648), 'redash.models.Dashboard.all', 'models.Dashboard.all', (['self.u1.org', 'self.u1.groups', 'None'], {}), '(self.u1.org, self.u1.groups, None)\n', (25613, 25648), False, 'from redash import models\n'), ((25691, 25746), 'redash.models.Dashboard.all', 'models.Dashboard.all', (['self.u2.org', 'self.u2.groups', 'None'], {}), '(self.u2.org, self.u2.groups, None)\n', (25711, 25746), False, 'from redash import models\n'), ((25792, 25847), 'redash.models.Dashboard.all', 'models.Dashboard.all', (['self.u2.org', 'self.u2.groups', 'None'], {}), '(self.u2.org, self.u2.groups, None)\n', (25812, 25847), False, 'from redash import models\n'), ((25893, 25948), 'redash.models.Dashboard.all', 'models.Dashboard.all', (['self.u1.org', 'self.u1.groups', 'None'], {}), '(self.u1.org, self.u1.groups, None)\n', (25913, 25948), False, 'from redash import models\n'), ((26025, 26080), 'redash.models.Dashboard.all', 'models.Dashboard.all', (['self.u2.org', 'self.u2.groups', 'None'], {}), '(self.u2.org, self.u2.groups, None)\n', (26045, 26080), False, 'from redash import models\n'), ((26234, 26289), 'redash.models.Dashboard.all', 'models.Dashboard.all', (['self.u1.org', 'self.u1.groups', 'None'], {}), '(self.u1.org, self.u1.groups, None)\n', (26254, 26289), False, 'from redash import models\n'), ((26431, 26492), 'redash.models.Dashboard.all', 'models.Dashboard.all', (['self.u1.org', 'self.u1.groups', 'self.u1.id'], {}), '(self.u1.org, self.u1.groups, self.u1.id)\n', (26451, 26492), False, 'from redash import models\n'), ((26520, 26570), 'redash.models.Dashboard.all', 'models.Dashboard.all', (['self.u1.org', '[0]', 'self.u1.id'], {}), '(self.u1.org, [0], self.u1.id)\n', (26540, 26570), False, 'from redash import models\n'), ((26601, 26662), 'redash.models.Dashboard.all', 'models.Dashboard.all', (['self.u2.org', 'self.u2.groups', 'self.u2.id'], {}), '(self.u2.org, self.u2.groups, self.u2.id)\n', (26621, 26662), False, 'from redash import models\n'), ((26819, 26874), 'redash.models.Dashboard.all', 'models.Dashboard.all', (['self.u1.org', 'self.u1.groups', 'None'], {}), '(self.u1.org, self.u1.groups, None)\n', (26839, 26874), False, 'from redash import models\n'), ((26912, 26967), 'redash.models.Dashboard.all', 'models.Dashboard.all', (['self.u2.org', 'self.u2.groups', 'None'], {}), '(self.u2.org, self.u2.groups, None)\n', (26932, 26967), False, 'from redash import models\n'), ((27200, 27255), 'redash.models.Dashboard.all', 'models.Dashboard.all', (['self.u1.org', 'self.u1.groups', 'None'], {}), '(self.u1.org, self.u1.groups, None)\n', (27220, 27255), False, 'from redash import models\n'), ((27296, 27345), 'redash.models.Dashboard.all', 'models.Dashboard.all', (['user.org', 'user.groups', 'None'], {}), '(user.org, user.groups, None)\n', (27316, 27345), False, 'from redash import models\n'), ((27756, 27814), 'redash.models.Dashboard.recent', 'models.Dashboard.recent', (['self.u1.org', 'self.u1.groups', 'None'], {}), '(self.u1.org, self.u1.groups, None)\n', (27779, 27814), False, 'from redash import models\n'), ((27860, 27918), 'redash.models.Dashboard.recent', 'models.Dashboard.recent', (['self.u1.org', 'self.u1.groups', 'None'], {}), '(self.u1.org, self.u1.groups, None)\n', (27883, 27918), False, 'from redash import models\n'), ((27964, 28022), 'redash.models.Dashboard.recent', 'models.Dashboard.recent', (['self.u1.org', 'self.u2.groups', 'None'], {}), '(self.u1.org, self.u2.groups, None)\n', (27987, 28022), False, 'from redash import models\n'), ((28320, 28373), 'redash.models.Dashboard.recent', 'models.Dashboard.recent', (['self.u1.org', '[0]', 'self.u1.id'], {}), '(self.u1.org, [0], self.u1.id)\n', (28343, 28373), False, 'from redash import models\n'), ((28419, 28472), 'redash.models.Dashboard.recent', 'models.Dashboard.recent', (['self.u1.org', '[0]', 'self.u1.id'], {}), '(self.u1.org, [0], self.u1.id)\n', (28442, 28472), False, 'from redash import models\n'), ((28503, 28556), 'redash.models.Dashboard.recent', 'models.Dashboard.recent', (['self.u2.org', '[0]', 'self.u2.id'], {}), '(self.u2.org, [0], self.u2.id)\n', (28526, 28556), False, 'from redash import models\n'), ((28884, 28937), 'redash.models.Dashboard.recent', 'models.Dashboard.recent', (['self.u1.org', '[0]', 'self.u1.id'], {}), '(self.u1.org, [0], self.u1.id)\n', (28907, 28937), False, 'from redash import models\n'), ((28983, 29036), 'redash.models.Dashboard.recent', 'models.Dashboard.recent', (['self.u1.org', '[0]', 'self.u1.id'], {}), '(self.u1.org, [0], self.u1.id)\n', (29006, 29036), False, 'from redash import models\n'), ((29787, 29866), 'redash.models.Dashboard.recent', 'models.Dashboard.recent', (['self.u1.org', 'self.u1.groups', 'self.u1.id'], {'for_user': '(True)'}), '(self.u1.org, self.u1.groups, self.u1.id, for_user=True)\n', (29810, 29866), False, 'from redash import models\n'), ((29909, 29988), 'redash.models.Dashboard.recent', 'models.Dashboard.recent', (['self.u2.org', 'self.u2.groups', 'self.u2.id'], {'for_user': '(True)'}), '(self.u2.org, self.u2.groups, self.u2.id, for_user=True)\n', (29932, 29988), False, 'from redash import models\n'), ((30034, 30113), 'redash.models.Dashboard.recent', 'models.Dashboard.recent', (['self.u2.org', 'self.u2.groups', 'self.u2.id'], {'for_user': '(True)'}), '(self.u2.org, self.u2.groups, self.u2.id, for_user=True)\n', (30057, 30113), False, 'from redash import models\n'), ((30159, 30238), 'redash.models.Dashboard.recent', 'models.Dashboard.recent', (['self.u1.org', 'self.u1.groups', 'self.u1.id'], {'for_user': '(True)'}), '(self.u1.org, self.u1.groups, self.u1.id, for_user=True)\n', (30182, 30238), False, 'from redash import models\n'), ((30281, 30360), 'redash.models.Dashboard.recent', 'models.Dashboard.recent', (['self.u1.org', 'self.u1.groups', 'self.u1.id'], {'for_user': '(True)'}), '(self.u1.org, self.u1.groups, self.u1.id, for_user=True)\n', (30304, 30360), False, 'from redash import models\n'), ((30403, 30482), 'redash.models.Dashboard.recent', 'models.Dashboard.recent', (['self.u2.org', 'self.u2.groups', 'self.u2.id'], {'for_user': '(True)'}), '(self.u2.org, self.u2.groups, self.u2.id, for_user=True)\n', (30426, 30482), False, 'from redash import models\n'), ((30888, 30946), 'redash.models.Dashboard.recent', 'models.Dashboard.recent', (['self.u1.org', 'self.u1.groups', 'None'], {}), '(self.u1.org, self.u1.groups, None)\n', (30911, 30946), False, 'from redash import models\n'), ((31383, 31441), 'redash.models.Dashboard.recent', 'models.Dashboard.recent', (['self.u1.org', 'self.u1.groups', 'None'], {}), '(self.u1.org, self.u1.groups, None)\n', (31406, 31441), False, 'from redash import models\n'), ((31482, 31534), 'redash.models.Dashboard.recent', 'models.Dashboard.recent', (['user.org', 'user.groups', 'None'], {}), '(user.org, user.groups, None)\n', (31505, 31534), False, 'from redash import models\n'), ((7301, 7324), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (7322, 7324), False, 'import datetime\n'), ((7377, 7403), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(2)'}), '(days=2)\n', (7395, 7403), False, 'import datetime\n'), ((9151, 9177), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (9169, 9177), False, 'import datetime\n'), ((19934, 19967), 'redash.models.Query.get_by_id', 'models.Query.get_by_id', (['query1.id'], {}), '(query1.id)\n', (19956, 19967), False, 'from redash import models\n'), ((20038, 20071), 'redash.models.Query.get_by_id', 'models.Query.get_by_id', (['query2.id'], {}), '(query2.id)\n', (20060, 20071), False, 'from redash import models\n'), ((20142, 20175), 'redash.models.Query.get_by_id', 'models.Query.get_by_id', (['query3.id'], {}), '(query3.id)\n', (20164, 20175), False, 'from redash import models\n'), ((20789, 20822), 'redash.models.Query.get_by_id', 'models.Query.get_by_id', (['query1.id'], {}), '(query1.id)\n', (20811, 20822), False, 'from redash import models\n'), ((20893, 20926), 'redash.models.Query.get_by_id', 'models.Query.get_by_id', (['query2.id'], {}), '(query2.id)\n', (20915, 20926), False, 'from redash import models\n'), ((21000, 21033), 'redash.models.Query.get_by_id', 'models.Query.get_by_id', (['query3.id'], {}), '(query3.id)\n', (21022, 21033), False, 'from redash import models\n'), ((21693, 21726), 'redash.models.Query.get_by_id', 'models.Query.get_by_id', (['query1.id'], {}), '(query1.id)\n', (21715, 21726), False, 'from redash import models\n'), ((21797, 21830), 'redash.models.Query.get_by_id', 'models.Query.get_by_id', (['query2.id'], {}), '(query2.id)\n', (21819, 21830), False, 'from redash import models\n'), ((21904, 21937), 'redash.models.Query.get_by_id', 'models.Query.get_by_id', (['query3.id'], {}), '(query3.id)\n', (21926, 21937), False, 'from redash import models\n')]
|
from py_sexpr.terms import *
from py_sexpr.stack_vm.emit import module_code
import dis
main = define("main", [], const(1))
assert eval(module_code(main))() == 1
main = define(
"main",
[],
block(
call(
var("print"), ite(cmp(const(1), Compare.GT, const(2)), const(1), const(2))
),
const(1),
),
)
assert eval(module_code(main))() == 1
main = call(
var("f"),
block(
call(var("print"), call(var("g"), var("x"))),
assign("x", call(var("add"), var("x"), const((1,)))),
call(var("print"), call(var("g"), var("x"))),
assign("x", call(var("add"), var("x"), const((1,)))),
var("x"),
),
)
add1 = module_code(
define(
None, ["x"], set_item(var("x"), 0, binop(get_item(var("x"), 0), BinOp.ADD, 1))
)
)
x = [5]
f = lambda x: "result {}".format(x)
assert eval(
module_code(main), dict(f=f, add=lambda a, b: a + list(b), x=list(x), g=eval(add1))
) == f([x[0] + 2, 1, 1])
main = block(
define(
"MyType",
["x", "y", "this"],
block(
set_item(var("this"), const("x"), var("x")),
set_item(var("this"), const("y"), var("y")),
var("this"),
),
),
assign_star("inst", new(var("MyType"), const(1), const(2))),
isa(var("inst"), var("MyType")),
)
code = module_code(main)
assert eval(code) is True
xs = []
main = for_range("a", 1, 10, call(var("print"), var("a")))
exec(module_code(main), dict(print=xs.append))
assert xs == [1, 2, 3, 4, 5, 6, 7, 8, 9]
main = for_in("a", record(("a", 1), ("b", "bb")), call(var("print"), var("a")))
xs = []
exec(module_code(main), dict(print=xs.append))
assert set(xs) == {"a", "b"}
main = call((define(None, ["x"], ret(mktuple(var("x"), var("x"))))), 1)
assert eval(module_code(main)) == (1, 1)
main = record()
assert eval(module_code(main)) == {}
RES = None
try:
exec(module_code(throw(call(var("Exception"), "abc"))))
except Exception as e:
RES = e.args[0]
assert RES == "abc"
main = for_in("a", record(("a", const(1)), ("b", const("bb"))), block())
exec(module_code(main))
main = define("a", [], block(ret()))
assert eval(module_code(main))() is None
main = define("a", [], block(ret(5)))
assert eval(module_code(main))() == 5
main = define("a", [], metadata(2, 3, "a.txt", ret(const(2))))
assert eval(module_code(main))() == 2
main = define(
None, ["x"], define(None, ["y"], binop(var("x"), BinOp.MULTIPLY, var("y")))
)
code = module_code(main)
# dis.dis(code)
assert eval(code)(7)(3) == 21
main = define(
None,
["x"],
define(None, ["y"], define(None, ["z"], mktuple(var("x"), var("y"), var("z")))),
)
code = module_code(main)
# dis.dis(code)
assert eval(code)(7)(0)(2) == (7, 0, 2)
main = define(
None,
["x"],
block(
assign("x'", var("x")),
define(
None, ["y"], define(None, ["z"], mktuple(var("x'"), var("y"), var("z")))
),
),
)
code = module_code(main)
# dis.dis(code)
assert eval(code)(7)(0)(2) == (7, 0, 2)
main = define(
None,
["x"],
block(
assign("x", var("x")),
define(None, ["y"], define(None, ["z"], mktuple(var("x"), var("y"), var("z")))),
),
)
code = module_code(main)
# dis.dis(code)
assert eval(code)(7)(0)(2) == (7, 0, 2)
main = define(None, ["x"], block(define("f", [], var("x")), var("f")))
code = module_code(main)
# dis.dis(code)
assert eval(code)(1)() == 1
main = document(
"the doc",
define(
None,
["x"],
block(
assign_star("y", binop(var("x"), BinOp.MODULO, 17)),
assign_star("k", binop(var("y"), BinOp.MULTIPLY, 3)),
var("k"),
),
),
)
code = module_code(main)
# dis.dis(code)
f = eval(code)
assert f.__doc__ == "the doc"
assert f(25) == 24
main = block(
assign_star("x", record(a=1)),
assign_star("y", lens(var("x"), record(a=2, b=3))),
var("y"),
)
code = module_code(main)
# dis.dis(code)
assert eval(code) == dict(a=2, b=3)
main = block(
assign_star("x", record(a=1)),
assign_star("y", lens(var("x"), record(b=3))),
var("y"),
)
code = module_code(main)
# dis.dis(code)
assert eval(code) == dict(a=1, b=3)
main = set_attr(var("o"), "h", 1)
code = module_code(main)
# dis.dis(code)
class O:
pass
exec(code, dict(o=O))
assert getattr(O, "h") == 1
main = define(
None,
["cond"],
loop(
cmp(var("cond"), Compare.GT, 0),
block(assign("cond", binop(var("cond"), BinOp.SUBTRACT, 5)), var("cond")),
),
)
code = module_code(main)
assert eval(code)(12) == -3
main = define(
None,
["xyz"],
mktuple(
define("y", [], define("y-", [], var("xyz"))),
define("z", [], define("z-", [], var("xyz"))),
),
)
code = module_code(main)
x, y = eval(code, {})(1)
assert x()() == y()() == 1
main = define(
None,
["xyz"],
define(
"y",
[],
define(
"yy",
[],
mktuple(define("y-", [], var("xyz")), define("z-", [], var("xyz")),),
),
),
)
code = module_code(main)
x, y = eval(code, {})(1)()()
assert x() == y() == 1
main = uop(UOp.INVERT, const(5))
code = module_code(main)
assert eval(code) == ~5
main = define(None, ["x"], var("x"), [1])
code = module_code(main)
f = eval(code)
assert f() == 1
main = block(define("f", [], var("f")), var("f"))
code = module_code(main)
f = eval(code)
assert f() == f
def deep_ite(depth):
expr = "good"
for each in range(depth):
expr = ite(False, each, expr)
return expr
main = deep_ite(100)
code = module_code(main)
assert eval(code) == "good"
|
[
"py_sexpr.stack_vm.emit.module_code"
] |
[((1343, 1360), 'py_sexpr.stack_vm.emit.module_code', 'module_code', (['main'], {}), '(main)\n', (1354, 1360), False, 'from py_sexpr.stack_vm.emit import module_code\n'), ((2479, 2496), 'py_sexpr.stack_vm.emit.module_code', 'module_code', (['main'], {}), '(main)\n', (2490, 2496), False, 'from py_sexpr.stack_vm.emit import module_code\n'), ((2674, 2691), 'py_sexpr.stack_vm.emit.module_code', 'module_code', (['main'], {}), '(main)\n', (2685, 2691), False, 'from py_sexpr.stack_vm.emit import module_code\n'), ((2956, 2973), 'py_sexpr.stack_vm.emit.module_code', 'module_code', (['main'], {}), '(main)\n', (2967, 2973), False, 'from py_sexpr.stack_vm.emit import module_code\n'), ((3214, 3231), 'py_sexpr.stack_vm.emit.module_code', 'module_code', (['main'], {}), '(main)\n', (3225, 3231), False, 'from py_sexpr.stack_vm.emit import module_code\n'), ((3367, 3384), 'py_sexpr.stack_vm.emit.module_code', 'module_code', (['main'], {}), '(main)\n', (3378, 3384), False, 'from py_sexpr.stack_vm.emit import module_code\n'), ((3699, 3716), 'py_sexpr.stack_vm.emit.module_code', 'module_code', (['main'], {}), '(main)\n', (3710, 3716), False, 'from py_sexpr.stack_vm.emit import module_code\n'), ((3926, 3943), 'py_sexpr.stack_vm.emit.module_code', 'module_code', (['main'], {}), '(main)\n', (3937, 3943), False, 'from py_sexpr.stack_vm.emit import module_code\n'), ((4120, 4137), 'py_sexpr.stack_vm.emit.module_code', 'module_code', (['main'], {}), '(main)\n', (4131, 4137), False, 'from py_sexpr.stack_vm.emit import module_code\n'), ((4232, 4249), 'py_sexpr.stack_vm.emit.module_code', 'module_code', (['main'], {}), '(main)\n', (4243, 4249), False, 'from py_sexpr.stack_vm.emit import module_code\n'), ((4528, 4545), 'py_sexpr.stack_vm.emit.module_code', 'module_code', (['main'], {}), '(main)\n', (4539, 4545), False, 'from py_sexpr.stack_vm.emit import module_code\n'), ((4754, 4771), 'py_sexpr.stack_vm.emit.module_code', 'module_code', (['main'], {}), '(main)\n', (4765, 4771), False, 'from py_sexpr.stack_vm.emit import module_code\n'), ((5060, 5077), 'py_sexpr.stack_vm.emit.module_code', 'module_code', (['main'], {}), '(main)\n', (5071, 5077), False, 'from py_sexpr.stack_vm.emit import module_code\n'), ((5172, 5189), 'py_sexpr.stack_vm.emit.module_code', 'module_code', (['main'], {}), '(main)\n', (5183, 5189), False, 'from py_sexpr.stack_vm.emit import module_code\n'), ((5266, 5283), 'py_sexpr.stack_vm.emit.module_code', 'module_code', (['main'], {}), '(main)\n', (5277, 5283), False, 'from py_sexpr.stack_vm.emit import module_code\n'), ((5374, 5391), 'py_sexpr.stack_vm.emit.module_code', 'module_code', (['main'], {}), '(main)\n', (5385, 5391), False, 'from py_sexpr.stack_vm.emit import module_code\n'), ((5579, 5596), 'py_sexpr.stack_vm.emit.module_code', 'module_code', (['main'], {}), '(main)\n', (5590, 5596), False, 'from py_sexpr.stack_vm.emit import module_code\n'), ((1461, 1478), 'py_sexpr.stack_vm.emit.module_code', 'module_code', (['main'], {}), '(main)\n', (1472, 1478), False, 'from py_sexpr.stack_vm.emit import module_code\n'), ((1639, 1656), 'py_sexpr.stack_vm.emit.module_code', 'module_code', (['main'], {}), '(main)\n', (1650, 1656), False, 'from py_sexpr.stack_vm.emit import module_code\n'), ((2097, 2114), 'py_sexpr.stack_vm.emit.module_code', 'module_code', (['main'], {}), '(main)\n', (2108, 2114), False, 'from py_sexpr.stack_vm.emit import module_code\n'), ((877, 894), 'py_sexpr.stack_vm.emit.module_code', 'module_code', (['main'], {}), '(main)\n', (888, 894), False, 'from py_sexpr.stack_vm.emit import module_code\n'), ((1795, 1812), 'py_sexpr.stack_vm.emit.module_code', 'module_code', (['main'], {}), '(main)\n', (1806, 1812), False, 'from py_sexpr.stack_vm.emit import module_code\n'), ((1853, 1870), 'py_sexpr.stack_vm.emit.module_code', 'module_code', (['main'], {}), '(main)\n', (1864, 1870), False, 'from py_sexpr.stack_vm.emit import module_code\n'), ((137, 154), 'py_sexpr.stack_vm.emit.module_code', 'module_code', (['main'], {}), '(main)\n', (148, 154), False, 'from py_sexpr.stack_vm.emit import module_code\n'), ((363, 380), 'py_sexpr.stack_vm.emit.module_code', 'module_code', (['main'], {}), '(main)\n', (374, 380), False, 'from py_sexpr.stack_vm.emit import module_code\n'), ((2166, 2183), 'py_sexpr.stack_vm.emit.module_code', 'module_code', (['main'], {}), '(main)\n', (2177, 2183), False, 'from py_sexpr.stack_vm.emit import module_code\n'), ((2246, 2263), 'py_sexpr.stack_vm.emit.module_code', 'module_code', (['main'], {}), '(main)\n', (2257, 2263), False, 'from py_sexpr.stack_vm.emit import module_code\n'), ((2348, 2365), 'py_sexpr.stack_vm.emit.module_code', 'module_code', (['main'], {}), '(main)\n', (2359, 2365), False, 'from py_sexpr.stack_vm.emit import module_code\n')]
|
import os
from typing import List
from enum import IntEnum
import cv2 as cv
import numpy as np
from pydicom import dcmread
from pydicom.dataset import Dataset
from pydicom.sequence import Sequence
from rt_utils.utils import ROIData, SOPClassUID
def load_sorted_image_series(dicom_series_path: str):
"""
File contains helper methods for loading / formatting DICOM images and contours
"""
series_data = load_dcm_images_from_path(dicom_series_path)
if len(series_data) == 0:
raise Exception("No DICOM Images found in input path")
# Sort slices in ascending order
series_data.sort(key=lambda ds: ds.ImagePositionPatient[2], reverse=False)
return series_data
def load_dcm_images_from_path(dicom_series_path: str) -> List[Dataset]:
series_data = []
for root, _, files in os.walk(dicom_series_path):
for file in files:
try:
ds = dcmread(os.path.join(root, file))
if hasattr(ds, 'pixel_array'):
series_data.append(ds)
except Exception:
# Not a valid DICOM file
continue
return series_data
def get_contours_coords(mask_slice: np.ndarray, series_slice: Dataset, roi_data: ROIData):
# Create pin hole mask if specified
if roi_data.use_pin_hole:
mask_slice = create_pin_hole_mask(mask_slice, roi_data.approximate_contours)
# Get contours from mask
contours, _ = find_mask_contours(mask_slice, roi_data.approximate_contours)
validate_contours(contours)
# Format for DICOM
formatted_contours = []
for contour in contours:
contour = np.array(contour) # Type cannot be a list
translated_contour = translate_contour_to_data_coordinants(contour, series_slice)
dicom_formatted_contour = format_contour_for_dicom(translated_contour, series_slice)
formatted_contours.append(dicom_formatted_contour)
return formatted_contours
def find_mask_contours(mask: np.ndarray, approximate_contours: bool):
approximation_method = cv.CHAIN_APPROX_SIMPLE if approximate_contours else cv.CHAIN_APPROX_NONE
contours, hierarchy = cv.findContours(mask.astype(np.uint8), cv.RETR_TREE, approximation_method)
# Format extra array out of data
for i, contour in enumerate(contours):
contours[i] = [[pos[0][0], pos[0][1]] for pos in contour]
hierarchy = hierarchy[0] # Format extra array out of data
return contours, hierarchy
def create_pin_hole_mask(mask: np.ndarray, approximate_contours: bool):
"""
Creates masks with pin holes added to contour regions with holes.
This is done so that a given region can be represented by a single contour.
"""
contours, hierarchy = find_mask_contours(mask, approximate_contours)
pin_hole_mask = mask.copy()
# Iterate through the hierarchy, for child nodes, draw a line upwards from the first point
for i, array in enumerate(hierarchy):
parent_contour_index = array[Hierarchy.parent_node]
if parent_contour_index == -1: continue # Contour is not a child
child_contour = contours[i]
line_start = tuple(child_contour[0])
pin_hole_mask = draw_line_upwards_from_point(pin_hole_mask, line_start, fill_value=0)
return pin_hole_mask
def draw_line_upwards_from_point(mask: np.ndarray, start, fill_value: int) -> np.ndarray:
line_width = 2
end = (start[0], start[1] - 1)
mask = mask.astype(np.uint8) # Type that OpenCV expects
# Draw one point at a time until we hit a point that already has the desired value
while mask[end] != fill_value:
cv.line(mask, start, end, fill_value, line_width)
# Update start and end to the next positions
start = end
end = (start[0], start[1] - line_width)
return mask.astype(bool)
def validate_contours(contours: list):
if len(contours) == 0:
raise Exception("Unable to find contour in non empty mask, please check your mask formatting")
def translate_contour_to_data_coordinants(contour, series_slice: Dataset):
offset = series_slice.ImagePositionPatient
spacing = series_slice.PixelSpacing
contour[:, 0] = (contour[:, 0]) * spacing[0] + offset[0]
contour[:, 1] = (contour[:, 1]) * spacing[1] + offset[1]
return contour
def translate_contour_to_pixel_coordinants(contour, series_slice: Dataset):
offset = series_slice.ImagePositionPatient
spacing = series_slice.PixelSpacing
contour[:, 0] = (contour[:, 0] - offset[0]) / spacing[0]
contour[:, 1] = (contour[:, 1] - + offset[1]) / spacing[1]
return contour
def format_contour_for_dicom(contour, series_slice: Dataset):
# DICOM uses a 1d array of x, y, z coords
z_indicies = np.ones((contour.shape[0], 1)) * series_slice.SliceLocation
contour = np.concatenate((contour, z_indicies), axis = 1)
contour = np.ravel(contour)
contour = contour.tolist()
return contour
def create_series_mask_from_contour_sequence(series_data, contour_sequence: Sequence):
mask = create_empty_series_mask(series_data)
# Iterate through each slice of the series, If it is a part of the contour, add the contour mask
for i, series_slice in enumerate(series_data):
slice_contour_data = get_slice_contour_data(series_slice, contour_sequence)
if len(slice_contour_data):
mask[:, :, i] = get_slice_mask_from_slice_contour_data(series_slice, slice_contour_data)
return mask
def get_slice_contour_data(series_slice: Dataset, contour_sequence: Sequence):
slice_contour_data = []
# Traverse through sequence data and get all contour data pertaining to the given slice
for contour in contour_sequence:
for contour_image in contour.ContourImageSequence:
if contour_image.ReferencedSOPInstanceUID == series_slice.SOPInstanceUID:
slice_contour_data.append(contour.ContourData)
return slice_contour_data
def get_slice_mask_from_slice_contour_data(series_slice: Dataset, slice_contour_data):
slice_mask = create_empty_slice_mask(series_slice)
for contour_coords in slice_contour_data:
fill_mask = get_contour_fill_mask(series_slice, contour_coords)
# Invert values in the region to be filled. This will create holes where needed if contours are stacked on top of each other
slice_mask[fill_mask == 1] = np.invert(slice_mask[fill_mask == 1])
return slice_mask
def get_contour_fill_mask(series_slice: Dataset, contour_coords):
# Format data
reshaped_contour_data = np.reshape(contour_coords, [len(contour_coords) // 3, 3])
translated_contour_data = translate_contour_to_pixel_coordinants(reshaped_contour_data, series_slice)
translated_contour_data = np.around(translated_contour_data)
polygon = [np.array([translated_contour_data[:, :2]], dtype=np.int32)]
# Create mask for the region. Fill with 1 for ROI
fill_mask = create_empty_slice_mask(series_slice).astype(np.uint8)
cv.fillPoly(img=fill_mask, pts=polygon, color=1)
return fill_mask
def create_empty_series_mask(series_data):
ref_dicom_image = series_data[0]
mask_dims = (int(ref_dicom_image.Columns), int(ref_dicom_image.Rows), len(series_data))
mask = np.zeros(mask_dims).astype(bool)
return mask
def create_empty_slice_mask(series_slice):
mask_dims = (int(series_slice.Columns), int(series_slice.Rows))
mask = np.zeros(mask_dims).astype(bool)
return mask
class Hierarchy(IntEnum):
"""
Enum class for what the positions in the OpenCV hierarchy array mean
"""
next_node = 0
previous_node = 1
first_child = 2
parent_node = 3
|
[
"cv2.line",
"numpy.invert",
"numpy.ravel",
"os.walk",
"numpy.zeros",
"numpy.ones",
"cv2.fillPoly",
"numpy.around",
"numpy.array",
"os.path.join",
"numpy.concatenate"
] |
[((823, 849), 'os.walk', 'os.walk', (['dicom_series_path'], {}), '(dicom_series_path)\n', (830, 849), False, 'import os\n'), ((4837, 4882), 'numpy.concatenate', 'np.concatenate', (['(contour, z_indicies)'], {'axis': '(1)'}), '((contour, z_indicies), axis=1)\n', (4851, 4882), True, 'import numpy as np\n'), ((4899, 4916), 'numpy.ravel', 'np.ravel', (['contour'], {}), '(contour)\n', (4907, 4916), True, 'import numpy as np\n'), ((6781, 6815), 'numpy.around', 'np.around', (['translated_contour_data'], {}), '(translated_contour_data)\n', (6790, 6815), True, 'import numpy as np\n'), ((7021, 7069), 'cv2.fillPoly', 'cv.fillPoly', ([], {'img': 'fill_mask', 'pts': 'polygon', 'color': '(1)'}), '(img=fill_mask, pts=polygon, color=1)\n', (7032, 7069), True, 'import cv2 as cv\n'), ((1654, 1671), 'numpy.array', 'np.array', (['contour'], {}), '(contour)\n', (1662, 1671), True, 'import numpy as np\n'), ((3649, 3698), 'cv2.line', 'cv.line', (['mask', 'start', 'end', 'fill_value', 'line_width'], {}), '(mask, start, end, fill_value, line_width)\n', (3656, 3698), True, 'import cv2 as cv\n'), ((4763, 4793), 'numpy.ones', 'np.ones', (['(contour.shape[0], 1)'], {}), '((contour.shape[0], 1))\n', (4770, 4793), True, 'import numpy as np\n'), ((6413, 6450), 'numpy.invert', 'np.invert', (['slice_mask[fill_mask == 1]'], {}), '(slice_mask[fill_mask == 1])\n', (6422, 6450), True, 'import numpy as np\n'), ((6831, 6889), 'numpy.array', 'np.array', (['[translated_contour_data[:, :2]]'], {'dtype': 'np.int32'}), '([translated_contour_data[:, :2]], dtype=np.int32)\n', (6839, 6889), True, 'import numpy as np\n'), ((7276, 7295), 'numpy.zeros', 'np.zeros', (['mask_dims'], {}), '(mask_dims)\n', (7284, 7295), True, 'import numpy as np\n'), ((7449, 7468), 'numpy.zeros', 'np.zeros', (['mask_dims'], {}), '(mask_dims)\n', (7457, 7468), True, 'import numpy as np\n'), ((924, 948), 'os.path.join', 'os.path.join', (['root', 'file'], {}), '(root, file)\n', (936, 948), False, 'import os\n')]
|
from ISR.utils.image_processing import process_array, process_output
class ImageModel:
"""ISR models parent class.
Contains functions that are common across the super-scaling models.
"""
def predict(self, input_image_array):
"""
Processes the image array into a suitable format
and transforms the network output in a suitable image format.
Args:
input_image_array: input image array.
Returns:
sr_img: image output.
"""
lr_img = process_array(input_image_array)
sr_img = self.model.predict(lr_img)
sr_img = process_output(sr_img)
return sr_img
|
[
"ISR.utils.image_processing.process_array",
"ISR.utils.image_processing.process_output"
] |
[((529, 561), 'ISR.utils.image_processing.process_array', 'process_array', (['input_image_array'], {}), '(input_image_array)\n', (542, 561), False, 'from ISR.utils.image_processing import process_array, process_output\n'), ((623, 645), 'ISR.utils.image_processing.process_output', 'process_output', (['sr_img'], {}), '(sr_img)\n', (637, 645), False, 'from ISR.utils.image_processing import process_array, process_output\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import mock
import unittest
from contextlib import contextmanager
from six import BytesIO
from girder.utility.plugin_utilities import findEntryPointPlugins, getPluginFailureInfo
class FakeEntryPoint(object):
def __init__(self, name, goodLoad=True):
self.name = name
self.goodLoad = goodLoad
def load(self):
if not self.goodLoad:
raise SystemError('failed')
class FindEntryPointPluginsTestCase(unittest.TestCase):
@mock.patch('girder.utility.plugin_utilities.iter_entry_points')
def testFindEntryPointPluginsNone(self, iter_entry_points):
iter_entry_points.return_value = []
plugins = {}
findEntryPointPlugins(plugins)
iter_entry_points.assert_called_once_with(group='girder.plugin')
self.assertFalse(plugins)
@mock.patch('pkg_resources.resource_exists')
@mock.patch('girder.utility.plugin_utilities.iter_entry_points')
def testFindEntryPointPluginsNoConfig(self, iter_entry_points, resource_exists):
iter_entry_points.return_value = [FakeEntryPoint(name='entry point plugin')]
resource_exists.return_value = False
plugins = {}
findEntryPointPlugins(plugins)
iter_entry_points.assert_called_once_with(group='girder.plugin')
self.assertIn('entry point plugin', plugins)
@mock.patch('pkg_resources.resource_stream')
@mock.patch('pkg_resources.resource_exists')
@mock.patch('girder.utility.plugin_utilities.iter_entry_points')
def testFindEntryPointPluginsJSONConfig(self, iter_entry_points, resource_exists,
resource_stream):
iter_entry_points.return_value = [FakeEntryPoint(name='entry_point_plugin_json')]
# Load as JSON
resource_exists.return_value = True
@contextmanager
def resource_stream_json_value():
yield BytesIO(b'{"name": "Plugin name from JSON", "description": "Plugin description"}')
resource_stream.return_value = resource_stream_json_value()
plugins = {}
findEntryPointPlugins(plugins)
iter_entry_points.assert_called_once_with(group='girder.plugin')
self.assertIn('entry_point_plugin_json', plugins)
self.assertEqual(plugins['entry_point_plugin_json']['name'], 'Plugin name from JSON')
self.assertEqual(plugins['entry_point_plugin_json']['description'], 'Plugin description')
@mock.patch('pkg_resources.resource_stream')
@mock.patch('pkg_resources.resource_exists')
@mock.patch('girder.utility.plugin_utilities.iter_entry_points')
def testFindEntryPointPluginsBadJSONConfig(self, iter_entry_points, resource_exists,
resource_stream):
iter_entry_points.return_value = [FakeEntryPoint(name='entry_point_plugin_bad_json')]
# Load as JSON
resource_exists.return_value = True
@contextmanager
def resource_stream_json_value():
yield BytesIO(b'{"name": "Plugin name from JSON", bad_json')
resource_stream.return_value = resource_stream_json_value()
plugins = {}
findEntryPointPlugins(plugins)
iter_entry_points.assert_called_once_with(group='girder.plugin')
self.assertIn('entry_point_plugin_bad_json', plugins)
failures = getPluginFailureInfo()
self.assertIn('entry_point_plugin_bad_json', failures)
self.assertIn('traceback', failures['entry_point_plugin_bad_json'])
self.assertIn('ValueError', failures['entry_point_plugin_bad_json']['traceback'])
@mock.patch('pkg_resources.resource_stream')
@mock.patch('pkg_resources.resource_exists')
@mock.patch('girder.utility.plugin_utilities.iter_entry_points')
def testFindEntryPointPluginsYAMLConfig(self, iter_entry_points, resource_exists,
resource_stream):
iter_entry_points.return_value = [FakeEntryPoint(name='entry_point_plugin_yaml')]
# Load as YAML
resource_exists.side_effect = [False, True]
@contextmanager
def resource_stream_yml_value():
yield BytesIO(b'"name": "Plugin name from YAML"\n"description": "Plugin description"')
resource_stream.return_value = resource_stream_yml_value()
plugins = {}
findEntryPointPlugins(plugins)
iter_entry_points.assert_called_once_with(group='girder.plugin')
self.assertIn('entry_point_plugin_yaml', plugins)
self.assertEqual(plugins['entry_point_plugin_yaml']['name'], 'Plugin name from YAML')
self.assertEqual(plugins['entry_point_plugin_yaml']['description'], 'Plugin description')
@mock.patch('pkg_resources.resource_stream')
@mock.patch('pkg_resources.resource_exists')
@mock.patch('girder.utility.plugin_utilities.iter_entry_points')
def testFindEntryPointPluginsBadYAMLConfig(self, iter_entry_points, resource_exists,
resource_stream):
iter_entry_points.return_value = [FakeEntryPoint(name='entry_point_plugin_bad_yaml')]
# Load as YAML
resource_exists.side_effect = [False, True]
@contextmanager
def resource_stream_yaml_value():
yield BytesIO(b'"name": "Plugin name from YAML"\nbad_yaml\n}')
resource_stream.return_value = resource_stream_yaml_value()
plugins = {}
findEntryPointPlugins(plugins)
iter_entry_points.assert_called_once_with(group='girder.plugin')
self.assertIn('entry_point_plugin_bad_yaml', plugins)
failures = getPluginFailureInfo()
self.assertIn('entry_point_plugin_bad_yaml', failures)
self.assertIn('traceback', failures['entry_point_plugin_bad_yaml'])
self.assertIn('ScannerError', failures['entry_point_plugin_bad_yaml']['traceback'])
@mock.patch('girder.utility.plugin_utilities._clearPluginFailureInfo')
@mock.patch('pkg_resources.resource_exists')
@mock.patch('girder.utility.plugin_utilities.iter_entry_points')
def testFindEntryPointPluginsBadLoad(self, iter_entry_points, resource_exists,
_clearPluginFailureInfo):
iter_entry_points.return_value = [FakeEntryPoint(
name='entry_point_plugin_bad_load', goodLoad=False)]
resource_exists.return_value = False
_clearPluginFailureInfo.return_value = None
plugins = {}
findEntryPointPlugins(plugins)
iter_entry_points.assert_called_once_with(group='girder.plugin')
self.assertIn('entry_point_plugin_bad_load', plugins)
failures = getPluginFailureInfo()
self.assertIn('entry_point_plugin_bad_load', failures)
self.assertIn('traceback', failures['entry_point_plugin_bad_load'])
self.assertIn('SystemError', failures['entry_point_plugin_bad_load']['traceback'])
|
[
"six.BytesIO",
"girder.utility.plugin_utilities.getPluginFailureInfo",
"mock.patch",
"girder.utility.plugin_utilities.findEntryPointPlugins"
] |
[((1258, 1321), 'mock.patch', 'mock.patch', (['"""girder.utility.plugin_utilities.iter_entry_points"""'], {}), "('girder.utility.plugin_utilities.iter_entry_points')\n", (1268, 1321), False, 'import mock\n'), ((1606, 1649), 'mock.patch', 'mock.patch', (['"""pkg_resources.resource_exists"""'], {}), "('pkg_resources.resource_exists')\n", (1616, 1649), False, 'import mock\n'), ((1655, 1718), 'mock.patch', 'mock.patch', (['"""girder.utility.plugin_utilities.iter_entry_points"""'], {}), "('girder.utility.plugin_utilities.iter_entry_points')\n", (1665, 1718), False, 'import mock\n'), ((2129, 2172), 'mock.patch', 'mock.patch', (['"""pkg_resources.resource_stream"""'], {}), "('pkg_resources.resource_stream')\n", (2139, 2172), False, 'import mock\n'), ((2178, 2221), 'mock.patch', 'mock.patch', (['"""pkg_resources.resource_exists"""'], {}), "('pkg_resources.resource_exists')\n", (2188, 2221), False, 'import mock\n'), ((2227, 2290), 'mock.patch', 'mock.patch', (['"""girder.utility.plugin_utilities.iter_entry_points"""'], {}), "('girder.utility.plugin_utilities.iter_entry_points')\n", (2237, 2290), False, 'import mock\n'), ((3226, 3269), 'mock.patch', 'mock.patch', (['"""pkg_resources.resource_stream"""'], {}), "('pkg_resources.resource_stream')\n", (3236, 3269), False, 'import mock\n'), ((3275, 3318), 'mock.patch', 'mock.patch', (['"""pkg_resources.resource_exists"""'], {}), "('pkg_resources.resource_exists')\n", (3285, 3318), False, 'import mock\n'), ((3324, 3387), 'mock.patch', 'mock.patch', (['"""girder.utility.plugin_utilities.iter_entry_points"""'], {}), "('girder.utility.plugin_utilities.iter_entry_points')\n", (3334, 3387), False, 'import mock\n'), ((4389, 4432), 'mock.patch', 'mock.patch', (['"""pkg_resources.resource_stream"""'], {}), "('pkg_resources.resource_stream')\n", (4399, 4432), False, 'import mock\n'), ((4438, 4481), 'mock.patch', 'mock.patch', (['"""pkg_resources.resource_exists"""'], {}), "('pkg_resources.resource_exists')\n", (4448, 4481), False, 'import mock\n'), ((4487, 4550), 'mock.patch', 'mock.patch', (['"""girder.utility.plugin_utilities.iter_entry_points"""'], {}), "('girder.utility.plugin_utilities.iter_entry_points')\n", (4497, 4550), False, 'import mock\n'), ((5490, 5533), 'mock.patch', 'mock.patch', (['"""pkg_resources.resource_stream"""'], {}), "('pkg_resources.resource_stream')\n", (5500, 5533), False, 'import mock\n'), ((5539, 5582), 'mock.patch', 'mock.patch', (['"""pkg_resources.resource_exists"""'], {}), "('pkg_resources.resource_exists')\n", (5549, 5582), False, 'import mock\n'), ((5588, 5651), 'mock.patch', 'mock.patch', (['"""girder.utility.plugin_utilities.iter_entry_points"""'], {}), "('girder.utility.plugin_utilities.iter_entry_points')\n", (5598, 5651), False, 'import mock\n'), ((6665, 6734), 'mock.patch', 'mock.patch', (['"""girder.utility.plugin_utilities._clearPluginFailureInfo"""'], {}), "('girder.utility.plugin_utilities._clearPluginFailureInfo')\n", (6675, 6734), False, 'import mock\n'), ((6740, 6783), 'mock.patch', 'mock.patch', (['"""pkg_resources.resource_exists"""'], {}), "('pkg_resources.resource_exists')\n", (6750, 6783), False, 'import mock\n'), ((6789, 6852), 'mock.patch', 'mock.patch', (['"""girder.utility.plugin_utilities.iter_entry_points"""'], {}), "('girder.utility.plugin_utilities.iter_entry_points')\n", (6799, 6852), False, 'import mock\n'), ((1460, 1490), 'girder.utility.plugin_utilities.findEntryPointPlugins', 'findEntryPointPlugins', (['plugins'], {}), '(plugins)\n', (1481, 1490), False, 'from girder.utility.plugin_utilities import findEntryPointPlugins, getPluginFailureInfo\n'), ((1964, 1994), 'girder.utility.plugin_utilities.findEntryPointPlugins', 'findEntryPointPlugins', (['plugins'], {}), '(plugins)\n', (1985, 1994), False, 'from girder.utility.plugin_utilities import findEntryPointPlugins, getPluginFailureInfo\n'), ((2864, 2894), 'girder.utility.plugin_utilities.findEntryPointPlugins', 'findEntryPointPlugins', (['plugins'], {}), '(plugins)\n', (2885, 2894), False, 'from girder.utility.plugin_utilities import findEntryPointPlugins, getPluginFailureInfo\n'), ((3943, 3973), 'girder.utility.plugin_utilities.findEntryPointPlugins', 'findEntryPointPlugins', (['plugins'], {}), '(plugins)\n', (3964, 3973), False, 'from girder.utility.plugin_utilities import findEntryPointPlugins, getPluginFailureInfo\n'), ((4131, 4153), 'girder.utility.plugin_utilities.getPluginFailureInfo', 'getPluginFailureInfo', ([], {}), '()\n', (4151, 4153), False, 'from girder.utility.plugin_utilities import findEntryPointPlugins, getPluginFailureInfo\n'), ((5128, 5158), 'girder.utility.plugin_utilities.findEntryPointPlugins', 'findEntryPointPlugins', (['plugins'], {}), '(plugins)\n', (5149, 5158), False, 'from girder.utility.plugin_utilities import findEntryPointPlugins, getPluginFailureInfo\n'), ((6217, 6247), 'girder.utility.plugin_utilities.findEntryPointPlugins', 'findEntryPointPlugins', (['plugins'], {}), '(plugins)\n', (6238, 6247), False, 'from girder.utility.plugin_utilities import findEntryPointPlugins, getPluginFailureInfo\n'), ((6405, 6427), 'girder.utility.plugin_utilities.getPluginFailureInfo', 'getPluginFailureInfo', ([], {}), '()\n', (6425, 6427), False, 'from girder.utility.plugin_utilities import findEntryPointPlugins, getPluginFailureInfo\n'), ((7254, 7284), 'girder.utility.plugin_utilities.findEntryPointPlugins', 'findEntryPointPlugins', (['plugins'], {}), '(plugins)\n', (7275, 7284), False, 'from girder.utility.plugin_utilities import findEntryPointPlugins, getPluginFailureInfo\n'), ((7442, 7464), 'girder.utility.plugin_utilities.getPluginFailureInfo', 'getPluginFailureInfo', ([], {}), '()\n', (7462, 7464), False, 'from girder.utility.plugin_utilities import findEntryPointPlugins, getPluginFailureInfo\n'), ((2682, 2769), 'six.BytesIO', 'BytesIO', (['b\'{"name": "Plugin name from JSON", "description": "Plugin description"}\''], {}), '(\n b\'{"name": "Plugin name from JSON", "description": "Plugin description"}\')\n', (2689, 2769), False, 'from six import BytesIO\n'), ((3789, 3843), 'six.BytesIO', 'BytesIO', (['b\'{"name": "Plugin name from JSON", bad_json\''], {}), '(b\'{"name": "Plugin name from JSON", bad_json\')\n', (3796, 3843), False, 'from six import BytesIO\n'), ((4949, 5034), 'six.BytesIO', 'BytesIO', (['b\'"name": "Plugin name from YAML"\\n"description": "Plugin description"\''], {}), '(b\'"name": "Plugin name from YAML"\\n"description": "Plugin description"\'\n )\n', (4956, 5034), False, 'from six import BytesIO\n'), ((6061, 6117), 'six.BytesIO', 'BytesIO', (['b\'"name": "Plugin name from YAML"\\nbad_yaml\\n}\''], {}), '(b\'"name": "Plugin name from YAML"\\nbad_yaml\\n}\')\n', (6068, 6117), False, 'from six import BytesIO\n')]
|
# Unit tests for c11.py
# IMPORTS
from c11 import Employee
from c11 import Manager
import unittest
# main
class EmployeeTests(unittest.TestCase):
def setUp(self):
self.e = Employee()
def test_get_name(self):
self.assertEqual("", self.e.get_name())
def test_get_salary(self):
self.assertEqual(0, self.e.get_salary())
def test_set_name(self):
self.e.set_name("Regular")
self.assertEqual("Regular", self.e.get_name())
def test_set_salary(self):
self.e.set_base_salary(70)
self.assertEqual(70, self.e.get_salary())
def test_full_constructor(self):
e = Employee("John", 60)
self.assertEqual("John", e.get_name())
self.assertEqual(60, e.get_salary())
class ManagerTests(unittest.TestCase):
def setUp(self):
self.m = Manager()
def test_get_name(self):
self.assertEqual("", self.m.get_name())
def test_get_salary(self):
self.assertEqual(0, self.m.get_salary())
def test_get_bonus(self):
self.assertEqual(0, self.m.get_bonus())
def test_full_constructor(self):
m = Manager("Michael", 70, 20)
self.assertEqual("Michael", m.get_name())
self.assertEqual(70, m.get_salary())
self.assertEqual(20, m.get_bonus())
# PROGRAM RUN
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"c11.Manager",
"c11.Employee"
] |
[((1347, 1362), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1360, 1362), False, 'import unittest\n'), ((188, 198), 'c11.Employee', 'Employee', ([], {}), '()\n', (196, 198), False, 'from c11 import Employee\n'), ((645, 665), 'c11.Employee', 'Employee', (['"""John"""', '(60)'], {}), "('John', 60)\n", (653, 665), False, 'from c11 import Employee\n'), ((837, 846), 'c11.Manager', 'Manager', ([], {}), '()\n', (844, 846), False, 'from c11 import Manager\n'), ((1135, 1161), 'c11.Manager', 'Manager', (['"""Michael"""', '(70)', '(20)'], {}), "('Michael', 70, 20)\n", (1142, 1161), False, 'from c11 import Manager\n')]
|
from PySide2.QtWidgets import QWidget, QHBoxLayout, QVBoxLayout, QLabel, QMenu, QAction
from PySide2.QtGui import QIcon, QDrag
from PySide2.QtCore import Signal, QMimeData, Qt, QEvent, QByteArray
import json
from custom_src.custom_list_widgets.ListWidget_NameLineEdit import ListWidget_NameLineEdit
from custom_src.global_tools.strings import shorten
from custom_src.EditVal_Dialog import EditVal_Dialog
class VarsList_VarWidget(QWidget):
"""Single variable representing component for VariablesListWidget.
See VariablesListWidget for further info."""
name_LE_editing_finished = Signal()
def __init__(self, vars_list_widget, vars_manager, var):
super(VarsList_VarWidget, self).__init__()
self.vars_manager = vars_manager
self.var = var
self.vars_list_widget = vars_list_widget
self.ignore_name_line_edit_signal = False
# UI
main_layout = QHBoxLayout()
# create icon via label
variable_icon = QIcon('../resources/pics/variable_picture.png')
icon_label = QLabel()
icon_label.setFixedSize(15, 15)
icon_label.setStyleSheet('border:none;')
icon_label.setPixmap(variable_icon.pixmap(15, 15))
main_layout.addWidget(icon_label)
# create name and data_type line edits
self.name_line_edit = ListWidget_NameLineEdit(var.name, self)
self.name_line_edit.setPlaceholderText('name')
self.name_line_edit.setEnabled(False)
self.name_line_edit.editingFinished.connect(self.name_line_edit_editing_finished)
self.name_line_edit.unfocused.connect(self.name_line_edit_editing_finished)
name_type_layout = QVBoxLayout()
name_type_layout.addWidget(self.name_line_edit)
main_layout.addLayout(name_type_layout)
self.setLayout(main_layout)
def mouseDoubleClickEvent(self, event):
if event.button() == Qt.LeftButton:
if self.name_line_edit.geometry().contains(event.pos()):
self.name_line_edit_double_clicked()
return
def mousePressEvent(self, event):
if event.button() == Qt.LeftButton:
drag = QDrag(self)
mime_data = QMimeData()
data_text = self.get_drag_data()
data = QByteArray(bytes(data_text, 'utf-8'))
mime_data.setData('text/plain', data)
drag.setMimeData(mime_data)
drop_action = drag.exec_()
return
def event(self, event):
if event.type() == QEvent.ToolTip:
val_str = ''
try:
val_str = str(self.var.val)
except Exception as e:
val_str = 'couldn\'t stringify value'
self.setToolTip('val type: '+str(type(self.var.val))+'\nval: '+shorten(val_str, 3000, line_break=True))
return QWidget.event(self, event)
def contextMenuEvent(self, event):
menu: QMenu = QMenu(self)
delete_action = QAction('delete')
delete_action.triggered.connect(self.action_delete_triggered)
edit_value_action = QAction('edit value')
edit_value_action.triggered.connect(self.action_edit_val_triggered)
actions = [delete_action, edit_value_action]
for a in actions:
menu.addAction(a)
menu.exec_(event.globalPos())
def action_delete_triggered(self):
self.vars_list_widget.del_variable(self.var, self)
def action_edit_val_triggered(self):
edit_var_val_dialog = EditVal_Dialog(self, self.var.val)
accepted = edit_var_val_dialog.exec_()
if accepted:
self.vars_manager.set_var(self.var.name, edit_var_val_dialog.get_val())
def name_line_edit_double_clicked(self):
self.name_line_edit.setEnabled(True)
self.name_line_edit.setFocus()
self.name_line_edit.selectAll()
self.vars_list_widget.currently_edited_var = self.var
def get_drag_data(self):
data = {'type': 'variable',
'name': self.var.name,
'value': self.var.val} # value is probably unnecessary
data_text = json.dumps(data)
return data_text
def name_line_edit_editing_finished(self):
if self.ignore_name_line_edit_signal:
return
self.ignore_name_line_edit_signal = True
self.name_LE_editing_finished.emit()
self.ignore_name_line_edit_signal = False
|
[
"PySide2.QtGui.QDrag",
"PySide2.QtWidgets.QWidget.event",
"PySide2.QtWidgets.QMenu",
"PySide2.QtWidgets.QLabel",
"custom_src.custom_list_widgets.ListWidget_NameLineEdit.ListWidget_NameLineEdit",
"PySide2.QtGui.QIcon",
"PySide2.QtWidgets.QVBoxLayout",
"json.dumps",
"PySide2.QtCore.QMimeData",
"custom_src.global_tools.strings.shorten",
"PySide2.QtCore.Signal",
"custom_src.EditVal_Dialog.EditVal_Dialog",
"PySide2.QtWidgets.QAction",
"PySide2.QtWidgets.QHBoxLayout"
] |
[((595, 603), 'PySide2.QtCore.Signal', 'Signal', ([], {}), '()\n', (601, 603), False, 'from PySide2.QtCore import Signal, QMimeData, Qt, QEvent, QByteArray\n'), ((919, 932), 'PySide2.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (930, 932), False, 'from PySide2.QtWidgets import QWidget, QHBoxLayout, QVBoxLayout, QLabel, QMenu, QAction\n'), ((990, 1037), 'PySide2.QtGui.QIcon', 'QIcon', (['"""../resources/pics/variable_picture.png"""'], {}), "('../resources/pics/variable_picture.png')\n", (995, 1037), False, 'from PySide2.QtGui import QIcon, QDrag\n'), ((1059, 1067), 'PySide2.QtWidgets.QLabel', 'QLabel', ([], {}), '()\n', (1065, 1067), False, 'from PySide2.QtWidgets import QWidget, QHBoxLayout, QVBoxLayout, QLabel, QMenu, QAction\n'), ((1336, 1375), 'custom_src.custom_list_widgets.ListWidget_NameLineEdit.ListWidget_NameLineEdit', 'ListWidget_NameLineEdit', (['var.name', 'self'], {}), '(var.name, self)\n', (1359, 1375), False, 'from custom_src.custom_list_widgets.ListWidget_NameLineEdit import ListWidget_NameLineEdit\n'), ((1679, 1692), 'PySide2.QtWidgets.QVBoxLayout', 'QVBoxLayout', ([], {}), '()\n', (1690, 1692), False, 'from PySide2.QtWidgets import QWidget, QHBoxLayout, QVBoxLayout, QLabel, QMenu, QAction\n'), ((2851, 2877), 'PySide2.QtWidgets.QWidget.event', 'QWidget.event', (['self', 'event'], {}), '(self, event)\n', (2864, 2877), False, 'from PySide2.QtWidgets import QWidget, QHBoxLayout, QVBoxLayout, QLabel, QMenu, QAction\n'), ((2941, 2952), 'PySide2.QtWidgets.QMenu', 'QMenu', (['self'], {}), '(self)\n', (2946, 2952), False, 'from PySide2.QtWidgets import QWidget, QHBoxLayout, QVBoxLayout, QLabel, QMenu, QAction\n'), ((2978, 2995), 'PySide2.QtWidgets.QAction', 'QAction', (['"""delete"""'], {}), "('delete')\n", (2985, 2995), False, 'from PySide2.QtWidgets import QWidget, QHBoxLayout, QVBoxLayout, QLabel, QMenu, QAction\n'), ((3095, 3116), 'PySide2.QtWidgets.QAction', 'QAction', (['"""edit value"""'], {}), "('edit value')\n", (3102, 3116), False, 'from PySide2.QtWidgets import QWidget, QHBoxLayout, QVBoxLayout, QLabel, QMenu, QAction\n'), ((3515, 3549), 'custom_src.EditVal_Dialog.EditVal_Dialog', 'EditVal_Dialog', (['self', 'self.var.val'], {}), '(self, self.var.val)\n', (3529, 3549), False, 'from custom_src.EditVal_Dialog import EditVal_Dialog\n'), ((4133, 4149), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (4143, 4149), False, 'import json\n'), ((2173, 2184), 'PySide2.QtGui.QDrag', 'QDrag', (['self'], {}), '(self)\n', (2178, 2184), False, 'from PySide2.QtGui import QIcon, QDrag\n'), ((2209, 2220), 'PySide2.QtCore.QMimeData', 'QMimeData', ([], {}), '()\n', (2218, 2220), False, 'from PySide2.QtCore import Signal, QMimeData, Qt, QEvent, QByteArray\n'), ((2794, 2833), 'custom_src.global_tools.strings.shorten', 'shorten', (['val_str', '(3000)'], {'line_break': '(True)'}), '(val_str, 3000, line_break=True)\n', (2801, 2833), False, 'from custom_src.global_tools.strings import shorten\n')]
|
import os
import json
import pickle
from datetime import datetime
import torch
import numpy as np
from pathlib import Path
from mushroom_rl.core import Serializable
from mushroom_rl.core.logger import ConsoleLogger
class BenchmarkLogger(ConsoleLogger):
"""
Class to handle all interactions with the log directory.
"""
def __init__(self, log_dir=None, log_id=None, use_timestamp=True):
"""
Constructor.
Args:
log_dir (str, None): path to the log directory, if not specified defaults to ./logs or to
/work/scratch/$USER if the second directory exists;
log_id (str, None): log id, if not specified defaults to: benchmark[_YY-mm-ddTHH:MM:SS.zzz]);
use_timestamp (bool, True): select if a timestamp should be appended to the log id.
"""
self._file_J = 'J.pkl'
self._file_R = 'R.pkl'
self._file_V = 'V.pkl'
self._file_entropy = 'entropy.pkl'
self._file_best_agent = 'best_agent.msh'
self._file_last_agent = 'last_agent.msh'
self._file_env_builder = 'environment_builder.pkl'
self._file_agent_builder = 'agent_builder.pkl'
self._file_config = 'config.json'
self._file_stats = 'stats.json'
self._log_dir = ''
self._log_id = ''
# Set and create log directories
self.set_log_dir(log_dir)
self.set_log_id(log_id, use_timestamp=use_timestamp)
super().__init__(self._log_id, Path(self.get_path()), log_file_name='console')
def set_log_dir(self, log_dir):
if log_dir is None:
default_dir = './logs'
scratch_dir = os.path.join('/work', 'scratch', os.getenv('USER'))
if Path(scratch_dir).is_dir():
log_dir = os.path.join(scratch_dir, 'logs')
else:
log_dir = default_dir
if not os.path.exists(log_dir):
Path(log_dir).mkdir(parents=True, exist_ok=True)
if not os.path.isdir(log_dir):
raise NotADirectoryError("Path to save builders is not valid")
self._log_dir = log_dir
def get_log_dir(self):
return self._log_dir
def set_log_id(self, log_id, use_timestamp=True):
if log_id is None:
log_id = 'benchmark'
if use_timestamp:
log_id += '_{}'.format(datetime.now().strftime('%Y-%m-%d-%H-%M-%S'))
path = os.path.join(self._log_dir, log_id, '')
if not os.path.exists(path):
Path(path).mkdir(parents=True, exist_ok=True)
if not os.path.isdir(path):
raise NotADirectoryError("Path to save builders is not valid")
self._log_id = log_id
def get_log_id(self):
return self._log_id
def get_path(self, filename=''):
return os.path.join(self._log_dir, self._log_id, filename)
def get_figure_path(self, filename='', subfolder=None):
figure_dir = Path(self._log_dir) / self._log_id / 'plots'
if subfolder is not None:
figure_dir = figure_dir / subfolder
if not figure_dir.exists():
figure_dir.mkdir(parents=True, exist_ok=True)
return str(figure_dir / filename)
def save_J(self, J):
self._save_pickle(self.get_path(self._file_J), J)
def load_J(self):
return self._load_pickle(self.get_path(self._file_J))
def save_R(self, R):
self._save_pickle(self.get_path(self._file_R), R)
def load_R(self):
return self._load_pickle(self.get_path(self._file_R))
def save_V(self, V):
self._save_pickle(self.get_path(self._file_V), V)
def load_V(self):
return self._load_pickle(self.get_path(self._file_V))
def save_entropy(self, entropy):
self._save_pickle(self.get_path(self._file_entropy), entropy)
def load_entropy(self):
path = self.get_path(self._file_entropy)
if os.path.exists(path):
return self._load_pickle(path)
else:
return None
def exists_policy_entropy(self):
return Path(self.get_path(self._file_entropy)).exists()
def save_best_agent(self, agent):
agent.save(self.get_path(self._file_best_agent))
def save_last_agent(self, agent):
agent.save(self.get_path(self._file_last_agent))
def load_best_agent(self):
return Serializable.load(self.get_path(self._file_best_agent))
def load_last_agent(self):
return Serializable.load(self.get_path(self._file_last_agent))
def save_environment_builder(self, env_builder):
self._save_pickle(self.get_path(self._file_env_builder), env_builder)
def load_environment_builder(self):
return self._load_pickle(self.get_path(self._file_env_builder))
def save_agent_builder(self, agent_builder):
self._save_pickle(self.get_path(self._file_agent_builder), agent_builder)
def load_agent_builder(self):
return self._load_pickle(self.get_path(self._file_agent_builder))
def save_config(self, config):
self._save_json(self.get_path(self._file_config), config)
def load_config(self):
return self._load_json(self.get_path(self._file_config))
def save_stats(self, stats):
self._save_json(self.get_path(self._file_stats), stats)
def load_stats(self):
return self._load_json(self.get_path(self._file_stats))
def save_figure(self, figure, figname, subfolder=None):
figure.savefig(self.get_figure_path(figname + ".pdf", subfolder))
@staticmethod
def _save_pickle(path, obj):
with Path(path).open('wb') as f:
pickle.dump(obj, f, protocol=pickle.HIGHEST_PROTOCOL)
@staticmethod
def _save_numpy(path, obj):
with Path(path).open('wb') as f:
np.save(f, obj)
@staticmethod
def _save_torch(path, obj):
torch.save(obj, path)
@staticmethod
def _save_json(path, obj):
with Path(path).open('w') as f:
json.dump(obj, f, indent=2)
@staticmethod
def _load_pickle(path):
with Path(path).open('rb') as f:
return pickle.load(f)
@staticmethod
def _load_numpy(path):
with Path(path).open('rb') as f:
return np.load(f)
@staticmethod
def _load_torch(path):
return torch.load(path)
@staticmethod
def _load_json(path):
with Path(path).open('r') as f:
return json.load(f)
@classmethod
def from_path(cls, path):
"""
Method to create a BenchmarkLogger from a path.
"""
path = Path(path)
return cls(path.parent, path.name, False)
|
[
"json.dump",
"pickle.dump",
"numpy.save",
"numpy.load",
"json.load",
"os.path.isdir",
"torch.load",
"os.path.exists",
"datetime.datetime.now",
"torch.save",
"pathlib.Path",
"pickle.load",
"os.path.join",
"os.getenv"
] |
[((2434, 2473), 'os.path.join', 'os.path.join', (['self._log_dir', 'log_id', '""""""'], {}), "(self._log_dir, log_id, '')\n", (2446, 2473), False, 'import os\n'), ((2818, 2869), 'os.path.join', 'os.path.join', (['self._log_dir', 'self._log_id', 'filename'], {}), '(self._log_dir, self._log_id, filename)\n', (2830, 2869), False, 'import os\n'), ((3920, 3940), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (3934, 3940), False, 'import os\n'), ((5874, 5895), 'torch.save', 'torch.save', (['obj', 'path'], {}), '(obj, path)\n', (5884, 5895), False, 'import torch\n'), ((6338, 6354), 'torch.load', 'torch.load', (['path'], {}), '(path)\n', (6348, 6354), False, 'import torch\n'), ((6620, 6630), 'pathlib.Path', 'Path', (['path'], {}), '(path)\n', (6624, 6630), False, 'from pathlib import Path\n'), ((1908, 1931), 'os.path.exists', 'os.path.exists', (['log_dir'], {}), '(log_dir)\n', (1922, 1931), False, 'import os\n'), ((2009, 2031), 'os.path.isdir', 'os.path.isdir', (['log_dir'], {}), '(log_dir)\n', (2022, 2031), False, 'import os\n'), ((2489, 2509), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (2503, 2509), False, 'import os\n'), ((2584, 2603), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (2597, 2603), False, 'import os\n'), ((5633, 5686), 'pickle.dump', 'pickle.dump', (['obj', 'f'], {'protocol': 'pickle.HIGHEST_PROTOCOL'}), '(obj, f, protocol=pickle.HIGHEST_PROTOCOL)\n', (5644, 5686), False, 'import pickle\n'), ((5795, 5810), 'numpy.save', 'np.save', (['f', 'obj'], {}), '(f, obj)\n', (5802, 5810), True, 'import numpy as np\n'), ((6002, 6029), 'json.dump', 'json.dump', (['obj', 'f'], {'indent': '(2)'}), '(obj, f, indent=2)\n', (6011, 6029), False, 'import json\n'), ((6137, 6151), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (6148, 6151), False, 'import pickle\n'), ((6262, 6272), 'numpy.load', 'np.load', (['f'], {}), '(f)\n', (6269, 6272), True, 'import numpy as np\n'), ((6463, 6475), 'json.load', 'json.load', (['f'], {}), '(f)\n', (6472, 6475), False, 'import json\n'), ((1715, 1732), 'os.getenv', 'os.getenv', (['"""USER"""'], {}), "('USER')\n", (1724, 1732), False, 'import os\n'), ((1803, 1836), 'os.path.join', 'os.path.join', (['scratch_dir', '"""logs"""'], {}), "(scratch_dir, 'logs')\n", (1815, 1836), False, 'import os\n'), ((2952, 2971), 'pathlib.Path', 'Path', (['self._log_dir'], {}), '(self._log_dir)\n', (2956, 2971), False, 'from pathlib import Path\n'), ((1749, 1766), 'pathlib.Path', 'Path', (['scratch_dir'], {}), '(scratch_dir)\n', (1753, 1766), False, 'from pathlib import Path\n'), ((1945, 1958), 'pathlib.Path', 'Path', (['log_dir'], {}), '(log_dir)\n', (1949, 1958), False, 'from pathlib import Path\n'), ((2523, 2533), 'pathlib.Path', 'Path', (['path'], {}), '(path)\n', (2527, 2533), False, 'from pathlib import Path\n'), ((5593, 5603), 'pathlib.Path', 'Path', (['path'], {}), '(path)\n', (5597, 5603), False, 'from pathlib import Path\n'), ((5755, 5765), 'pathlib.Path', 'Path', (['path'], {}), '(path)\n', (5759, 5765), False, 'from pathlib import Path\n'), ((5963, 5973), 'pathlib.Path', 'Path', (['path'], {}), '(path)\n', (5967, 5973), False, 'from pathlib import Path\n'), ((6090, 6100), 'pathlib.Path', 'Path', (['path'], {}), '(path)\n', (6094, 6100), False, 'from pathlib import Path\n'), ((6215, 6225), 'pathlib.Path', 'Path', (['path'], {}), '(path)\n', (6219, 6225), False, 'from pathlib import Path\n'), ((6417, 6427), 'pathlib.Path', 'Path', (['path'], {}), '(path)\n', (6421, 6427), False, 'from pathlib import Path\n'), ((2373, 2387), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2385, 2387), False, 'from datetime import datetime\n')]
|
import math
import numpy as np
def _is_in_china(func):
def wrapper(cls, lnglat):
if 72.004 < lnglat[0] < 137.8347 and .8293 < lnglat[1] < 55.8271:
return func(cls, lnglat)
return lnglat
return wrapper
class Convert:
_XPI = math.pi * 3000 / 180
_PI = math.pi
_A = 6378245
_EE = .00669342162296594323
_MERCATOR = 20037508.34 / 180
_SIZE = 78271516
@classmethod
def _transform_lng(cls, lng: float, lat: float) -> float:
ret = 300 + lng + 2 * lat + .1 * lng * lng + \
.1 * lng * lat + .1 * math.sqrt(math.fabs(lng))
ret += (20 * math.sin(6.0 * lng * cls._PI) + 20 *
math.sin(2 * lng * cls._PI)) * 2 / 3
ret += (20 * math.sin(lng * cls._PI) + 40 *
math.sin(lng / 3 * cls._PI)) * 2 / 3
ret += (150 * math.sin(lng / 12 * cls._PI) + 300 *
math.sin(lng / 30 * cls._PI)) * 2 / 3
return ret
@classmethod
def _transform_lat(cls, lng: float, lat: float) -> float:
ret = -100 + 2 * lng + 3 * lat + .2 * lat * lat + \
.1 * lng * lat + .2 * math.sqrt(math.fabs(lng))
ret += (20 * math.sin(6.0 * lng * cls._PI) + 20 *
math.sin(2 * lng * cls._PI)) * 2 / 3
ret += (20 * math.sin(lat * cls._PI) + 40 *
math.sin(lat / 3 * cls._PI)) * 2 / 3
ret += (160 * math.sin(lat / 12 * cls._PI) + 320 *
math.sin(lat * cls._PI / 30)) * 2 / 3
return ret
@classmethod
@_is_in_china
def wgs84togcj02(cls, lnglat: list) -> list:
"""
将wgs84坐标系转为火星坐标
:param lnglat: list[float] 经纬度数组
:return: list[float] 经纬度数组
"""
dlng = cls._transform_lng(lnglat[0] - 105, lnglat[1] - 35)
dlat = cls._transform_lat(lnglat[0] - 105, lnglat[1] - 35)
radlat = lnglat[1] / 180 * cls._PI
magic = math.sin(radlat)
magic = 1 - cls._EE * magic * magic
sqrtmagic = math.sqrt(magic)
dlat = (dlat * 180) / ((cls._A * (1 - cls._EE)) / (magic * sqrtmagic) * cls._PI)
dlng = (dlng * 180) / (cls._A / sqrtmagic * math.cos(radlat) * cls._PI)
mglat = lnglat[1] + dlat
mglng = lnglat[0] + dlng
return [mglng, mglat]
@classmethod
@_is_in_china
def wgs84tobd09(cls, lnglat: list) -> list:
"""
将wgs84坐标系转为百度坐标
:param lnglat: list[float] 经纬度数组
:return: list[float] 经纬度数组
"""
lnglat = cls.wgs84togcj02(lnglat)
return cls.gcj02tobd09(lnglat)
@classmethod
@_is_in_china
def gcj02towgs84(cls, lnglat: list) -> list:
"""
将火星坐标系转为wgs84坐标
:param lnglat: list[float] 经纬度数组
:return: list[float] 经纬度数组
"""
dlat = cls._transform_lat(lnglat[0] - 105, lnglat[1] - 35)
dlng = cls._transform_lng(lnglat[0] - 105, lnglat[1] - 35)
radlat = lnglat[1] / 180.0 * cls._PI
magic = math.sin(radlat)
magic = 1 - cls._EE * magic * magic
sqrtmagic = math.sqrt(magic)
dlat = (dlat * 180) / ((cls._A * (1 - cls._EE)) / (magic * sqrtmagic) * cls._PI)
dlng = (dlng * 180) / (cls._A / sqrtmagic * math.cos(radlat) * cls._PI)
mglat = lnglat[1] + dlat
mglng = lnglat[0] + dlng
return [lnglat[0] * 2 - mglng, lnglat[1] * 2 - mglat]
@classmethod
@_is_in_china
def gcj02tobd09(cls, lnglat: list) -> list:
"""
将火星坐标系转为百度坐标
:param lnglat: list[float] 经纬度数组
:return: list[float] 经纬度数组
"""
z = math.sqrt(lnglat[0] * lnglat[0] + lnglat[1] * lnglat[1]) + .00002 * math.sin(lnglat[1] * cls._XPI)
theta = math.atan2(lnglat[1], lnglat[0]) + .000003 * math.cos(lnglat[0] * cls._XPI)
bd_lng = z * math.cos(theta) + .0065
bd_lat = z * math.sin(theta) + .006
return [bd_lng, bd_lat]
@classmethod
@_is_in_china
def bd09towgs84(cls, lnglat: list) -> list:
"""
将百度坐标系转为wgs84坐标
:param lnglat: list[float] 经纬度数组
:return: list[float] 经纬度数组
"""
lnglat = cls.bd09togcj02(lnglat)
return cls.gcj02towgs84(lnglat)
@classmethod
def bd09togcj02(cls, lnglat: list) -> list:
"""
将百度坐标系转为火星坐标
:param lnglat: list[float] 经纬度数组
:return: list[float] 经纬度数组
"""
x = lnglat[0] - .0065
y = lnglat[1] - .006
z = math.sqrt(x * x + y * y) - .00002 * math.sin(y * cls._XPI)
theta = math.atan2(y, x) - .000003 * math.cos(x * cls._XPI)
gcj_lng = z * math.cos(theta)
gcj_lat = z * math.sin(theta)
return [gcj_lng, gcj_lat]
@classmethod
def lnglat_to_mercator(
cls,
lnglat: list,
reference_position=(0, 0),
convert_rate=(1, 1),
unit='mm'
) -> list:
"""
将经纬度坐标二维展开为平面坐标
:param lnglat: list[float] 经纬度
:param reference_position: list 经纬度参照零点坐标,如城市中心或项目中心
:param convert_rate: list 形变比例
:return: list 展开后的二纬坐标
"""
x = lnglat[0] - reference_position[0]
y = lnglat[1] - reference_position[1]
x = x * cls._MERCATOR
y = math.log(math.tan((90 + y) * cls._PI / 360)) / (cls._PI / 180)
y = y * cls._MERCATOR
if unit == 'mm':
x *= 1000
y *= 1000
return [x * convert_rate[0], y * convert_rate[1]]
@classmethod
def mercator_to_lnglat(
cls,
mercator,
reference_position=(0, 0),
convert_rate=(1, 1)
) -> list:
"""
将平面座标回经纬度坐标
:param mercator: list[float] 墨卡托 xy 坐标
:param reference_position: list 经纬度参照零点坐标,如城市中心或项目中心
:param convert_rate: list 形变比例
:return: list 回归后的经纬度
"""
x, y = mercator[0] / convert_rate[0], mercator[1] / convert_rate[1]
x, y = x / cls._MERCATOR, y / cls._MERCATOR
y = 180 / cls._PI * (2 * math.atan(math.exp(y * cls._PI / 180)) - cls._PI / 2)
x += reference_position[0]
y += reference_position[1]
return [x, y]
@classmethod
def lnglat_to_tile_index(cls, lnglat: list, level: int) -> list:
n = 2 ** level
x = int((lnglat[0] + 180.0) / 360.0 * n)
lat_rad = math.radians(lnglat[1])
y = int((1.0 - math.asinh(math.tan(lat_rad)) / cls._PI) / 2.0 * n)
return [x, y, level]
@staticmethod
def tile_index_to_lnglat(tiles) -> list:
n = 2 ** tiles[2]
lng = tiles[0] / n * 360.0 - 180.0
lat_rad = math.atan(math.sinh(math.pi * (1 - 2 * tiles[1] / n)))
lat = math.degrees(lat_rad)
return [lng, lat]
@classmethod
def tile_size_by_zoom(cls, level: int, unit='mm'):
"""
得到某等级下每片瓦片的标准大小
:param level:
:return:
"""
a = cls._SIZE * 2 ** (- level - 1)
return a * 1000 if unit == 'mm' else a
@staticmethod
def rgb_to_hex(rgb: tuple) -> str:
return '#%02x%02x%02x' % tuple(rgb)
@staticmethod
def hex_to_rgb(hex: str) -> tuple:
return tuple(int(hex[i:i+2], 16) for i in (0, 2, 4))
@staticmethod
def to_list(location: str) -> tuple:
"""
将字符格式的经纬坐标转为数字列表格式的经纬坐标,用以计算
:param location: str 如'123.456, 123.456'
:return: tuple 如(123.456, 123.456)
"""
# 预设location为'123.456, 123.456'
return list(eval(location))
@staticmethod
def to_string(location: tuple) -> str:
"""
将数字列表格式的经纬坐标转为字符格式的经纬坐标,用以请求
:param location: list 如[123.456, 123.456]
:return: str 如'123.456, 123.456'
"""
# 预设location为[123.456, 123.456]
# 输出 '123.456, 123.456'
return ','.join(list(map(str, location)))
@staticmethod
def stringtolist(string, reverse=False):
"""
string = "113.52546031343,22.129509715856;113.52673029534,22.12949968767;113.52803031317,22.129279677622;113.52832026393,22.129219617399;113.52899033426,22.12907959059;
113.53028032877,22.128819536949;113.53039032742,22.128789572229;113.5322202692,22.12864953033;113.53390023979,22.128729548104;113.53566024254,22.128759520287;113.5
3599023855,22.128759507971;113.53607024919,22.128759566279;113.53644027672,22.128759511018;113.53818016921,22.128749559991;113.53828022438,22.128749595569;113.5385
101591,22.12874961982;113.53944022455,22.128739604046"
"""
ls = string.split(';')
c = list(map(eval, ls))
d = np.array(c)
if reverse:
d = np.flip(d, axis=1)
return d.tolist()
|
[
"math.exp",
"numpy.flip",
"math.sqrt",
"math.atan2",
"math.radians",
"math.tan",
"math.fabs",
"math.sin",
"numpy.array",
"math.cos",
"math.sinh",
"math.degrees"
] |
[((1975, 1991), 'math.sin', 'math.sin', (['radlat'], {}), '(radlat)\n', (1983, 1991), False, 'import math\n'), ((2056, 2072), 'math.sqrt', 'math.sqrt', (['magic'], {}), '(magic)\n', (2065, 2072), False, 'import math\n'), ((3034, 3050), 'math.sin', 'math.sin', (['radlat'], {}), '(radlat)\n', (3042, 3050), False, 'import math\n'), ((3115, 3131), 'math.sqrt', 'math.sqrt', (['magic'], {}), '(magic)\n', (3124, 3131), False, 'import math\n'), ((6360, 6383), 'math.radians', 'math.radians', (['lnglat[1]'], {}), '(lnglat[1])\n', (6372, 6383), False, 'import math\n'), ((6708, 6729), 'math.degrees', 'math.degrees', (['lat_rad'], {}), '(lat_rad)\n', (6720, 6729), False, 'import math\n'), ((8600, 8611), 'numpy.array', 'np.array', (['c'], {}), '(c)\n', (8608, 8611), True, 'import numpy as np\n'), ((3648, 3704), 'math.sqrt', 'math.sqrt', (['(lnglat[0] * lnglat[0] + lnglat[1] * lnglat[1])'], {}), '(lnglat[0] * lnglat[0] + lnglat[1] * lnglat[1])\n', (3657, 3704), False, 'import math\n'), ((3763, 3795), 'math.atan2', 'math.atan2', (['lnglat[1]', 'lnglat[0]'], {}), '(lnglat[1], lnglat[0])\n', (3773, 3795), False, 'import math\n'), ((4507, 4531), 'math.sqrt', 'math.sqrt', (['(x * x + y * y)'], {}), '(x * x + y * y)\n', (4516, 4531), False, 'import math\n'), ((4582, 4598), 'math.atan2', 'math.atan2', (['y', 'x'], {}), '(y, x)\n', (4592, 4598), False, 'import math\n'), ((4656, 4671), 'math.cos', 'math.cos', (['theta'], {}), '(theta)\n', (4664, 4671), False, 'import math\n'), ((4694, 4709), 'math.sin', 'math.sin', (['theta'], {}), '(theta)\n', (4702, 4709), False, 'import math\n'), ((6649, 6692), 'math.sinh', 'math.sinh', (['(math.pi * (1 - 2 * tiles[1] / n))'], {}), '(math.pi * (1 - 2 * tiles[1] / n))\n', (6658, 6692), False, 'import math\n'), ((8649, 8667), 'numpy.flip', 'np.flip', (['d'], {'axis': '(1)'}), '(d, axis=1)\n', (8656, 8667), True, 'import numpy as np\n'), ((3716, 3746), 'math.sin', 'math.sin', (['(lnglat[1] * cls._XPI)'], {}), '(lnglat[1] * cls._XPI)\n', (3724, 3746), False, 'import math\n'), ((3808, 3838), 'math.cos', 'math.cos', (['(lnglat[0] * cls._XPI)'], {}), '(lnglat[0] * cls._XPI)\n', (3816, 3838), False, 'import math\n'), ((3860, 3875), 'math.cos', 'math.cos', (['theta'], {}), '(theta)\n', (3868, 3875), False, 'import math\n'), ((3905, 3920), 'math.sin', 'math.sin', (['theta'], {}), '(theta)\n', (3913, 3920), False, 'import math\n'), ((4543, 4565), 'math.sin', 'math.sin', (['(y * cls._XPI)'], {}), '(y * cls._XPI)\n', (4551, 4565), False, 'import math\n'), ((4611, 4633), 'math.cos', 'math.cos', (['(x * cls._XPI)'], {}), '(x * cls._XPI)\n', (4619, 4633), False, 'import math\n'), ((5285, 5319), 'math.tan', 'math.tan', (['((90 + y) * cls._PI / 360)'], {}), '((90 + y) * cls._PI / 360)\n', (5293, 5319), False, 'import math\n'), ((597, 611), 'math.fabs', 'math.fabs', (['lng'], {}), '(lng)\n', (606, 611), False, 'import math\n'), ((1181, 1195), 'math.fabs', 'math.fabs', (['lng'], {}), '(lng)\n', (1190, 1195), False, 'import math\n'), ((2215, 2231), 'math.cos', 'math.cos', (['radlat'], {}), '(radlat)\n', (2223, 2231), False, 'import math\n'), ((3274, 3290), 'math.cos', 'math.cos', (['radlat'], {}), '(radlat)\n', (3282, 3290), False, 'import math\n'), ((638, 667), 'math.sin', 'math.sin', (['(6.0 * lng * cls._PI)'], {}), '(6.0 * lng * cls._PI)\n', (646, 667), False, 'import math\n'), ((695, 722), 'math.sin', 'math.sin', (['(2 * lng * cls._PI)'], {}), '(2 * lng * cls._PI)\n', (703, 722), False, 'import math\n'), ((757, 780), 'math.sin', 'math.sin', (['(lng * cls._PI)'], {}), '(lng * cls._PI)\n', (765, 780), False, 'import math\n'), ((808, 835), 'math.sin', 'math.sin', (['(lng / 3 * cls._PI)'], {}), '(lng / 3 * cls._PI)\n', (816, 835), False, 'import math\n'), ((871, 899), 'math.sin', 'math.sin', (['(lng / 12 * cls._PI)'], {}), '(lng / 12 * cls._PI)\n', (879, 899), False, 'import math\n'), ((928, 956), 'math.sin', 'math.sin', (['(lng / 30 * cls._PI)'], {}), '(lng / 30 * cls._PI)\n', (936, 956), False, 'import math\n'), ((1222, 1251), 'math.sin', 'math.sin', (['(6.0 * lng * cls._PI)'], {}), '(6.0 * lng * cls._PI)\n', (1230, 1251), False, 'import math\n'), ((1279, 1306), 'math.sin', 'math.sin', (['(2 * lng * cls._PI)'], {}), '(2 * lng * cls._PI)\n', (1287, 1306), False, 'import math\n'), ((1341, 1364), 'math.sin', 'math.sin', (['(lat * cls._PI)'], {}), '(lat * cls._PI)\n', (1349, 1364), False, 'import math\n'), ((1392, 1419), 'math.sin', 'math.sin', (['(lat / 3 * cls._PI)'], {}), '(lat / 3 * cls._PI)\n', (1400, 1419), False, 'import math\n'), ((1455, 1483), 'math.sin', 'math.sin', (['(lat / 12 * cls._PI)'], {}), '(lat / 12 * cls._PI)\n', (1463, 1483), False, 'import math\n'), ((1512, 1540), 'math.sin', 'math.sin', (['(lat * cls._PI / 30)'], {}), '(lat * cls._PI / 30)\n', (1520, 1540), False, 'import math\n'), ((6046, 6073), 'math.exp', 'math.exp', (['(y * cls._PI / 180)'], {}), '(y * cls._PI / 180)\n', (6054, 6073), False, 'import math\n'), ((6418, 6435), 'math.tan', 'math.tan', (['lat_rad'], {}), '(lat_rad)\n', (6426, 6435), False, 'import math\n')]
|
from __future__ import print_function
import findspark
findspark.init()
from pyspark import SparkConf,SparkContext
from pyspark.streaming import StreamingContext
from pyspark.sql import Row,SQLContext
import sys
import requests
'''def most_hashtags(new_state,):
final_hash = defaultdict(lambda: 0)
hashtags = tweet.split(',')
for i in hashtags:
final_hash[i] +=1
sorted_hash = sorted(final_hash,key = d.get, reverse = True)[:3]
'''
# input
def line_split(x):
return x.split(";")[7].split(",")
'''def tmp(x):
for i in x.split(','):
return(i,1)
def printrdd(rdd):
line = 0
for i in rdd.collect():
if(i[0]):
if(line != 4):
#print(5)
print(i[0],end = ",")
line += 1
else:
print(i[0])
break'''
def printrdd2(rdd):
temp_rdd = rdd.sortBy(lambda a: (-a[1],a[0])).filter(lambda x : x[0] != '')
temp_rdd_2 = temp_rdd.collect()
if(temp_rdd_2 != []):
for i in range(5):
if(i != 4):
print(temp_rdd_2[i][0],end=",")
else:
print(temp_rdd_2[i][0])
window_size = int(sys.argv[1])
batch = int(sys.argv[2])
conf=SparkConf()
conf.setAppName("BigData")
sc=SparkContext(conf=conf)
ssc=StreamingContext(sc,batch)
ssc.checkpoint("~/checkpoint_BIGDATA")
dataStream=ssc.socketTextStream("localhost",9009)
tweets = dataStream.window(window_size,1)
flat_tweets = tweets.flatMap(line_split).map(lambda w:(w, 1))
reduced_tweets = flat_tweets.reduceByKey(lambda x,y:int(x)+int(y))
reduced_tweets.foreachRDD(printrdd2)
ssc.start()
ssc.awaitTermination(60)
ssc.stop()
|
[
"pyspark.SparkContext",
"pyspark.SparkConf",
"pyspark.streaming.StreamingContext",
"findspark.init"
] |
[((55, 71), 'findspark.init', 'findspark.init', ([], {}), '()\n', (69, 71), False, 'import findspark\n'), ((1084, 1095), 'pyspark.SparkConf', 'SparkConf', ([], {}), '()\n', (1093, 1095), False, 'from pyspark import SparkConf, SparkContext\n'), ((1126, 1149), 'pyspark.SparkContext', 'SparkContext', ([], {'conf': 'conf'}), '(conf=conf)\n', (1138, 1149), False, 'from pyspark import SparkConf, SparkContext\n'), ((1155, 1182), 'pyspark.streaming.StreamingContext', 'StreamingContext', (['sc', 'batch'], {}), '(sc, batch)\n', (1171, 1182), False, 'from pyspark.streaming import StreamingContext\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
from flask import Flask, render_template, jsonify
from redis_conn import redis_conn_pool
app = Flask(__name__)
redis_conn = redis_conn_pool()
@app.route('/')
def index():
return render_template("bigdata.html")
def get_chart1_data():
chart1_data_list,chart1_city_list,chart1_info = [],[],{}
city_data = json.loads(redis_conn.get("ncovcity_data"))
for city in city_data["newslist"][:5]:
chart1_dict = {}
chart1_dict["name"] = city["provinceShortName"]
chart1_dict["value"] = city["confirmedCount"]
chart1_data_list.append(chart1_dict)
chart1_city_list.append(city["provinceShortName"])
chart1_info["names"] = chart1_city_list
chart1_info["data"] = chart1_data_list
return chart1_info
def get_chart2_data():
chart2_info,chart2_dict = {},{}
city_data = json.loads(redis_conn.get("ncovcity_data"))
for city in city_data["newslist"]:
chart2_dict[city["provinceShortName"]] = city["deadCount"]
chart2_data_list = sorted(chart2_dict.items(), key=lambda x: x[1], reverse=True)
chart2_city_list = [x[0] for x in chart2_data_list[:5]]
chart2_info["names"] = chart2_city_list
chart2_info["data"] = chart2_data_list[:5]
return chart2_info
def get_chart3_1_data():
chart3_1_list = []
ncov_data = json.loads(redis_conn.get("ncov_data"))
ncov = ncov_data["newslist"][0]["desc"]
confirm = {"value":ncov["confirmedCount"], "name":"确诊"}
dead = {"value":ncov["deadCount"], "name":"死亡"}
chart3_1_list.append(confirm)
chart3_1_list.append(dead)
return chart3_1_list
def get_chart3_2_data():
chart3_2_list = []
ncov_data = json.loads(redis_conn.get("ncov_data"))
ncov = ncov_data["newslist"][0]["desc"]
confirm = {"value":ncov["confirmedCount"], "name":"确诊"}
cured = {"value":ncov["curedCount"], "name":"治愈"}
chart3_2_list.append(confirm)
chart3_2_list.append(cured)
return chart3_2_list
def get_chart3_3_data():
chart3_3_list = []
ncov_data = json.loads(redis_conn.get("ncov_data"))
ncov = ncov_data["newslist"][0]["desc"]
cured = {"value":ncov["curedCount"], "name":"治愈"}
dead = {"value":ncov["deadCount"], "name":"死亡"}
chart3_3_list.append(cured)
chart3_3_list.append(dead)
return chart3_3_list
def get_chart4_data():
chart4_info,chart4_date_list,chart4_confirm_list,chart4_suspect_list,chart4_heal_list,chart4_dead_list = {},[],[],[],[],[]
trend_data = json.loads(redis_conn.get("trend"))
for data in trend_data["data"]["chinaDayList"]:
chart4_date_list.append(data["date"].split("2020-")[1])
chart4_confirm_list.append(data["total"]["confirm"])
chart4_suspect_list.append(data["total"]["suspect"])
chart4_heal_list.append(data["total"]["heal"])
chart4_dead_list.append(data["total"]["dead"])
chart4_info["names"] = chart4_date_list[-20:]
chart4_info["confirm"] = chart4_confirm_list[-20:]
chart4_info["suspect"] = chart4_suspect_list[-20:]
chart4_info["heal"] = chart4_heal_list[-20:]
chart4_info["dead"] = chart4_dead_list[-20:]
return chart4_info
def get_chart5_data():
chart5_info,chart5_dict = {},{}
city_data = json.loads(redis_conn.get("ncovcity_data"))
for city in city_data["newslist"]:
chart5_dict[city["provinceShortName"]] = city["curedCount"]
chart5_data_list = sorted(chart5_dict.items(), key=lambda x: x[1], reverse=True)
chart5_city_list = [x[0] for x in chart5_data_list[:5]]
chart5_info["names"] = chart5_city_list
chart5_info["data"] = chart5_data_list[:5]
return chart5_info
def get_chart5_1_data():
chart5_1_info,chart5_1_dict = {},{}
trend_data = json.loads(redis_conn.get("trend"))
for data in trend_data["data"]["areaTree"][1:]:
chart5_1_dict[data["name"]] = data["total"]["confirm"]
chart5_1_data_list = sorted(chart5_1_dict.items(), key=lambda x: x[1], reverse=True)
chart5_country_list = [x[0] for x in chart5_1_data_list[:5]]
chart5_1_info["names"] = chart5_country_list
chart5_1_info["data"] = chart5_1_data_list[:5]
return chart5_1_info
def get_chart_map_data():
map_chart_list = []
map_data = json.loads(redis_conn.get("ncovcity_data"))
for data in map_data["newslist"]:
map_chart_dict = {}
map_chart_dict["name"] = data["provinceShortName"]
map_chart_dict["value"] = data["confirmedCount"]
map_chart_list.append(map_chart_dict)
return map_chart_list
@app.route("/get_ncov_totalcount")
def get_ncov_totalcount():
ncov_data = redis_conn.get("ncov_data")
result_ncov = json.loads(ncov_data)
confirmedCount = result_ncov["newslist"][0]["desc"]["confirmedCount"]
suspectedCount = result_ncov["newslist"][0]["desc"]["suspectedCount"]
return jsonify({"confirmedCount":confirmedCount, "suspectedCount":suspectedCount})
@app.route("/get_chart_data")
def get_chart_data():
chart_info = {}
chart1_data = get_chart1_data()
chart2_data = get_chart2_data()
chart3_1_data = get_chart3_1_data()
chart3_2_data = get_chart3_2_data()
chart3_3_data = get_chart3_3_data()
chart4_data = get_chart4_data()
chart5_data = get_chart5_data()
chart5_1_data = get_chart5_1_data()
chart_info["chart1"] = chart1_data
chart_info["chart2"] = chart2_data
chart_info["chart3_1"] = chart3_1_data
chart_info["chart3_2"] = chart3_2_data
chart_info["chart3_3"] = chart3_3_data
chart_info["chart4"] = chart4_data
chart_info["chart5"] = chart5_data
chart_info["chart5_1"] = chart5_1_data
return jsonify(chart_info)
@app.route("/get_map_data")
def get_map_data():
map_data = get_chart_map_data()
return jsonify(map_data)
if __name__ == "__main__":
app.run(debug=True, host="localhost")
|
[
"json.loads",
"flask.Flask",
"flask.jsonify",
"flask.render_template",
"redis_conn.redis_conn_pool"
] |
[((156, 171), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (161, 171), False, 'from flask import Flask, render_template, jsonify\n'), ((185, 202), 'redis_conn.redis_conn_pool', 'redis_conn_pool', ([], {}), '()\n', (200, 202), False, 'from redis_conn import redis_conn_pool\n'), ((244, 275), 'flask.render_template', 'render_template', (['"""bigdata.html"""'], {}), "('bigdata.html')\n", (259, 275), False, 'from flask import Flask, render_template, jsonify\n'), ((4717, 4738), 'json.loads', 'json.loads', (['ncov_data'], {}), '(ncov_data)\n', (4727, 4738), False, 'import json\n'), ((4899, 4976), 'flask.jsonify', 'jsonify', (["{'confirmedCount': confirmedCount, 'suspectedCount': suspectedCount}"], {}), "({'confirmedCount': confirmedCount, 'suspectedCount': suspectedCount})\n", (4906, 4976), False, 'from flask import Flask, render_template, jsonify\n'), ((5695, 5714), 'flask.jsonify', 'jsonify', (['chart_info'], {}), '(chart_info)\n', (5702, 5714), False, 'from flask import Flask, render_template, jsonify\n'), ((5813, 5830), 'flask.jsonify', 'jsonify', (['map_data'], {}), '(map_data)\n', (5820, 5830), False, 'from flask import Flask, render_template, jsonify\n')]
|
# pylint: disable=missing-docstring, protected-access
# type: ignore
# TODO remove it later
# pylint: disable=invalid-name
import unittest
from exonum_client.crypto import Hash
from exonum_client.proofs.list_proof import ListProof
from exonum_client.proofs.list_proof.key import ProofListKey
from exonum_client.proofs.list_proof.list_proof import HashedEntry
from exonum_client.proofs.list_proof.errors import MalformedListProofError, ListProofVerificationError
def _to_bytes(hex_data: str) -> bytes:
return bytes.fromhex(hex_data)
def _parse_hash(hex_data: str) -> Hash:
return Hash(_to_bytes(hex_data))
class TestListProofParse(unittest.TestCase):
def setUp(self):
self.HASH_A_HEX = "2dc17ca9c00d29ecff475d92f9b0c8885350d7b783e703b8ad21ae331d134496"
self.HASH_B_HEX = "c6f5873ab0f93c8be05e4e412cfc307fd98e58c9da9e6f582130882e672eb742"
self.HASH_A = _parse_hash(self.HASH_A_HEX)
self.HASH_B = _parse_hash(self.HASH_B_HEX)
def test_parse_hashed_entry(self):
entry_json = {"index": 0, "height": 0, "hash": self.HASH_A_HEX}
entry = HashedEntry.parse(entry_json)
self.assertEqual(entry, HashedEntry(ProofListKey(0, 0), self.HASH_A))
malformed_entries = [
{"index": 0, "hash": self.HASH_A_HEX},
{"height": 0, "hash": self.HASH_A_HEX},
{"index": 0, "height": 0},
{"index": "abc", "height": 0, "hash": self.HASH_A_HEX},
{"index": 0, "height": "cde", "hash": self.HASH_A_HEX},
{"index": 0, "height": 0, "hash": 123},
]
for malformed_entry in malformed_entries:
with self.assertRaises(MalformedListProofError):
HashedEntry.parse(malformed_entry)
def test_parse_proof(self):
json_proof = {"proof": [], "entries": [], "length": 0}
proof = ListProof.parse(json_proof, _to_bytes)
self.assertEqual(proof._proof, [])
self.assertEqual(proof._entries, [])
self.assertEqual(proof._length, 0)
self.assertEqual(proof._value_to_bytes, _to_bytes)
json_proof = {
"proof": [{"index": 1, "height": 1, "hash": self.HASH_A_HEX}],
"entries": [[0, self.HASH_B_HEX]],
"length": 2,
}
proof = ListProof.parse(json_proof, _to_bytes)
self.assertEqual(proof._proof, [HashedEntry(ProofListKey(1, 1), self.HASH_A)])
self.assertEqual(proof._entries, [(0, self.HASH_B_HEX)])
self.assertEqual(proof._length, 2)
self.assertEqual(proof._value_to_bytes, _to_bytes)
def test_parse_malformed_raises(self):
malformed_proofs = [
{"malformed": "proof"},
{"proof": [], "length": 0},
{"entries": [], "length": 0},
{"proof": [], "entries": []},
{"proof": [123], "entries": [], "length": 0},
{"proof": [], "entries": [123], "length": 0},
]
for malformed_proof in malformed_proofs:
with self.assertRaises(MalformedListProofError):
ListProof.parse(malformed_proof, _to_bytes)
class TestListProof(unittest.TestCase):
def test_proof_simple(self):
stored_val = "6b70d869aeed2fe090e708485d9f4b4676ae6984206cf05efc136d663610e5c9"
proof_json = {
"proof": [
{"index": 1, "height": 1, "hash": "eae60adeb5c681110eb5226a4ef95faa4f993c4a838d368b66f7c98501f2c8f9"}
],
"entries": [[0, "6b70d869aeed2fe090e708485d9f4b4676ae6984206cf05efc136d663610e5c9"]],
"length": 2,
}
expected_hash = "07df67b1a853551eb05470a03c9245483e5a3731b4b558e634908ff356b69857"
proof = ListProof.parse(proof_json)
result = proof.validate(_parse_hash(expected_hash))
self.assertEqual(result, [(0, stored_val)])
def test_incorrect_proof_raises(self):
# Test that an incorrect proof verification will raise an error:
incorrect_stored_val = "DEADBEEFaeed2fe090e708485d9f4b4676ae6984206cf05efc136d663610e5c9"
incorrect_proof_json = {
"proof": [
{"index": 1, "height": 1, "hash": "eae60adeb5c681110eb5226a4ef95faa4f993c4a838d368b66f7c98501f2c8f9"}
],
"entries": [[0, incorrect_stored_val]],
"length": 2,
}
expected_hash = "07df67b1a853551eb05470a03c9245483e5a3731b4b558e634908ff356b69857"
proof = ListProof.parse(incorrect_proof_json)
with self.assertRaises(ListProofVerificationError):
_result = proof.validate(_parse_hash(expected_hash))
# Test that verification of a proof against an incorrect hash will raise an error:
stored_val = "6b70d869aeed2fe090e708485d9f4b4676ae6984206cf05efc136d663610e5c9"
proof_json = {
"proof": [
{"index": 1, "height": 1, "hash": "eae60adeb5c681110eb5226a4ef95faa4f993c4a838d368b66f7c98501f2c8f9"}
],
"entries": [[0, stored_val]],
"length": 2,
}
incorrect_expected_hash = "DEADBEEFa853551eb05470a03c9245483e5a3731b4b558e634908ff356b69857"
proof = ListProof.parse(proof_json)
with self.assertRaises(ListProofVerificationError):
_result = proof.validate(_parse_hash(incorrect_expected_hash))
def test_proof_range(self):
proof_json = proof_json = {
"proof": [],
"entries": [
[0, "4507b25b6c91cbeba4320ac641728a92f4c085674e11c96b5a5830eddfe7a07a"],
[1, "17c18e8cfbba5cd179cb9067f28e5a6dc8aeb2a66a7cd7237746f891a2e125b7"],
[2, "183c6af10407efd8ab875cdf372a5e5893e2527f77fec4bbbcf14f2dd5c22340"],
[3, "378ec583913aad58f857fa016fbe0b0fccede49454e9e4bd574e6234a620869f"],
[4, "8021361a8e6cd5fbd5edef78140117a0802b3dc187388037345b8b65835382b2"],
[5, "8d8b0adab49c2568c2b62ba0ab51ac2a6961b73c3f3bb1b596dd62a0a9971aac"],
],
"length": 6,
}
expected_hash = "3bb680f61d358cc208003e7b42f077402fdb05388dc0e7f3099551e4f86bb70a"
proof = ListProof.parse(proof_json)
res = proof.validate(_parse_hash(expected_hash))
self.assertEqual(
res,
[
(0, "4507b25b6c91cbeba4320ac641728a92f4c085674e11c96b5a5830eddfe7a07a"),
(1, "17c18e8cfbba5cd179cb9067f28e5a6dc8aeb2a66a7cd7237746f891a2e125b7"),
(2, "183c6af10407efd8ab875cdf372a5e5893e2527f77fec4bbbcf14f2dd5c22340"),
(3, "378ec583913aad58f857fa016fbe0b0fccede49454e9e4bd574e6234a620869f"),
(4, "8021361a8e6cd5fbd5edef78140117a0802b3dc187388037345b8b65835382b2"),
(5, "8d8b0adab49c2568c2b62ba0ab51ac2a6961b73c3f3bb1b596dd62a0a9971aac"),
],
)
def test_proof_of_absence(self):
expected_hash = "07df67b1a853551eb05470a03c9245483e5a3731b4b558e634908ff356b69857"
proof_json = {
"proof": [
{"index": 0, "height": 2, "hash": "34e927df0267eac2dbd7e27f0ad9de2b3dba7af7c1c84b9cab599b8048333c3b"}
],
"entries": [],
"length": 2,
}
proof = ListProof.parse(proof_json)
res = proof.validate(_parse_hash(expected_hash))
self.assertEqual(res, [])
|
[
"exonum_client.proofs.list_proof.list_proof.HashedEntry.parse",
"exonum_client.proofs.list_proof.ListProof.parse",
"exonum_client.proofs.list_proof.key.ProofListKey"
] |
[((1105, 1134), 'exonum_client.proofs.list_proof.list_proof.HashedEntry.parse', 'HashedEntry.parse', (['entry_json'], {}), '(entry_json)\n', (1122, 1134), False, 'from exonum_client.proofs.list_proof.list_proof import HashedEntry\n'), ((1861, 1899), 'exonum_client.proofs.list_proof.ListProof.parse', 'ListProof.parse', (['json_proof', '_to_bytes'], {}), '(json_proof, _to_bytes)\n', (1876, 1899), False, 'from exonum_client.proofs.list_proof import ListProof\n'), ((2289, 2327), 'exonum_client.proofs.list_proof.ListProof.parse', 'ListProof.parse', (['json_proof', '_to_bytes'], {}), '(json_proof, _to_bytes)\n', (2304, 2327), False, 'from exonum_client.proofs.list_proof import ListProof\n'), ((3697, 3724), 'exonum_client.proofs.list_proof.ListProof.parse', 'ListProof.parse', (['proof_json'], {}), '(proof_json)\n', (3712, 3724), False, 'from exonum_client.proofs.list_proof import ListProof\n'), ((4440, 4477), 'exonum_client.proofs.list_proof.ListProof.parse', 'ListProof.parse', (['incorrect_proof_json'], {}), '(incorrect_proof_json)\n', (4455, 4477), False, 'from exonum_client.proofs.list_proof import ListProof\n'), ((5160, 5187), 'exonum_client.proofs.list_proof.ListProof.parse', 'ListProof.parse', (['proof_json'], {}), '(proof_json)\n', (5175, 5187), False, 'from exonum_client.proofs.list_proof import ListProof\n'), ((6136, 6163), 'exonum_client.proofs.list_proof.ListProof.parse', 'ListProof.parse', (['proof_json'], {}), '(proof_json)\n', (6151, 6163), False, 'from exonum_client.proofs.list_proof import ListProof\n'), ((7227, 7254), 'exonum_client.proofs.list_proof.ListProof.parse', 'ListProof.parse', (['proof_json'], {}), '(proof_json)\n', (7242, 7254), False, 'from exonum_client.proofs.list_proof import ListProof\n'), ((1180, 1198), 'exonum_client.proofs.list_proof.key.ProofListKey', 'ProofListKey', (['(0)', '(0)'], {}), '(0, 0)\n', (1192, 1198), False, 'from exonum_client.proofs.list_proof.key import ProofListKey\n'), ((1713, 1747), 'exonum_client.proofs.list_proof.list_proof.HashedEntry.parse', 'HashedEntry.parse', (['malformed_entry'], {}), '(malformed_entry)\n', (1730, 1747), False, 'from exonum_client.proofs.list_proof.list_proof import HashedEntry\n'), ((3069, 3112), 'exonum_client.proofs.list_proof.ListProof.parse', 'ListProof.parse', (['malformed_proof', '_to_bytes'], {}), '(malformed_proof, _to_bytes)\n', (3084, 3112), False, 'from exonum_client.proofs.list_proof import ListProof\n'), ((2381, 2399), 'exonum_client.proofs.list_proof.key.ProofListKey', 'ProofListKey', (['(1)', '(1)'], {}), '(1, 1)\n', (2393, 2399), False, 'from exonum_client.proofs.list_proof.key import ProofListKey\n')]
|
"""
# Sample code to perform I/O:
name = input() # Reading input from STDIN
print('Hi, %s.' % name) # Writing output to STDOUT
# Warning: Printing unwanted or ill-formatted data to output will cause the test cases to fail
"""
# Write your code here
import sys
from collections import defaultdict
sys.setrecursionlimit(100000)
k = int(input())
n = int(input())
if n == 1:
print(0)
elif k == 1:
vertices = input()
print(n)
else:
vertices = list(map(int, input().strip().split()))
res = 0
def dfs(adj, visited, i, k):
global res
des = 1
visited[i] = True
for j in adj[i]:
if not visited[j]:
des += dfs(adj, visited, j, k)
if des >= k:
res += 1
des = 0
return des
graph = defaultdict(list)
for i in range(2, len(vertices) + 2):
v = vertices[i - 2]
graph[i].append(v)
graph[v].append(i)
seen = [False] * (n + 1)
dfs(graph, seen, 1, k)
print(res)
|
[
"collections.defaultdict",
"sys.setrecursionlimit"
] |
[((325, 354), 'sys.setrecursionlimit', 'sys.setrecursionlimit', (['(100000)'], {}), '(100000)\n', (346, 354), False, 'import sys\n'), ((830, 847), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (841, 847), False, 'from collections import defaultdict\n')]
|
# Generated by Django 3.2 on 2022-02-04 03:06
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Apps', '0002_diaries'),
]
operations = [
migrations.AddField(
model_name='user',
name='remark',
field=models.CharField(max_length=50, null=True, verbose_name='备注'),
),
]
|
[
"django.db.models.CharField"
] |
[((316, 377), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'null': '(True)', 'verbose_name': '"""备注"""'}), "(max_length=50, null=True, verbose_name='备注')\n", (332, 377), False, 'from django.db import migrations, models\n')]
|
import datetime
from flask import current_app, jsonify
from http import HTTPStatus
from sqlalchemy.orm import Session
import werkzeug.exceptions
from app.controllers.sales.helpers_sales import (
helper_verify_quatity_product_stock,
)
from app.models.product.products_model import ProductModel
from app.decorators import verify_payload
from app.models.orders_has_products.orders_has_products import OrdersHasProductsModel
from app.models.orders_sellers.orders_seller import OrdersModel
from app.models import ClientModel
from app.models.types_sales.type_sale import TypeSaleModel
@verify_payload(
fields_and_types={
"id_seller": int,
"id_client": int,
"id_store": int,
"products": list,
"id_type_sale": int,
}
)
def create_sale(data: dict):
try:
session: Session = current_app.db.session
orders_products = []
date_now = datetime.date.today()
list_products = data.pop("products")
if not list_products:
return {"erro": "list of products empty."}, HTTPStatus.BAD_REQUEST
ClientModel.query.get_or_404(data["id_client"], description="client")
type_sale: TypeSaleModel = TypeSaleModel.query.get_or_404(
data["id_type_sale"], description="type sale"
)
sale_finish = (
{"sale_finish": True}
if type_sale.name == "Loja"
else {"sale_finish": False}
)
new_order = OrdersModel(**{**data, **sale_finish})
orders_products = []
for product in list_products:
if type(product["size"]) == str:
product["size"] = product["size"].upper()
product_stock: ProductModel = ProductModel.query.get_or_404(
product["id_product"],
description=f"id_product: {product['id_product']}",
)
if not (
helper_verify_quatity_product_stock(product, product_stock.variations)
):
return {
"erro": f"id_product:{product['id_product']} size:{product['size']} quantity greater than stock!"
}, HTTPStatus.BAD_REQUEST
product_stock.sale_product(product)
date_start = product_stock.date_start
date_end = product_stock.date_end
sale_value = 0
if date_now >= date_start and date_now <= date_end:
sale_value = product_stock.sale_value_promotion
elif date_now >= date_start and date_now <= date_end:
sale_value = product_stock.sale_value_promotion
else:
sale_value = product_stock.sale_value_varejo
orders_products.append(
OrdersHasProductsModel(
**{
"sale_value": sale_value,
"quantity": product["quantity"],
"color": product["color"],
"size": product["size"],
"id_product": product["id_product"],
"id_order": new_order.id_order,
}
)
)
new_order.orders_has_products = orders_products
session.add(new_order)
session.commit()
return jsonify(new_order), HTTPStatus.CREATED
except werkzeug.exceptions.NotFound as e:
return {"erro": f"{e.description} Not found."}, HTTPStatus.NOT_FOUND
except InterruptedError as e:
return {"erro": f"{e.args[0]}"}, HTTPStatus.BAD_REQUEST
except Exception as e:
raise e
|
[
"app.controllers.sales.helpers_sales.helper_verify_quatity_product_stock",
"app.models.orders_sellers.orders_seller.OrdersModel",
"app.models.orders_has_products.orders_has_products.OrdersHasProductsModel",
"app.decorators.verify_payload",
"datetime.date.today",
"app.models.types_sales.type_sale.TypeSaleModel.query.get_or_404",
"app.models.ClientModel.query.get_or_404",
"flask.jsonify",
"app.models.product.products_model.ProductModel.query.get_or_404"
] |
[((590, 719), 'app.decorators.verify_payload', 'verify_payload', ([], {'fields_and_types': "{'id_seller': int, 'id_client': int, 'id_store': int, 'products': list,\n 'id_type_sale': int}"}), "(fields_and_types={'id_seller': int, 'id_client': int,\n 'id_store': int, 'products': list, 'id_type_sale': int})\n", (604, 719), False, 'from app.decorators import verify_payload\n'), ((906, 927), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (925, 927), False, 'import datetime\n'), ((1092, 1161), 'app.models.ClientModel.query.get_or_404', 'ClientModel.query.get_or_404', (["data['id_client']"], {'description': '"""client"""'}), "(data['id_client'], description='client')\n", (1120, 1161), False, 'from app.models import ClientModel\n'), ((1198, 1275), 'app.models.types_sales.type_sale.TypeSaleModel.query.get_or_404', 'TypeSaleModel.query.get_or_404', (["data['id_type_sale']"], {'description': '"""type sale"""'}), "(data['id_type_sale'], description='type sale')\n", (1228, 1275), False, 'from app.models.types_sales.type_sale import TypeSaleModel\n'), ((1467, 1505), 'app.models.orders_sellers.orders_seller.OrdersModel', 'OrdersModel', ([], {}), '(**{**data, **sale_finish})\n', (1478, 1505), False, 'from app.models.orders_sellers.orders_seller import OrdersModel\n'), ((1720, 1829), 'app.models.product.products_model.ProductModel.query.get_or_404', 'ProductModel.query.get_or_404', (["product['id_product']"], {'description': 'f"""id_product: {product[\'id_product\']}"""'}), '(product[\'id_product\'], description=\n f"id_product: {product[\'id_product\']}")\n', (1749, 1829), False, 'from app.models.product.products_model import ProductModel\n'), ((3299, 3317), 'flask.jsonify', 'jsonify', (['new_order'], {}), '(new_order)\n', (3306, 3317), False, 'from flask import current_app, jsonify\n'), ((1910, 1980), 'app.controllers.sales.helpers_sales.helper_verify_quatity_product_stock', 'helper_verify_quatity_product_stock', (['product', 'product_stock.variations'], {}), '(product, product_stock.variations)\n', (1945, 1980), False, 'from app.controllers.sales.helpers_sales import helper_verify_quatity_product_stock\n'), ((2745, 2960), 'app.models.orders_has_products.orders_has_products.OrdersHasProductsModel', 'OrdersHasProductsModel', ([], {}), "(**{'sale_value': sale_value, 'quantity': product[\n 'quantity'], 'color': product['color'], 'size': product['size'],\n 'id_product': product['id_product'], 'id_order': new_order.id_order})\n", (2767, 2960), False, 'from app.models.orders_has_products.orders_has_products import OrdersHasProductsModel\n')]
|
#!/usr/bin/env python3
#
# Advent of Code 2017 - Day 21
#
import logging
logging.basicConfig(format="%(asctime)s %(message)s", level=logging.INFO)
logger = logging.getLogger()
INPUTFILE = 'input.txt'
START = ['.#.', '..#', '###']
def sample_input():
return """
../.# => ##./#../...
.#./..#/### => #..#/..../..../#..#
"""
# Utility functions
def load_input(infile):
lines = []
with open(infile, 'r') as fp:
for line in fp:
line = line.strip()
if line:
lines.append(line)
return lines
def split_nonblank_lines(text):
lines = []
for line in text.splitlines():
line = line.strip()
if line:
lines.append(line)
return lines
# Solution
def load_rule(input0, output):
"""Load a single enhancement rule.
A dict mapping all matching input patterns to the given output
pattern is returned.
"""
result = {input0: output}
pix = input0.split('/')
if len(pix) == 2:
for _ in range(3):
pix = [pix[0][1] + pix[1][1], pix[0][0] + pix[1][0]]
result["/".join(pix)] = output
vflip = [pix[1][0] + pix[1][1], pix[0][0] + pix[0][1]]
result["/".join(vflip)] = output
hflip = [pix[0][1] + pix[0][0], pix[1][1] + pix[1][0]]
result["/".join(hflip)] = output
elif len(pix) == 3:
for _ in range(3):
pix = [pix[0][2] + pix[1][2] + pix[2][2],
pix[0][1] + pix[1][1] + pix[2][1],
pix[0][0] + pix[1][0] + pix[2][0]]
result["/".join(pix)] = output
vflip = [pix[2][0] + pix[2][1] + pix[2][2],
pix[1][0] + pix[1][1] + pix[1][2],
pix[0][0] + pix[0][1] + pix[0][2]]
result["/".join(vflip)] = output
hflip = [pix[0][2] + pix[0][1] + pix[0][0],
pix[1][2] + pix[1][1] + pix[1][0],
pix[2][2] + pix[2][1] + pix[2][0]]
result["/".join(hflip)] = output
return result
def load_rulebook(lines):
"""Load a table of enhancement rules.
A dict mapping the input and output patterns is returned.
"""
result = dict()
for line in lines:
input_pattern, output_pattern = line.split(' => ')
result.update(load_rule(input_pattern, output_pattern))
return result
def enhance(pattern, rules):
"""Apply the given rules to the input pattern (a list of strings.)
The output pattern is returned.
"""
assert len(pattern) == max([len(row) for row in pattern])
assert len(pattern) == min([len(row) for row in pattern])
if len(pattern) % 2 == 0:
unit = 2
nunit = len(pattern) // 2
else:
unit = 3
nunit = len(pattern) // 3
result = []
for iunit in range(nunit):
inputs = ["/".join(rows) for rows in zip(
*[[pattern[i][junit*unit:(junit+1)*unit]
for junit in range(nunit)]
for i in range(iunit*unit, (iunit+1)*unit)])]
outputs = ["".join(segs) for segs in zip(
*[rules[pat].split('/') for pat in inputs])]
result.extend(outputs)
return result
def solve(lines, n):
"""Apply the rules specified in the input lines to the starting
pattern for n iterations.
The number of lit pixels in the final pattern is returned.
"""
rules = load_rulebook(lines)
pattern = START
for _ in range(n):
pattern = enhance(pattern, rules)
return sum([row.count('#') for row in pattern])
# PART 1
def example():
lines = split_nonblank_lines(sample_input())
expected = 12
result = solve(lines, 2)
logger.info("result is {} (expected {})".format(result, expected))
assert result == expected
logger.info('= ' * 32)
def part1(lines):
result = solve(lines, 5)
logger.info("result is {}".format(result))
logger.info('= ' * 32)
# PART 2
def part2(lines):
result = solve(lines, 18)
logger.info("result is {}".format(result))
logger.info('= ' * 32)
if __name__ == '__main__':
example()
input = load_input(INPUTFILE)
part1(input)
part2(input)
|
[
"logging.getLogger",
"logging.basicConfig"
] |
[((75, 148), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(asctime)s %(message)s"""', 'level': 'logging.INFO'}), "(format='%(asctime)s %(message)s', level=logging.INFO)\n", (94, 148), False, 'import logging\n'), ((158, 177), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (175, 177), False, 'import logging\n')]
|
"""
Some codes from https://github.com/Newmu/dcgan_code
"""
from __future__ import division
import math
import json
import random
import pprint
import scipy.misc
import numpy as np
import os
from time import gmtime, strftime
#pp = pprint.PrettyPrinter()
#get_stddev = lambda x, k_h, k_w: 1/math.sqrt(k_w*k_h*x.get_shape()[-1])
def load_data(image_path, flip=False, is_test=False, image_size = 128):
img = load_image(image_path)
img = preprocess_img(img, img_size=image_size, flip=flip, is_test=is_test)
img = img/127.5 - 1.
if len(img.shape)<3:
img = np.expand_dims(img, axis=2)
return img
def load_image(image_path):
img = imread(image_path)
return img
def preprocess_img(img, img_size=128, flip=False, is_test=False):
img = scipy.misc.imresize(img, [img_size, img_size])
if (not is_test) and flip and np.random.random() > 0.5:
img = np.fliplr(img)
return img
def get_image(image_path, image_size, is_crop=True, resize_w=64, is_grayscale = False):
return transform(imread(image_path, is_grayscale), image_size, is_crop, resize_w)
def save_images(images, size, image_path):
dir = os.path.dirname(image_path)
if not os.path.exists(dir):
os.makedirs(dir)
return imsave(inverse_transform(images), size, image_path)
def imread(path, is_grayscale = False):
if (is_grayscale):
return scipy.misc.imread(path, flatten = True)#.astype(np.float)
else:
return scipy.misc.imread(path)#.astype(np.float)
def merge_images(images, size):
return inverse_transform(images)
def merge(images, size):
h, w = images.shape[1], images.shape[2]
if len(images.shape) < 4:
img = np.zeros((h * size[0], w * size[1], 1))
images = np.expand_dims(images, axis = 3)
else:
img = np.zeros((h * size[0], w * size[1], images.shape[3]))
for idx, image in enumerate(images):
i = idx % size[1]
j = idx // size[1]
img[j*h:j*h+h, i*w:i*w+w, :] = image
if images.shape[3] ==1:
return np.concatenate([img,img,img],axis=2)
else:
return img.astype(np.uint8)
def imsave(images, size, path):
return scipy.misc.imsave(path, merge(images, size))
def transform(image, npx=64, is_crop=True, resize_w=64):
# npx : # of pixels width/height of image
if is_crop:
cropped_image = center_crop(image, npx, resize_w=resize_w)
else:
cropped_image = image
return np.array(cropped_image)/127.5 - 1.
def inverse_transform(images):
return ((images+1.)*127.5)
|
[
"os.makedirs",
"os.path.dirname",
"os.path.exists",
"numpy.expand_dims",
"numpy.zeros",
"numpy.fliplr",
"numpy.random.random",
"numpy.array",
"numpy.concatenate"
] |
[((1157, 1184), 'os.path.dirname', 'os.path.dirname', (['image_path'], {}), '(image_path)\n', (1172, 1184), False, 'import os\n'), ((584, 611), 'numpy.expand_dims', 'np.expand_dims', (['img'], {'axis': '(2)'}), '(img, axis=2)\n', (598, 611), True, 'import numpy as np\n'), ((898, 912), 'numpy.fliplr', 'np.fliplr', (['img'], {}), '(img)\n', (907, 912), True, 'import numpy as np\n'), ((1196, 1215), 'os.path.exists', 'os.path.exists', (['dir'], {}), '(dir)\n', (1210, 1215), False, 'import os\n'), ((1225, 1241), 'os.makedirs', 'os.makedirs', (['dir'], {}), '(dir)\n', (1236, 1241), False, 'import os\n'), ((1693, 1732), 'numpy.zeros', 'np.zeros', (['(h * size[0], w * size[1], 1)'], {}), '((h * size[0], w * size[1], 1))\n', (1701, 1732), True, 'import numpy as np\n'), ((1750, 1780), 'numpy.expand_dims', 'np.expand_dims', (['images'], {'axis': '(3)'}), '(images, axis=3)\n', (1764, 1780), True, 'import numpy as np\n'), ((1807, 1860), 'numpy.zeros', 'np.zeros', (['(h * size[0], w * size[1], images.shape[3])'], {}), '((h * size[0], w * size[1], images.shape[3]))\n', (1815, 1860), True, 'import numpy as np\n'), ((2043, 2082), 'numpy.concatenate', 'np.concatenate', (['[img, img, img]'], {'axis': '(2)'}), '([img, img, img], axis=2)\n', (2057, 2082), True, 'import numpy as np\n'), ((858, 876), 'numpy.random.random', 'np.random.random', ([], {}), '()\n', (874, 876), True, 'import numpy as np\n'), ((2453, 2476), 'numpy.array', 'np.array', (['cropped_image'], {}), '(cropped_image)\n', (2461, 2476), True, 'import numpy as np\n')]
|
#!/usr/bin/env
# -*- coding: utf-8 -*-
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import argparse
import datetime
import logging.config
import pytz
from campbellsciparser import cr
from tasks import insert_to_daily_single_measurements_by_sensor
from tasks import insert_to_hourly_single_measurements_by_sensor
from tasks import insert_to_thirty_min_single_measurements_by_sensor
from tasks import insert_to_twenty_min_single_measurements_by_sensor
from tasks import insert_to_fifteen_min_single_measurements_by_sensor
from tasks import insert_to_ten_min_single_measurements_by_sensor
from tasks import insert_to_five_min_single_measurements_by_sensor
from tasks import insert_to_one_min_single_measurements_by_sensor
from tasks import insert_to_one_sec_single_measurements_by_sensor
from tasks import insert_to_daily_profile_measurements_by_sensor
from tasks import insert_to_hourly_profile_measurements_by_sensor
from tasks import insert_to_thirty_min_profile_measurements_by_sensor
from tasks import insert_to_twenty_min_profile_measurements_by_sensor
from tasks import insert_to_fifteen_min_profile_measurements_by_sensor
from tasks import insert_to_ten_min_profile_measurements_by_sensor
from tasks import insert_to_five_min_profile_measurements_by_sensor
from tasks import insert_to_one_min_profile_measurements_by_sensor
from tasks import insert_to_one_sec_profile_measurements_by_sensor
import utils
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
APP_CONFIG_PATH = os.path.join(BASE_DIR, 'cfg/appconfig.yaml')
LOGGING_CONFIG_PATH = os.path.join(BASE_DIR, 'cfg/logging.yaml')
logging_conf = utils.load_config(LOGGING_CONFIG_PATH)
logging.config.dictConfig(logging_conf)
logger_info = logging.getLogger('cassandra_storage_info')
logger_debug = logging.getLogger('cassandra_storage_debug')
def process_daily_profile_measurements_by_sensor(station, file):
if file.get('source') == 'profiles':
num_of_new_rows = process_daily_profile_measurements_by_sensor_profile_source(station, file)
elif file.get('source') == 'parameters':
num_of_new_rows = process_daily_parameters_to_profile_measurements_by_sensor(station, file)
else:
raise TypeError("source must be either profiles or parameters, got {}".format(file.get('source')))
return num_of_new_rows
def process_daily_profile_measurements_by_sensor_profile_source(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
vertical_position_column = file.get('vertical_position_column')
vertical_position_correction_factor = file.get('vertical_position_correction_factor')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
logger_debug.debug(file)
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
param_data = cr.extract_columns_data(data, "timestamp", vertical_position_column, param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
day = datetime.datetime(ts.year, ts.month, ts.day)
year = int(day.strftime("%Y"))
vertical_position = float(row.get(vertical_position_column))
if vertical_position_correction_factor is not None:
vertical_position = utils.round_of_rating(vertical_position, vertical_position_correction_factor)
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, year, int(day.timestamp()) * 1e3, vertical_position, min_value, avg_value, max_value, unit))
insert_to_daily_profile_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_daily_parameters_to_profile_measurements_by_sensor(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
logger_debug.debug(file)
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
vertical_position = float(param_info.get('vertical_position'))
param_data = cr.extract_columns_data(data, "timestamp", param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
day = datetime.datetime(ts.year, ts.month, ts.day)
year = int(day.strftime("%Y"))
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, year, int(day.timestamp()) * 1e3, vertical_position, min_value, avg_value, max_value, unit))
insert_to_daily_profile_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_hourly_profile_measurements_by_sensor(station, file):
if file.get('source') == 'profiles':
num_of_new_rows = process_hourly_profile_measurements_by_sensor_profile_source(station, file)
elif file.get('source') == 'parameters':
num_of_new_rows = process_hourly_parameters_to_profile_measurements_by_sensor(station, file)
else:
raise TypeError("source must be either profiles or parameters, got {}".format(file.get('source')))
return num_of_new_rows
def process_hourly_profile_measurements_by_sensor_profile_source(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
vertical_position_column = file.get('vertical_position_column')
vertical_position_correction_factor = file.get('vertical_position_correction_factor')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
logger_debug.debug(file)
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
param_data = cr.extract_columns_data(data, "timestamp", vertical_position_column, param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
day = datetime.datetime(ts.year, ts.month, ts.day)
year = int(day.strftime("%Y"))
hour = datetime.datetime(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)
vertical_position = float(row.get(vertical_position_column))
value_type = param_info.get('value_type')
if vertical_position_correction_factor is not None:
vertical_position = utils.round_of_rating(vertical_position, vertical_position_correction_factor)
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, year, int(hour.timestamp()) * 1e3, vertical_position, min_value, avg_value, max_value, unit))
insert_to_hourly_profile_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_hourly_parameters_to_profile_measurements_by_sensor(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
time_columns = file.get('time_columns')
to_utc = file.get('to_utc')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
logger_debug.debug(file)
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
vertical_position = float(param_info.get('vertical_position'))
param_data = cr.extract_columns_data(data, "timestamp", param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
day = datetime.datetime(ts.year, ts.month, ts.day)
year = int(day.strftime("%Y"))
hour = datetime.datetime(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, year, int(hour.timestamp()) * 1e3, vertical_position, min_value, avg_value, max_value, unit))
insert_to_hourly_profile_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_thirty_min_profile_measurements_by_sensor(station, file):
if file.get('source') == 'profiles':
num_of_new_rows = process_thirty_min_profile_measurements_by_sensor_profile_source(station, file)
elif file.get('source') == 'parameters':
num_of_new_rows = process_thirty_min_parameters_to_profile_measurements_by_sensor(station, file)
else:
raise TypeError("source must be either profiles or parameters, got {}".format(file.get('source')))
return num_of_new_rows
def process_thirty_min_profile_measurements_by_sensor_profile_source(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
vertical_position_column = file.get('vertical_position_column')
vertical_position_correction_factor = file.get('vertical_position_correction_factor')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
logger_debug.debug(file)
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
param_data = cr.extract_columns_data(data, "timestamp", vertical_position_column, param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
month_first_day = datetime.datetime(ts.year, ts.month, 1).strftime("%Y-%m-%d")
profile_ts = datetime.datetime(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)
vertical_position = float(row.get(vertical_position_column))
if vertical_position_correction_factor is not None:
vertical_position = utils.round_of_rating(vertical_position, vertical_position_correction_factor)
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, month_first_day, int(hour.timestamp()) * 1e3, vertical_position, min_value, avg_value, max_value, unit))
insert_to_thirty_min_profile_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_thirty_min_parameters_to_profile_measurements_by_sensor(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
time_columns = file.get('time_columns')
to_utc = file.get('to_utc')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
logger_debug.debug(file)
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
vertical_position = float(param_info.get('vertical_position'))
param_data = cr.extract_columns_data(data, "timestamp", param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
month_first_day = datetime.datetime(ts.year, ts.month, 1).strftime("%Y-%m-%d")
profile_ts = datetime.datetime(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, month_first_day, int(profile_ts.timestamp()) * 1e3, vertical_position, min_value, avg_value, max_value, unit))
insert_to_thirty_min_profile_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_twenty_min_profile_measurements_by_sensor(station, file):
if file.get('source') == 'profiles':
num_of_new_rows = process_twenty_min_profile_measurements_by_sensor_profile_source(station, file)
elif file.get('source') == 'parameters':
num_of_new_rows = process_twenty_min_parameters_to_profile_measurements_by_sensor(station, file)
else:
raise TypeError("source must be either profiles or parameters, got {}".format(file.get('source')))
return num_of_new_rows
def process_twenty_min_profile_measurements_by_sensor_profile_source(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
vertical_position_column = file.get('vertical_position_column')
vertical_position_correction_factor = file.get('vertical_position_correction_factor')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
logger_debug.debug(file)
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
param_data = cr.extract_columns_data(data, "timestamp", vertical_position_column, param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
month_first_day = datetime.datetime(ts.year, ts.month, 1).strftime("%Y-%m-%d")
profile_ts = datetime.datetime(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)
vertical_position = float(row.get(vertical_position_column))
if vertical_position_correction_factor is not None:
vertical_position = utils.round_of_rating(vertical_position, vertical_position_correction_factor)
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, month_first_day, int(hour.timestamp()) * 1e3, vertical_position, min_value, avg_value, max_value, unit))
insert_to_twenty_min_profile_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_twenty_min_parameters_to_profile_measurements_by_sensor(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
time_columns = file.get('time_columns')
to_utc = file.get('to_utc')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
logger_debug.debug(file)
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
vertical_position = float(param_info.get('vertical_position'))
param_data = cr.extract_columns_data(data, "timestamp", param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
month_first_day = datetime.datetime(ts.year, ts.month, 1).strftime("%Y-%m-%d")
profile_ts = datetime.datetime(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, month_first_day, int(profile_ts.timestamp()) * 1e3, vertical_position, min_value, avg_value, max_value, unit))
insert_to_twenty_profile_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_fifteen_min_profile_measurements_by_sensor(station, file):
if file.get('source') == 'profiles':
num_of_new_rows = process_fifteen_min_profile_measurements_by_sensor_profile_source(station, file)
elif file.get('source') == 'parameters':
num_of_new_rows = process_fifteen_min_parameters_to_profile_measurements_by_sensor(station, file)
else:
raise TypeError("source must be either profiles or parameters, got {}".format(file.get('source')))
return num_of_new_rows
def process_fifteen_min_profile_measurements_by_sensor_profile_source(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
vertical_position_column = file.get('vertical_position_column')
vertical_position_correction_factor = file.get('vertical_position_correction_factor')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
logger_debug.debug(file)
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
param_data = cr.extract_columns_data(data, "timestamp", vertical_position_column, param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
month_first_day = datetime.datetime(ts.year, ts.month, 1).strftime("%Y-%m-%d")
profile_ts = datetime.datetime(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)
vertical_position = float(row.get(vertical_position_column))
if vertical_position_correction_factor is not None:
vertical_position = utils.round_of_rating(vertical_position, vertical_position_correction_factor)
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, month_first_day, int(hour.timestamp()) * 1e3, vertical_position, min_value, avg_value, max_value, unit))
insert_to_fifteen_min_profile_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_fifteen_min_parameters_to_profile_measurements_by_sensor(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
time_columns = file.get('time_columns')
to_utc = file.get('to_utc')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
logger_debug.debug(file)
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
vertical_position = float(param_info.get('vertical_position'))
param_data = cr.extract_columns_data(data, "timestamp", param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
month_first_day = datetime.datetime(ts.year, ts.month, 1).strftime("%Y-%m-%d")
profile_ts = datetime.datetime(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, month_first_day, int(profile_ts.timestamp()) * 1e3, vertical_position, min_value, avg_value, max_value, unit))
insert_to_fifteen_profile_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_ten_min_profile_measurements_by_sensor(station, file):
if file.get('source') == 'profiles':
num_of_new_rows = process_ten_min_profile_measurements_by_sensor_profile_source(station, file)
elif file.get('source') == 'parameters':
num_of_new_rows = process_ten_min_parameters_to_profile_measurements_by_sensor(station, file)
else:
raise TypeError("source must be either profiles or parameters, got {}".format(file.get('source')))
return num_of_new_rows
def process_ten_min_profile_measurements_by_sensor_profile_source(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
vertical_position_column = file.get('vertical_position_column')
vertical_position_correction_factor = file.get('vertical_position_correction_factor')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
logger_debug.debug(file)
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
param_data = cr.extract_columns_data(data, "timestamp", vertical_position_column, param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
month_first_day = datetime.datetime(ts.year, ts.month, 1).strftime("%Y-%m-%d")
profile_ts = datetime.datetime(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)
vertical_position = float(row.get(vertical_position_column))
if vertical_position_correction_factor is not None:
vertical_position = utils.round_of_rating(vertical_position, vertical_position_correction_factor)
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, month_first_day, int(hour.timestamp()) * 1e3, vertical_position, min_value, avg_value, max_value, unit))
insert_to_ten_min_profile_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_ten_min_parameters_to_profile_measurements_by_sensor(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
time_columns = file.get('time_columns')
to_utc = file.get('to_utc')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
logger_debug.debug(file)
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
vertical_position = float(param_info.get('vertical_position'))
param_data = cr.extract_columns_data(data, "timestamp", param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
month_first_day = datetime.datetime(ts.year, ts.month, 1).strftime("%Y-%m-%d")
profile_ts = datetime.datetime(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, month_first_day, int(profile_ts.timestamp()) * 1e3, vertical_position, min_value, avg_value, max_value, unit))
insert_to_ten_profile_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_five_min_profile_measurements_by_sensor(station, file):
if file.get('source') == 'profiles':
num_of_new_rows = process_five_min_profile_measurements_by_sensor_profile_source(station, file)
elif file.get('source') == 'parameters':
num_of_new_rows = process_five_min_parameters_to_profile_measurements_by_sensor(station, file)
else:
raise TypeError("source must be either profiles or parameters, got {}".format(file.get('source')))
return num_of_new_rows
def process_five_min_profile_measurements_by_sensor_profile_source(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
eader_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
vertical_position_column = file.get('vertical_position_column')
vertical_position_correction_factor = file.get('vertical_position_correction_factor')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
logger_debug.debug(file)
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
param_data = cr.extract_columns_data(data, "timestamp", vertical_position_column, param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
month_first_day = datetime.datetime(ts.year, ts.month, 1).strftime("%Y-%m-%d")
profile_ts = datetime.datetime(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)
vertical_position = float(row.get(vertical_position_column))
if vertical_position_correction_factor is not None:
vertical_position = utils.round_of_rating(vertical_position, vertical_position_correction_factor)
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, month_first_day, int(profile_ts.timestamp()) * 1e3, vertical_position, min_value, avg_value, max_value, unit))
insert_to_five_min_profile_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_five_min_parameters_to_profile_measurements_by_sensor(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
logger_debug.debug(file)
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
vertical_position = float(param_info.get('vertical_position'))
param_data = cr.extract_columns_data(data, "timestamp", param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
month_first_day = datetime.datetime(ts.year, ts.month, 1).strftime("%Y-%m-%d")
profile_ts = datetime.datetime(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, month_first_day, int(profile_ts.timestamp()) * 1e3, vertical_position, min_value, avg_value, max_value, unit))
insert_to_five_min_profile_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_one_min_profile_measurements_by_sensor(station, file):
if file.get('source') == 'profiles':
num_of_new_rows = process_one_min_profile_measurements_by_sensor_profile_source(station, file)
elif file.get('source') == 'parameters':
num_of_new_rows = process_one_min_parameters_to_profile_measurements_by_sensor(station, file)
else:
raise TypeError("source must be either profiles or parameters, got {}".format(file.get('source')))
return num_of_new_rows
def process_one_min_profile_measurements_by_sensor_profile_source(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
vertical_position_column = file.get('vertical_position_column')
vertical_position_correction_factor = file.get('vertical_position_correction_factor')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
logger_debug.debug(file)
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
param_data = cr.extract_columns_data(data, "timestamp", vertical_position_column, param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
year, week_number, weekday = ts.isocalendar()
week_first_day = (datetime.datetime.strptime('{} {} 1'.format(year, week_number), '%Y %W %w')).strftime("%Y-%m-%d")
profile_ts = datetime.datetime(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)
vertical_position = float(row.get(vertical_position_column))
if vertical_position_correction_factor is not None:
vertical_position = utils.round_of_rating(vertical_position, vertical_position_correction_factor)
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, week_first_day, int(hour.timestamp()) * 1e3, vertical_position, min_value, avg_value, max_value, unit))
insert_to_one_min_profile_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_one_min_parameters_to_profile_measurements_by_sensor(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
time_columns = file.get('time_columns')
to_utc = file.get('to_utc')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
logger_debug.debug(file)
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
vertical_position = float(param_info.get('vertical_position'))
param_data = cr.extract_columns_data(data, "timestamp", param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
year, week_number, weekday = ts.isocalendar()
week_first_day = (datetime.datetime.strptime('{} {} 1'.format(year, week_number), '%Y %W %w')).strftime("%Y-%m-%d")
profile_ts = datetime.datetime(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, week_first_day, int(profile_ts.timestamp()) * 1e3, vertical_position, min_value, avg_value, max_value, unit))
insert_to_one_min_profile_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_one_sec_profile_measurements_by_sensor(station, file):
if file.get('source') == 'profiles':
num_of_new_rows = process_one_sec_profile_measurements_by_sensor_profile_source(station, file)
elif file.get('source') == 'parameters':
num_of_new_rows = process_one_sec_parameters_to_profile_measurements_by_sensor(station, file)
else:
raise TypeError("source must be either profiles or parameters, got {}".format(file.get('source')))
return num_of_new_rows
def process_one_sec_profile_measurements_by_sensor_profile_source(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
vertical_position_column = file.get('vertical_position_column')
vertical_position_correction_factor = file.get('vertical_position_correction_factor')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
logger_debug.debug(file)
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
param_data = cr.extract_columns_data(data, "timestamp", vertical_position_column, param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
date_dt = datetime.datetime(ts.year, ts.month, ts.day).strftime("%Y-%m-%d")
profile_ts = datetime.datetime(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)
vertical_position = float(row.get(vertical_position_column))
if vertical_position_correction_factor is not None:
vertical_position = utils.round_of_rating(vertical_position, vertical_position_correction_factor)
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, date_dt, int(hour.timestamp()) * 1e3, vertical_position, min_value, avg_value, max_value, unit))
insert_to_one_sec_profile_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_one_sec_parameters_to_profile_measurements_by_sensor(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
time_columns = file.get('time_columns')
to_utc = file.get('to_utc')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
logger_debug.debug(file)
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
vertical_position = float(param_info.get('vertical_position'))
param_data = cr.extract_columns_data(data, "timestamp", param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
date_dt = datetime.datetime(ts.year, ts.month, ts.day).strftime("%Y-%m-%d")
profile_ts = datetime.datetime(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, date_dt, int(profile_ts.timestamp()) * 1e3, vertical_position, min_value, avg_value, max_value, unit))
insert_to_one_sec_profile_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_daily_single_measurements_by_sensor(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
param_data = cr.extract_columns_data(data, "timestamp", param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
day = datetime.datetime(ts.year, ts.month, ts.day)
year = int(day.strftime("%Y"))
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, year, int(day.timestamp()) * 1e3, min_value, avg_value, max_value, unit))
insert_to_daily_single_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_hourly_single_measurements_by_sensor(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
param_data = cr.extract_columns_data(data, "timestamp", param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
day = datetime.datetime(ts.year, ts.month, ts.day)
year = int(day.strftime("%Y"))
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, year, int(ts.timestamp()) * 1e3, min_value, avg_value, max_value, unit))
insert_to_hourly_single_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_twenty_min_single_measurements_by_sensor(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
param_data = cr.extract_columns_data(data, "timestamp", param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
day = datetime.datetime(ts.year, ts.month, ts.day)
year = int(day.strftime("%Y"))
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, year, int(ts.timestamp()) * 1e3, min_value, avg_value, max_value, unit))
insert_to_thirty_min_single_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_twenty_min_single_measurements_by_sensor(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
param_data = cr.extract_columns_data(data, "timestamp", param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
day = datetime.datetime(ts.year, ts.month, ts.day)
year = int(day.strftime("%Y"))
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, year, int(ts.timestamp()) * 1e3, min_value, avg_value, max_value, unit))
insert_to_twenty_min_single_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_fifteen_min_single_measurements_by_sensor(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
param_data = cr.extract_columns_data(data, "timestamp", param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
month_first_day = datetime.datetime(ts.year, ts.month, 1).strftime("%Y-%m-%d")
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, month_first_day, int(ts.timestamp()) * 1e3, min_value, avg_value, max_value, unit))
insert_to_fifteen_min_single_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_ten_min_single_measurements_by_sensor(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
param_data = cr.extract_columns_data(data, "timestamp", param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
month_first_day = datetime.datetime(ts.year, ts.month, 1).strftime("%Y-%m-%d")
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, month_first_day, int(ts.timestamp()) * 1e3, min_value, avg_value, max_value, unit))
insert_to_ten_min_single_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_five_min_single_measurements_by_sensor(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
param_data = cr.extract_columns_data(data, "timestamp", param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
month_first_day = datetime.datetime(ts.year, ts.month, 1).strftime("%Y-%m-%d")
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, month_first_day, int(ts.timestamp()) * 1e3, min_value, avg_value, max_value, unit))
insert_to_five_min_single_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_one_min_single_measurements_by_sensor(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
param_data = cr.extract_columns_data(data, "timestamp", param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
year, week_number, weekday = ts.isocalendar()
week_first_day = (datetime.datetime.strptime('{} {} 1'.format(year, week_number), '%Y %W %w')).strftime("%Y-%m-%d")
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, week_first_day, int(ts.timestamp()) * 1e3, min_value, avg_value, max_value, unit))
insert_to_one_min_single_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def process_one_sec_single_measurements_by_sensor(station, file):
path=file.get('path')
#sensor_id = file.get('sensor_id')
header_row=file.get('header_row')
first_line_num = file.get('first_line_num', 0)
time_format_args_library = file.get('time_format_args_library')
time_zone = file.get('time_zone')
to_utc = file.get('to_utc')
time_columns = file.get('time_columns')
parse_time_columns = file.get('parse_time_columns')
parameters = file.get('parameters')
data = cr.read_table_data(infile_path=path, header_row=header_row, first_line_num=first_line_num, parse_time_columns=parse_time_columns, time_zone=time_zone, time_format_args_library=time_format_args_library, time_parsed_column="timestamp", time_columns=time_columns, to_utc=to_utc)
num_of_new_rows = len(data)
for param, param_info in parameters.items():
parameter_id = param_info.get('parameter_id')
unit = param_info.get('unit')
sensor_id = param_info.get('sensor_id')
value_type = param_info.get('value_type')
param_data = cr.extract_columns_data(data, "timestamp", param)
param_formatted_data = []
for row in param_data:
ts = row.get('timestamp')
date_dt = datetime.datetime(ts.year, ts.month, ts.day).strftime("%Y-%m-%d")
min_value, avg_value, max_value = None, None, None
if value_type == 'min_value':
min_value = float(row.get(param))
elif value_type == 'avg_value':
avg_value = float(row.get(param))
elif value_type == 'max_value':
max_value = float(row.get(param))
param_formatted_data.append((sensor_id, parameter_id, 0, date_dt, int(ts.timestamp()) * 1e3, min_value, avg_value, max_value, unit))
insert_to_one_sec_single_measurements_by_sensor.delay(param_formatted_data)
return num_of_new_rows
def run_update(config_file, args):
file = config_file[args.station][args.file]
num_of_new_rows = 0
if (file.get('table') == 'daily_single_measurements_by_sensor'):
num_of_new_rows = process_daily_single_measurements_by_sensor(args.station, file)
elif (file.get('table') == 'hourly_single_measurements_by_sensor'):
num_of_new_rows = process_hourly_single_measurements_by_sensor(args.station, file)
elif (file.get('table') == 'thirty_min_single_measurements_by_sensor'):
num_of_new_rows = process_thirty_min_single_measurements_by_sensor(args.station, file)
elif (file.get('table') == 'twenty_min_single_measurements_by_sensor'):
num_of_new_rows = process_twenty_min_single_measurements_by_sensor(args.station, file)
elif (file.get('table') == 'fifteen_min_single_measurements_by_sensor'):
num_of_new_rows = process_fifteen_min_single_measurements_by_sensor(args.station, file)
elif (file.get('table') == 'ten_min_single_measurements_by_sensor'):
num_of_new_rows = process_ten_min_single_measurements_by_sensor(args.station, file)
elif (file.get('table') == 'five_min_single_measurements_by_sensor'):
num_of_new_rows = process_five_min_single_measurements_by_sensor(args.station, file)
elif (file.get('table') == 'one_min_single_measurements_by_sensor'):
num_of_new_rows = process_one_min_single_measurements_by_sensor(args.station, file)
elif (file.get('table') == 'one_sec_single_measurements_by_sensor'):
num_of_new_rows = process_one_sec_single_measurements_by_sensor(args.station, file)
elif (file.get('table') == 'daily_profile_measurements_by_sensor'):
num_of_new_rows = process_daily_profile_measurements_by_sensor(args.station, file)
elif (file.get('table') == 'hourly_profile_measurements_by_sensor'):
num_of_new_rows = process_hourly_profile_measurements_by_sensor(args.station, file)
elif (file.get('table') == 'thirty_min_profile_measurements_by_sensor'):
num_of_new_rows = process_thirty_min_profile_measurements_by_sensor(args.station, file)
elif (file.get('table') == 'twenty_min_profile_measurements_by_sensor'):
num_of_new_rows = process_twenty_min_profile_measurements_by_sensor(args.station, file)
elif (file.get('table') == 'fifteen_profile_measurements_by_sensor'):
num_of_new_rows = process_fifteen_min_profile_measurements_by_sensor(args.station, file)
elif (file.get('table') == 'ten_profile_measurements_by_sensor'):
num_of_new_rows = process_ten_profile_measurements_by_sensor(args.station, file)
elif (file.get('table') == 'five_min_profile_measurements_by_sensor'):
num_of_new_rows = process_five_min_profile_measurements_by_sensor(args.station, file)
elif (file.get('table') == 'one_min_profile_measurements_by_sensor'):
num_of_new_rows = process_one_min_profile_measurements_by_sensor(args.station, file)
elif (file.get('table') == 'one_sec_profile_measurements_by_sensor'):
num_of_new_rows = process_one_sec_profile_measurements_by_sensor(args.station, file)
if args.track:
if num_of_new_rows > 0:
first_line_num = file.get('first_line_num', 0)
new_line_num = first_line_num + num_of_new_rows
logger_info.info("Updated up to line number {num}".format(num=new_line_num))
config_file[args.station][args.file]['first_line_num'] = new_line_num
logger_info.info("Done processing table {table}".format(table=file.get('table')))
if args.track:
logger_info.info("Updating config file.")
utils.save_config(APP_CONFIG_PATH, config_file)
def main():
"""Parses and validates arguments from the command line. """
parser = argparse.ArgumentParser(
prog='CassandraFormatter',
description='Program for formatting and storing logger data to Cassandra database.'
)
parser.add_argument('-s', '--station', action='store', dest='station',
help='Station to process.')
parser.add_argument('-f', '--file', action='store', dest='file',
help='File to process.')
parser.add_argument(
'-t', '--track',
help='Track file line number.',
dest='track',
action='store_true',
default=False
)
args = parser.parse_args()
if not args.station or not args.file:
parser.error("--station and --file is required.")
app_cfg = utils.load_config(APP_CONFIG_PATH)
run_update(app_cfg, args)
if __name__=='__main__':
main()
|
[
"campbellsciparser.cr.extract_columns_data",
"argparse.ArgumentParser",
"tasks.insert_to_hourly_single_measurements_by_sensor.delay",
"utils.save_config",
"utils.load_config",
"tasks.insert_to_one_min_profile_measurements_by_sensor.delay",
"os.path.join",
"tasks.insert_to_one_min_single_measurements_by_sensor.delay",
"tasks.insert_to_daily_profile_measurements_by_sensor.delay",
"os.path.abspath",
"tasks.insert_to_twenty_min_single_measurements_by_sensor.delay",
"os.path.dirname",
"utils.round_of_rating",
"tasks.insert_to_daily_single_measurements_by_sensor.delay",
"tasks.insert_to_one_sec_profile_measurements_by_sensor.delay",
"tasks.insert_to_twenty_min_profile_measurements_by_sensor.delay",
"tasks.insert_to_thirty_min_profile_measurements_by_sensor.delay",
"tasks.insert_to_hourly_profile_measurements_by_sensor.delay",
"datetime.datetime",
"tasks.insert_to_ten_min_single_measurements_by_sensor.delay",
"tasks.insert_to_five_min_single_measurements_by_sensor.delay",
"campbellsciparser.cr.read_table_data",
"tasks.insert_to_one_sec_single_measurements_by_sensor.delay",
"tasks.insert_to_ten_min_profile_measurements_by_sensor.delay",
"tasks.insert_to_thirty_min_single_measurements_by_sensor.delay",
"tasks.insert_to_five_min_profile_measurements_by_sensor.delay",
"tasks.insert_to_fifteen_min_profile_measurements_by_sensor.delay",
"tasks.insert_to_fifteen_min_single_measurements_by_sensor.delay"
] |
[((1541, 1585), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""cfg/appconfig.yaml"""'], {}), "(BASE_DIR, 'cfg/appconfig.yaml')\n", (1553, 1585), False, 'import os\n'), ((1608, 1650), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""cfg/logging.yaml"""'], {}), "(BASE_DIR, 'cfg/logging.yaml')\n", (1620, 1650), False, 'import os\n'), ((1667, 1705), 'utils.load_config', 'utils.load_config', (['LOGGING_CONFIG_PATH'], {}), '(LOGGING_CONFIG_PATH)\n', (1684, 1705), False, 'import utils\n'), ((1496, 1521), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1511, 1521), False, 'import os\n'), ((3086, 3375), 'campbellsciparser.cr.read_table_data', 'cr.read_table_data', ([], {'infile_path': 'path', 'header_row': 'header_row', 'first_line_num': 'first_line_num', 'parse_time_columns': 'parse_time_columns', 'time_zone': 'time_zone', 'time_format_args_library': 'time_format_args_library', 'time_parsed_column': '"""timestamp"""', 'time_columns': 'time_columns', 'to_utc': 'to_utc'}), "(infile_path=path, header_row=header_row, first_line_num=\n first_line_num, parse_time_columns=parse_time_columns, time_zone=\n time_zone, time_format_args_library=time_format_args_library,\n time_parsed_column='timestamp', time_columns=time_columns, to_utc=to_utc)\n", (3104, 3375), False, 'from campbellsciparser import cr\n'), ((5374, 5663), 'campbellsciparser.cr.read_table_data', 'cr.read_table_data', ([], {'infile_path': 'path', 'header_row': 'header_row', 'first_line_num': 'first_line_num', 'parse_time_columns': 'parse_time_columns', 'time_zone': 'time_zone', 'time_format_args_library': 'time_format_args_library', 'time_parsed_column': '"""timestamp"""', 'time_columns': 'time_columns', 'to_utc': 'to_utc'}), "(infile_path=path, header_row=header_row, first_line_num=\n first_line_num, parse_time_columns=parse_time_columns, time_zone=\n time_zone, time_format_args_library=time_format_args_library,\n time_parsed_column='timestamp', time_columns=time_columns, to_utc=to_utc)\n", (5392, 5663), False, 'from campbellsciparser import cr\n'), ((8117, 8406), 'campbellsciparser.cr.read_table_data', 'cr.read_table_data', ([], {'infile_path': 'path', 'header_row': 'header_row', 'first_line_num': 'first_line_num', 'parse_time_columns': 'parse_time_columns', 'time_zone': 'time_zone', 'time_format_args_library': 'time_format_args_library', 'time_parsed_column': '"""timestamp"""', 'time_columns': 'time_columns', 'to_utc': 'to_utc'}), "(infile_path=path, header_row=header_row, first_line_num=\n first_line_num, parse_time_columns=parse_time_columns, time_zone=\n time_zone, time_format_args_library=time_format_args_library,\n time_parsed_column='timestamp', time_columns=time_columns, to_utc=to_utc)\n", (8135, 8406), False, 'from campbellsciparser import cr\n'), ((10499, 10788), 'campbellsciparser.cr.read_table_data', 'cr.read_table_data', ([], {'infile_path': 'path', 'header_row': 'header_row', 'first_line_num': 'first_line_num', 'parse_time_columns': 'parse_time_columns', 'time_zone': 'time_zone', 'time_format_args_library': 'time_format_args_library', 'time_parsed_column': '"""timestamp"""', 'time_columns': 'time_columns', 'to_utc': 'to_utc'}), "(infile_path=path, header_row=header_row, first_line_num=\n first_line_num, parse_time_columns=parse_time_columns, time_zone=\n time_zone, time_format_args_library=time_format_args_library,\n time_parsed_column='timestamp', time_columns=time_columns, to_utc=to_utc)\n", (10517, 10788), False, 'from campbellsciparser import cr\n'), ((13355, 13644), 'campbellsciparser.cr.read_table_data', 'cr.read_table_data', ([], {'infile_path': 'path', 'header_row': 'header_row', 'first_line_num': 'first_line_num', 'parse_time_columns': 'parse_time_columns', 'time_zone': 'time_zone', 'time_format_args_library': 'time_format_args_library', 'time_parsed_column': '"""timestamp"""', 'time_columns': 'time_columns', 'to_utc': 'to_utc'}), "(infile_path=path, header_row=header_row, first_line_num=\n first_line_num, parse_time_columns=parse_time_columns, time_zone=\n time_zone, time_format_args_library=time_format_args_library,\n time_parsed_column='timestamp', time_columns=time_columns, to_utc=to_utc)\n", (13373, 13644), False, 'from campbellsciparser import cr\n'), ((15739, 16028), 'campbellsciparser.cr.read_table_data', 'cr.read_table_data', ([], {'infile_path': 'path', 'header_row': 'header_row', 'first_line_num': 'first_line_num', 'parse_time_columns': 'parse_time_columns', 'time_zone': 'time_zone', 'time_format_args_library': 'time_format_args_library', 'time_parsed_column': '"""timestamp"""', 'time_columns': 'time_columns', 'to_utc': 'to_utc'}), "(infile_path=path, header_row=header_row, first_line_num=\n first_line_num, parse_time_columns=parse_time_columns, time_zone=\n time_zone, time_format_args_library=time_format_args_library,\n time_parsed_column='timestamp', time_columns=time_columns, to_utc=to_utc)\n", (15757, 16028), False, 'from campbellsciparser import cr\n'), ((18607, 18896), 'campbellsciparser.cr.read_table_data', 'cr.read_table_data', ([], {'infile_path': 'path', 'header_row': 'header_row', 'first_line_num': 'first_line_num', 'parse_time_columns': 'parse_time_columns', 'time_zone': 'time_zone', 'time_format_args_library': 'time_format_args_library', 'time_parsed_column': '"""timestamp"""', 'time_columns': 'time_columns', 'to_utc': 'to_utc'}), "(infile_path=path, header_row=header_row, first_line_num=\n first_line_num, parse_time_columns=parse_time_columns, time_zone=\n time_zone, time_format_args_library=time_format_args_library,\n time_parsed_column='timestamp', time_columns=time_columns, to_utc=to_utc)\n", (18625, 18896), False, 'from campbellsciparser import cr\n'), ((20991, 21280), 'campbellsciparser.cr.read_table_data', 'cr.read_table_data', ([], {'infile_path': 'path', 'header_row': 'header_row', 'first_line_num': 'first_line_num', 'parse_time_columns': 'parse_time_columns', 'time_zone': 'time_zone', 'time_format_args_library': 'time_format_args_library', 'time_parsed_column': '"""timestamp"""', 'time_columns': 'time_columns', 'to_utc': 'to_utc'}), "(infile_path=path, header_row=header_row, first_line_num=\n first_line_num, parse_time_columns=parse_time_columns, time_zone=\n time_zone, time_format_args_library=time_format_args_library,\n time_parsed_column='timestamp', time_columns=time_columns, to_utc=to_utc)\n", (21009, 21280), False, 'from campbellsciparser import cr\n'), ((23859, 24148), 'campbellsciparser.cr.read_table_data', 'cr.read_table_data', ([], {'infile_path': 'path', 'header_row': 'header_row', 'first_line_num': 'first_line_num', 'parse_time_columns': 'parse_time_columns', 'time_zone': 'time_zone', 'time_format_args_library': 'time_format_args_library', 'time_parsed_column': '"""timestamp"""', 'time_columns': 'time_columns', 'to_utc': 'to_utc'}), "(infile_path=path, header_row=header_row, first_line_num=\n first_line_num, parse_time_columns=parse_time_columns, time_zone=\n time_zone, time_format_args_library=time_format_args_library,\n time_parsed_column='timestamp', time_columns=time_columns, to_utc=to_utc)\n", (23877, 24148), False, 'from campbellsciparser import cr\n'), ((26245, 26534), 'campbellsciparser.cr.read_table_data', 'cr.read_table_data', ([], {'infile_path': 'path', 'header_row': 'header_row', 'first_line_num': 'first_line_num', 'parse_time_columns': 'parse_time_columns', 'time_zone': 'time_zone', 'time_format_args_library': 'time_format_args_library', 'time_parsed_column': '"""timestamp"""', 'time_columns': 'time_columns', 'to_utc': 'to_utc'}), "(infile_path=path, header_row=header_row, first_line_num=\n first_line_num, parse_time_columns=parse_time_columns, time_zone=\n time_zone, time_format_args_library=time_format_args_library,\n time_parsed_column='timestamp', time_columns=time_columns, to_utc=to_utc)\n", (26263, 26534), False, 'from campbellsciparser import cr\n'), ((29098, 29387), 'campbellsciparser.cr.read_table_data', 'cr.read_table_data', ([], {'infile_path': 'path', 'header_row': 'header_row', 'first_line_num': 'first_line_num', 'parse_time_columns': 'parse_time_columns', 'time_zone': 'time_zone', 'time_format_args_library': 'time_format_args_library', 'time_parsed_column': '"""timestamp"""', 'time_columns': 'time_columns', 'to_utc': 'to_utc'}), "(infile_path=path, header_row=header_row, first_line_num=\n first_line_num, parse_time_columns=parse_time_columns, time_zone=\n time_zone, time_format_args_library=time_format_args_library,\n time_parsed_column='timestamp', time_columns=time_columns, to_utc=to_utc)\n", (29116, 29387), False, 'from campbellsciparser import cr\n'), ((31476, 31765), 'campbellsciparser.cr.read_table_data', 'cr.read_table_data', ([], {'infile_path': 'path', 'header_row': 'header_row', 'first_line_num': 'first_line_num', 'parse_time_columns': 'parse_time_columns', 'time_zone': 'time_zone', 'time_format_args_library': 'time_format_args_library', 'time_parsed_column': '"""timestamp"""', 'time_columns': 'time_columns', 'to_utc': 'to_utc'}), "(infile_path=path, header_row=header_row, first_line_num=\n first_line_num, parse_time_columns=parse_time_columns, time_zone=\n time_zone, time_format_args_library=time_format_args_library,\n time_parsed_column='timestamp', time_columns=time_columns, to_utc=to_utc)\n", (31494, 31765), False, 'from campbellsciparser import cr\n'), ((34328, 34617), 'campbellsciparser.cr.read_table_data', 'cr.read_table_data', ([], {'infile_path': 'path', 'header_row': 'header_row', 'first_line_num': 'first_line_num', 'parse_time_columns': 'parse_time_columns', 'time_zone': 'time_zone', 'time_format_args_library': 'time_format_args_library', 'time_parsed_column': '"""timestamp"""', 'time_columns': 'time_columns', 'to_utc': 'to_utc'}), "(infile_path=path, header_row=header_row, first_line_num=\n first_line_num, parse_time_columns=parse_time_columns, time_zone=\n time_zone, time_format_args_library=time_format_args_library,\n time_parsed_column='timestamp', time_columns=time_columns, to_utc=to_utc)\n", (34346, 34617), False, 'from campbellsciparser import cr\n'), ((36726, 37015), 'campbellsciparser.cr.read_table_data', 'cr.read_table_data', ([], {'infile_path': 'path', 'header_row': 'header_row', 'first_line_num': 'first_line_num', 'parse_time_columns': 'parse_time_columns', 'time_zone': 'time_zone', 'time_format_args_library': 'time_format_args_library', 'time_parsed_column': '"""timestamp"""', 'time_columns': 'time_columns', 'to_utc': 'to_utc'}), "(infile_path=path, header_row=header_row, first_line_num=\n first_line_num, parse_time_columns=parse_time_columns, time_zone=\n time_zone, time_format_args_library=time_format_args_library,\n time_parsed_column='timestamp', time_columns=time_columns, to_utc=to_utc)\n", (36744, 37015), False, 'from campbellsciparser import cr\n'), ((39580, 39869), 'campbellsciparser.cr.read_table_data', 'cr.read_table_data', ([], {'infile_path': 'path', 'header_row': 'header_row', 'first_line_num': 'first_line_num', 'parse_time_columns': 'parse_time_columns', 'time_zone': 'time_zone', 'time_format_args_library': 'time_format_args_library', 'time_parsed_column': '"""timestamp"""', 'time_columns': 'time_columns', 'to_utc': 'to_utc'}), "(infile_path=path, header_row=header_row, first_line_num=\n first_line_num, parse_time_columns=parse_time_columns, time_zone=\n time_zone, time_format_args_library=time_format_args_library,\n time_parsed_column='timestamp', time_columns=time_columns, to_utc=to_utc)\n", (39598, 39869), False, 'from campbellsciparser import cr\n'), ((42052, 42341), 'campbellsciparser.cr.read_table_data', 'cr.read_table_data', ([], {'infile_path': 'path', 'header_row': 'header_row', 'first_line_num': 'first_line_num', 'parse_time_columns': 'parse_time_columns', 'time_zone': 'time_zone', 'time_format_args_library': 'time_format_args_library', 'time_parsed_column': '"""timestamp"""', 'time_columns': 'time_columns', 'to_utc': 'to_utc'}), "(infile_path=path, header_row=header_row, first_line_num=\n first_line_num, parse_time_columns=parse_time_columns, time_zone=\n time_zone, time_format_args_library=time_format_args_library,\n time_parsed_column='timestamp', time_columns=time_columns, to_utc=to_utc)\n", (42070, 42341), False, 'from campbellsciparser import cr\n'), ((44999, 45288), 'campbellsciparser.cr.read_table_data', 'cr.read_table_data', ([], {'infile_path': 'path', 'header_row': 'header_row', 'first_line_num': 'first_line_num', 'parse_time_columns': 'parse_time_columns', 'time_zone': 'time_zone', 'time_format_args_library': 'time_format_args_library', 'time_parsed_column': '"""timestamp"""', 'time_columns': 'time_columns', 'to_utc': 'to_utc'}), "(infile_path=path, header_row=header_row, first_line_num=\n first_line_num, parse_time_columns=parse_time_columns, time_zone=\n time_zone, time_format_args_library=time_format_args_library,\n time_parsed_column='timestamp', time_columns=time_columns, to_utc=to_utc)\n", (45017, 45288), False, 'from campbellsciparser import cr\n'), ((47366, 47655), 'campbellsciparser.cr.read_table_data', 'cr.read_table_data', ([], {'infile_path': 'path', 'header_row': 'header_row', 'first_line_num': 'first_line_num', 'parse_time_columns': 'parse_time_columns', 'time_zone': 'time_zone', 'time_format_args_library': 'time_format_args_library', 'time_parsed_column': '"""timestamp"""', 'time_columns': 'time_columns', 'to_utc': 'to_utc'}), "(infile_path=path, header_row=header_row, first_line_num=\n first_line_num, parse_time_columns=parse_time_columns, time_zone=\n time_zone, time_format_args_library=time_format_args_library,\n time_parsed_column='timestamp', time_columns=time_columns, to_utc=to_utc)\n", (47384, 47655), False, 'from campbellsciparser import cr\n'), ((49496, 49785), 'campbellsciparser.cr.read_table_data', 'cr.read_table_data', ([], {'infile_path': 'path', 'header_row': 'header_row', 'first_line_num': 'first_line_num', 'parse_time_columns': 'parse_time_columns', 'time_zone': 'time_zone', 'time_format_args_library': 'time_format_args_library', 'time_parsed_column': '"""timestamp"""', 'time_columns': 'time_columns', 'to_utc': 'to_utc'}), "(infile_path=path, header_row=header_row, first_line_num=\n first_line_num, parse_time_columns=parse_time_columns, time_zone=\n time_zone, time_format_args_library=time_format_args_library,\n time_parsed_column='timestamp', time_columns=time_columns, to_utc=to_utc)\n", (49514, 49785), False, 'from campbellsciparser import cr\n'), ((51441, 51730), 'campbellsciparser.cr.read_table_data', 'cr.read_table_data', ([], {'infile_path': 'path', 'header_row': 'header_row', 'first_line_num': 'first_line_num', 'parse_time_columns': 'parse_time_columns', 'time_zone': 'time_zone', 'time_format_args_library': 'time_format_args_library', 'time_parsed_column': '"""timestamp"""', 'time_columns': 'time_columns', 'to_utc': 'to_utc'}), "(infile_path=path, header_row=header_row, first_line_num=\n first_line_num, parse_time_columns=parse_time_columns, time_zone=\n time_zone, time_format_args_library=time_format_args_library,\n time_parsed_column='timestamp', time_columns=time_columns, to_utc=to_utc)\n", (51459, 51730), False, 'from campbellsciparser import cr\n'), ((53394, 53683), 'campbellsciparser.cr.read_table_data', 'cr.read_table_data', ([], {'infile_path': 'path', 'header_row': 'header_row', 'first_line_num': 'first_line_num', 'parse_time_columns': 'parse_time_columns', 'time_zone': 'time_zone', 'time_format_args_library': 'time_format_args_library', 'time_parsed_column': '"""timestamp"""', 'time_columns': 'time_columns', 'to_utc': 'to_utc'}), "(infile_path=path, header_row=header_row, first_line_num=\n first_line_num, parse_time_columns=parse_time_columns, time_zone=\n time_zone, time_format_args_library=time_format_args_library,\n time_parsed_column='timestamp', time_columns=time_columns, to_utc=to_utc)\n", (53412, 53683), False, 'from campbellsciparser import cr\n'), ((55347, 55636), 'campbellsciparser.cr.read_table_data', 'cr.read_table_data', ([], {'infile_path': 'path', 'header_row': 'header_row', 'first_line_num': 'first_line_num', 'parse_time_columns': 'parse_time_columns', 'time_zone': 'time_zone', 'time_format_args_library': 'time_format_args_library', 'time_parsed_column': '"""timestamp"""', 'time_columns': 'time_columns', 'to_utc': 'to_utc'}), "(infile_path=path, header_row=header_row, first_line_num=\n first_line_num, parse_time_columns=parse_time_columns, time_zone=\n time_zone, time_format_args_library=time_format_args_library,\n time_parsed_column='timestamp', time_columns=time_columns, to_utc=to_utc)\n", (55365, 55636), False, 'from campbellsciparser import cr\n'), ((57301, 57590), 'campbellsciparser.cr.read_table_data', 'cr.read_table_data', ([], {'infile_path': 'path', 'header_row': 'header_row', 'first_line_num': 'first_line_num', 'parse_time_columns': 'parse_time_columns', 'time_zone': 'time_zone', 'time_format_args_library': 'time_format_args_library', 'time_parsed_column': '"""timestamp"""', 'time_columns': 'time_columns', 'to_utc': 'to_utc'}), "(infile_path=path, header_row=header_row, first_line_num=\n first_line_num, parse_time_columns=parse_time_columns, time_zone=\n time_zone, time_format_args_library=time_format_args_library,\n time_parsed_column='timestamp', time_columns=time_columns, to_utc=to_utc)\n", (57319, 57590), False, 'from campbellsciparser import cr\n'), ((59239, 59528), 'campbellsciparser.cr.read_table_data', 'cr.read_table_data', ([], {'infile_path': 'path', 'header_row': 'header_row', 'first_line_num': 'first_line_num', 'parse_time_columns': 'parse_time_columns', 'time_zone': 'time_zone', 'time_format_args_library': 'time_format_args_library', 'time_parsed_column': '"""timestamp"""', 'time_columns': 'time_columns', 'to_utc': 'to_utc'}), "(infile_path=path, header_row=header_row, first_line_num=\n first_line_num, parse_time_columns=parse_time_columns, time_zone=\n time_zone, time_format_args_library=time_format_args_library,\n time_parsed_column='timestamp', time_columns=time_columns, to_utc=to_utc)\n", (59257, 59528), False, 'from campbellsciparser import cr\n'), ((61182, 61471), 'campbellsciparser.cr.read_table_data', 'cr.read_table_data', ([], {'infile_path': 'path', 'header_row': 'header_row', 'first_line_num': 'first_line_num', 'parse_time_columns': 'parse_time_columns', 'time_zone': 'time_zone', 'time_format_args_library': 'time_format_args_library', 'time_parsed_column': '"""timestamp"""', 'time_columns': 'time_columns', 'to_utc': 'to_utc'}), "(infile_path=path, header_row=header_row, first_line_num=\n first_line_num, parse_time_columns=parse_time_columns, time_zone=\n time_zone, time_format_args_library=time_format_args_library,\n time_parsed_column='timestamp', time_columns=time_columns, to_utc=to_utc)\n", (61200, 61471), False, 'from campbellsciparser import cr\n'), ((63126, 63415), 'campbellsciparser.cr.read_table_data', 'cr.read_table_data', ([], {'infile_path': 'path', 'header_row': 'header_row', 'first_line_num': 'first_line_num', 'parse_time_columns': 'parse_time_columns', 'time_zone': 'time_zone', 'time_format_args_library': 'time_format_args_library', 'time_parsed_column': '"""timestamp"""', 'time_columns': 'time_columns', 'to_utc': 'to_utc'}), "(infile_path=path, header_row=header_row, first_line_num=\n first_line_num, parse_time_columns=parse_time_columns, time_zone=\n time_zone, time_format_args_library=time_format_args_library,\n time_parsed_column='timestamp', time_columns=time_columns, to_utc=to_utc)\n", (63144, 63415), False, 'from campbellsciparser import cr\n'), ((65154, 65443), 'campbellsciparser.cr.read_table_data', 'cr.read_table_data', ([], {'infile_path': 'path', 'header_row': 'header_row', 'first_line_num': 'first_line_num', 'parse_time_columns': 'parse_time_columns', 'time_zone': 'time_zone', 'time_format_args_library': 'time_format_args_library', 'time_parsed_column': '"""timestamp"""', 'time_columns': 'time_columns', 'to_utc': 'to_utc'}), "(infile_path=path, header_row=header_row, first_line_num=\n first_line_num, parse_time_columns=parse_time_columns, time_zone=\n time_zone, time_format_args_library=time_format_args_library,\n time_parsed_column='timestamp', time_columns=time_columns, to_utc=to_utc)\n", (65172, 65443), False, 'from campbellsciparser import cr\n'), ((70326, 70466), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '"""CassandraFormatter"""', 'description': '"""Program for formatting and storing logger data to Cassandra database."""'}), "(prog='CassandraFormatter', description=\n 'Program for formatting and storing logger data to Cassandra database.')\n", (70349, 70466), False, 'import argparse\n'), ((71046, 71080), 'utils.load_config', 'utils.load_config', (['APP_CONFIG_PATH'], {}), '(APP_CONFIG_PATH)\n', (71063, 71080), False, 'import utils\n'), ((3656, 3731), 'campbellsciparser.cr.extract_columns_data', 'cr.extract_columns_data', (['data', '"""timestamp"""', 'vertical_position_column', 'param'], {}), "(data, 'timestamp', vertical_position_column, param)\n", (3679, 3731), False, 'from campbellsciparser import cr\n'), ((4706, 4780), 'tasks.insert_to_daily_profile_measurements_by_sensor.delay', 'insert_to_daily_profile_measurements_by_sensor.delay', (['param_formatted_data'], {}), '(param_formatted_data)\n', (4758, 4780), False, 'from tasks import insert_to_daily_profile_measurements_by_sensor\n'), ((6015, 6064), 'campbellsciparser.cr.extract_columns_data', 'cr.extract_columns_data', (['data', '"""timestamp"""', 'param'], {}), "(data, 'timestamp', param)\n", (6038, 6064), False, 'from campbellsciparser import cr\n'), ((6788, 6862), 'tasks.insert_to_daily_profile_measurements_by_sensor.delay', 'insert_to_daily_profile_measurements_by_sensor.delay', (['param_formatted_data'], {}), '(param_formatted_data)\n', (6840, 6862), False, 'from tasks import insert_to_daily_profile_measurements_by_sensor\n'), ((8637, 8712), 'campbellsciparser.cr.extract_columns_data', 'cr.extract_columns_data', (['data', '"""timestamp"""', 'vertical_position_column', 'param'], {}), "(data, 'timestamp', vertical_position_column, param)\n", (8660, 8712), False, 'from campbellsciparser import cr\n'), ((9837, 9912), 'tasks.insert_to_hourly_profile_measurements_by_sensor.delay', 'insert_to_hourly_profile_measurements_by_sensor.delay', (['param_formatted_data'], {}), '(param_formatted_data)\n', (9890, 9912), False, 'from tasks import insert_to_hourly_profile_measurements_by_sensor\n'), ((11140, 11189), 'campbellsciparser.cr.extract_columns_data', 'cr.extract_columns_data', (['data', '"""timestamp"""', 'param'], {}), "(data, 'timestamp', param)\n", (11163, 11189), False, 'from campbellsciparser import cr\n'), ((12009, 12084), 'tasks.insert_to_hourly_profile_measurements_by_sensor.delay', 'insert_to_hourly_profile_measurements_by_sensor.delay', (['param_formatted_data'], {}), '(param_formatted_data)\n', (12062, 12084), False, 'from tasks import insert_to_hourly_profile_measurements_by_sensor\n'), ((13925, 14000), 'campbellsciparser.cr.extract_columns_data', 'cr.extract_columns_data', (['data', '"""timestamp"""', 'vertical_position_column', 'param'], {}), "(data, 'timestamp', vertical_position_column, param)\n", (13948, 14000), False, 'from campbellsciparser import cr\n'), ((15073, 15152), 'tasks.insert_to_thirty_min_profile_measurements_by_sensor.delay', 'insert_to_thirty_min_profile_measurements_by_sensor.delay', (['param_formatted_data'], {}), '(param_formatted_data)\n', (15130, 15152), False, 'from tasks import insert_to_thirty_min_profile_measurements_by_sensor\n'), ((16380, 16429), 'campbellsciparser.cr.extract_columns_data', 'cr.extract_columns_data', (['data', '"""timestamp"""', 'param'], {}), "(data, 'timestamp', param)\n", (16403, 16429), False, 'from campbellsciparser import cr\n'), ((17257, 17336), 'tasks.insert_to_thirty_min_profile_measurements_by_sensor.delay', 'insert_to_thirty_min_profile_measurements_by_sensor.delay', (['param_formatted_data'], {}), '(param_formatted_data)\n', (17314, 17336), False, 'from tasks import insert_to_thirty_min_profile_measurements_by_sensor\n'), ((19177, 19252), 'campbellsciparser.cr.extract_columns_data', 'cr.extract_columns_data', (['data', '"""timestamp"""', 'vertical_position_column', 'param'], {}), "(data, 'timestamp', vertical_position_column, param)\n", (19200, 19252), False, 'from campbellsciparser import cr\n'), ((20325, 20404), 'tasks.insert_to_twenty_min_profile_measurements_by_sensor.delay', 'insert_to_twenty_min_profile_measurements_by_sensor.delay', (['param_formatted_data'], {}), '(param_formatted_data)\n', (20382, 20404), False, 'from tasks import insert_to_twenty_min_profile_measurements_by_sensor\n'), ((21632, 21681), 'campbellsciparser.cr.extract_columns_data', 'cr.extract_columns_data', (['data', '"""timestamp"""', 'param'], {}), "(data, 'timestamp', param)\n", (21655, 21681), False, 'from campbellsciparser import cr\n'), ((24429, 24504), 'campbellsciparser.cr.extract_columns_data', 'cr.extract_columns_data', (['data', '"""timestamp"""', 'vertical_position_column', 'param'], {}), "(data, 'timestamp', vertical_position_column, param)\n", (24452, 24504), False, 'from campbellsciparser import cr\n'), ((25577, 25662), 'tasks.insert_to_fifteen_min_profile_measurements_by_sensor.delay', 'insert_to_fifteen_min_profile_measurements_by_sensor.delay', (['param_formatted_data'], {}), '(param_formatted_data\n )\n', (25635, 25662), False, 'from tasks import insert_to_fifteen_min_profile_measurements_by_sensor\n'), ((26886, 26935), 'campbellsciparser.cr.extract_columns_data', 'cr.extract_columns_data', (['data', '"""timestamp"""', 'param'], {}), "(data, 'timestamp', param)\n", (26909, 26935), False, 'from campbellsciparser import cr\n'), ((29668, 29743), 'campbellsciparser.cr.extract_columns_data', 'cr.extract_columns_data', (['data', '"""timestamp"""', 'vertical_position_column', 'param'], {}), "(data, 'timestamp', vertical_position_column, param)\n", (29691, 29743), False, 'from campbellsciparser import cr\n'), ((30816, 30892), 'tasks.insert_to_ten_min_profile_measurements_by_sensor.delay', 'insert_to_ten_min_profile_measurements_by_sensor.delay', (['param_formatted_data'], {}), '(param_formatted_data)\n', (30870, 30892), False, 'from tasks import insert_to_ten_min_profile_measurements_by_sensor\n'), ((32117, 32166), 'campbellsciparser.cr.extract_columns_data', 'cr.extract_columns_data', (['data', '"""timestamp"""', 'param'], {}), "(data, 'timestamp', param)\n", (32140, 32166), False, 'from campbellsciparser import cr\n'), ((34898, 34973), 'campbellsciparser.cr.extract_columns_data', 'cr.extract_columns_data', (['data', '"""timestamp"""', 'vertical_position_column', 'param'], {}), "(data, 'timestamp', vertical_position_column, param)\n", (34921, 34973), False, 'from campbellsciparser import cr\n'), ((36052, 36129), 'tasks.insert_to_five_min_profile_measurements_by_sensor.delay', 'insert_to_five_min_profile_measurements_by_sensor.delay', (['param_formatted_data'], {}), '(param_formatted_data)\n', (36107, 36129), False, 'from tasks import insert_to_five_min_profile_measurements_by_sensor\n'), ((37367, 37416), 'campbellsciparser.cr.extract_columns_data', 'cr.extract_columns_data', (['data', '"""timestamp"""', 'param'], {}), "(data, 'timestamp', param)\n", (37390, 37416), False, 'from campbellsciparser import cr\n'), ((38244, 38321), 'tasks.insert_to_five_min_profile_measurements_by_sensor.delay', 'insert_to_five_min_profile_measurements_by_sensor.delay', (['param_formatted_data'], {}), '(param_formatted_data)\n', (38299, 38321), False, 'from tasks import insert_to_five_min_profile_measurements_by_sensor\n'), ((40150, 40225), 'campbellsciparser.cr.extract_columns_data', 'cr.extract_columns_data', (['data', '"""timestamp"""', 'vertical_position_column', 'param'], {}), "(data, 'timestamp', vertical_position_column, param)\n", (40173, 40225), False, 'from campbellsciparser import cr\n'), ((41392, 41468), 'tasks.insert_to_one_min_profile_measurements_by_sensor.delay', 'insert_to_one_min_profile_measurements_by_sensor.delay', (['param_formatted_data'], {}), '(param_formatted_data)\n', (41446, 41468), False, 'from tasks import insert_to_one_min_profile_measurements_by_sensor\n'), ((42693, 42742), 'campbellsciparser.cr.extract_columns_data', 'cr.extract_columns_data', (['data', '"""timestamp"""', 'param'], {}), "(data, 'timestamp', param)\n", (42716, 42742), False, 'from campbellsciparser import cr\n'), ((43664, 43740), 'tasks.insert_to_one_min_profile_measurements_by_sensor.delay', 'insert_to_one_min_profile_measurements_by_sensor.delay', (['param_formatted_data'], {}), '(param_formatted_data)\n', (43718, 43740), False, 'from tasks import insert_to_one_min_profile_measurements_by_sensor\n'), ((45569, 45644), 'campbellsciparser.cr.extract_columns_data', 'cr.extract_columns_data', (['data', '"""timestamp"""', 'vertical_position_column', 'param'], {}), "(data, 'timestamp', vertical_position_column, param)\n", (45592, 45644), False, 'from campbellsciparser import cr\n'), ((46706, 46782), 'tasks.insert_to_one_sec_profile_measurements_by_sensor.delay', 'insert_to_one_sec_profile_measurements_by_sensor.delay', (['param_formatted_data'], {}), '(param_formatted_data)\n', (46760, 46782), False, 'from tasks import insert_to_one_sec_profile_measurements_by_sensor\n'), ((48007, 48056), 'campbellsciparser.cr.extract_columns_data', 'cr.extract_columns_data', (['data', '"""timestamp"""', 'param'], {}), "(data, 'timestamp', param)\n", (48030, 48056), False, 'from campbellsciparser import cr\n'), ((48873, 48949), 'tasks.insert_to_one_sec_profile_measurements_by_sensor.delay', 'insert_to_one_sec_profile_measurements_by_sensor.delay', (['param_formatted_data'], {}), '(param_formatted_data)\n', (48927, 48949), False, 'from tasks import insert_to_one_sec_profile_measurements_by_sensor\n'), ((50066, 50115), 'campbellsciparser.cr.extract_columns_data', 'cr.extract_columns_data', (['data', '"""timestamp"""', 'param'], {}), "(data, 'timestamp', param)\n", (50089, 50115), False, 'from campbellsciparser import cr\n'), ((50820, 50893), 'tasks.insert_to_daily_single_measurements_by_sensor.delay', 'insert_to_daily_single_measurements_by_sensor.delay', (['param_formatted_data'], {}), '(param_formatted_data)\n', (50871, 50893), False, 'from tasks import insert_to_daily_single_measurements_by_sensor\n'), ((52011, 52060), 'campbellsciparser.cr.extract_columns_data', 'cr.extract_columns_data', (['data', '"""timestamp"""', 'param'], {}), "(data, 'timestamp', param)\n", (52034, 52060), False, 'from campbellsciparser import cr\n'), ((52764, 52838), 'tasks.insert_to_hourly_single_measurements_by_sensor.delay', 'insert_to_hourly_single_measurements_by_sensor.delay', (['param_formatted_data'], {}), '(param_formatted_data)\n', (52816, 52838), False, 'from tasks import insert_to_hourly_single_measurements_by_sensor\n'), ((53964, 54013), 'campbellsciparser.cr.extract_columns_data', 'cr.extract_columns_data', (['data', '"""timestamp"""', 'param'], {}), "(data, 'timestamp', param)\n", (53987, 54013), False, 'from campbellsciparser import cr\n'), ((54717, 54795), 'tasks.insert_to_thirty_min_single_measurements_by_sensor.delay', 'insert_to_thirty_min_single_measurements_by_sensor.delay', (['param_formatted_data'], {}), '(param_formatted_data)\n', (54773, 54795), False, 'from tasks import insert_to_thirty_min_single_measurements_by_sensor\n'), ((55917, 55966), 'campbellsciparser.cr.extract_columns_data', 'cr.extract_columns_data', (['data', '"""timestamp"""', 'param'], {}), "(data, 'timestamp', param)\n", (55940, 55966), False, 'from campbellsciparser import cr\n'), ((56670, 56748), 'tasks.insert_to_twenty_min_single_measurements_by_sensor.delay', 'insert_to_twenty_min_single_measurements_by_sensor.delay', (['param_formatted_data'], {}), '(param_formatted_data)\n', (56726, 56748), False, 'from tasks import insert_to_twenty_min_single_measurements_by_sensor\n'), ((57871, 57920), 'campbellsciparser.cr.extract_columns_data', 'cr.extract_columns_data', (['data', '"""timestamp"""', 'param'], {}), "(data, 'timestamp', param)\n", (57894, 57920), False, 'from campbellsciparser import cr\n'), ((58620, 58699), 'tasks.insert_to_fifteen_min_single_measurements_by_sensor.delay', 'insert_to_fifteen_min_single_measurements_by_sensor.delay', (['param_formatted_data'], {}), '(param_formatted_data)\n', (58677, 58699), False, 'from tasks import insert_to_fifteen_min_single_measurements_by_sensor\n'), ((59809, 59858), 'campbellsciparser.cr.extract_columns_data', 'cr.extract_columns_data', (['data', '"""timestamp"""', 'param'], {}), "(data, 'timestamp', param)\n", (59832, 59858), False, 'from campbellsciparser import cr\n'), ((60558, 60633), 'tasks.insert_to_ten_min_single_measurements_by_sensor.delay', 'insert_to_ten_min_single_measurements_by_sensor.delay', (['param_formatted_data'], {}), '(param_formatted_data)\n', (60611, 60633), False, 'from tasks import insert_to_ten_min_single_measurements_by_sensor\n'), ((61752, 61801), 'campbellsciparser.cr.extract_columns_data', 'cr.extract_columns_data', (['data', '"""timestamp"""', 'param'], {}), "(data, 'timestamp', param)\n", (61775, 61801), False, 'from campbellsciparser import cr\n'), ((62501, 62577), 'tasks.insert_to_five_min_single_measurements_by_sensor.delay', 'insert_to_five_min_single_measurements_by_sensor.delay', (['param_formatted_data'], {}), '(param_formatted_data)\n', (62555, 62577), False, 'from tasks import insert_to_five_min_single_measurements_by_sensor\n'), ((63696, 63745), 'campbellsciparser.cr.extract_columns_data', 'cr.extract_columns_data', (['data', '"""timestamp"""', 'param'], {}), "(data, 'timestamp', param)\n", (63719, 63745), False, 'from campbellsciparser import cr\n'), ((64539, 64614), 'tasks.insert_to_one_min_single_measurements_by_sensor.delay', 'insert_to_one_min_single_measurements_by_sensor.delay', (['param_formatted_data'], {}), '(param_formatted_data)\n', (64592, 64614), False, 'from tasks import insert_to_one_min_single_measurements_by_sensor\n'), ((65724, 65773), 'campbellsciparser.cr.extract_columns_data', 'cr.extract_columns_data', (['data', '"""timestamp"""', 'param'], {}), "(data, 'timestamp', param)\n", (65747, 65773), False, 'from campbellsciparser import cr\n'), ((66462, 66537), 'tasks.insert_to_one_sec_single_measurements_by_sensor.delay', 'insert_to_one_sec_single_measurements_by_sensor.delay', (['param_formatted_data'], {}), '(param_formatted_data)\n', (66515, 66537), False, 'from tasks import insert_to_one_sec_single_measurements_by_sensor\n'), ((70187, 70234), 'utils.save_config', 'utils.save_config', (['APP_CONFIG_PATH', 'config_file'], {}), '(APP_CONFIG_PATH, config_file)\n', (70204, 70234), False, 'import utils\n'), ((109, 134), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (124, 134), False, 'import os\n'), ((3853, 3897), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', 'ts.day'], {}), '(ts.year, ts.month, ts.day)\n', (3870, 3897), False, 'import datetime\n'), ((6186, 6230), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', 'ts.day'], {}), '(ts.year, ts.month, ts.day)\n', (6203, 6230), False, 'import datetime\n'), ((8834, 8878), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', 'ts.day'], {}), '(ts.year, ts.month, ts.day)\n', (8851, 8878), False, 'import datetime\n'), ((8941, 9016), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', 'ts.day', 'ts.hour', 'ts.minute', 'ts.second'], {}), '(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)\n', (8958, 9016), False, 'import datetime\n'), ((11311, 11355), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', 'ts.day'], {}), '(ts.year, ts.month, ts.day)\n', (11328, 11355), False, 'import datetime\n'), ((11418, 11493), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', 'ts.day', 'ts.hour', 'ts.minute', 'ts.second'], {}), '(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)\n', (11435, 11493), False, 'import datetime\n'), ((14220, 14295), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', 'ts.day', 'ts.hour', 'ts.minute', 'ts.second'], {}), '(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)\n', (14237, 14295), False, 'import datetime\n'), ((16649, 16724), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', 'ts.day', 'ts.hour', 'ts.minute', 'ts.second'], {}), '(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)\n', (16666, 16724), False, 'import datetime\n'), ((19472, 19547), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', 'ts.day', 'ts.hour', 'ts.minute', 'ts.second'], {}), '(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)\n', (19489, 19547), False, 'import datetime\n'), ((21901, 21976), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', 'ts.day', 'ts.hour', 'ts.minute', 'ts.second'], {}), '(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)\n', (21918, 21976), False, 'import datetime\n'), ((24724, 24799), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', 'ts.day', 'ts.hour', 'ts.minute', 'ts.second'], {}), '(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)\n', (24741, 24799), False, 'import datetime\n'), ((27155, 27230), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', 'ts.day', 'ts.hour', 'ts.minute', 'ts.second'], {}), '(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)\n', (27172, 27230), False, 'import datetime\n'), ((29963, 30038), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', 'ts.day', 'ts.hour', 'ts.minute', 'ts.second'], {}), '(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)\n', (29980, 30038), False, 'import datetime\n'), ((32386, 32461), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', 'ts.day', 'ts.hour', 'ts.minute', 'ts.second'], {}), '(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)\n', (32403, 32461), False, 'import datetime\n'), ((35193, 35268), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', 'ts.day', 'ts.hour', 'ts.minute', 'ts.second'], {}), '(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)\n', (35210, 35268), False, 'import datetime\n'), ((37636, 37711), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', 'ts.day', 'ts.hour', 'ts.minute', 'ts.second'], {}), '(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)\n', (37653, 37711), False, 'import datetime\n'), ((40540, 40615), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', 'ts.day', 'ts.hour', 'ts.minute', 'ts.second'], {}), '(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)\n', (40557, 40615), False, 'import datetime\n'), ((43057, 43132), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', 'ts.day', 'ts.hour', 'ts.minute', 'ts.second'], {}), '(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)\n', (43074, 43132), False, 'import datetime\n'), ((45861, 45936), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', 'ts.day', 'ts.hour', 'ts.minute', 'ts.second'], {}), '(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)\n', (45878, 45936), False, 'import datetime\n'), ((48273, 48348), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', 'ts.day', 'ts.hour', 'ts.minute', 'ts.second'], {}), '(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)\n', (48290, 48348), False, 'import datetime\n'), ((50237, 50281), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', 'ts.day'], {}), '(ts.year, ts.month, ts.day)\n', (50254, 50281), False, 'import datetime\n'), ((52182, 52226), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', 'ts.day'], {}), '(ts.year, ts.month, ts.day)\n', (52199, 52226), False, 'import datetime\n'), ((54135, 54179), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', 'ts.day'], {}), '(ts.year, ts.month, ts.day)\n', (54152, 54179), False, 'import datetime\n'), ((56088, 56132), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', 'ts.day'], {}), '(ts.year, ts.month, ts.day)\n', (56105, 56132), False, 'import datetime\n'), ((4114, 4191), 'utils.round_of_rating', 'utils.round_of_rating', (['vertical_position', 'vertical_position_correction_factor'], {}), '(vertical_position, vertical_position_correction_factor)\n', (4135, 4191), False, 'import utils\n'), ((9244, 9321), 'utils.round_of_rating', 'utils.round_of_rating', (['vertical_position', 'vertical_position_correction_factor'], {}), '(vertical_position, vertical_position_correction_factor)\n', (9265, 9321), False, 'import utils\n'), ((14469, 14546), 'utils.round_of_rating', 'utils.round_of_rating', (['vertical_position', 'vertical_position_correction_factor'], {}), '(vertical_position, vertical_position_correction_factor)\n', (14490, 14546), False, 'import utils\n'), ((19721, 19798), 'utils.round_of_rating', 'utils.round_of_rating', (['vertical_position', 'vertical_position_correction_factor'], {}), '(vertical_position, vertical_position_correction_factor)\n', (19742, 19798), False, 'import utils\n'), ((24973, 25050), 'utils.round_of_rating', 'utils.round_of_rating', (['vertical_position', 'vertical_position_correction_factor'], {}), '(vertical_position, vertical_position_correction_factor)\n', (24994, 25050), False, 'import utils\n'), ((30212, 30289), 'utils.round_of_rating', 'utils.round_of_rating', (['vertical_position', 'vertical_position_correction_factor'], {}), '(vertical_position, vertical_position_correction_factor)\n', (30233, 30289), False, 'import utils\n'), ((35442, 35519), 'utils.round_of_rating', 'utils.round_of_rating', (['vertical_position', 'vertical_position_correction_factor'], {}), '(vertical_position, vertical_position_correction_factor)\n', (35463, 35519), False, 'import utils\n'), ((40789, 40866), 'utils.round_of_rating', 'utils.round_of_rating', (['vertical_position', 'vertical_position_correction_factor'], {}), '(vertical_position, vertical_position_correction_factor)\n', (40810, 40866), False, 'import utils\n'), ((46110, 46187), 'utils.round_of_rating', 'utils.round_of_rating', (['vertical_position', 'vertical_position_correction_factor'], {}), '(vertical_position, vertical_position_correction_factor)\n', (46131, 46187), False, 'import utils\n'), ((14134, 14173), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', '(1)'], {}), '(ts.year, ts.month, 1)\n', (14151, 14173), False, 'import datetime\n'), ((16563, 16602), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', '(1)'], {}), '(ts.year, ts.month, 1)\n', (16580, 16602), False, 'import datetime\n'), ((19386, 19425), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', '(1)'], {}), '(ts.year, ts.month, 1)\n', (19403, 19425), False, 'import datetime\n'), ((21815, 21854), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', '(1)'], {}), '(ts.year, ts.month, 1)\n', (21832, 21854), False, 'import datetime\n'), ((24638, 24677), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', '(1)'], {}), '(ts.year, ts.month, 1)\n', (24655, 24677), False, 'import datetime\n'), ((27069, 27108), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', '(1)'], {}), '(ts.year, ts.month, 1)\n', (27086, 27108), False, 'import datetime\n'), ((29877, 29916), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', '(1)'], {}), '(ts.year, ts.month, 1)\n', (29894, 29916), False, 'import datetime\n'), ((32300, 32339), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', '(1)'], {}), '(ts.year, ts.month, 1)\n', (32317, 32339), False, 'import datetime\n'), ((35107, 35146), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', '(1)'], {}), '(ts.year, ts.month, 1)\n', (35124, 35146), False, 'import datetime\n'), ((37550, 37589), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', '(1)'], {}), '(ts.year, ts.month, 1)\n', (37567, 37589), False, 'import datetime\n'), ((45770, 45814), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', 'ts.day'], {}), '(ts.year, ts.month, ts.day)\n', (45787, 45814), False, 'import datetime\n'), ((48182, 48226), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', 'ts.day'], {}), '(ts.year, ts.month, ts.day)\n', (48199, 48226), False, 'import datetime\n'), ((58054, 58093), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', '(1)'], {}), '(ts.year, ts.month, 1)\n', (58071, 58093), False, 'import datetime\n'), ((59992, 60031), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', '(1)'], {}), '(ts.year, ts.month, 1)\n', (60009, 60031), False, 'import datetime\n'), ((61935, 61974), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', '(1)'], {}), '(ts.year, ts.month, 1)\n', (61952, 61974), False, 'import datetime\n'), ((65899, 65943), 'datetime.datetime', 'datetime.datetime', (['ts.year', 'ts.month', 'ts.day'], {}), '(ts.year, ts.month, ts.day)\n', (65916, 65943), False, 'import datetime\n')]
|
import sys
def reverse_gcd(a, b, k):
cnt = 0
while cnt <= k:
a, b = a + b, a
cnt += 1
return a, b
def main():
k = int(sys.stdin.readline().rstrip())
print(reverse_gcd(1, 0, k))
if __name__ == '__main__':
main()
|
[
"sys.stdin.readline"
] |
[((164, 184), 'sys.stdin.readline', 'sys.stdin.readline', ([], {}), '()\n', (182, 184), False, 'import sys\n')]
|
# allow us to mock behavior of django get database function.
# simulate behavior when database is available and not
# when we run our command
from unittest.mock import patch
# allow us to call the command in the source code
from django.core.management import call_command
# Error that django will throw when db is not available
from django.db.utils import OperationalError
from django.test import TestCase
class CommandTests(TestCase):
def test_wait_for_db_ready(self):
"""Test waiting for db when db is available"""
# whenever the __getitem__ method is called in the block,
# the code mocks/overrides the default connection handler
# and replace it with a mock object that returns what we want
# and monitors the number of calls being made to it
with patch('django.db.utils.ConnectionHandler.__getitem__') as \
get_item:
# instruct this mock to always return true
get_item.return_value = True
# wait_for_db is the command we will create
call_command('wait_for_db')
# access the call_count attribute for the object
self.assertEqual(get_item.call_count, 1)
# Use patch as decorator. Essentially does the same thing as above,
# overriding the default bahavor of a function. The mock object
# will then be passed into the function we define below.
# Here it's the time_sleep object that's analogous
# to get_item above. We patch time.sleep function
# in order to save time here
@patch('time.sleep', return_value=True)
def test_wait_for_db(self, time_sleep):
"""Test waiting for db until the db is ready"""
with patch('django.db.utils.ConnectionHandler.__getitem__') as \
get_item:
# Make the __getitem__ method return Error the first five times
get_item.side_effect = [OperationalError] * 5 + [True]
call_command('wait_for_db')
self.assertEqual(get_item.call_count, 6)
|
[
"unittest.mock.patch",
"django.core.management.call_command"
] |
[((1582, 1620), 'unittest.mock.patch', 'patch', (['"""time.sleep"""'], {'return_value': '(True)'}), "('time.sleep', return_value=True)\n", (1587, 1620), False, 'from unittest.mock import patch\n'), ((826, 880), 'unittest.mock.patch', 'patch', (['"""django.db.utils.ConnectionHandler.__getitem__"""'], {}), "('django.db.utils.ConnectionHandler.__getitem__')\n", (831, 880), False, 'from unittest.mock import patch\n'), ((1081, 1108), 'django.core.management.call_command', 'call_command', (['"""wait_for_db"""'], {}), "('wait_for_db')\n", (1093, 1108), False, 'from django.core.management import call_command\n'), ((1737, 1791), 'unittest.mock.patch', 'patch', (['"""django.db.utils.ConnectionHandler.__getitem__"""'], {}), "('django.db.utils.ConnectionHandler.__getitem__')\n", (1742, 1791), False, 'from unittest.mock import patch\n'), ((1982, 2009), 'django.core.management.call_command', 'call_command', (['"""wait_for_db"""'], {}), "('wait_for_db')\n", (1994, 2009), False, 'from django.core.management import call_command\n')]
|
from direct.directnotify.DirectNotifyGlobal import directNotify
from direct.gui.DirectGui import *
from panda3d.core import *
from pirates.ai import HolidayGlobals
from pirates.battle import WeaponGlobals
from pirates.economy import EconomyGlobals
from pirates.economy.EconomyGlobals import ItemType
from pirates.holiday import CatalogHoliday
from pirates.piratesgui import PiratesGuiGlobals
from pirates.piratesgui import GuiPanel, RedeemCodeGUI
from pirates.piratesgui import GuiButton, DialogButton
from pirates.piratesbase import PiratesGlobals
from pirates.piratesbase import PLocalizer
from pirates.piratesgui import GuiButton
from pirates.pirate import DynamicHuman
from pirates.pirate import Human
from pirates.pirate import HumanDNA
from pirates.piratesgui.TabBar import LeftTab, TabBar
from direct.interval.IntervalGlobal import *
from pirates.makeapirate import ClothingGlobals
from pirates.makeapirate import TattooGlobals
from pirates.piratesgui.BorderFrame import BorderFrame
from pirates.uberdog.UberDogGlobals import InventoryId, InventoryType
from otp.otpbase import OTPGlobals
from otp.otpgui import OTPDialog
from pirates.piratesgui import PDialog
from direct.task import Task
import random
from pirates.piratesbase import Freebooter
from pirates.piratesgui.InventoryItemGui import InventoryItemGui
from pirates.inventory.InventoryGlobals import *
from pirates.uberdog.TradableInventoryBase import InvItem
from pirates.inventory import ItemGlobals, DropGlobals
from pirates.inventory import ItemConstants
from pirates.inventory import InventoryUIStoreContainer
from pirates.pirate import AvatarTypes
from math import sin
from math import cos
from math import pi
from pirates.uberdog.UberDogGlobals import InventoryType
from pirates.inventory import InventoryGlobals
class SimpleItem:
def __init__(self, uid):
self.uid = uid
self.itemName = PLocalizer.getItemName(uid)
self.shortDesc = PLocalizer.getItemName(uid)
self.longDesc = PLocalizer.getItemFlavorText(uid)
self.text = None
self.modelId = None
self.texId = None
self.colorId = 0
self.holidayId = None
self.icon = None
self.iconScale = 1.0
self.iconHpr = (0, 0, 0)
if self.checkStackable():
self.quantity = EconomyGlobals.getItemQuantity(self.uid)
else:
self.quantity = 1
def checkStackable(self):
return InventoryId.isStackable(self.uid)
def configureForPirate(self, pirate):
pass
def canBeUsed(self, pirate):
return True
def apply(self, pirate):
pass
def unapply(self, pirate, sourceStyle):
pass
def purchase(self, npc):
pass
def getFlavorAnim(self):
return ''
def getCameraPos(self, pirate):
return (0, 0, 0)
def getCameraLookAtPos(self, pirate):
return (0, 0, 0)
class SimpleLootItem(SimpleItem):
Icons = None
def __init__(self, uid):
self.itemClass = ItemGlobals.getClass(uid)
self.itemType = ItemGlobals.getType(uid)
self.cost = ItemGlobals.getGoldCost(uid)
SimpleItem.__init__(self, uid)
self.holidayId = ItemGlobals.getHoliday(uid)
if self.Icons:
self.icon = self.Icons.find('**/%s' % ItemGlobals.getIcon(uid))
def checkStackable(self):
return InventoryGlobals.isStackableType(self.itemClass)
def configureForPirate(self, pirate):
gender = pirate.style.getGender()
getModelId = choice(gender == 'm', ItemGlobals.getMaleModelId, ItemGlobals.getFemaleModelId)
getTextureId = choice(gender == 'm', ItemGlobals.getMaleTextureId, ItemGlobals.getFemaleTextureId)
self.modelId = getModelId(self.uid)
self.texId = getTextureId(self.uid)
def getQuantityInInventory(self):
inventory = base.localAvatar.getInventory()
quantity = inventory.getItemQuantity(self.itemClass, self.uid)
return quantity
class SimpleClothingItem(SimpleLootItem):
Icons = loader.loadModel('models/gui/gui_icons_clothing')
def __init__(self, uid):
SimpleLootItem.__init__(self, uid)
self.clothingNumber = self.itemType
self.clothingString = ClothingGlobals.CLOTHING_STRING[self.itemType]
def canBeUsed(self, pirate):
self.configureForPirate(pirate)
if self.modelId >= 0:
pass
return self.texId >= 0
def apply(self, pirate):
if not self.canBeUsed(pirate):
return None
self.configureForPirate(pirate)
typeString = ClothingGlobals.CLOTHING_STRING[self.itemType]
pirate.setClothesByType(typeString, self.modelId, self.texId, self.colorId)
pirate.model.handleClothesHiding()
pirate.model.handleHeadHiding()
def unapply(self, pirate, sourceStyle):
if not self.canBeUsed(pirate):
return None
pirate.style.clothes.shirt = sourceStyle.clothes.shirt
pirate.style.clothes.shirtTexture = sourceStyle.clothes.shirtTexture
pirate.style.clothes.shirtColor = sourceStyle.clothes.shirtColor
pirate.style.clothes.vest = sourceStyle.clothes.vest
pirate.style.clothes.vestTexture = sourceStyle.clothes.vestTexture
pirate.style.clothes.vestColor = sourceStyle.clothes.vestColor
pirate.style.clothes.pant = sourceStyle.clothes.pant
pirate.style.clothes.pantTexture = sourceStyle.clothes.pantTexture
pirate.style.clothes.pantColor = sourceStyle.clothes.pantColor
pirate.style.clothes.coat = sourceStyle.clothes.coat
pirate.style.clothes.coatTexture = sourceStyle.clothes.coatTexture
pirate.style.clothes.coatColor = sourceStyle.clothes.coatColor
pirate.style.clothes.shoe = sourceStyle.clothes.shoe
pirate.style.clothes.shoeTexture = sourceStyle.clothes.shoeTexture
pirate.style.clothes.shoeColor = sourceStyle.clothes.shoeColor
pirate.style.clothes.belt = sourceStyle.clothes.belt
pirate.style.clothes.beltTexture = sourceStyle.clothes.beltTexture
pirate.style.clothes.sashColor = sourceStyle.clothes.sashColor
pirate.style.clothes.hat = sourceStyle.clothes.hat
pirate.style.clothes.hatTexture = sourceStyle.clothes.hatTexture
pirate.style.clothes.hatColor = sourceStyle.clothes.hatColor
pirate.model.handleClothesHiding()
pirate.model.handleHeadHiding()
def purchase(self, npc):
location = 0
purchaseArgs = [
self.uid,
self.colorId,
self.clothingNumber,
location]
npc.sendRequestAccessories([
purchaseArgs], [])
def getFlavorAnim(self):
typeString = self.clothingString
if typeString == 'SHIRT' or typeString == 'COAT':
if random.randint(0, 1) == 0:
return 'map_look_arm_left'
else:
return 'map_look_arm_right'
if typeString == 'PANT' or typeString == 'BELT':
return 'map_look_pant_right'
if typeString == 'SHOE':
return 'map_look_boot_left'
return ''
def getCameraPos(self, pirate):
return (0, 10, pirate.headNode.getZ(pirate))
def getCameraLookAtPos(self, pirate):
return (pirate.headNode.getX(pirate), pirate.headNode.getY(pirate), pirate.headNode.getZ(pirate) * 0.90000000000000002)
class SimpleJewelryItem(SimpleLootItem):
Icons = loader.loadModel('models/gui/gui_icons_jewelry')
def __init__(self, uid):
SimpleLootItem.__init__(self, uid)
self.jewelryType = SimpleJewelryItem.jewelryTypeFromItemType(self.itemType)
def jewelryTypeFromItemType(cls, itemType):
if itemType == ItemGlobals.BROW:
return JewelryGlobals.LBROW
elif itemType == ItemGlobals.EAR:
return JewelryGlobals.LEAR
elif itemType == ItemGlobals.NOSE:
return JewelryGlobals.NOSE
elif itemType == ItemGlobals.MOUTH:
return JewelryGlobals.MOUTH
elif itemType == ItemGlobals.HAND:
return JewelryGlobals.LHAND
jewelryTypeFromItemType = classmethod(jewelryTypeFromItemType)
def itemTypeFromJewelryType(cls, jewelryType):
if jewelryType in (JewelryGlobals.LBROW, JewelryGlobals.RBROW):
return ItemGlobals.BROW
elif jewelryType in (JewelryGlobals.LEAR, JewelryGlobals.REAR):
return ItemGlobals.EAR
elif jewelryType == JewelryGlobals.NOSE:
return ItemGlobals.NOSE
elif jewelryType == JewelryGlobals.MOUTH:
return ItemGlobals.MOUTH
elif jewelryType in (JewelryGlobals.LHAND, JewelryGlobals.RHAND):
return ItemGlobals.HAND
itemTypeFromJewelryType = classmethod(itemTypeFromJewelryType)
def apply(self, pirate):
gender = localAvatar.style.getGender()
if gender == 'm':
idx = ItemGlobals.getMaleModelId(self.uid)
else:
idx = ItemGlobals.getFemaleModelId(self.uid)
primaryColor = ItemGlobals.getPrimaryColor(self.uid)
secondaryColor = ItemGlobals.getSecondaryColor(self.uid)
if self.jewelryType == JewelryGlobals.LBROW:
pirate.setJewelryZone3(idx, primaryColor, secondaryColor)
elif self.jewelryType == JewelryGlobals.RBROW:
pirate.setJewelryZone4(idx, primaryColor, secondaryColor)
elif self.jewelryType == JewelryGlobals.LEAR:
pirate.setJewelryZone1(idx, primaryColor, secondaryColor)
elif self.jewelryType == JewelryGlobals.REAR:
pirate.setJewelryZone2(idx, primaryColor, secondaryColor)
elif self.jewelryType == JewelryGlobals.NOSE:
pirate.setJewelryZone5(idx, primaryColor, secondaryColor)
elif self.jewelryType == JewelryGlobals.MOUTH:
pirate.setJewelryZone6(idx, primaryColor, secondaryColor)
elif self.jewelryType == JewelryGlobals.LHAND:
pirate.setJewelryZone7(idx, primaryColor, secondaryColor)
elif self.jewelryType == JewelryGlobals.RHAND:
pirate.setJewelryZone8(idx, primaryColor, secondaryColor)
pirate.model.handleJewelryHiding()
def unapply(self, pirate, sourceStyle):
jewelryZone4 = list(localAvatar.style.getJewelryZone4())
jewelryZone3 = list(localAvatar.style.getJewelryZone3())
jewelryZone1 = list(localAvatar.style.getJewelryZone1())
jewelryZone2 = list(localAvatar.style.getJewelryZone2())
jewelryZone5 = list(localAvatar.style.getJewelryZone5())
jewelryZone6 = list(localAvatar.style.getJewelryZone6())
jewelryZone7 = list(localAvatar.style.getJewelryZone7())
jewelryZone8 = list(localAvatar.style.getJewelryZone8())
if not hasattr(pirate, 'style'):
return None
pirate.style.setJewelryZone1(jewelryZone1[0], jewelryZone1[1], jewelryZone1[2])
pirate.style.setJewelryZone2(jewelryZone2[0], jewelryZone2[1], jewelryZone2[2])
pirate.style.setJewelryZone3(jewelryZone3[0], jewelryZone3[1], jewelryZone3[2])
pirate.style.setJewelryZone4(jewelryZone4[0], jewelryZone4[1], jewelryZone4[2])
pirate.style.setJewelryZone5(jewelryZone5[0], jewelryZone5[1], jewelryZone5[2])
pirate.style.setJewelryZone6(jewelryZone6[0], jewelryZone6[1], jewelryZone6[2])
pirate.style.setJewelryZone7(jewelryZone7[0], jewelryZone7[1], jewelryZone7[2])
pirate.style.setJewelryZone8(jewelryZone8[0], jewelryZone8[1], jewelryZone8[2])
pirate.model.handleJewelryHiding()
def purchase(self, npc):
location = 0
purchaseArgs = [
self.uid,
location]
npc.sendRequestJewelry([
purchaseArgs], [])
def getCameraPos(self, pirate):
pz = pirate.headNode.getZ(pirate)
if self.jewelryType == JewelryGlobals.LBROW:
return (-1, 2, pz + 0.5)
elif self.jewelryType == JewelryGlobals.RBROW:
return (1, 2, pz + 0.5)
elif self.jewelryType == JewelryGlobals.LEAR:
return (-2, 2, pz + 0.25)
elif self.jewelryType == JewelryGlobals.REAR:
return (2, 2, pz + 0.25)
elif self.jewelryType == JewelryGlobals.NOSE:
return (0, 2, pz + 0.25)
elif self.jewelryType == JewelryGlobals.MOUTH:
return (0, 2, pz)
elif self.jewelryType == JewelryGlobals.LHAND:
return (-2, 2.5, pirate.leftHandNode.getZ(pirate))
elif self.jewelryType == JewelryGlobals.RHAND:
return (2, 2.5, pirate.rightHandNode.getZ(pirate))
return (0, 0, 0)
def getCameraLookAtPos(self, pirate):
px = pirate.headNode.getX(pirate)
py = pirate.headNode.getY(pirate)
pz = pirate.headNode.getZ(pirate)
if self.jewelryType == JewelryGlobals.LBROW:
return (px, py, pz * 1.1000000000000001)
elif self.jewelryType == JewelryGlobals.RBROW:
return (px, py, pz * 1.1000000000000001)
elif self.jewelryType == JewelryGlobals.LEAR:
return (px, py, pz * 1.1000000000000001)
elif self.jewelryType == JewelryGlobals.REAR:
return (px, py, pz * 1.1000000000000001)
elif self.jewelryType == JewelryGlobals.NOSE:
return (px, py, pz * 1.1000000000000001)
elif self.jewelryType == JewelryGlobals.MOUTH:
return (px, py, pz * 1.075)
elif self.jewelryType == JewelryGlobals.LHAND:
return (pirate.leftHandNode.getX(pirate), pirate.leftHandNode.getY(pirate), pirate.leftHandNode.getZ(pirate) * 1.2)
elif self.jewelryType == JewelryGlobals.RHAND:
return (pirate.rightHandNode.getX(pirate), pirate.rightHandNode.getY(pirate), pirate.rightHandNode.getZ(pirate) * 1.2)
return (0, 0, 0)
class SimpleTattooItem(SimpleLootItem):
Icons = loader.loadModel('models/textureCards/tattooIcons')
def __init__(self, uid):
SimpleLootItem.__init__(self, uid)
self.zone = SimpleTattooItem.tattooTypeFromItemType(self.itemType)
self.icon = ItemGlobals.getItemTattooImage(uid)[0]
self.iconScale = 0.40000000000000002
def tattooTypeFromItemType(cls, itemType):
if itemType == ItemGlobals.CHEST:
return TattooGlobals.ZONE1
elif itemType == ItemGlobals.ARM:
return TattooGlobals.ZONE2
elif itemType == ItemGlobals.FACE:
return TattooGlobals.ZONE4
tattooTypeFromItemType = classmethod(tattooTypeFromItemType)
def itemTypeFromTattooType(cls, tattooType):
if tattooType == TattooGlobals.ZONE1:
return ItemGlobals.CHEST
elif tattooType in (TattooGlobals.ZONE2, TattooGlobals.ZONE3):
return ItemGlobals.ARM
elif tattooType == TattooGlobals.ZONE4:
return ItemGlobals.FACE
itemTypeFromTattooType = classmethod(itemTypeFromTattooType)
def apply(self, pirate):
pirate.style.setClothesShirt(0)
pirate.style.setClothesCoat(0)
pirate.style.setClothesVest(0)
pirate.model.handleClothesHiding()
gender = localAvatar.style.getGender()
if gender == 'm':
tattooId = ItemGlobals.getMaleModelId(self.uid)
if self.zone == TattooGlobals.ZONE3:
orientation = ItemGlobals.getMaleOrientation2(self.uid)
else:
orientation = ItemGlobals.getMaleOrientation(self.uid)
else:
tattooId = ItemGlobals.getFemaleModelId(self.uid)
if self.zone == TattooGlobals.ZONE3:
orientation = ItemGlobals.getFemaleOrientation2(self.uid)
else:
orientation = ItemGlobals.getFemaleOrientation(self.uid)
(offsetx, offsety, scale, rotate) = ItemGlobals.getOrientation(orientation)
if not hasattr(pirate, 'model'):
return None
pirate.model.tattoos[self.zone][TattooGlobals.TYPE] = tattooId
S = Vec2(1 / float(scale), 1 / float(scale))
Iv = Vec2(offsetx, offsety)
Vm = Vec2(sin(rotate * pi / 180.0), cos(rotate * pi / 180.0))
Vms = Vec2(Vm[0] * S[0], Vm[1] * S[1])
Vn = Vec2(Vm[1], -Vm[0])
Vns = Vec2(Vn[0] * S[0], Vn[1] * S[1])
F = Vec2(-Vns.dot(Iv) + 0.5, -Vms.dot(Iv) + 0.5)
pirate.model.tattoos[self.zone][TattooGlobals.OFFSETX] = F[0]
pirate.model.tattoos[self.zone][TattooGlobals.OFFSETY] = F[1]
pirate.model.tattoos[self.zone][TattooGlobals.SCALE] = S[0]
pirate.model.tattoos[self.zone][TattooGlobals.ROTATE] = rotate
pirate.model.updateTattoo(self.zone)
def unapply(self, pirate, sourceStyle):
pirate.style.clothes.shirt = localAvatar.style.clothes.shirt
pirate.style.clothes.shirtTexture = localAvatar.style.clothes.shirtTexture
pirate.style.clothes.shirtColor = localAvatar.style.clothes.shirtColor
pirate.style.clothes.vest = localAvatar.style.clothes.vest
pirate.style.clothes.vestTexture = localAvatar.style.clothes.vestTexture
pirate.style.clothes.vestColor = localAvatar.style.clothes.vestColor
pirate.style.clothes.coat = localAvatar.style.clothes.coat
pirate.style.clothes.coatTexture = localAvatar.style.clothes.coatTexture
pirate.style.clothes.coatColor = localAvatar.style.clothes.coatColor
pirate.model.handleClothesHiding()
if self.zone == TattooGlobals.ZONE1:
pirate.model.tattoos[self.zone] = list(localAvatar.style.getTattooChest())
if self.zone == TattooGlobals.ZONE2:
pirate.model.tattoos[self.zone] = list(localAvatar.style.getTattooZone2())
if self.zone == TattooGlobals.ZONE3:
pirate.model.tattoos[self.zone] = list(localAvatar.style.getTattooZone3())
if self.zone == TattooGlobals.ZONE4:
pirate.model.tattoos[self.zone] = list(localAvatar.style.getTattooZone4())
pirate.model.updateTattoo(self.zone)
def purchase(self, npc):
location = 0
purchaseArgs = [
self.uid,
location]
npc.sendRequestTattoo([
purchaseArgs], [])
def getFlavorAnim(self):
if self.zone == TattooGlobals.ZONE2:
return 'map_look_arm_left'
elif self.zone == TattooGlobals.ZONE3:
return 'map_look_arm_right'
return ''
def getCameraPos(self, pirate):
pz = pirate.headNode.getZ(pirate)
if self.zone == TattooGlobals.ZONE1:
return (0, 4, pz)
elif self.zone == TattooGlobals.ZONE2:
return (-5, 2, pz)
elif self.zone == TattooGlobals.ZONE3:
return (5, 2, pz)
elif self.zone == TattooGlobals.ZONE4:
return (0, 3, pz * 1.1000000000000001)
return (0, 0, 0)
def getCameraLookAtPos(self, pirate):
px = pirate.headNode.getX(pirate)
py = pirate.headNode.getY(pirate)
pz = pirate.headNode.getZ(pirate)
if self.zone == TattooGlobals.ZONE1:
return (px, py, pz)
elif self.zone == TattooGlobals.ZONE2:
return (px, py, pz * 0.90000000000000002)
elif self.zone == TattooGlobals.ZONE3:
return (px, py, pz * 0.90000000000000002)
elif self.zone == TattooGlobals.ZONE4:
return (px, py, pz * 1.1000000000000001)
return (0, 0, 0)
class SimpleWeaponItem(SimpleLootItem):
Icons = loader.loadModel('models/gui/gui_icons_weapon')
def __init__(self, uid):
SimpleLootItem.__init__(self, uid)
self.quantity = 1
def purchase(self, npc):
location = 0
purchaseArgs = [
self.uid,
location]
npc.sendRequestWeapon([
purchaseArgs], [])
class SimpleConsumableItem(SimpleLootItem):
Icons = loader.loadModel('models/textureCards/skillIcons')
def __init__(self, uid):
SimpleLootItem.__init__(self, uid)
self.quantity = 1
def purchase(self, npc):
purchaseArgs = [
self.uid,
self.quantity]
messenger.send('makeSale', [
[
purchaseArgs],
[]])
def getQuantityInInventory(self):
inventory = base.localAvatar.getInventory()
quantity = inventory.getItemQuantity(self.itemClass, self.uid)
return quantity
class SimpleEconomyItem(SimpleItem):
def __init__(self, uid):
SimpleItem.__init__(self, uid)
self.itemClass = EconomyGlobals.getItemCategory(uid)
self.itemType = EconomyGlobals.getItemType(uid)
self.cost = EconomyGlobals.getItemCost(uid)
if not self.cost:
self.cost = ItemGlobals.getGoldCost(uid)
def makeButton(self, parent, pos, cellSizeX, cellSizeZ):
if InventoryId.isStackable(self.uid):
data = [
self.uid,
1]
else:
data = [
InventoryId.getCategory(self.uid),
self.uid]
simpleItemGui = SimpleItemGUI([
self.uid,
1], parent = parent, pos = pos)
self.itemName = simpleItemGui.nameTag['text']
self.shortDesc = self.itemName
self.longDesc = self.itemName
simpleItemGui.destroy()
geomParams = InventoryItemGui.getGeomParams(self.uid)
button = DirectButton(parent = parent, relief = None, rolloverSound = None, text = '', text_scale = 0.050000000000000003, textMayChange = 1, geom = geomParams['geom'], geom_pos = (0, 0, 0), geom_scale = geomParams['geom_scale'], pos = pos, extraArgs = [
self])
return button
def purchase(self, npc):
purchaseArgs = [
self.uid,
self.quantity]
messenger.send('makeSale', [
[
purchaseArgs],
[]])
def getCameraPos(self, pirate):
pz = pirate.headNode.getZ(pirate)
return (0, 0, pz)
def getCameraLookAtPos(self, pirate):
px = pirate.headNode.getX(pirate)
py = pirate.headNode.getY(pirate)
pz = pirate.headNode.getZ(pirate)
return (px, py, pz)
def getQuantityInInventory(self):
inventory = base.localAvatar.getInventory()
quantity = inventory.getStackQuantity(self.uid)
return quantity
class SimpleAmmoItem(SimpleEconomyItem):
Icons = loader.loadModel('models/textureCards/skillIcons')
def __init__(self, uid):
SimpleEconomyItem.__init__(self, uid)
skillId = WeaponGlobals.getSkillIdForAmmoSkillId(uid)
if skillId:
asset = WeaponGlobals.getSkillIcon(skillId)
if asset:
self.icon = self.Icons.find('**/%s' % asset)
self.iconScale = 1.1000000000000001
self.iconHpr = (0, 0, 45)
def getQuantityInInventory(self):
inventory = base.localAvatar.getInventory()
quantity = inventory.getStackQuantity(self.uid)
return quantity
class SimpleFishingLureItem(SimpleEconomyItem):
Icons = loader.loadModel('models/textureCards/fishing_icons')
FishingItemTable = {
InventoryType.RegularLure: 'pir_t_gui_fsh_lureReg',
InventoryType.LegendaryLure: 'pir_t_gui_fsh_lureLegend' }
def __init__(self, uid):
SimpleEconomyItem.__init__(self, uid)
iconName = self.FishingItemTable.get(uid)
asset = None
if iconName:
asset = self.Icons.find('**/%s' % iconName)
if asset:
self.icon = asset
self.iconScale = 1.1000000000000001
self.iconHpr = (0, 0, 45)
def getQuantityInInventory(self):
inventory = base.localAvatar.getInventory()
quantity = inventory.getStackQuantity(self.uid)
return quantity
class SimplePouchItem(SimpleEconomyItem):
Icons = loader.loadModel('models/gui/gui_icons_weapon')
def __init__(self, uid):
SimpleEconomyItem.__init__(self, uid)
self.icon = self.Icons.find('**/%s' % EconomyGlobals.getItemIcons(uid))
if uid in range(InventoryType.begin_PistolPouches, InventoryType.end_PistolPouches):
self.shortDesc = PLocalizer.makeHeadingString(PLocalizer.InventoryItemClassNames.get(ItemType.PISTOL), 1)
elif uid in range(InventoryType.begin_DaggerPouches, InventoryType.end_DaggerPouches):
self.shortDesc = PLocalizer.makeHeadingString(PLocalizer.InventoryItemClassNames.get(ItemType.DAGGER), 1)
elif uid in range(InventoryType.begin_GrenadePouches, InventoryType.end_GrenadePouches):
self.shortDesc = PLocalizer.makeHeadingString(PLocalizer.GrenadeShort, 1)
elif uid in range(InventoryType.begin_CannonPouches, InventoryType.end_CannonPouches):
self.shortDesc = PLocalizer.makeHeadingString(PLocalizer.ShipCannonShort, 1)
else:
self.shortDesc = PLocalizer.makeHeadingString(self.itemType, 1)
self.itemName = self.shortDesc
self.longDesc = self.shortDesc
|
[
"pirates.piratesbase.PLocalizer.getItemName",
"pirates.inventory.ItemGlobals.getPrimaryColor",
"pirates.uberdog.UberDogGlobals.InventoryId.isStackable",
"pirates.piratesbase.PLocalizer.makeHeadingString",
"pirates.economy.EconomyGlobals.getItemCategory",
"pirates.economy.EconomyGlobals.getItemType",
"random.randint",
"pirates.inventory.ItemGlobals.getMaleOrientation2",
"pirates.economy.EconomyGlobals.getItemQuantity",
"pirates.inventory.ItemGlobals.getGoldCost",
"pirates.inventory.ItemGlobals.getItemTattooImage",
"math.cos",
"pirates.piratesbase.PLocalizer.getItemFlavorText",
"pirates.piratesbase.PLocalizer.InventoryItemClassNames.get",
"pirates.inventory.ItemGlobals.getClass",
"pirates.economy.EconomyGlobals.getItemIcons",
"pirates.inventory.ItemGlobals.getIcon",
"math.sin",
"pirates.inventory.ItemGlobals.getFemaleOrientation2",
"pirates.battle.WeaponGlobals.getSkillIdForAmmoSkillId",
"pirates.battle.WeaponGlobals.getSkillIcon",
"pirates.inventory.ItemGlobals.getSecondaryColor",
"pirates.inventory.ItemGlobals.getFemaleModelId",
"pirates.uberdog.UberDogGlobals.InventoryId.getCategory",
"pirates.inventory.ItemGlobals.getFemaleOrientation",
"pirates.economy.EconomyGlobals.getItemCost",
"pirates.inventory.ItemGlobals.getType",
"pirates.inventory.InventoryGlobals.isStackableType",
"pirates.piratesgui.InventoryItemGui.InventoryItemGui.getGeomParams",
"pirates.inventory.ItemGlobals.getOrientation",
"pirates.inventory.ItemGlobals.getMaleModelId",
"pirates.inventory.ItemGlobals.getMaleOrientation",
"pirates.inventory.ItemGlobals.getHoliday"
] |
[((1885, 1912), 'pirates.piratesbase.PLocalizer.getItemName', 'PLocalizer.getItemName', (['uid'], {}), '(uid)\n', (1907, 1912), False, 'from pirates.piratesbase import PLocalizer\n'), ((1938, 1965), 'pirates.piratesbase.PLocalizer.getItemName', 'PLocalizer.getItemName', (['uid'], {}), '(uid)\n', (1960, 1965), False, 'from pirates.piratesbase import PLocalizer\n'), ((1990, 2023), 'pirates.piratesbase.PLocalizer.getItemFlavorText', 'PLocalizer.getItemFlavorText', (['uid'], {}), '(uid)\n', (2018, 2023), False, 'from pirates.piratesbase import PLocalizer\n'), ((2443, 2476), 'pirates.uberdog.UberDogGlobals.InventoryId.isStackable', 'InventoryId.isStackable', (['self.uid'], {}), '(self.uid)\n', (2466, 2476), False, 'from pirates.uberdog.UberDogGlobals import InventoryId, InventoryType\n'), ((3062, 3087), 'pirates.inventory.ItemGlobals.getClass', 'ItemGlobals.getClass', (['uid'], {}), '(uid)\n', (3082, 3087), False, 'from pirates.inventory import ItemGlobals, DropGlobals\n'), ((3112, 3136), 'pirates.inventory.ItemGlobals.getType', 'ItemGlobals.getType', (['uid'], {}), '(uid)\n', (3131, 3136), False, 'from pirates.inventory import ItemGlobals, DropGlobals\n'), ((3157, 3185), 'pirates.inventory.ItemGlobals.getGoldCost', 'ItemGlobals.getGoldCost', (['uid'], {}), '(uid)\n', (3180, 3185), False, 'from pirates.inventory import ItemGlobals, DropGlobals\n'), ((3250, 3277), 'pirates.inventory.ItemGlobals.getHoliday', 'ItemGlobals.getHoliday', (['uid'], {}), '(uid)\n', (3272, 3277), False, 'from pirates.inventory import ItemGlobals, DropGlobals\n'), ((3437, 3485), 'pirates.inventory.InventoryGlobals.isStackableType', 'InventoryGlobals.isStackableType', (['self.itemClass'], {}), '(self.itemClass)\n', (3469, 3485), False, 'from pirates.inventory import InventoryGlobals\n'), ((9275, 9312), 'pirates.inventory.ItemGlobals.getPrimaryColor', 'ItemGlobals.getPrimaryColor', (['self.uid'], {}), '(self.uid)\n', (9302, 9312), False, 'from pirates.inventory import ItemGlobals, DropGlobals\n'), ((9338, 9377), 'pirates.inventory.ItemGlobals.getSecondaryColor', 'ItemGlobals.getSecondaryColor', (['self.uid'], {}), '(self.uid)\n', (9367, 9377), False, 'from pirates.inventory import ItemGlobals, DropGlobals\n'), ((16115, 16154), 'pirates.inventory.ItemGlobals.getOrientation', 'ItemGlobals.getOrientation', (['orientation'], {}), '(orientation)\n', (16141, 16154), False, 'from pirates.inventory import ItemGlobals, DropGlobals\n'), ((20948, 20983), 'pirates.economy.EconomyGlobals.getItemCategory', 'EconomyGlobals.getItemCategory', (['uid'], {}), '(uid)\n', (20978, 20983), False, 'from pirates.economy import EconomyGlobals\n'), ((21008, 21039), 'pirates.economy.EconomyGlobals.getItemType', 'EconomyGlobals.getItemType', (['uid'], {}), '(uid)\n', (21034, 21039), False, 'from pirates.economy import EconomyGlobals\n'), ((21060, 21091), 'pirates.economy.EconomyGlobals.getItemCost', 'EconomyGlobals.getItemCost', (['uid'], {}), '(uid)\n', (21086, 21091), False, 'from pirates.economy import EconomyGlobals\n'), ((21258, 21291), 'pirates.uberdog.UberDogGlobals.InventoryId.isStackable', 'InventoryId.isStackable', (['self.uid'], {}), '(self.uid)\n', (21281, 21291), False, 'from pirates.uberdog.UberDogGlobals import InventoryId, InventoryType\n'), ((21761, 21801), 'pirates.piratesgui.InventoryItemGui.InventoryItemGui.getGeomParams', 'InventoryItemGui.getGeomParams', (['self.uid'], {}), '(self.uid)\n', (21791, 21801), False, 'from pirates.piratesgui.InventoryItemGui import InventoryItemGui\n'), ((23006, 23049), 'pirates.battle.WeaponGlobals.getSkillIdForAmmoSkillId', 'WeaponGlobals.getSkillIdForAmmoSkillId', (['uid'], {}), '(uid)\n', (23044, 23049), False, 'from pirates.battle import WeaponGlobals\n'), ((2307, 2347), 'pirates.economy.EconomyGlobals.getItemQuantity', 'EconomyGlobals.getItemQuantity', (['self.uid'], {}), '(self.uid)\n', (2337, 2347), False, 'from pirates.economy import EconomyGlobals\n'), ((9144, 9180), 'pirates.inventory.ItemGlobals.getMaleModelId', 'ItemGlobals.getMaleModelId', (['self.uid'], {}), '(self.uid)\n', (9170, 9180), False, 'from pirates.inventory import ItemGlobals, DropGlobals\n'), ((9213, 9251), 'pirates.inventory.ItemGlobals.getFemaleModelId', 'ItemGlobals.getFemaleModelId', (['self.uid'], {}), '(self.uid)\n', (9241, 9251), False, 'from pirates.inventory import ItemGlobals, DropGlobals\n'), ((14385, 14420), 'pirates.inventory.ItemGlobals.getItemTattooImage', 'ItemGlobals.getItemTattooImage', (['uid'], {}), '(uid)\n', (14415, 14420), False, 'from pirates.inventory import ItemGlobals, DropGlobals\n'), ((15534, 15570), 'pirates.inventory.ItemGlobals.getMaleModelId', 'ItemGlobals.getMaleModelId', (['self.uid'], {}), '(self.uid)\n', (15560, 15570), False, 'from pirates.inventory import ItemGlobals, DropGlobals\n'), ((15818, 15856), 'pirates.inventory.ItemGlobals.getFemaleModelId', 'ItemGlobals.getFemaleModelId', (['self.uid'], {}), '(self.uid)\n', (15846, 15856), False, 'from pirates.inventory import ItemGlobals, DropGlobals\n'), ((16407, 16431), 'math.sin', 'sin', (['(rotate * pi / 180.0)'], {}), '(rotate * pi / 180.0)\n', (16410, 16431), False, 'from math import sin\n'), ((16433, 16457), 'math.cos', 'cos', (['(rotate * pi / 180.0)'], {}), '(rotate * pi / 180.0)\n', (16436, 16457), False, 'from math import cos\n'), ((21142, 21170), 'pirates.inventory.ItemGlobals.getGoldCost', 'ItemGlobals.getGoldCost', (['uid'], {}), '(uid)\n', (21165, 21170), False, 'from pirates.inventory import ItemGlobals, DropGlobals\n'), ((23090, 23125), 'pirates.battle.WeaponGlobals.getSkillIcon', 'WeaponGlobals.getSkillIcon', (['skillId'], {}), '(skillId)\n', (23116, 23125), False, 'from pirates.battle import WeaponGlobals\n'), ((6962, 6982), 'random.randint', 'random.randint', (['(0)', '(1)'], {}), '(0, 1)\n', (6976, 6982), False, 'import random\n'), ((15650, 15691), 'pirates.inventory.ItemGlobals.getMaleOrientation2', 'ItemGlobals.getMaleOrientation2', (['self.uid'], {}), '(self.uid)\n', (15681, 15691), False, 'from pirates.inventory import ItemGlobals, DropGlobals\n'), ((15740, 15780), 'pirates.inventory.ItemGlobals.getMaleOrientation', 'ItemGlobals.getMaleOrientation', (['self.uid'], {}), '(self.uid)\n', (15770, 15780), False, 'from pirates.inventory import ItemGlobals, DropGlobals\n'), ((15936, 15979), 'pirates.inventory.ItemGlobals.getFemaleOrientation2', 'ItemGlobals.getFemaleOrientation2', (['self.uid'], {}), '(self.uid)\n', (15969, 15979), False, 'from pirates.inventory import ItemGlobals, DropGlobals\n'), ((16028, 16070), 'pirates.inventory.ItemGlobals.getFemaleOrientation', 'ItemGlobals.getFemaleOrientation', (['self.uid'], {}), '(self.uid)\n', (16060, 16070), False, 'from pirates.inventory import ItemGlobals, DropGlobals\n'), ((21410, 21443), 'pirates.uberdog.UberDogGlobals.InventoryId.getCategory', 'InventoryId.getCategory', (['self.uid'], {}), '(self.uid)\n', (21433, 21443), False, 'from pirates.uberdog.UberDogGlobals import InventoryId, InventoryType\n'), ((24556, 24588), 'pirates.economy.EconomyGlobals.getItemIcons', 'EconomyGlobals.getItemIcons', (['uid'], {}), '(uid)\n', (24583, 24588), False, 'from pirates.economy import EconomyGlobals\n'), ((24741, 24796), 'pirates.piratesbase.PLocalizer.InventoryItemClassNames.get', 'PLocalizer.InventoryItemClassNames.get', (['ItemType.PISTOL'], {}), '(ItemType.PISTOL)\n', (24779, 24796), False, 'from pirates.piratesbase import PLocalizer\n'), ((3351, 3375), 'pirates.inventory.ItemGlobals.getIcon', 'ItemGlobals.getIcon', (['uid'], {}), '(uid)\n', (3370, 3375), False, 'from pirates.inventory import ItemGlobals, DropGlobals\n'), ((24954, 25009), 'pirates.piratesbase.PLocalizer.InventoryItemClassNames.get', 'PLocalizer.InventoryItemClassNames.get', (['ItemType.DAGGER'], {}), '(ItemType.DAGGER)\n', (24992, 25009), False, 'from pirates.piratesbase import PLocalizer\n'), ((25140, 25196), 'pirates.piratesbase.PLocalizer.makeHeadingString', 'PLocalizer.makeHeadingString', (['PLocalizer.GrenadeShort', '(1)'], {}), '(PLocalizer.GrenadeShort, 1)\n', (25168, 25196), False, 'from pirates.piratesbase import PLocalizer\n'), ((25321, 25380), 'pirates.piratesbase.PLocalizer.makeHeadingString', 'PLocalizer.makeHeadingString', (['PLocalizer.ShipCannonShort', '(1)'], {}), '(PLocalizer.ShipCannonShort, 1)\n', (25349, 25380), False, 'from pirates.piratesbase import PLocalizer\n'), ((25424, 25470), 'pirates.piratesbase.PLocalizer.makeHeadingString', 'PLocalizer.makeHeadingString', (['self.itemType', '(1)'], {}), '(self.itemType, 1)\n', (25452, 25470), False, 'from pirates.piratesbase import PLocalizer\n')]
|
from django.contrib import admin
from .models import Holiday, Vendor, Category, Ticket
admin.site.register(Holiday)
admin.site.register(Vendor)
admin.site.register(Category)
admin.site.register(Ticket)
|
[
"django.contrib.admin.site.register"
] |
[((89, 117), 'django.contrib.admin.site.register', 'admin.site.register', (['Holiday'], {}), '(Holiday)\n', (108, 117), False, 'from django.contrib import admin\n'), ((118, 145), 'django.contrib.admin.site.register', 'admin.site.register', (['Vendor'], {}), '(Vendor)\n', (137, 145), False, 'from django.contrib import admin\n'), ((146, 175), 'django.contrib.admin.site.register', 'admin.site.register', (['Category'], {}), '(Category)\n', (165, 175), False, 'from django.contrib import admin\n'), ((176, 203), 'django.contrib.admin.site.register', 'admin.site.register', (['Ticket'], {}), '(Ticket)\n', (195, 203), False, 'from django.contrib import admin\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import subprocess
import sys
import datetime
import os
from itertools import repeat
import multiprocessing
from multiprocessing import Pool, Lock, Value, Manager
multiprocessing.set_start_method('spawn', True)
### compile_test.py
### For each filename in file listed in @p, runs 'make name=@file graph
### and writes a browsable markdown-formatted test report containing stderr,
### stdout and return code output for each test.
### Files are compiled in a concurrent manner
# ------------------------------------------------------------------------------
# ----------------------------- Utility functions ------------------------------
# ------------------------------------------------------------------------------
def run(cmd):
proc = subprocess.Popen(cmd,
stdout = subprocess.PIPE,
stderr = subprocess.PIPE,
)
stdout, stderr = proc.communicate()
return proc.returncode, stdout.decode('utf-8'), stderr.decode('utf-8')
TESTFILE = "filelist.lst"
OUTFILE = "compile_test_report.md"
# courtesy of https://stackoverflow.com/a/34325723
def printProgressBar (iteration, total, prefix = '', suffix = '', decimals = 1,
length = 60, fill = '█'):
"""
Call in a loop to create terminal progress bar
@params:
iteration - Required : current iteration (Int)
total - Required : total iterations (Int)
prefix - Optional : prefix string (Str)
suffix - Optional : suffix string (Str)
decimals - Optional : positive number of decimals in percent complete (Int)
length - Optional : character length of bar (Int)
fill - Optional : bar fill character (Str)
"""
percent = ("{0:." + str(decimals) + "f}").format(100 * (iteration / float(total)))
filledLength = int(length * iteration // total)
bar = fill * filledLength + '-' * (length - filledLength)
print('\r%s |%s| %s%% %s' % (prefix, bar, percent, suffix), end='\r', flush=True)
# Print New Line on Complete
if iteration == total:
print()
def printHeader():
print("=" * 80)
def init_proc(lock_):
global lock
lock = lock_
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
def executeDHLSMake(file, nfiles, testResults, iteration):
rc, stdout, stderr = run(["make", "name="+file, "graph"])
with lock:
iteration.value += 1
testResults[file] = {"RC": rc, "stdout" : stdout, "stderr" : stderr}
printProgressBar(iteration.value, nfiles, prefix="Progress", suffix=file.ljust(20))
if __name__ == "__main__":
# Setup shared memory variables
manager = Manager()
testResults = manager.dict()
iteration = manager.Value('i', 0)
DIR = os.path.dirname(os.path.abspath(__file__))
timestamp = str(datetime.datetime.now())
# Ensure that we are in the examples directory
os.chdir(DIR)
printHeader()
print("============================= DHLS Compilation test ============================")
printHeader()
with open(TESTFILE) as filelist:
# Compile each example
files = filelist.read().splitlines()
nFiles = len(files)
print("Compiling " + str(nFiles) + " files listed in:\n\t'"+ DIR + "/" + TESTFILE + "'\n")
printProgressBar(iteration.value, nFiles, prefix="Progress", suffix="")
lock = Lock()
# run executeDHLSMake on each file listed in the input filelist using
# a threadpool of size n_cpu's
with Pool(os.cpu_count(), initializer=init_proc, initargs=(lock,)) as pool:
for file in files:
pool.apply_async(executeDHLSMake, args=(file, nFiles, testResults, iteration))
pool.close()
pool.join()
print("\nCompilation complete!")
# Format a report
report = "# DHLS Compile test report\n"
report += "## Executed at:" + timestamp + "\n"
TOC = "## Table of contents\n"
contents = ""
summary = "## Summary\n"
failingTests = []
## Format content of report
for name, res in testResults.items():
fail = res["RC"] != 0
reportName = "File: '" + name + "'\tStatus: " + \
("Success" if fail == 0 else "Fail") + \
"\t(return code: " + str(res["RC"]) + ")"
# Header anchor link must be simplified a bit to adhere to markdown syntax
reportNameLink = reportName.replace('\t', '-').replace('\'', '').replace(':', '').replace(' ', '-').lower()
reportNameAnchor = "[" + reportName + "]" + \
"(#" + reportNameLink + ")"
TOC += " - " + reportNameAnchor + "\n"
contents += "## " + reportNameAnchor.replace('#', '') + "\n"
# Add collapsible subsections containing stderr and stdout output
contents += "<details>\n"
contents += "<summary>`stdout` output:</summary>\n\n"
contents += "```\n" + res["stdout"] + "```\n"
contents += "</details>\n"
contents += "<details>\n"
contents += "<summary>`stderr` output:</summary>\n\n"
contents += "```\n" +res["stderr"] + "```\n"
contents += "</details>\n"
contents += "\n"
if fail:
failingTests.append(reportNameAnchor)
## Format summary
if len(failingTests) == 0:
print("Status: SUCCESS - all tests compiled succesfully\n")
summary += "All tests compiled successfully\n"
else:
print("Status: FAIL - some tests compiled with non-zero exit codes\n")
summary += "The following tests returned a non-zero exit code:\n"
for failingTest in failingTests:
summary += " - " + failingTest + "\n"
report += summary + "\n" + TOC + "\n" + contents + "\n"
with open(OUTFILE, 'w+') as reportFile:
reportFile.write(report)
print("Report written to file: \n\t" + DIR + "/" + OUTFILE)
printHeader()
|
[
"subprocess.Popen",
"os.path.abspath",
"multiprocessing.Lock",
"multiprocessing.Manager",
"multiprocessing.set_start_method",
"os.cpu_count",
"datetime.datetime.now",
"os.chdir"
] |
[((210, 257), 'multiprocessing.set_start_method', 'multiprocessing.set_start_method', (['"""spawn"""', '(True)'], {}), "('spawn', True)\n", (242, 257), False, 'import multiprocessing\n'), ((792, 861), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), '(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n', (808, 861), False, 'import subprocess\n'), ((2784, 2793), 'multiprocessing.Manager', 'Manager', ([], {}), '()\n', (2791, 2793), False, 'from multiprocessing import Pool, Lock, Value, Manager\n'), ((3020, 3033), 'os.chdir', 'os.chdir', (['DIR'], {}), '(DIR)\n', (3028, 3033), False, 'import os\n'), ((2892, 2917), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (2907, 2917), False, 'import os\n'), ((2939, 2962), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2960, 2962), False, 'import datetime\n'), ((3499, 3505), 'multiprocessing.Lock', 'Lock', ([], {}), '()\n', (3503, 3505), False, 'from multiprocessing import Pool, Lock, Value, Manager\n'), ((3642, 3656), 'os.cpu_count', 'os.cpu_count', ([], {}), '()\n', (3654, 3656), False, 'import os\n')]
|
#!/usr/bin/python3
from test_framework.test_framework import BethelTestFramework
from test_framework.staticr_util import *
import logging
'''
Checks that there is no interaction between immature balance and staking weight
node0 has both confirmed and immature balance, it sends away its confirmed balance to node1 so that only immature balance remains
node0 checks that immature balance does not affect stake weight
'''
SENDING_FEE= 0.003393
BLOCK_REWARD = 50
logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO, stream=sys.stdout)
class StakeImmatureBalance(BethelTestFramework):
def __init__(self):
super().__init__()
self.num_nodes = 2
def setup_network(self, split=False):
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir)
connect_nodes_bi(self.nodes,0,1)
self.is_network_split=False
self.sync_all()
def run_test(self):
addr = self.nodes[1].getnewaddress()
activate_staticr(self.nodes[0])
self.nodes[0].sendtoaddress(addr, satoshi_round(float(self.nodes[0].getbalance()) - SENDING_FEE))
slow_gen(self.nodes[0], 1)
logging.info('Checking stake weight')
assert(self.nodes[0].getbalance() - BLOCK_REWARD < 1), 'Wallet balance not sent'
assert(self.nodes[0].getwalletinfo()['immature_balance'] > 0), 'No immature balance'
assert(self.nodes[0].getstakinginfo()['weight'] == 0), 'Immature balance affecting staking weight'
if __name__ == '__main__':
StakeImmatureBalance().main()
|
[
"logging.info",
"logging.basicConfig"
] |
[((464, 563), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(levelname)s: %(message)s"""', 'level': 'logging.INFO', 'stream': 'sys.stdout'}), "(format='%(levelname)s: %(message)s', level=logging.INFO,\n stream=sys.stdout)\n", (483, 563), False, 'import logging\n'), ((1161, 1198), 'logging.info', 'logging.info', (['"""Checking stake weight"""'], {}), "('Checking stake weight')\n", (1173, 1198), False, 'import logging\n')]
|
"""Common code between all entities"""
from dataclasses import dataclass, field
from urllib.parse import urljoin
from functools import wraps
from aiohttp import ClientResponse, ClientSession
from async_lru import alru_cache
HOST = "https://api.flair.co"
SCOPE = "thermostats.view+structures.view+structures.edit"
class Util:
"""Common utilities to reduce boiler"""
async def create_url(self, path):
"""Create a valid URL for the API"""
url = urljoin(HOST, path)
return url
async def entity_url(self, entity_type, entity_id):
"""Create a valid entity URL"""
url = await self.create_url(f"/api/{entity_type}/{entity_id}")
return url
class Auth:
"""Authenticate with API"""
def __init__(self, ident, access_token, scope):
self.ident = ident
self.access_token = access_token
self.scope = scope
self.opts = {
"Accept": "application/vnd.api+json",
"Content-Type": "application/json",
}
async def oauth_token(self):
"""Retrieve OAuth2 token from API"""
u = Util()
url = await u.create_url("/oauth/token")
credentials = {
"client_id": self.ident,
"client_secret": self.access_token,
"grant_type": 'client_credentials',
}
async with ClientSession() as session:
async with session.post(url, params=credentials, headers=self.opts) as resp:
json = await resp.json()
token = json["access_token"]
return token
class Entity:
"""Template for entity types"""
def __init__(self, token, entity_type):
self.entity_type = entity_type
self.entity_list: list[EntityStore] = []
self.opts = {
"Accept": "application/vnd.api+json",
"Content-Type": "application/json",
"Authorization": f"Bearer {token}",
}
async def get_list(self):
"""Get a list of entities of a certain type"""
u = Util()
url = await u.create_url(f"/api/{self.entity_type}")
async with ClientSession() as session:
async with session.get(url, headers=self.opts) as resp:
json = await resp.json()
self.entity = json
return resp.status
async def get(self, entity_id):
"""Get information on an entity in the API"""
u = Util()
url = await u.entity_url(self.entity_type, entity_id)
async with ClientSession() as session:
async with session.get(url, headers=self.opts) as resp:
json = await resp.json()
self.entity_response = json
return resp.status
# FIXME: Async wrappers are weird
def update_entity(func): # noqa
"""Wrapper to update list of entities"""
async def update(self):
status = await self.get_list()
await func(self.entity)
return status
return update
async def control(self, entity_id, body):
"""POST request to API to change entity properties"""
u = Util()
url = await u.entity_url(self.entity_type, entity_id)
__body = {"data": {"type": self.entity_type, "attributes": body}}
async with ClientSession() as session:
async with session.patch(url, data=__body, headers=self.opts) as resp:
return resp.status
@alru_cache
async def id_from_name(self, name):
"""Get entity ID from its name"""
await self.get_list()
entity_num = next(
(
i
for i, item in enumerate(self.entity["data"])
if item["attributes"]["name"] is name
),
None,
)
print(self.entity)
# entity_id = self.entity[entity_num]["id"]
# return entity_id
# TODO: EntityStore dataclass for all entity types
# TODO: have one instance EntityStore contain all entities within their respective stores, or something else?
# FIXME: Rename *Stores to something more appropriate, and by extension the other classes
@dataclass
class EntityStore:
"""Store all entities in a dataclass"""
name: str
entity_id: str = field(repr=False)
|
[
"dataclasses.field",
"urllib.parse.urljoin",
"aiohttp.ClientSession"
] |
[((4278, 4295), 'dataclasses.field', 'field', ([], {'repr': '(False)'}), '(repr=False)\n', (4283, 4295), False, 'from dataclasses import dataclass, field\n'), ((471, 490), 'urllib.parse.urljoin', 'urljoin', (['HOST', 'path'], {}), '(HOST, path)\n', (478, 490), False, 'from urllib.parse import urljoin\n'), ((1355, 1370), 'aiohttp.ClientSession', 'ClientSession', ([], {}), '()\n', (1368, 1370), False, 'from aiohttp import ClientResponse, ClientSession\n'), ((2135, 2150), 'aiohttp.ClientSession', 'ClientSession', ([], {}), '()\n', (2148, 2150), False, 'from aiohttp import ClientResponse, ClientSession\n'), ((2533, 2548), 'aiohttp.ClientSession', 'ClientSession', ([], {}), '()\n', (2546, 2548), False, 'from aiohttp import ClientResponse, ClientSession\n'), ((3316, 3331), 'aiohttp.ClientSession', 'ClientSession', ([], {}), '()\n', (3329, 3331), False, 'from aiohttp import ClientResponse, ClientSession\n')]
|
import ssl
from unittest import TestCase
import urllib.error
import urllib.request
from seleniumwire.proxy.client import AdminClient
class AdminClientIntegrationTest(TestCase):
def test_create_proxy(self):
html = self._make_request('http://python.org')
self.assertIn(b'Welcome to Python.org', html)
def test_destroy_proxy(self):
self.client.destroy_proxy()
with self.assertRaises(urllib.error.URLError):
self._make_request('http://github.com')
def test_get_requests_single(self):
self._make_request('https://www.python.org/')
requests = self.client.get_requests()
self.assertEqual(len(requests), 1)
request = requests[0]
self.assertEqual(request['method'], 'GET')
self.assertEqual(request['path'], 'https://www.python.org/')
self.assertEqual(request['headers']['Accept-Encoding'], 'identity')
self.assertEqual(request['response']['status_code'], 200)
self.assertEqual(request['response']['headers']['Content-Type'], 'text/html; charset=utf-8')
def test_get_requests_multiple(self):
self._make_request('https://github.com/')
self._make_request('https://www.wikipedia.org/')
requests = self.client.get_requests()
self.assertEqual(len(requests), 2)
def test_get_last_request(self):
self._make_request('https://python.org')
self._make_request('https://www.bbc.co.uk/')
last_request = self.client.get_last_request()
self.assertEqual(last_request['path'], 'https://www.bbc.co.uk/')
def test_get_last_request_none(self):
last_request = self.client.get_last_request()
self.assertIsNone(last_request)
def test_clear_requests(self):
self._make_request('https://python.org')
self._make_request('https://www.wikipedia.org')
self.client.clear_requests()
self.assertEqual(self.client.get_requests(), [])
def test_find(self):
self._make_request('https://stackoverflow.com/questions/tagged/django?page=2&sort=newest&pagesize=15')
self._make_request('https://docs.python.org/3.4/library/http.client.html')
self._make_request('https://www.google.com')
self.assertEqual(
self.client.find('/questions/tagged/django')['path'],
'https://stackoverflow.com/questions/tagged/django?page=2&sort=newest&pagesize=15'
)
self.assertEqual(
self.client.find('/3.4/library/http.client.html')['path'],
'https://docs.python.org/3.4/library/http.client.html'
)
self.assertEqual(
self.client.find('https://www.google.com')['path'],
'https://www.google.com/'
)
def test_get_request_body_empty(self):
self._make_request('https://www.amazon.com')
last_request = self.client.get_last_request()
body = self.client.get_request_body(last_request['id'])
self.assertIsNone(body)
def test_get_response_body(self):
self._make_request('https://www.wikipedia.org')
last_request = self.client.get_last_request()
body = self.client.get_response_body(last_request['id'])
self.assertIsInstance(body, bytes)
self.assertIn(b'html', body)
def test_get_response_body_json(self):
self._make_request('https://radiopaedia.org/api/v1/countries/current')
last_request = self.client.get_last_request()
body = self.client.get_response_body(last_request['id'])
self.assertIsInstance(body, bytes)
def test_get_response_body_binary(self):
self._make_request('https://www.python.org/static/img/python-logo@2x.png')
last_request = self.client.get_last_request()
body = self.client.get_response_body(last_request['id'])
self.assertIsInstance(body, bytes)
def test_get_response_body_empty(self):
self._make_request('http://www.python.org') # Redirects to https with empty body
redirect_request = self.client.get_requests()[0]
body = self.client.get_response_body(redirect_request['id'])
self.assertIsNone(body)
def test_set_header_overrides(self):
self.client.set_header_overrides({
'User-Agent': 'Test_User_Agent_String'
})
self._make_request('https://www.github.com')
last_request = self.client.get_last_request()
self.assertEqual(last_request['headers']['User-Agent'], 'Test_User_Agent_String')
def test_set_header_overrides_case_insensitive(self):
self.client.set_header_overrides({
'user-agent': 'Test_User_Agent_String' # Lowercase header name
})
self._make_request('https://www.bbc.co.uk')
last_request = self.client.get_last_request()
self.assertEqual(last_request['headers']['User-Agent'], 'Test_User_Agent_String')
def test_set_header_overrides_filters_out_header(self):
self.client.set_header_overrides({
'User-Agent': None
})
self._make_request('https://www.wikipedia.org')
last_request = self.client.get_last_request()
self.assertNotIn('User-Agent', last_request['headers'])
def test_clear_header_overrides(self):
self.client.set_header_overrides({
'User-Agent': 'Test_User_Agent_String'
})
self.client.clear_header_overrides()
self._make_request('https://www.stackoverflow.com')
last_request = self.client.get_last_request()
self.assertNotEqual(last_request['headers']['User-Agent'], 'Test_User_Agent_String')
def test_get_header_overrides(self):
self.client.set_header_overrides({
'User-Agent': 'Test_User_Agent_String'
})
self.assertEqual(self.client.get_header_overrides(), {
'User-Agent': 'Test_User_Agent_String'
})
def test_set_rewrite_rules(self):
self.client.set_rewrite_rules([
(r'http://stackoverflow.com(.*)', r'https://github.com\1'),
])
self._make_request('http://stackoverflow.com')
last_request = self.client.get_last_request()
self.assertEqual(last_request['path'], 'https://github.com/')
self.assertEqual(last_request['headers']['Host'], 'github.com')
def test_clear_rewrite_rules(self):
self.client.set_rewrite_rules([
(r'https://stackoverflow.com(.*)', r'https://www.github.com\1'),
])
self.client.clear_rewrite_rules()
self._make_request('https://www.stackoverflow.com/')
last_request = self.client.get_last_request()
self.assertEqual(last_request['path'], 'https://stackoverflow.com/')
self.assertEqual(last_request['headers']['Host'], 'stackoverflow.com')
def test_get_rewrite_rules(self):
self.client.set_rewrite_rules([
(r'http://www.stackoverflow.com(.*)', r'https://www.github.com\1'),
])
self.assertEqual(self.client.get_rewrite_rules(), [
[r'http://www.stackoverflow.com(.*)', r'https://www.github.com\1'],
])
def test_disable_encoding(self):
# Explicitly set the accept-encoding to gzip
self.client.set_header_overrides({
'Accept-Encoding': 'gzip'
})
self._make_request('https://www.google.com/')
requests = self.client.get_requests()
# No Content-Encoding header implies 'identity'
self.assertEqual(requests[0]['response']['headers'].get('Content-Encoding', 'identity'), 'identity')
def setUp(self):
options = {}
if self._testMethodName == 'test_disable_encoding': # yuck
options['disable_encoding'] = True
self.client = AdminClient()
host, port = self.client.create_proxy(options=options)
self._configure_proxy(host, port)
def tearDown(self):
self.client.destroy_proxy()
def _configure_proxy(self, host, port):
context = ssl.create_default_context()
context.check_hostname = False
context.verify_mode = ssl.CERT_NONE
https_handler = urllib.request.HTTPSHandler(context=context)
proxy_handler = urllib.request.ProxyHandler({
'http': 'http://{}:{}'.format(host, port),
'https': 'http://{}:{}'.format(host, port),
})
opener = urllib.request.build_opener(https_handler, proxy_handler)
urllib.request.install_opener(opener)
def _make_request(self, url):
request = urllib.request.Request(url)
request.add_header('User-Agent', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 '
'(KHTML, like Gecko) Chrome/66.0.3359.181 Safari/537.36')
with urllib.request.urlopen(request, timeout=5) as response:
html = response.read()
return html
|
[
"ssl.create_default_context",
"seleniumwire.proxy.client.AdminClient"
] |
[((7754, 7767), 'seleniumwire.proxy.client.AdminClient', 'AdminClient', ([], {}), '()\n', (7765, 7767), False, 'from seleniumwire.proxy.client import AdminClient\n'), ((7997, 8025), 'ssl.create_default_context', 'ssl.create_default_context', ([], {}), '()\n', (8023, 8025), False, 'import ssl\n')]
|
from tkinter import *
from tkinter import ttk
from tkinter import font
import threading
import time as time
import datetime
import calendar
import requests
from PIL import Image, ImageTk
def timer():
tmp = time.thread_time()
currentTime = time.localtime()
print(time.asctime(currentTime))
print(type(currentTime))
return
timer_t = threading.Thread(target=timer()).start()
# from kivy.app import App
# from kivy.uix.label import Label
# from kivy.uix.button import Button
# import gi
# gi.require_version('Gtk', '4.0')
# from gi.repository import Gtk
HEIGHT = 1080
WIDTH = 960
weather = Tk()
weather.title("Trippet's Weather")
Grid.rowconfigure(weather, 0, weight=1)
Grid.columnconfigure(weather, 0, weight=1)
# Formats current forecast to make it more readable
def format_response(weather_json):
try:
city = weather_json["name"]
conditions = weather_json["weather"][0]["description"]
temp = weather_json["main"]["temp"]
final_str = "City: %s \nConditions: %s \nTemperature (°C): %s" % (city, conditions, temp)
except:
final_str = "There was a problem retrieving that information"
print(weather_json)
return final_str
# Formats the future forecast to make it more readable
def format_forecast(weather_json):
final_str = "Weather forecast for: " + weather_json["city"]["name"] + " is\n"
for i in weather_json["list"]:
try:
hour = i["dt_txt"]
conditions = i["weather"][0]["description"] # .get("description")
temp = i["main"]["temp"] # .get("temp")
final_str += "Hour: %s \nConditions: %s \nTemperature (°C): %s \n\n" % (hour, conditions, temp)
except:
final_str = "There was a problem retrieving that information"
print(weather_json)
return final_str
# api.openweathermap.org/data/2.5/weather?q={city name}&appid={API key}
def get_weather(city):
# Shows lower frame and makes top frame smaller
lower_frame.place(relx=0.5, rely=0.25, relwidth=0.75, relheight=0.6, anchor="n")
frame.place_forget()
frame.place(relx=0.5, rely=0.1, relwidth=0.75, relheight=0.1, anchor="n")
# API CALL
try:
weather_key = "<KEY>"
url = "https://api.openweathermap.org/data/2.5/weather"
params = {"APPID": weather_key, "q": city, "units": "metric"}
response = requests.get(url, params=params)
weather_json = response.json()
label['text'] = format_response(response.json())
icon_name = weather_json["weather"][0]["icon"]
open_image(icon_name)
except:
return
def get_forecast(city):
# Shows lower frame and makes top frame smaller
lower_frame.place(relx=0.5, rely=0.25, relwidth=0.75, relheight=0.6, anchor="n")
frame.place_forget()
frame.place(relx=0.5, rely=0.1, relwidth=0.75, relheight=0.1, anchor="n")
# API CALL
try:
weather_key = "<KEY>"
url = "https://api.openweathermap.org/data/2.5/forecast"
params = {"APPID": weather_key, "q": city, "units": "metric"}
response = requests.get(url, params=params)
weather_json = response.json()
label['text'] = format_forecast(response.json())
except:
return
def open_image(icon):
size = int(lower_frame.winfo_height() * 0.25)
img = ImageTk.PhotoImage(Image.open('./img/' + icon + '.png').resize((size, size)))
weather_icon.delete("all")
weather_icon.create_image(0, 0, anchor='nw', image=img)
weather_icon.image = img
def quick_alarm():
# Hide label to show gridframe
label.place_forget()
gridframe = Frame(lower_frame)
gridframe.pack(fill="both", expand=True)
for i in range(2):
Grid.rowconfigure(gridframe, i, weight=3)
for j in range(3):
Grid.columnconfigure(gridframe, j, weight=2)
# Top left
if i == 0 and j == 0:
Button(gridframe, text="60 Min", font=("Courier", 10)).grid(row=i, column=j, sticky=NSEW)
# Top middle
if i == 0 and j == 1:
Button(gridframe, text="30 Min", font=("Courier", 10)).grid(row=i, column=j, sticky=NSEW)
# Top right
if i == 0 and j == 2:
Button(gridframe, text="15 Min", font=("Courier", 10)).grid(row=i, column=j, sticky=NSEW)
# Bottom left
if i == 1 and j == 0:
Button(gridframe, text="10 Min", font=("Courier", 10)).grid(row=i, column=j, sticky=NSEW)
# Bottom middle
if i == 1 and j == 1:
Button(gridframe, text="5 Min", font=("Courier", 10)).grid(row=i, column=j, sticky=NSEW)
# Bottom right
if i == 1 and j == 2:
Button(gridframe, text="1 Min", font=("Courier", 10)).grid(row=i, column=j, sticky=NSEW)
def onFrameConfigure(canvas):
"""Reset the scroll region to encompass the inner frame"""
canvas.configure(scrollregion=canvas.bbox("all"))
# Just because
bg_color = "white"
# The whole intended screen
canvas = Canvas(weather, height=HEIGHT, width=WIDTH)
canvas.pack()
# The top frame
frame = Frame(weather, bg="#80c1ff", bd=5)
frame.place(relx=0.5, rely=0.1, relwidth=0.74, relheight=0.15, anchor="n")
# Where you shall input city
textbox = Entry(frame, font=("Courier", 12))
textbox.bind("<FocusIn>", lambda args: textbox.delete(0, "end"))
textbox.bind("<<FocusOut>>", lambda args: textbox.delete(0, "end"), textbox.insert(0, "City Name"))
textbox.place(relwidth=1, relheight=.5)
# Button to get current forecast
button1 = Button(frame, text="Current Forecast", font=("Courier", 10), command=lambda: get_weather(textbox.get()))
button1.place(relx=0, rely=.5, relwidth=0.5, relheight=0.5, anchor="nw")
# Button to get the future forecasts 3 hours at a time
button1 = Button(frame, text="Future Forecast", font=("Courier", 10), command=lambda: get_forecast(textbox.get()))
button1.place(relx=0.5, rely=.5, relwidth=0.5, relheight=0.5, anchor="nw")
# The big lower frame initially hidden
lower_frame = Frame(weather, bg="#80c1ff", bd=10)
lower_frame.place_forget()
# # Scroll area
# scrollarea = Canvas(weather, width=WIDTH, height=HEIGHT / 1.5, scrollregion=canvas.bbox("all"))
# scrollframe = Frame(scrollarea, bg="#80c1ff")
#
# # Scrollbar for when text is too long to fit in one screen
# vbar = Scrollbar(lower_frame, orient="vertical", command=scrollarea.yview)
# vbar.pack(side=RIGHT, fill="y")
#
# # Link scrollbar to canvas
# scrollarea.config(width=WIDTH, height=HEIGHT / 1.5)
# scrollarea.config(yscrollcommand=vbar.set)
# scrollarea.pack(side=BOTTOM, expand=True, fill="both")
# scrollarea.create_window((4, 4), window=scrollframe, anchor="n")
#
# scrollframe.bind("<Configure>", lambda event, scrollarea=scrollarea: onFrameConfigure(scrollarea))
#
# scrollframe.pack(expand=True, fill="both")
# Where the weather will be shown
label = Label(lower_frame, font=("Courier", 12), anchor="nw", justify="left", bd=4, bg=bg_color)
label.place(relwidth=1, relheight=1)
# top right of lower_frame
weather_icon = Canvas(label, bg=bg_color, bd=0, highlightthickness=0)
weather_icon.place(relx=.75, rely=0, relwidth=1, relheight=0.5)
# The frame in the bottom left of lower_frame
tiny_frame = Frame(label, bg=bg_color, bd=2)
tiny_frame.place(relx=.74, rely=0.74, relwidth=.25, relheight=.25)
# Button to show the grid of buttons for quick alarm
button2 = Button(tiny_frame, text="Quick Alarm", command=lambda: quick_alarm())
button2.place(relwidth=1, relheight=1)
weather.mainloop()
|
[
"time.asctime",
"PIL.Image.open",
"time.thread_time",
"requests.get",
"time.localtime"
] |
[((212, 230), 'time.thread_time', 'time.thread_time', ([], {}), '()\n', (228, 230), True, 'import time as time\n'), ((250, 266), 'time.localtime', 'time.localtime', ([], {}), '()\n', (264, 266), True, 'import time as time\n'), ((277, 302), 'time.asctime', 'time.asctime', (['currentTime'], {}), '(currentTime)\n', (289, 302), True, 'import time as time\n'), ((2391, 2423), 'requests.get', 'requests.get', (['url'], {'params': 'params'}), '(url, params=params)\n', (2403, 2423), False, 'import requests\n'), ((3108, 3140), 'requests.get', 'requests.get', (['url'], {'params': 'params'}), '(url, params=params)\n', (3120, 3140), False, 'import requests\n'), ((3368, 3404), 'PIL.Image.open', 'Image.open', (["('./img/' + icon + '.png')"], {}), "('./img/' + icon + '.png')\n", (3378, 3404), False, 'from PIL import Image, ImageTk\n')]
|
import torch.nn.functional as F
import torch
from .factorgnn import FactorGNN, FactorGNNPool
from .factorgnn_zinc import FactorGNNZinc
from .factorgnn_pattern import FactorGNNSBMs
from .mlp import MLP, MLPPool
from .mlp_zinc import MLPZinc
from .gat import GAT, GATPool
from .gat_zinc import GATZinc
from .gat_pattern import GATSBMs
from .gcn import GCNPool
from .gcn_zinc import GCNZinc
from .disengcn import DisenGCN, DisenGCNPool, DisenGCNZinc, DisenGCNSBMs
import numpy as np
def get_model(dataset, args, mode = "multiclass"):
if mode == "multiclass":
model = get_model_multiclass(dataset, args)
elif mode == "multilabel":
model = get_model_multilabel(dataset, args)
elif mode == "zinc":
model = get_zinc_model(dataset, args)
elif mode == 'sbms':
model = get_sbms_model(dataset, args)
else:
raise ValueError(f"Unknown mode: {mode}")
return model
def get_zinc_model(dataset, args):
g, features, labels, train_mask, val_mask, test_mask, factor_graphs = dataset
if args.model_name == 'GAT':
heads = ([args.num_heads] * args.num_layers) + [args.num_out_heads]
model = GATZinc(g, args.num_layers, args.in_dim, args.num_hidden,
heads, F.elu, args.in_drop, args.attn_drop, args.negative_slope,
args.residual, num_atom_type = 28, num_bond_type = 4)
elif args.model_name == 'FactorGNN':
model = FactorGNNZinc(g, args.num_layers, args.in_dim, args.num_hidden,
args.num_latent, args.in_drop, args.residual,
num_atom_type = 28, num_bond_type = 4)
elif args.model_name == "MLP":
model = MLPZinc(g, args.in_dim, args.num_layers, args.num_hidden,
num_atom_type = 28, num_bond_type = 4)
elif args.model_name == "GCN":
model = GCNZinc(g, args.in_dim, args.num_layers, args.num_hidden,
num_atom_type = 28, num_bond_type = 4)
elif args.model_name == "DisenGCN":
model = DisenGCNZinc(args.in_dim, 1,
args, split_mlp=False,
num_atom_type = 28, num_bond_type = 4)
else:
raise NameError(f'unknow format of model name: {args.model_name}')
return model
def get_model_multilabel(dataset, args):
g, features, labels, train_mask, val_mask, test_mask, factor_graphs = dataset
num_feats = features.shape[1]
n_classes = labels.shape[1]
pooling = True if features.shape[0] != labels.shape[0] else False
if args.model_name == "FactorGNN":
if pooling:
model = FactorGNNPool(g, args.num_layers, num_feats, args.num_hidden,
n_classes, args.num_latent, args.in_drop, args.residual)
else:
model = FactorGNN(g, args.num_layers, num_feats, args.num_hidden,
n_classes, args.num_latent, args.in_drop, args.residual)
elif args.model_name == "GAT":
# create model
heads = ([args.num_heads] * args.num_layers) + [args.num_out_heads]
if pooling:
model = GATPool(g, args.num_layers, num_feats, args.num_hidden,
n_classes, heads, F.elu, args.in_drop, args.attn_drop,
args.negative_slope, args.residual)
else:
model = GAT(g, args.num_layers, num_feats, args.num_hidden,
n_classes, heads, F.elu, args.in_drop,
args.attn_drop, args.negative_slope, args.residual)
elif args.model_name == 'MLP':
if pooling:
model = MLPPool(g, num_feats, args.num_layers,
args.num_hidden, n_classes)
else:
model = MLP(g, num_feats, args.num_layers,
args.num_hidden, n_classes)
elif args.model_name == "GCN":
if pooling:
model = GCNPool(g, num_feats, args.num_layers,
args.num_hidden, n_classes)
elif args.model_name == "DisenGCN":
if pooling:
model = DisenGCNPool(num_feats, n_classes,
args, split_mlp=False)
else:
raise NameError(f'unknow format of model name: {args.model_name}')
return model
def get_model_multiclass(dataset, args):
g, features, labels, train_mask, val_mask, test_mask, factor_graph = dataset
num_feats = features.shape[1]
n_classes = torch.max(labels).item() + 1
if args.model_name == "FactorGNN":
model = FactorGNN(g,
args.num_layers,
num_feats,
args.num_hidden,
n_classes,
args.num_latent,
args.in_drop,
args.residual)
elif args.model_name == "GAT":
# create model
heads = ([args.num_heads] * args.num_layers) + [args.num_out_heads]
model = GAT(g,
args.num_layers,
num_feats,
args.num_hidden,
n_classes,
heads,
F.elu,
args.in_drop,
args.attn_drop,
args.negative_slope,
args.residual)
else:
raise ValueError(f"unknow model name: {args.model_name}")
return model
def get_sbms_model(dataset, args):
g, features, labels, train_mask, val_mask, test_mask, factor_graphs = dataset
n_classes = 2
if args.model_name == 'FactorGNN':
model = FactorGNNSBMs(g, args.num_layers, args.in_dim, args.num_hidden,
args.num_latent, args.in_drop, args.residual, n_classes)
elif args.model_name == 'GAT':
heads = ([args.num_heads] * args.num_layers) + [args.num_out_heads]
model = GATSBMs(g, args.num_layers, args.in_dim, args.num_hidden,
heads, F.elu, args.in_drop, args.attn_drop, args.negative_slope,
args.residual)
elif args.model_name == 'DisenGCN':
model = DisenGCNSBMs(args.in_dim, 1,
args, split_mlp=False)
else:
raise NameError(f'unknow format of model name: {args.model_name}')
return model
|
[
"torch.max"
] |
[((4491, 4508), 'torch.max', 'torch.max', (['labels'], {}), '(labels)\n', (4500, 4508), False, 'import torch\n')]
|
from kivy.properties import NumericProperty, ReferenceListProperty
from kivy.uix.widget import Widget
from kivy.vector import Vector
# Define o elemento "bola"
class Bola(Widget):
"""
Define a bola do jogo e mantém sua velocidade, a qual é um Vector
contendo suas componentes de velocidade X e Y.
"""
# Velocidade da bola
velocidade_x = NumericProperty(0)
velocidade_y = NumericProperty(0)
# Velocidade
velocidade = ReferenceListProperty(velocidade_x, velocidade_y)
# Define a função de movimento da nossa bolinha
def movimenta(self):
self.pos = Vector(*self.velocidade) + self.pos
|
[
"kivy.properties.NumericProperty",
"kivy.properties.ReferenceListProperty",
"kivy.vector.Vector"
] |
[((364, 382), 'kivy.properties.NumericProperty', 'NumericProperty', (['(0)'], {}), '(0)\n', (379, 382), False, 'from kivy.properties import NumericProperty, ReferenceListProperty\n'), ((402, 420), 'kivy.properties.NumericProperty', 'NumericProperty', (['(0)'], {}), '(0)\n', (417, 420), False, 'from kivy.properties import NumericProperty, ReferenceListProperty\n'), ((456, 505), 'kivy.properties.ReferenceListProperty', 'ReferenceListProperty', (['velocidade_x', 'velocidade_y'], {}), '(velocidade_x, velocidade_y)\n', (477, 505), False, 'from kivy.properties import NumericProperty, ReferenceListProperty\n'), ((603, 627), 'kivy.vector.Vector', 'Vector', (['*self.velocidade'], {}), '(*self.velocidade)\n', (609, 627), False, 'from kivy.vector import Vector\n')]
|
from equity import app
from flask_script import Manager
manager = Manager(app)
if __name__ == '__main__':
manager.run()
|
[
"flask_script.Manager"
] |
[((67, 79), 'flask_script.Manager', 'Manager', (['app'], {}), '(app)\n', (74, 79), False, 'from flask_script import Manager\n')]
|
#!/usr/bin/env python3
import io
import unittest
import unittest.mock
from src import output
class TestOutput(unittest.TestCase):
HEADER_ROW = "filepath,function_or_class_name,variable_name,is_local\n"
def test_corner_cases(self):
mock_file_handle = io.StringIO()
output.writeCSV(set(), mock_file_handle)
got = mock_file_handle.getvalue()
self.assertEqual(TestOutput.HEADER_ROW, got)
def test_output(self):
mock_file_handle = io.StringIO()
mock_function_or_class = unittest.mock.Mock()
mock_function_or_class.name = "function_name"
output.writeCSV(
{
("source.py", mock_function_or_class, "global_variable", False),
("source.py", mock_function_or_class, "local_variable", True),
},
mock_file_handle,
)
want = TestOutput.HEADER_ROW
want += "source.py,function_name,global_variable,False\n"
want += "source.py,function_name,local_variable,True\n"
got = mock_file_handle.getvalue()
self.assertEqual(sorted(want), sorted(got))
if __name__ == "__main__":
unittest.main()
|
[
"unittest.main",
"io.StringIO",
"unittest.mock.Mock",
"src.output.writeCSV"
] |
[((1157, 1172), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1170, 1172), False, 'import unittest\n'), ((271, 284), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (282, 284), False, 'import io\n'), ((485, 498), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (496, 498), False, 'import io\n'), ((532, 552), 'unittest.mock.Mock', 'unittest.mock.Mock', ([], {}), '()\n', (550, 552), False, 'import unittest\n'), ((616, 788), 'src.output.writeCSV', 'output.writeCSV', (["{('source.py', mock_function_or_class, 'global_variable', False), (\n 'source.py', mock_function_or_class, 'local_variable', True)}", 'mock_file_handle'], {}), "({('source.py', mock_function_or_class, 'global_variable', \n False), ('source.py', mock_function_or_class, 'local_variable', True)},\n mock_file_handle)\n", (631, 788), False, 'from src import output\n')]
|
"""
MIT License
Copyright (c) 2020 <NAME> <<EMAIL>>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from copy import copy
from ctypes import get_errno
from datetime import timedelta
from errno import ENOENT
from os import strerror
from typing import Iterator, List, Optional, Union
from .. import libgpiod
# pylint: disable=too-many-lines
class chip:
# pylint: disable=too-few-public-methods
OPEN_LOOKUP = 1
OPEN_BY_PATH = 2
OPEN_BY_NAME = 3
OPEN_BY_LABEL = 4
OPEN_BY_NUMBER = 5
class line:
# pylint: disable=too-few-public-methods
pass
class line_bulk:
# pylint: disable=too-few-public-methods
pass
class line_event:
# pylint: disable=too-few-public-methods
pass
class line_request:
# pylint: disable=too-few-public-methods
pass
open_funcs = {
chip.OPEN_LOOKUP: libgpiod.gpiod_chip_open_lookup,
chip.OPEN_BY_PATH: libgpiod.gpiod_chip_open,
chip.OPEN_BY_NAME: libgpiod.gpiod_chip_open_by_name,
chip.OPEN_BY_LABEL: libgpiod.gpiod_chip_open_by_label,
chip.OPEN_BY_NUMBER: libgpiod.gpiod_chip_open_by_number,
}
def chip_deleter(chip_struct: libgpiod.gpiod_chip):
# pylint: disable=missing-function-docstring
libgpiod.gpiod_chip_close(chip_struct)
class shared_chip:
# pylint: disable=missing-function-docstring
def __init__(self, chip_struct: Optional[libgpiod.gpiod_chip] = None):
self._chip_struct = chip_struct
def get(self) -> Optional[libgpiod.gpiod_chip]:
return self._chip_struct
def __del__(self) -> None:
if self._chip_struct is not None:
chip_deleter(self._chip_struct)
def __bool__(self) -> bool:
return self._chip_struct is not None
class chip:
# pylint: disable=function-redefined
def __init__(
self,
device: Optional[Union[int, str]] = None,
how: int = chip.OPEN_LOOKUP,
chip_shared: Optional[shared_chip] = None,
) -> None:
"""
@brief Constructor. Creates an empty GPIO chip object or opens the chip
using chip.open.
@param device: String describing the GPIO chip.
@param how: Indicates how the chip should be opened.
Usage:
c = chip()
c = chip("gpiochip0")
c = chip("/dev/gpiochip0", chip.OPEN_BY_PATH)
"""
if chip_shared is not None and bool(chip_shared):
self._m_chip = chip_shared
return
self._m_chip = shared_chip()
if device is not None:
self.open(device, how)
def __del__(self):
"""
@brief Destructor
Usage:
del chip
"""
def open(
self, device: Union[int, str], how: int = chip.OPEN_LOOKUP
) -> None:
"""
@brief Open a GPIO chip.
@param device: String or int describing the GPIO chip.
@param how: Indicates how the chip should be opened.
If the object already holds a reference to an open chip, it will be
closed and the reference reset.
Usage:
chip.open("/dev/gpiochip0")
chip.open(0, chip.OPEN_BY_NUMBER)
"""
device = str(device)
func = open_funcs[how]
chip_struct = func(device)
if chip_struct is None:
errno = get_errno()
raise OSError(
errno,
strerror(errno),
"cannot open GPIO device {}".format(device),
)
self._m_chip = shared_chip(chip_struct)
def reset(self):
"""
@brief Reset the internal smart pointer owned by this object.
Usage:
chip.reset()
"""
# Act like shared_ptr::reset()
self._m_chip = shared_chip()
@property
def name(self) -> str:
"""
@brief Return the name of the chip held by this object.
@return Name of the GPIO chip.
Usage:
print(chip.name)
"""
return self._throw_if_noref_and_get_m_chip().name
@property
def label(self) -> str:
"""
@brief Return the label of the chip held by this object.
@return Label of the GPIO chip.
Usage:
print(chip.label)
"""
return self._throw_if_noref_and_get_m_chip().label
@property
def num_lines(self) -> int:
"""
@brief Return the number of lines exposed by this chip.
@return Number of lines.
Usage:
print(chip.num_lines)
"""
return self._throw_if_noref_and_get_m_chip().num_lines
def get_line(self, offset: int) -> line:
"""
@brief Get the line exposed by this chip at given offset.
@param offset: Offset of the line.
@return Line object
Usage:
l = chip.get_line(0)
"""
if offset >= self.num_lines or offset < 0:
raise IndexError("line offset out of range")
line_struct = libgpiod.gpiod_chip_get_line(
self._throw_if_noref_and_get_m_chip(), offset
)
if line_struct is None:
errno = get_errno()
raise OSError(
errno, strerror(errno), "error getting GPIO line from chip"
)
return line(line_struct, copy(self))
def find_line(self, name: str) -> line:
"""
@brief Get the line exposed by this chip by name.
@param name: Line name.
@return Line object.
Usage:
l = chip.find_line("PIN_0")
"""
line_struct = libgpiod.gpiod_chip_find_line(
self._throw_if_noref_and_get_m_chip(), name
)
errno = get_errno()
if line_struct is None and errno != ENOENT:
raise OSError(
errno, strerror(errno), "error looking up GPIO line by name"
)
return line(line_struct, copy(self)) if bool(line_struct) else line()
def get_lines(self, offsets: List[int]) -> line_bulk:
"""
@brief Get a set of lines exposed by this chip at given offsets.
@param offsets: List of line offsets.
@return Set of lines held by a line_bulk object.
Usage:
lb = chip.get_lines([0, 1, 2])
"""
lines = line_bulk()
for it in offsets:
lines.append(self.get_line(it))
return lines
def get_all_lines(self) -> line_bulk:
"""
@brief Get all lines exposed by this chip.
@return All lines exposed by this chip held by a line_bulk object.
Usage:
lb = chip.get_all_lines()
"""
lines = line_bulk()
for i in range(self.num_lines):
lines.append(self.get_line(i))
return lines
def find_lines(self, names: List[str]) -> line_bulk:
"""
@brief Get a set of lines exposed by this chip by their names.
@param names: List of line names.
@return Set of lines held by a line_bulk object.
Usage:
lb = chip.find_lines(["PIN_0", "PIN_1", "PIN_2"])
"""
lines = line_bulk()
for it in names:
a_line = self.find_line(it)
if not a_line:
lines.clear()
return lines
lines.append(a_line)
return lines
def __eq__(self, rhs: chip) -> bool:
"""
@brief Equality operator.
@param rhs: Right-hand side of the equation.
@return True if rhs references the same chip. False otherwise.
Usage:
print(chip1 == chip2)
"""
return self._m_chip.get() == rhs._m_chip.get()
def __ne__(self, rhs: chip) -> bool:
"""
@brief Inequality operator.
@param rhs: Right-hand side of the equation.
@return False if rhs references the same chip. True otherwise.
Usage:
print(chip1 != chip2)
"""
return self._m_chip.get() != rhs._m_chip.get()
def __bool__(self) -> bool:
"""
@brief Check if this object holds a reference to a GPIO chip.
@return True if this object references a GPIO chip, false otherwise.
Usage:
print(bool(chip))
print(not chip)
"""
return self._m_chip.get() is not None
OPEN_LOOKUP = 1
OPEN_BY_PATH = 2
OPEN_BY_NAME = 3
OPEN_BY_LABEL = 4
OPEN_BY_NUMBER = 5
def _throw_if_noref_and_get_m_chip(self) -> libgpiod.gpiod_chip:
_m_chip_get = self._m_chip.get()
if _m_chip_get is None or not bool(_m_chip_get):
raise RuntimeError("object not associated with an open GPIO chip")
return _m_chip_get
class line_request:
# pylint: disable=function-redefined
# pylint: disable=too-few-public-methods
DIRECTION_AS_IS = 1
DIRECTION_INPUT = 2
DIRECTION_OUTPUT = 3
EVENT_FALLING_EDGE = 4
EVENT_RISING_EDGE = 5
EVENT_BOTH_EDGES = 6
FLAG_ACTIVE_LOW = 0b001
FLAG_OPEN_SOURCE = 0b010
FLAG_OPEN_DRAIN = 0b100
def __init__(self):
self.consumer = ""
self.request_type = 0
self.flags = 0
reqtype_mapping = {
# pylint: disable=line-too-long
line_request.DIRECTION_AS_IS: libgpiod.GPIOD_LINE_REQUEST_DIRECTION_AS_IS,
line_request.DIRECTION_INPUT: libgpiod.GPIOD_LINE_REQUEST_DIRECTION_INPUT,
line_request.DIRECTION_OUTPUT: libgpiod.GPIOD_LINE_REQUEST_DIRECTION_OUTPUT,
line_request.EVENT_FALLING_EDGE: libgpiod.GPIOD_LINE_REQUEST_EVENT_FALLING_EDGE,
line_request.EVENT_RISING_EDGE: libgpiod.GPIOD_LINE_REQUEST_EVENT_RISING_EDGE,
line_request.EVENT_BOTH_EDGES: libgpiod.GPIOD_LINE_REQUEST_EVENT_BOTH_EDGES,
}
reqflag_mapping = {
line_request.FLAG_ACTIVE_LOW: libgpiod.GPIOD_LINE_REQUEST_FLAG_ACTIVE_LOW,
line_request.FLAG_OPEN_DRAIN: libgpiod.GPIOD_LINE_REQUEST_FLAG_OPEN_DRAIN,
line_request.FLAG_OPEN_SOURCE: libgpiod.GPIOD_LINE_REQUEST_FLAG_OPEN_SOURCE,
}
class line:
# pylint: disable=function-redefined
def __init__(
self,
line_struct: Optional[libgpiod.gpiod_line] = None,
owner: chip = chip(),
) -> None:
"""
@brief Constructor. Creates an empty line object.
Usage:
l = line()
"""
self._m_line = line_struct
self._m_chip = owner
def __del__(self):
"""
@brief Destructor
Usage:
del line
"""
@property
def offset(self) -> int:
"""
@brief Get the offset of this line.
@return Offet of this line.
Usage:
print(line.offset)
"""
return self._throw_if_null_and_get_m_line().offset
@property
def name(self) -> str:
"""
@brief Get the name of this line (if any).
@return Name of this line or an empty string if it is unnamed.
Usage:
print(line.name)
"""
return self._throw_if_null_and_get_m_line().name
@property
def consumer(self) -> str:
"""
@brief Get the consumer of this line (if any).
@return Name of the consumer of this line or an empty string if it
is unused.
Usage:
print(line.consumer)
"""
return self._throw_if_null_and_get_m_line().consumer
@property
def direction(self) -> int:
"""
@brief Get current direction of this line.
@return Current direction setting.
Usage:
print(line.direction == line.DIRECTION_INPUT)
"""
return (
self.DIRECTION_INPUT
if self._throw_if_null_and_get_m_line().direction
== libgpiod.GPIOD_LINE_DIRECTION_INPUT
else self.DIRECTION_OUTPUT
)
@property
def active_state(self) -> int:
"""
@brief Get current active state of this line.
@return Current active state setting.
Usage:
print(line.active_state == line.ACTIVE_HIGH)
"""
return (
self.ACTIVE_HIGH
if self._throw_if_null_and_get_m_line().active_state
== libgpiod.GPIOD_LINE_ACTIVE_STATE_HIGH
else self.ACTIVE_LOW
)
@property
def is_used(self) -> bool:
"""
@brief Check if this line is used by the kernel or other user space
process.
@return True if this line is in use, false otherwise.
Usage:
print(line.is_used)
"""
return self._throw_if_null_and_get_m_line().used
@property
def is_open_drain(self) -> bool:
"""
@brief Check if this line represents an open-drain GPIO.
@return True if the line is an open-drain GPIO, false otherwise.
Usage:
print(line.is_open_drain)
"""
return self._throw_if_null_and_get_m_line().open_drain
@property
def is_open_source(self) -> bool:
"""
@brief Check if this line represents an open-source GPIO.
@return True if the line is an open-source GPIO, false otherwise.
Usage:
print(line.is_open_source)
"""
return self._throw_if_null_and_get_m_line().open_source
def request(self, config: line_request, default_val: int = 0):
"""
@brief Request this line.
@param config: Request config (see gpiod.line_request).
@param default_val: Default value - only matters for OUTPUT direction.
Usage:
config = line_request()
config.consumer = "Application"
config.request_type = line_request.DIRECTION_OUTPUT
# line.request(config)
line.request(config, 1)
"""
_m_line = self._throw_if_null_and_get_m_line()
conf = libgpiod.gpiod_line_request_config()
conf.consumer = config.consumer
conf.request_type = reqtype_mapping[config.request_type]
conf.flags = 0
rv = libgpiod.gpiod_line_request(_m_line, conf, default_val)
if rv:
errno = get_errno()
raise OSError(errno, strerror(errno), "error requesting GPIO line")
def release(self):
"""
@brief Release the line if it was previously requested.
Usage:
line.release()
"""
libgpiod.gpiod_line_release(self._throw_if_null_and_get_m_line())
@property
def is_requested(self) -> bool:
"""
@brief Check if this user has ownership of this line.
@return True if the user has ownership of this line, false otherwise.
Usage:
print(line.is_requested)
"""
return libgpiod.gpiod_line_is_requested(
self._throw_if_null_and_get_m_line()
)
def get_value(self) -> int:
"""
@brief Read the line value.
@return Current value (0 or 1).
Usage:
val = line.get_value()
"""
rv = libgpiod.gpiod_line_get_value(self._throw_if_null_and_get_m_line())
if rv == -1:
errno = get_errno()
raise OSError(
errno, strerror(errno), "error reading GPIO line value"
)
return rv
def set_value(self, val: int):
"""
@brief Set the value of this line.
@param val: New value (0 or 1).
Usage:
line.set_value(1)
"""
rv = libgpiod.gpiod_line_set_value(
self._throw_if_null_and_get_m_line(), val
)
if rv:
errno = get_errno()
raise OSError(
errno, strerror(errno), "error setting GPIO line value"
)
def event_wait(self, timeout: timedelta) -> bool:
"""
@brief Wait for an event on this line.
@param timeout: Time to wait before returning if no event occurred.
@return True if an event occurred and can be read, false if the wait
timed out.
Usage:
if line.event_wait(timedelta(seconds=10)):
print("An event occurred")
else:
print("Timeout")
"""
rv = libgpiod.gpiod_line_event_wait(
self._throw_if_null_and_get_m_line(), timeout
)
if rv < 0:
errno = get_errno()
raise OSError(errno, strerror(errno), "error polling for events")
return bool(rv)
def event_read(self) -> line_event:
"""
@brief Read a line event.
@return Line event object.
Usage:
if line.event_wait(timedelta(seconds=10)):
event = line.event_read()
print(event.event_type == line_event.RISING_EDGE)
print(event.timestamp)
else:
print("Timeout")
"""
_m_line = self._throw_if_null_and_get_m_line()
event_buf = libgpiod.gpiod_line_event()
event = line_event()
rv = libgpiod.gpiod_line_event_read(_m_line, event_buf)
if rv < 0:
errno = get_errno()
raise OSError(errno, strerror(errno), "error reading line event")
if event_buf.event_type == libgpiod.GPIOD_LINE_EVENT_RISING_EDGE:
event.event_type = line_event.RISING_EDGE
elif event_buf.event_type == libgpiod.GPIOD_LINE_EVENT_FALLING_EDGE:
event.event_type = line_event.FALLING_EDGE
event.timestamp = event_buf.ts
event.source = self
return event
def event_get_fd(self) -> int:
"""
@brief Get the event file descriptor associated with this line.
@return File descriptor number
Usage:
fd = line.event_get_fd()
"""
ret = libgpiod.gpiod_line_event_get_fd(
self._throw_if_null_and_get_m_line()
)
if ret < 0:
errno = get_errno()
raise OSError(
errno,
strerror(errno),
"unable to get the line event file descriptor",
)
return ret
def get_chip(self) -> chip:
"""
@brief Get the reference to the parent chip.
@return Reference to the parent chip object.
Usage:
c = line.get_chip()
"""
return self._m_chip
def reset(self):
"""
@brief Reset the state of this object.
This is useful when the user needs to e.g. keep the line_event object
but wants to drop the reference to the GPIO chip indirectly held by
the line being the source of the event.
Usage:
line.reset()
"""
self._m_line = None
self._m_chip.reset()
def __eq__(self, rhs: line) -> bool:
"""
@brief Check if two line objects reference the same GPIO line.
@param rhs: Right-hand side of the equation.
@return True if both objects reference the same line, fale otherwise.
Usage:
print(line1 == line2)
"""
return self._m_line == rhs._m_line
def __ne__(self, rhs: line) -> bool:
"""
@brief Check if two line objects reference different GPIO lines.
@param rhs: Right-hand side of the equation.
@return False if both objects reference the same line, true otherwise.
Usage:
print(line1 != line2)
"""
return self._m_line != rhs._m_line
def __bool__(self) -> bool:
"""
@brief Check if this object holds a reference to any GPIO line.
@return True if this object references a GPIO line, false otherwise.
Usage:
print(bool(line))
print(not line)
"""
return self._m_line is not None
DIRECTION_INPUT = 1
DIRECTION_OUTPUT = 2
ACTIVE_LOW = 1
ACTIVE_HIGH = 2
def _throw_if_null_and_get_m_line(self) -> libgpiod.gpiod_line:
if self._m_line is None:
raise RuntimeError("object not holding a GPIO line handle")
return self._m_line
class line_event:
# pylint: disable=function-redefined
# pylint: disable=too-few-public-methods
RISING_EDGE = 1
FALLING_EDGE = 2
def __init__(self):
self.timestamp = None
self.event_type = 0
self.source = line()
class line_bulk:
# pylint: disable=function-redefined
# pylint: disable=missing-function-docstring
def __init__(self, lines: Optional[List[line]] = None) -> None:
"""
@brief Constructor. Creates a empty line_bulk or from a list of lines.
@param lines: List of gpiod::line objects.
@note All lines must be owned by the same GPIO chip.
Usage:
bulk = line_bulk()
bulk = line_bulk([line1, line2])
"""
self._m_bulk = lines if lines is not None else []
def __del__(self) -> None:
"""
@brief Destructor
Usage:
del bulk
"""
def append(self, new_line: line) -> None:
"""
@brief Add a line to this line_bulk object.
@param new_line: Line to add.
@note The new line must be owned by the same chip as all the other
lines already held by this line_bulk object.
Usage:
bulk.append(line1)
"""
if not new_line:
raise ValueError("line_bulk cannot hold empty line objects")
if len(self._m_bulk) >= self.MAX_LINES:
raise IndexError("maximum number of lines reached")
if (
len(self._m_bulk) >= 1
and self._m_bulk[0].get_chip() != new_line.get_chip()
):
raise ValueError(
"line_bulk cannot hold GPIO lines from different chips"
)
self._m_bulk.append(new_line)
def get(self, offset: int) -> line:
"""
@brief Get the line at given offset.
@param offset: Offset of the line to get.
@return Reference to the line object.
Usage:
line1 = bulk.get(1)
"""
return self._m_bulk[offset]
def __getitem__(self, offset: int) -> line:
"""
@brief Get the line at given offset.
@param offset: Offset of the line to get.
@return Reference to the line object.
Usage:
line1 = bulk[1]
"""
return self._m_bulk[offset]
@property
def size(self) -> int:
"""
@brief Get the number of lines currently held by this object.
@return Number of elements in this line_bulk.
Usage:
print(bulk.size)
"""
return len(self._m_bulk)
def __len__(self) -> int:
"""
@brief Get the number of lines currently held by this object.
@return Number of elements in this line_bulk.
Usage:
print(len(bulk))
"""
return len(self._m_bulk)
@property
def empty(self) -> bool:
"""
@brief Check if this line_bulk doesn't hold any lines.
@return True if this object is empty, false otherwise.
Usage:
print(bulk.empty)
"""
return len(self._m_bulk) == 0
def clear(self):
"""
@brief Remove all lines from this object.
Usage:
bulk.clear()
"""
self._m_bulk.clear()
def request(
self, config: line_request, default_vals: Optional[List[int]] = None
) -> None:
"""
@brief Request all lines held by this object.
@param config: Request config (see gpiod::line_request).
@param default_vals: List of default values. Only relevant for output
direction requests.
Usage:
config = line_request()
config.consumer = "Application"
config.request_type = line_request.DIRECTION_OUTPUT
# bulk.request(config)
bulk.request(config, [1] * bulk.size)
"""
self._throw_if_empty()
if default_vals is None:
default_vals = [0] * self.size
if self.size != len(default_vals):
raise ValueError(
"the number of default values must correspond "
"with the number of lines"
)
try:
for i in range(self.size):
self._m_bulk[i].request(config, default_vals[i])
except OSError as error:
self.release()
raise error
def release(self) -> None:
"""
@brief Release all lines held by this object.
Usage:
bulk.release()
"""
self._throw_if_empty()
for it in self._m_bulk:
it.release()
def get_values(self) -> List[int]:
"""
@brief Read values from all lines held by this object.
@return List containing line values the order of which corresponds
with the order of lines in the internal array.
Usage:
ret = bulk.get_values()
"""
self._throw_if_empty()
values = []
for it in self._m_bulk:
values.append(it.get_value())
return values
def set_values(self, values: List[int]) -> None:
"""
@brief Set values of all lines held by this object.
@param values: List of values to set. Must be the same size as the
number of lines held by this line_bulk.
Usage:
bulk.set)_blaues([1] * bulk.size)
"""
self._throw_if_empty()
if self.size != len(values):
raise ValueError(
"the size of values array must correspond with "
"the number of lines"
)
for i in range(self.size):
self._m_bulk[i].set_value(values[i])
def event_wait(self, timeout: timedelta) -> line_bulk:
"""
@brief Poll the set of lines for line events.
@param timeout: timedelta to wait before returning an empty line_bulk.
@return Returns a line_bulk object containing lines on which events
occurred.
Usage:
ebulk = bulk.event_wait(timedelta(microseconds=20000))
"""
self._throw_if_empty()
bulk = libgpiod.gpiod_line_bulk()
event_bulk = libgpiod.gpiod_line_bulk()
ret = line_bulk()
self._to_line_bulk(bulk)
rv = libgpiod.gpiod_line_event_wait_bulk(bulk, timeout, event_bulk)
if rv < 0:
errno = get_errno()
raise OSError(errno, strerror(errno), "error polling for events")
if rv > 0:
for i in range(event_bulk.num_lines):
ret.append(line(event_bulk[i], self._m_bulk[i].get_chip()))
return ret
def __bool__(self) -> bool:
"""
@brief Check if this object holds any lines.
@return True if this line_bulk holds at least one line, false otherwise.
Usage:
print(bool(bulk))
print(not bulk)
"""
return not self.empty
@property
def MAX_LINES(self) -> int:
"""
@brief Max number of lines that this object can hold.
"""
return libgpiod.GPIOD_LINE_BULK_MAX_LINES
def __iter__(self) -> Iterator[line]:
"""
@brief Iterator for iterating over lines held by line_bulk.
Usage:
for l in bulk:
print(l.name)
"""
return self._m_bulk.__iter__()
def _throw_if_empty(self):
if self.empty:
raise RuntimeError("line_bulk not holding any GPIO lines")
def _to_line_bulk(self, bulk: libgpiod.gpiod_line_bulk) -> None:
for it in self._m_bulk:
# pylint: disable=protected-access
bulk.add(it._m_line)
class chip_iter:
"""
@brief Allows to iterate over all GPIO chips present on the system.
Usage:
for c in chip_iter():
print(c.name)
"""
def __init__(self):
self._iter = None
def __iter__(self):
self._iter = libgpiod.gpiod_chip_iter().__iter__()
if self._iter is None:
errno = get_errno()
raise OSError(
errno, strerror(errno), "error creating GPIO chip iterator"
)
return self
def __next__(self) -> chip:
_next = self._iter.next_noclose()
return chip(chip_shared=shared_chip(_next))
class line_iter:
"""
@brief Allows to iterate over all lines owned by a GPIO chip.
@param owner: Chip owning the GPIO lines over which we want to iterate.
Usage:
for l in line_iter(chip):
print("{}: {}".format(l.offset, l.name))
"""
def __init__(self, owner: chip) -> None:
self._chip = owner
self._iter = None
def __iter__(self) -> Iterator[libgpiod.gpiod_line]:
self._iter = iter(libgpiod.gpiod_line_iter(self._chip._m_chip.get()))
if self._iter is None:
errno = get_errno()
raise OSError(
errno, strerror(errno), "error creating GPIO line iterator"
)
return self
def __next__(self) -> line:
if self._iter is not None:
return line(next(self._iter), self._chip)
raise StopIteration
|
[
"ctypes.get_errno",
"copy.copy",
"os.strerror"
] |
[((6681, 6692), 'ctypes.get_errno', 'get_errno', ([], {}), '()\n', (6690, 6692), False, 'from ctypes import get_errno\n'), ((4301, 4312), 'ctypes.get_errno', 'get_errno', ([], {}), '()\n', (4310, 4312), False, 'from ctypes import get_errno\n'), ((6125, 6136), 'ctypes.get_errno', 'get_errno', ([], {}), '()\n', (6134, 6136), False, 'from ctypes import get_errno\n'), ((6288, 6298), 'copy.copy', 'copy', (['self'], {}), '(self)\n', (6292, 6298), False, 'from copy import copy\n'), ((15098, 15109), 'ctypes.get_errno', 'get_errno', ([], {}), '()\n', (15107, 15109), False, 'from ctypes import get_errno\n'), ((16103, 16114), 'ctypes.get_errno', 'get_errno', ([], {}), '()\n', (16112, 16114), False, 'from ctypes import get_errno\n'), ((16580, 16591), 'ctypes.get_errno', 'get_errno', ([], {}), '()\n', (16589, 16591), False, 'from ctypes import get_errno\n'), ((17330, 17341), 'ctypes.get_errno', 'get_errno', ([], {}), '()\n', (17339, 17341), False, 'from ctypes import get_errno\n'), ((18086, 18097), 'ctypes.get_errno', 'get_errno', ([], {}), '()\n', (18095, 18097), False, 'from ctypes import get_errno\n'), ((18901, 18912), 'ctypes.get_errno', 'get_errno', ([], {}), '()\n', (18910, 18912), False, 'from ctypes import get_errno\n'), ((27569, 27580), 'ctypes.get_errno', 'get_errno', ([], {}), '()\n', (27578, 27580), False, 'from ctypes import get_errno\n'), ((29220, 29231), 'ctypes.get_errno', 'get_errno', ([], {}), '()\n', (29229, 29231), False, 'from ctypes import get_errno\n'), ((30061, 30072), 'ctypes.get_errno', 'get_errno', ([], {}), '()\n', (30070, 30072), False, 'from ctypes import get_errno\n'), ((4379, 4394), 'os.strerror', 'strerror', (['errno'], {}), '(errno)\n', (4387, 4394), False, 'from os import strerror\n'), ((6187, 6202), 'os.strerror', 'strerror', (['errno'], {}), '(errno)\n', (6195, 6202), False, 'from os import strerror\n'), ((6795, 6810), 'os.strerror', 'strerror', (['errno'], {}), '(errno)\n', (6803, 6810), False, 'from os import strerror\n'), ((6897, 6907), 'copy.copy', 'copy', (['self'], {}), '(self)\n', (6901, 6907), False, 'from copy import copy\n'), ((15143, 15158), 'os.strerror', 'strerror', (['errno'], {}), '(errno)\n', (15151, 15158), False, 'from os import strerror\n'), ((16165, 16180), 'os.strerror', 'strerror', (['errno'], {}), '(errno)\n', (16173, 16180), False, 'from os import strerror\n'), ((16642, 16657), 'os.strerror', 'strerror', (['errno'], {}), '(errno)\n', (16650, 16657), False, 'from os import strerror\n'), ((17375, 17390), 'os.strerror', 'strerror', (['errno'], {}), '(errno)\n', (17383, 17390), False, 'from os import strerror\n'), ((18131, 18146), 'os.strerror', 'strerror', (['errno'], {}), '(errno)\n', (18139, 18146), False, 'from os import strerror\n'), ((18979, 18994), 'os.strerror', 'strerror', (['errno'], {}), '(errno)\n', (18987, 18994), False, 'from os import strerror\n'), ((27614, 27629), 'os.strerror', 'strerror', (['errno'], {}), '(errno)\n', (27622, 27629), False, 'from os import strerror\n'), ((29282, 29297), 'os.strerror', 'strerror', (['errno'], {}), '(errno)\n', (29290, 29297), False, 'from os import strerror\n'), ((30123, 30138), 'os.strerror', 'strerror', (['errno'], {}), '(errno)\n', (30131, 30138), False, 'from os import strerror\n')]
|
from datetime import datetime
from os.path import dirname, join
import pytest
from city_scrapers_core.utils import file_response
from freezegun import freeze_time
from city_scrapers.spiders.chi_police_retirement import ChiPoliceRetirementSpider
test_response = file_response(
join(dirname(__file__), "files", "chi_police_retirement.html"),
url="http://www.chipabf.org/ChicagoPolicePension/MonthlyMeetings.html",
)
spider = ChiPoliceRetirementSpider()
freezer = freeze_time("2019-05-05")
freezer.start()
parsed_items = [item for item in spider.parse(test_response)]
freezer.stop()
def test_title():
assert parsed_items[0]["title"] == "Retirement Board"
def test_description():
assert parsed_items[0]["description"] == ""
def test_start():
assert parsed_items[0]["start"] == datetime(2019, 1, 31, 9, 0)
def test_id():
assert (
parsed_items[0]["id"] == "chi_police_retirement/201901310900/x/"
"retirement_board"
)
def test_status():
assert parsed_items[0]["status"] == "passed"
def test_location():
assert parsed_items[0]["location"] == {
"name": "Policemen's Annuity and Benefit Fund",
"address": "221 North LaSalle Street, Suite 1626, Chicago, "
"Illinois 60601-1203",
}
def test_source():
assert (
parsed_items[0]["source"]
== "http://www.chipabf.org/ChicagoPolicePension/MonthlyMeetings.html"
)
def test_links():
assert parsed_items[0]["links"] == [
{
"href": "http://www.chipabf.org/ChicagoPolicePension/PDF/Agenda/2019/2019AGENDA01.pdf", # noqa
"title": "Agenda",
},
{
"href": "http://www.chipabf.org/ChicagoPolicePension/PDF/Minutes/2019/2019MINUTES01.pdf", # noqa
"title": "Minutes",
},
]
def test_classification():
assert parsed_items[0]["classification"] == "Board"
@pytest.mark.parametrize("item", parsed_items)
def test_all_day(item):
assert item["all_day"] is False
|
[
"os.path.dirname",
"datetime.datetime",
"pytest.mark.parametrize",
"city_scrapers.spiders.chi_police_retirement.ChiPoliceRetirementSpider",
"freezegun.freeze_time"
] |
[((434, 461), 'city_scrapers.spiders.chi_police_retirement.ChiPoliceRetirementSpider', 'ChiPoliceRetirementSpider', ([], {}), '()\n', (459, 461), False, 'from city_scrapers.spiders.chi_police_retirement import ChiPoliceRetirementSpider\n'), ((473, 498), 'freezegun.freeze_time', 'freeze_time', (['"""2019-05-05"""'], {}), "('2019-05-05')\n", (484, 498), False, 'from freezegun import freeze_time\n'), ((1899, 1944), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""item"""', 'parsed_items'], {}), "('item', parsed_items)\n", (1922, 1944), False, 'import pytest\n'), ((288, 305), 'os.path.dirname', 'dirname', (['__file__'], {}), '(__file__)\n', (295, 305), False, 'from os.path import dirname, join\n'), ((805, 832), 'datetime.datetime', 'datetime', (['(2019)', '(1)', '(31)', '(9)', '(0)'], {}), '(2019, 1, 31, 9, 0)\n', (813, 832), False, 'from datetime import datetime\n')]
|
"""Categorical LSTM Model.
A model represented by a Categorical distribution
which is parameterized by a Long short-term memory (LSTM).
"""
import tensorflow as tf
import tensorflow_probability as tfp
from garage.tf.models.lstm_model import LSTMModel
class CategoricalLSTMModel(LSTMModel):
"""Categorical LSTM Model.
A model represented by a Categorical distribution
which is parameterized by a Long short-term memory (LSTM).
Args:
output_dim (int): Dimension of the network output.
hidden_dim (int): Hidden dimension for LSTM cell.
name (str): Policy name, also the variable scope.
hidden_nonlinearity (callable): Activation function for intermediate
dense layer(s). It should return a tf.Tensor. Set it to
None to maintain a linear activation.
hidden_w_init (callable): Initializer function for the weight
of intermediate dense layer(s). The function should return a
tf.Tensor.
hidden_b_init (callable): Initializer function for the bias
of intermediate dense layer(s). The function should return a
tf.Tensor.
recurrent_nonlinearity (callable): Activation function for recurrent
layers. It should return a tf.Tensor. Set it to None to
maintain a linear activation.
recurrent_w_init (callable): Initializer function for the weight
of recurrent layer(s). The function should return a
tf.Tensor.
output_nonlinearity (callable): Activation function for output dense
layer. It should return a tf.Tensor. Set it to None to
maintain a linear activation.
output_w_init (callable): Initializer function for the weight
of output dense layer(s). The function should return a
tf.Tensor.
output_b_init (callable): Initializer function for the bias
of output dense layer(s). The function should return a
tf.Tensor.
hidden_state_init (callable): Initializer function for the
initial hidden state. The functino should return a tf.Tensor.
hidden_state_init_trainable (bool): Bool for whether the initial
hidden state is trainable.
cell_state_init (callable): Initializer function for the
initial cell state. The functino should return a tf.Tensor.
cell_state_init_trainable (bool): Bool for whether the initial
cell state is trainable.
forget_bias (bool): If True, add 1 to the bias of the forget gate at
initialization. It's used to reduce the scale of forgetting at the
beginning of the training.
layer_normalization (bool): Bool for using layer normalization or not.
"""
def __init__(self,
output_dim,
hidden_dim,
name=None,
hidden_nonlinearity=tf.nn.tanh,
hidden_w_init=tf.initializers.glorot_uniform(),
hidden_b_init=tf.zeros_initializer(),
recurrent_nonlinearity=tf.nn.sigmoid,
recurrent_w_init=tf.initializers.glorot_uniform(),
output_nonlinearity=None,
output_w_init=tf.initializers.glorot_uniform(),
output_b_init=tf.zeros_initializer(),
hidden_state_init=tf.zeros_initializer(),
hidden_state_init_trainable=False,
cell_state_init=tf.zeros_initializer(),
cell_state_init_trainable=False,
forget_bias=True,
layer_normalization=False):
super().__init__(
output_dim=output_dim,
hidden_dim=hidden_dim,
name=name,
hidden_nonlinearity=hidden_nonlinearity,
hidden_w_init=hidden_w_init,
hidden_b_init=hidden_b_init,
recurrent_nonlinearity=recurrent_nonlinearity,
recurrent_w_init=recurrent_w_init,
output_nonlinearity=tf.nn.softmax,
output_w_init=output_w_init,
output_b_init=output_b_init,
hidden_state_init=hidden_state_init,
hidden_state_init_trainable=hidden_state_init_trainable,
cell_state_init=cell_state_init,
cell_state_init_trainable=cell_state_init_trainable,
forget_bias=forget_bias,
layer_normalization=layer_normalization)
self._output_normalization_fn = output_nonlinearity
def network_output_spec(self):
"""Network output spec.
Returns:
list[str]: Name of the model outputs, in order.
"""
return [
'dist', 'step_output', 'step_hidden', 'step_cell', 'init_hidden',
'init_cell'
]
# pylint: disable=arguments-differ
def _build(self,
state_input,
step_input,
step_hidden,
step_cell,
name=None):
"""Build model.
Args:
state_input (tf.Tensor): Entire time-series observation input,
with shape :math:`(N, T, S^*)`.
step_input (tf.Tensor): Single timestep observation input,
with shape :math:`(N, S^*)`.
step_hidden (tf.Tensor): Hidden state for step, with shape
:math:`(N, S^*)`.
step_cell (tf.Tensor): Cell state for step, with shape
:math:`(N, S^*)`.
name (str): Inner model name, also the variable scope of the
inner model, if exist. One example is
garage.tf.models.Sequential.
Returns:
tfp.distributions.OneHotCategorical: Policy distribution.
tf.Tensor: Step output, with shape :math:`(N, S^*)`
tf.Tensor: Step hidden state, with shape :math:`(N, S^*)`
tf.Tensor: Step cell state, with shape :math:`(N, S^*)`
tf.Tensor: Initial hidden state, used to reset the hidden state
when policy resets. Shape: :math:`(S^*)`
tf.Tensor: Initial cell state, used to reset the cell state
when policy resets. Shape: :math:`(S^*)`
"""
(outputs, step_output, step_hidden, step_cell, init_hidden,
init_cell) = super()._build(state_input,
step_input,
step_hidden,
step_cell,
name=name)
if self._output_normalization_fn:
outputs = self._output_normalization_fn(outputs)
dist = tfp.distributions.OneHotCategorical(probs=outputs)
return (dist, step_output, step_hidden, step_cell, init_hidden,
init_cell)
|
[
"tensorflow.zeros_initializer",
"tensorflow.initializers.glorot_uniform",
"tensorflow_probability.distributions.OneHotCategorical"
] |
[((3041, 3073), 'tensorflow.initializers.glorot_uniform', 'tf.initializers.glorot_uniform', ([], {}), '()\n', (3071, 3073), True, 'import tensorflow as tf\n'), ((3107, 3129), 'tensorflow.zeros_initializer', 'tf.zeros_initializer', ([], {}), '()\n', (3127, 3129), True, 'import tensorflow as tf\n'), ((3222, 3254), 'tensorflow.initializers.glorot_uniform', 'tf.initializers.glorot_uniform', ([], {}), '()\n', (3252, 3254), True, 'import tensorflow as tf\n'), ((3332, 3364), 'tensorflow.initializers.glorot_uniform', 'tf.initializers.glorot_uniform', ([], {}), '()\n', (3362, 3364), True, 'import tensorflow as tf\n'), ((3398, 3420), 'tensorflow.zeros_initializer', 'tf.zeros_initializer', ([], {}), '()\n', (3418, 3420), True, 'import tensorflow as tf\n'), ((3458, 3480), 'tensorflow.zeros_initializer', 'tf.zeros_initializer', ([], {}), '()\n', (3478, 3480), True, 'import tensorflow as tf\n'), ((3569, 3591), 'tensorflow.zeros_initializer', 'tf.zeros_initializer', ([], {}), '()\n', (3589, 3591), True, 'import tensorflow as tf\n'), ((6804, 6854), 'tensorflow_probability.distributions.OneHotCategorical', 'tfp.distributions.OneHotCategorical', ([], {'probs': 'outputs'}), '(probs=outputs)\n', (6839, 6854), True, 'import tensorflow_probability as tfp\n')]
|
import logging
import os
import random
import sys
from enum import Enum
from functools import wraps
from logging.handlers import TimedRotatingFileHandler
class DynamicTimedRotatingFileHandler(TimedRotatingFileHandler):
# noinspection PyPep8Naming
def __init__(self, filename, when='h', interval=1, backupCount=0, encoding=None, delay=False, utc=False):
dir_name = os.path.dirname(filename)
file_name = os.path.basename(filename)
script_name = os.path.splitext(os.path.basename(sys.argv[0]))[0]
filename = os.path.join(dir_name, script_name + file_name)
if dir_name:
try:
os.makedirs(dir_name)
except OSError:
if not os.path.isdir(dir_name):
raise
super(DynamicTimedRotatingFileHandler, self).__init__(
filename=filename, when=when, interval=interval, backupCount=backupCount, encoding=encoding, delay=delay,
utc=utc
)
class LineBreakFormatter(logging.Formatter):
def format(self, record):
return super(LineBreakFormatter, self).format(record) + "\n"
def random_choice(sequence):
""" Same as :meth:`random.choice`, but also supports :class:`set` type to be passed as sequence. """
return random.choice(tuple(sequence) if isinstance(sequence, set) else sequence)
def log_call(func):
logger = logging.getLogger(func.__module__)
@wraps(func)
def _log(self, *args, **kwargs):
logger.debug("Entering function: '{}'.".format(func.__qualname__))
result = func(self, *args, **kwargs)
logger.debug("Result is: '{}'.".format(result))
logger.debug("Exiting function: '{}'.".format(func.__qualname__))
return result
return _log
class AutoNumberEnum(Enum):
def __new__(cls):
value = len(cls.__members__)
obj = object.__new__(cls)
obj._value_ = value
return obj
def mean(data):
return float(sum(data)) / max(len(data), 1)
|
[
"os.makedirs",
"os.path.basename",
"os.path.isdir",
"os.path.dirname",
"functools.wraps",
"os.path.join",
"logging.getLogger"
] |
[((1388, 1422), 'logging.getLogger', 'logging.getLogger', (['func.__module__'], {}), '(func.__module__)\n', (1405, 1422), False, 'import logging\n'), ((1429, 1440), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (1434, 1440), False, 'from functools import wraps\n'), ((382, 407), 'os.path.dirname', 'os.path.dirname', (['filename'], {}), '(filename)\n', (397, 407), False, 'import os\n'), ((428, 454), 'os.path.basename', 'os.path.basename', (['filename'], {}), '(filename)\n', (444, 454), False, 'import os\n'), ((547, 594), 'os.path.join', 'os.path.join', (['dir_name', '(script_name + file_name)'], {}), '(dir_name, script_name + file_name)\n', (559, 594), False, 'import os\n'), ((494, 523), 'os.path.basename', 'os.path.basename', (['sys.argv[0]'], {}), '(sys.argv[0])\n', (510, 523), False, 'import os\n'), ((650, 671), 'os.makedirs', 'os.makedirs', (['dir_name'], {}), '(dir_name)\n', (661, 671), False, 'import os\n'), ((723, 746), 'os.path.isdir', 'os.path.isdir', (['dir_name'], {}), '(dir_name)\n', (736, 746), False, 'import os\n')]
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 - 2022 -- <NAME>
# All rights reserved.
#
# License: BSD License
#
"""\
Tests against issue 39
<https://github.com/heuer/segno/issues/39>
"""
from __future__ import absolute_import, unicode_literals
import os
import io
import tempfile
import pytest
import segno
from segno import cli
def test_output():
out = io.BytesIO()
segno.make_qr('Good Times', error='M').save(out, kind='png', scale=10,
dark='red')
f = tempfile.NamedTemporaryFile('w', suffix='.png', delete=False)
f.close()
cli.main(['-e=M', '--scale=10', '--dark=red', '--output={0}'.format(f.name),
'Good Times'])
f = open(f.name, 'rb')
content = f.read()
f.close()
os.unlink(f.name)
assert out.getvalue() == content
def test_output2():
out = io.BytesIO()
segno.make_qr('Good Times', error='M').save(out, kind='png', scale=10,
dark='red')
f = tempfile.NamedTemporaryFile('w', suffix='.png', delete=False)
f.close()
cli.main(['-e=M', '--scale=10', '--dark=red', '--output={0}'.format(f.name),
'Good', 'Times'])
f = open(f.name, 'rb')
content = f.read()
f.close()
os.unlink(f.name)
assert out.getvalue() == content
if __name__ == '__main__':
pytest.main([__file__])
|
[
"tempfile.NamedTemporaryFile",
"io.BytesIO",
"os.unlink",
"segno.make_qr",
"pytest.main"
] |
[((362, 374), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (372, 374), False, 'import io\n'), ((518, 579), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', (['"""w"""'], {'suffix': '""".png"""', 'delete': '(False)'}), "('w', suffix='.png', delete=False)\n", (545, 579), False, 'import tempfile\n'), ((772, 789), 'os.unlink', 'os.unlink', (['f.name'], {}), '(f.name)\n', (781, 789), False, 'import os\n'), ((859, 871), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (869, 871), False, 'import io\n'), ((1015, 1076), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', (['"""w"""'], {'suffix': '""".png"""', 'delete': '(False)'}), "('w', suffix='.png', delete=False)\n", (1042, 1076), False, 'import tempfile\n'), ((1272, 1289), 'os.unlink', 'os.unlink', (['f.name'], {}), '(f.name)\n', (1281, 1289), False, 'import os\n'), ((1360, 1383), 'pytest.main', 'pytest.main', (['[__file__]'], {}), '([__file__])\n', (1371, 1383), False, 'import pytest\n'), ((379, 417), 'segno.make_qr', 'segno.make_qr', (['"""Good Times"""'], {'error': '"""M"""'}), "('Good Times', error='M')\n", (392, 417), False, 'import segno\n'), ((876, 914), 'segno.make_qr', 'segno.make_qr', (['"""Good Times"""'], {'error': '"""M"""'}), "('Good Times', error='M')\n", (889, 914), False, 'import segno\n')]
|
import hashlib
import hmac
import logging
import os
import tornado.escape
import tornado.httpserver
import tornado.gen
import tornado.ioloop
import tornado.log
import tornado.web
from . import update_pr
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.set_status(404)
self.write_error(404)
class WebhookHandler(tornado.web.RequestHandler):
@tornado.gen.coroutine
def post(self):
headers = self.request.headers
event = headers.get('X-GitHub-Event', None)
hmac_digest = headers.get('X-Hub-Signature', None)
webhook_secret = os.environ['WEBHOOK_SECRET'].encode()
# Compute the payload's hmac digest.
expected_hmac = hmac.new(
webhook_secret, self.request.body, hashlib.sha1).hexdigest()
expected_digest = 'sha1={}'.format(expected_hmac)
if hmac_digest != expected_digest:
logging.warning('HMAC FAIL: expected: {}; got: {};'
''.format(expected_digest, hmac_digest))
self.set_status(403)
if event == 'ping':
self.write('pong')
elif event == 'pull_request':
body = tornado.escape.json_decode(self.request.body)
repo_name = body['repository']['name']
owner = body['repository']['owner']['login']
pr_id = int(body['pull_request']['number'])
is_open = body['pull_request']['state'] == 'open'
# Do some sanity chceking
if is_open and owner.lower() in ['scitools', 'scitools-incubator']:
yield update_pr.check_pr('{}/{}'.format(owner, repo_name),
pr_id)
else:
self.write('Unhandled event "{}".'.format(event))
self.set_status(404)
def main():
tornado.log.enable_pretty_logging()
application = tornado.web.Application([
(r"/", MainHandler),
(r"/webhook", WebhookHandler),
])
http_server = tornado.httpserver.HTTPServer(application)
PORT = os.environ.get('PORT', 8080)
http_server.listen(PORT)
tornado.ioloop.IOLoop.current().start()
if __name__ == "__main__":
main()
|
[
"os.environ.get",
"hmac.new"
] |
[((2055, 2083), 'os.environ.get', 'os.environ.get', (['"""PORT"""', '(8080)'], {}), "('PORT', 8080)\n", (2069, 2083), False, 'import os\n'), ((714, 771), 'hmac.new', 'hmac.new', (['webhook_secret', 'self.request.body', 'hashlib.sha1'], {}), '(webhook_secret, self.request.body, hashlib.sha1)\n', (722, 771), False, 'import hmac\n')]
|
import logging
import os
from galaxy.util.dictifiable import Dictifiable
from galaxy.util.bunch import Bunch
from galaxy.util import asbool
from tool_shed.util import common_util
from urlparse import urljoin
log = logging.getLogger( __name__ )
class ToolShedRepository( object ):
dict_collection_visible_keys = ( 'id', 'tool_shed', 'name', 'owner', 'installed_changeset_revision', 'changeset_revision', 'ctx_rev', 'includes_datatypes',
'tool_shed_status', 'deleted', 'uninstalled', 'dist_to_shed', 'status', 'error_message' )
dict_element_visible_keys = ( 'id', 'tool_shed', 'name', 'owner', 'installed_changeset_revision', 'changeset_revision', 'ctx_rev', 'includes_datatypes',
'tool_shed_status', 'deleted', 'uninstalled', 'dist_to_shed', 'status', 'error_message' )
installation_status = Bunch( NEW='New',
CLONING='Cloning',
SETTING_TOOL_VERSIONS='Setting tool versions',
INSTALLING_REPOSITORY_DEPENDENCIES='Installing repository dependencies',
INSTALLING_TOOL_DEPENDENCIES='Installing tool dependencies',
LOADING_PROPRIETARY_DATATYPES='Loading proprietary datatypes',
INSTALLED='Installed',
DEACTIVATED='Deactivated',
ERROR='Error',
UNINSTALLED='Uninstalled' )
states = Bunch( INSTALLING='running',
OK='ok',
WARNING='queued',
ERROR='error',
UNINSTALLED='deleted_new' )
def __init__( self, id=None, create_time=None, tool_shed=None, name=None, description=None, owner=None, installed_changeset_revision=None,
changeset_revision=None, ctx_rev=None, metadata=None, includes_datatypes=False, tool_shed_status=None, deleted=False,
uninstalled=False, dist_to_shed=False, status=None, error_message=None ):
self.id = id
self.create_time = create_time
self.tool_shed = tool_shed
self.name = name
self.description = description
self.owner = owner
self.installed_changeset_revision = installed_changeset_revision
self.changeset_revision = changeset_revision
self.ctx_rev = ctx_rev
self.metadata = metadata
self.includes_datatypes = includes_datatypes
self.tool_shed_status = tool_shed_status
self.deleted = deleted
self.uninstalled = uninstalled
self.dist_to_shed = dist_to_shed
self.status = status
self.error_message = error_message
def as_dict( self, value_mapper=None ):
return self.to_dict( view='element', value_mapper=value_mapper )
@property
def can_install( self ):
return self.status == self.installation_status.NEW
@property
def can_reset_metadata( self ):
return self.status == self.installation_status.INSTALLED
@property
def can_uninstall( self ):
return self.status != self.installation_status.UNINSTALLED
@property
def can_deactivate( self ):
return self.status not in [ self.installation_status.DEACTIVATED,
self.installation_status.ERROR,
self.installation_status.UNINSTALLED ]
@property
def can_reinstall_or_activate( self ):
return self.deleted
def get_sharable_url( self, app ):
tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, self.tool_shed )
if tool_shed_url:
# Append a slash to the tool shed URL, because urlparse.urljoin will eliminate
# the last part of a URL if it does not end with a forward slash.
tool_shed_url = '%s/' % tool_shed_url
return urljoin( tool_shed_url, 'view/%s/%s' % ( self.owner, self.name ) )
return tool_shed_url
def get_shed_config_filename( self ):
shed_config_filename = None
if self.metadata:
shed_config_filename = self.metadata.get( 'shed_config_filename', shed_config_filename )
return shed_config_filename
def get_shed_config_dict( self, app, default=None ):
"""
Return the in-memory version of the shed_tool_conf file, which is stored in the config_elems entry
in the shed_tool_conf_dict.
"""
def _is_valid_shed_config_filename( filename ):
for shed_tool_conf_dict in app.toolbox.dynamic_confs( include_migrated_tool_conf=True ):
if filename == shed_tool_conf_dict[ 'config_filename' ]:
return True
return False
if not self.shed_config_filename or not _is_valid_shed_config_filename( self.shed_config_filename ):
self.guess_shed_config( app, default=default )
if self.shed_config_filename:
for shed_tool_conf_dict in app.toolbox.dynamic_confs( include_migrated_tool_conf=True ):
if self.shed_config_filename == shed_tool_conf_dict[ 'config_filename' ]:
return shed_tool_conf_dict
return default
def get_tool_relative_path( self, app ):
shed_conf_dict = self.get_shed_config_dict( app )
tool_path = None
relative_path = None
if shed_conf_dict:
tool_path = shed_conf_dict[ 'tool_path' ]
relative_path = os.path.join( self.tool_shed_path_name, 'repos', self.owner, self.name, self.installed_changeset_revision )
return tool_path, relative_path
def guess_shed_config( self, app, default=None ):
tool_ids = []
metadata = self.metadata or {}
for tool in metadata.get( 'tools', [] ):
tool_ids.append( tool.get( 'guid' ) )
for shed_tool_conf_dict in app.toolbox.dynamic_confs( include_migrated_tool_conf=True ):
name = shed_tool_conf_dict[ 'config_filename' ]
for elem in shed_tool_conf_dict[ 'config_elems' ]:
if elem.tag == 'tool':
for sub_elem in elem.findall( 'id' ):
tool_id = sub_elem.text.strip()
if tool_id in tool_ids:
self.shed_config_filename = name
return shed_tool_conf_dict
elif elem.tag == "section":
for tool_elem in elem.findall( 'tool' ):
for sub_elem in tool_elem.findall( 'id' ):
tool_id = sub_elem.text.strip()
if tool_id in tool_ids:
self.shed_config_filename = name
return shed_tool_conf_dict
if self.includes_datatypes:
# We need to search by file paths here, which is less desirable.
tool_shed = common_util.remove_protocol_and_port_from_tool_shed_url( self.tool_shed )
for shed_tool_conf_dict in app.toolbox.dynamic_confs( include_migrated_tool_conf=True ):
tool_path = shed_tool_conf_dict[ 'tool_path' ]
relative_path = os.path.join( tool_path, tool_shed, 'repos', self.owner, self.name, self.installed_changeset_revision )
if os.path.exists( relative_path ):
self.shed_config_filename = shed_tool_conf_dict[ 'config_filename' ]
return shed_tool_conf_dict
return default
@property
def has_readme_files( self ):
if self.metadata:
return 'readme_files' in self.metadata
return False
@property
def has_repository_dependencies( self ):
if self.metadata:
repository_dependencies_dict = self.metadata.get( 'repository_dependencies', {} )
repository_dependencies = repository_dependencies_dict.get( 'repository_dependencies', [] )
# [["http://localhost:9009", "package_libgtextutils_0_6", "test", "e2003cbf18cd", "True", "True"]]
for rd_tup in repository_dependencies:
tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
common_util.parse_repository_dependency_tuple( rd_tup )
if not asbool( only_if_compiling_contained_td ):
return True
return False
@property
def has_repository_dependencies_only_if_compiling_contained_td( self ):
if self.metadata:
repository_dependencies_dict = self.metadata.get( 'repository_dependencies', {} )
repository_dependencies = repository_dependencies_dict.get( 'repository_dependencies', [] )
# [["http://localhost:9009", "package_libgtextutils_0_6", "test", "e2003cbf18cd", "True", "True"]]
for rd_tup in repository_dependencies:
tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
common_util.parse_repository_dependency_tuple( rd_tup )
if not asbool( only_if_compiling_contained_td ):
return False
return True
return False
@property
def in_error_state( self ):
return self.status == self.installation_status.ERROR
@property
def includes_data_managers( self ):
if self.metadata:
return bool( len( self.metadata.get( 'data_manager', {} ).get( 'data_managers', {} ) ) )
return False
@property
def includes_tools( self ):
if self.metadata:
return 'tools' in self.metadata
return False
@property
def includes_tools_for_display_in_tool_panel( self ):
if self.includes_tools:
tool_dicts = self.metadata[ 'tools' ]
for tool_dict in tool_dicts:
if tool_dict.get( 'add_to_tool_panel', True ):
return True
return False
@property
def includes_tool_dependencies( self ):
if self.metadata:
return 'tool_dependencies' in self.metadata
return False
@property
def includes_workflows( self ):
if self.metadata:
return 'workflows' in self.metadata
return False
@property
def installed_repository_dependencies( self ):
"""Return the repository's repository dependencies that are currently installed."""
installed_required_repositories = []
for required_repository in self.repository_dependencies:
if required_repository.status == self.installation_status.INSTALLED:
installed_required_repositories.append( required_repository )
return installed_required_repositories
@property
def installed_tool_dependencies( self ):
"""Return the repository's tool dependencies that are currently installed, but possibly in an error state."""
installed_dependencies = []
for tool_dependency in self.tool_dependencies:
if tool_dependency.status in [ ToolDependency.installation_status.INSTALLED ]:
installed_dependencies.append( tool_dependency )
return installed_dependencies
@property
def is_deprecated_in_tool_shed( self ):
if self.tool_shed_status:
return asbool( self.tool_shed_status.get( 'repository_deprecated', False ) )
return False
@property
def is_deactivated_or_installed( self ):
return self.status in [ self.installation_status.DEACTIVATED,
self.installation_status.INSTALLED ]
@property
def is_installed( self ):
return self.status == self.installation_status.INSTALLED
@property
def is_latest_installable_revision( self ):
if self.tool_shed_status:
return asbool( self.tool_shed_status.get( 'latest_installable_revision', False ) )
return False
@property
def is_new( self ):
return self.status == self.installation_status.NEW
@property
def missing_repository_dependencies( self ):
"""Return the repository's repository dependencies that are not currently installed, and may not ever have been installed."""
missing_required_repositories = []
for required_repository in self.repository_dependencies:
if required_repository.status not in [ self.installation_status.INSTALLED ]:
missing_required_repositories.append( required_repository )
return missing_required_repositories
@property
def missing_tool_dependencies( self ):
"""Return the repository's tool dependencies that are not currently installed, and may not ever have been installed."""
missing_dependencies = []
for tool_dependency in self.tool_dependencies:
if tool_dependency.status not in [ ToolDependency.installation_status.INSTALLED ]:
missing_dependencies.append( tool_dependency )
return missing_dependencies
def repo_files_directory( self, app ):
repo_path = self.repo_path( app )
if repo_path:
return os.path.join( repo_path, self.name )
return None
def repo_path( self, app ):
tool_shed = common_util.remove_protocol_and_port_from_tool_shed_url( self.tool_shed )
for shed_tool_conf_dict in app.toolbox.dynamic_confs( include_migrated_tool_conf=True ):
tool_path = shed_tool_conf_dict[ 'tool_path' ]
relative_path = os.path.join( tool_path, tool_shed, 'repos', self.owner, self.name, self.installed_changeset_revision )
if os.path.exists( relative_path ):
return relative_path
return None
@property
def repository_dependencies( self ):
"""
Return all of this repository's repository dependencies, ignoring their attributes like prior_installation_required and
only_if_compiling_contained_td.
"""
required_repositories = []
for rrda in self.required_repositories:
repository_dependency = rrda.repository_dependency
required_repository = repository_dependency.repository
if required_repository:
required_repositories.append( required_repository )
return required_repositories
@property
def repository_dependencies_being_installed( self ):
"""Return the repository's repository dependencies that are currently being installed."""
required_repositories_being_installed = []
for required_repository in self.repository_dependencies:
if required_repository.status in [ self.installation_status.CLONING,
self.installation_status.INSTALLING_REPOSITORY_DEPENDENCIES,
self.installation_status.INSTALLING_TOOL_DEPENDENCIES,
self.installation_status.LOADING_PROPRIETARY_DATATYPES,
self.installation_status.SETTING_TOOL_VERSIONS ]:
required_repositories_being_installed.append( required_repository )
return required_repositories_being_installed
@property
def repository_dependencies_missing_or_being_installed( self ):
"""Return the repository's repository dependencies that are either missing or currently being installed."""
required_repositories_missing_or_being_installed = []
for required_repository in self.repository_dependencies:
if required_repository.status in [ self.installation_status.ERROR,
self.installation_status.INSTALLING,
self.installation_status.NEVER_INSTALLED,
self.installation_status.UNINSTALLED ]:
required_repositories_missing_or_being_installed.append( required_repository )
return required_repositories_missing_or_being_installed
@property
def repository_dependencies_with_installation_errors( self ):
"""Return the repository's repository dependencies that have installation errors."""
required_repositories_with_installation_errors = []
for required_repository in self.repository_dependencies:
if required_repository.status == self.installation_status.ERROR:
required_repositories_with_installation_errors.append( required_repository )
return required_repositories_with_installation_errors
@property
def requires_prior_installation_of( self ):
"""
Return a list of repository dependency tuples like (tool_shed, name, owner, changeset_revision, prior_installation_required) for this
repository's repository dependencies where prior_installation_required is True. By definition, repository dependencies are required to
be installed in order for this repository to function correctly. However, those repository dependencies that are defined for this
repository with prior_installation_required set to True place them in a special category in that the required repositories must be
installed before this repository is installed. Among other things, this enables these "special" repository dependencies to include
information that enables the successful installation of this repository. This method is not used during the initial installation of
this repository, but only after it has been installed (metadata must be set for this repository in order for this method to be useful).
"""
required_rd_tups_that_must_be_installed = []
if self.has_repository_dependencies:
rd_tups = self.metadata[ 'repository_dependencies' ][ 'repository_dependencies' ]
for rd_tup in rd_tups:
if len( rd_tup ) == 5:
tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
common_util.parse_repository_dependency_tuple( rd_tup, contains_error=False )
if asbool( prior_installation_required ):
required_rd_tups_that_must_be_installed.append( ( tool_shed, name, owner, changeset_revision, 'True', 'False' ) )
elif len( rd_tup ) == 6:
tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
common_util.parse_repository_dependency_tuple( rd_tup, contains_error=False )
# The repository dependency will only be required to be previously installed if it does not fall into the category of
# a repository that must be installed only so that its contained tool dependency can be used for compiling the tool
# dependency of the dependent repository.
if not asbool( only_if_compiling_contained_td ):
if asbool( prior_installation_required ):
required_rd_tups_that_must_be_installed.append( ( tool_shed, name, owner, changeset_revision, 'True', 'False' ) )
return required_rd_tups_that_must_be_installed
@property
def revision_update_available( self ):
# This method should be named update_available, but since it is no longer possible to drop a table column using migration scripts
# with the sqlite database (see ~/galaxy/model/migrate/versions/0016_drop_update_available_col_add_tool_shed_status_col.py), we
# have to name it in such a way that it will not conflict with the eliminated tool_shed_repository.update_available column (which
# cannot be eliminated if using the sqlite database).
if self.tool_shed_status:
return asbool( self.tool_shed_status.get( 'revision_update', False ) )
return False
def set_shed_config_filename( self, value ):
self.metadata[ 'shed_config_filename' ] = value
shed_config_filename = property( get_shed_config_filename, set_shed_config_filename )
def to_dict( self, view='collection', value_mapper=None ):
if value_mapper is None:
value_mapper = {}
rval = {}
try:
visible_keys = self.__getattribute__( 'dict_' + view + '_visible_keys' )
except AttributeError:
raise Exception( 'Unknown API view: %s' % view )
for key in visible_keys:
try:
rval[ key ] = self.__getattribute__( key )
if key in value_mapper:
rval[ key ] = value_mapper.get( key, rval[ key ] )
except AttributeError:
rval[ key ] = None
return rval
@property
def tool_dependencies_being_installed( self ):
dependencies_being_installed = []
for tool_dependency in self.tool_dependencies:
if tool_dependency.status == ToolDependency.installation_status.INSTALLING:
dependencies_being_installed.append( tool_dependency )
return dependencies_being_installed
@property
def tool_dependencies_installed_or_in_error( self ):
"""Return the repository's tool dependencies that are currently installed, but possibly in an error state."""
installed_dependencies = []
for tool_dependency in self.tool_dependencies:
if tool_dependency.status in [ ToolDependency.installation_status.INSTALLED,
ToolDependency.installation_status.ERROR ]:
installed_dependencies.append( tool_dependency )
return installed_dependencies
@property
def tool_dependencies_missing_or_being_installed( self ):
dependencies_missing_or_being_installed = []
for tool_dependency in self.tool_dependencies:
if tool_dependency.status in [ ToolDependency.installation_status.ERROR,
ToolDependency.installation_status.INSTALLING,
ToolDependency.installation_status.NEVER_INSTALLED,
ToolDependency.installation_status.UNINSTALLED ]:
dependencies_missing_or_being_installed.append( tool_dependency )
return dependencies_missing_or_being_installed
@property
def tool_dependencies_with_installation_errors( self ):
dependencies_with_installation_errors = []
for tool_dependency in self.tool_dependencies:
if tool_dependency.status == ToolDependency.installation_status.ERROR:
dependencies_with_installation_errors.append( tool_dependency )
return dependencies_with_installation_errors
@property
def tool_shed_path_name( self ):
tool_shed_url = self.tool_shed
if tool_shed_url.find( ':' ) > 0:
# Eliminate the port, if any, since it will result in an invalid directory name.
tool_shed_url = tool_shed_url.split( ':' )[ 0 ]
return tool_shed_url.rstrip( '/' )
@property
def tuples_of_repository_dependencies_needed_for_compiling_td( self ):
"""
Return tuples defining this repository's repository dependencies that are necessary only for compiling this repository's tool
dependencies.
"""
rd_tups_of_repositories_needed_for_compiling_td = []
if self.metadata:
repository_dependencies = self.metadata.get( 'repository_dependencies', None )
rd_tups = repository_dependencies[ 'repository_dependencies' ]
for rd_tup in rd_tups:
if len( rd_tup ) == 6:
tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = rd_tup
if asbool( only_if_compiling_contained_td ):
rd_tups_of_repositories_needed_for_compiling_td.append( ( tool_shed, name, owner, changeset_revision, 'False', 'True' ) )
return rd_tups_of_repositories_needed_for_compiling_td
@property
def uninstalled_repository_dependencies( self ):
"""Return the repository's repository dependencies that have been uninstalled."""
uninstalled_required_repositories = []
for required_repository in self.repository_dependencies:
if required_repository.status == self.installation_status.UNINSTALLED:
uninstalled_required_repositories.append( required_repository )
return uninstalled_required_repositories
@property
def uninstalled_tool_dependencies( self ):
"""Return the repository's tool dependencies that have been uninstalled."""
uninstalled_tool_dependencies = []
for tool_dependency in self.tool_dependencies:
if tool_dependency.status == ToolDependency.installation_status.UNINSTALLED:
uninstalled_tool_dependencies.append( tool_dependency )
return uninstalled_tool_dependencies
@property
def upgrade_available( self ):
if self.tool_shed_status:
if self.is_deprecated_in_tool_shed:
# Only allow revision upgrades if the repository is not deprecated in the tool shed.
return False
return asbool( self.tool_shed_status.get( 'revision_upgrade', False ) )
return False
class RepositoryRepositoryDependencyAssociation( object ):
def __init__( self, tool_shed_repository_id=None, repository_dependency_id=None ):
self.tool_shed_repository_id = tool_shed_repository_id
self.repository_dependency_id = repository_dependency_id
class RepositoryDependency( object ):
def __init__( self, tool_shed_repository_id=None ):
self.tool_shed_repository_id = tool_shed_repository_id
class ToolDependency( object ):
installation_status = Bunch( NEVER_INSTALLED='Never installed',
INSTALLING='Installing',
INSTALLED='Installed',
ERROR='Error',
UNINSTALLED='Uninstalled' )
states = Bunch( INSTALLING='running',
OK='ok',
WARNING='queued',
ERROR='error',
UNINSTALLED='deleted_new' )
def __init__( self, tool_shed_repository_id=None, name=None, version=None, type=None, status=None, error_message=None ):
self.tool_shed_repository_id = tool_shed_repository_id
self.name = name
self.version = version
self.type = type
self.status = status
self.error_message = error_message
@property
def can_install( self ):
return self.status in [ self.installation_status.NEVER_INSTALLED, self.installation_status.UNINSTALLED ]
@property
def can_uninstall( self ):
return self.status in [ self.installation_status.ERROR, self.installation_status.INSTALLED ]
@property
def can_update( self ):
return self.status in [ self.installation_status.NEVER_INSTALLED,
self.installation_status.INSTALLED,
self.installation_status.ERROR,
self.installation_status.UNINSTALLED ]
def get_env_shell_file_path( self, app ):
installation_directory = self.installation_directory( app )
file_path = os.path.join( installation_directory, 'env.sh' )
if os.path.exists( file_path ):
return file_path
return None
@property
def in_error_state( self ):
return self.status == self.installation_status.ERROR
def installation_directory( self, app ):
if self.type == 'package':
return os.path.join( app.config.tool_dependency_dir,
self.name,
self.version,
self.tool_shed_repository.owner,
self.tool_shed_repository.name,
self.tool_shed_repository.installed_changeset_revision )
if self.type == 'set_environment':
return os.path.join( app.config.tool_dependency_dir,
'environment_settings',
self.name,
self.tool_shed_repository.owner,
self.tool_shed_repository.name,
self.tool_shed_repository.installed_changeset_revision )
@property
def is_installed( self ):
return self.status == self.installation_status.INSTALLED
class ToolVersion( object, Dictifiable ):
dict_element_visible_keys = ( 'id', 'tool_shed_repository' )
def __init__( self, id=None, create_time=None, tool_id=None, tool_shed_repository=None ):
self.id = id
self.create_time = create_time
self.tool_id = tool_id
self.tool_shed_repository = tool_shed_repository
def get_previous_version( self, app ):
context = app.install_model.context
tva = context.query( app.install_model.ToolVersionAssociation ) \
.filter( app.install_model.ToolVersionAssociation.table.c.tool_id == self.id ) \
.first()
if tva:
return context.query( app.install_model.ToolVersion ) \
.filter( app.install_model.ToolVersion.table.c.id == tva.parent_id ) \
.first()
return None
def get_next_version( self, app ):
context = app.install_model.context
tva = context.query( app.install_model.ToolVersionAssociation ) \
.filter( app.install_model.ToolVersionAssociation.table.c.parent_id == self.id ) \
.first()
if tva:
return context.query( app.install_model.ToolVersion ) \
.filter( app.install_model.ToolVersion.table.c.id == tva.tool_id ) \
.first()
return None
def get_versions( self, app ):
tool_versions = []
# Prepend ancestors.
def __ancestors( app, tool_version ):
# Should we handle multiple parents at each level?
previous_version = tool_version.get_previous_version( app )
if previous_version:
if previous_version not in tool_versions:
tool_versions.insert( 0, previous_version )
__ancestors( app, previous_version )
# Append descendants.
def __descendants( app, tool_version ):
# Should we handle multiple child siblings at each level?
next_version = tool_version.get_next_version( app )
if next_version:
if next_version not in tool_versions:
tool_versions.append( next_version )
__descendants( app, next_version )
__ancestors( app, self )
if self not in tool_versions:
tool_versions.append( self )
__descendants( app, self )
return tool_versions
def get_version_ids( self, app, reverse=False ):
version_ids = [ tool_version.tool_id for tool_version in self.get_versions( app ) ]
if reverse:
version_ids.reverse()
return version_ids
def to_dict( self, view='element' ):
rval = super( ToolVersion, self ).to_dict( view=view )
rval[ 'tool_name' ] = self.tool_id
for a in self.parent_tool_association:
rval[ 'parent_tool_id' ] = a.parent_id
for a in self.child_tool_association:
rval[ 'child_tool_id' ] = a.tool_id
return rval
class ToolVersionAssociation( object ):
def __init__( self, id=None, tool_id=None, parent_id=None ):
self.id = id
self.tool_id = tool_id
self.parent_id = parent_id
class MigrateTools( object ):
def __init__( self, repository_id=None, repository_path=None, version=None ):
self.repository_id = repository_id
self.repository_path = repository_path
self.version = version
|
[
"tool_shed.util.common_util.remove_protocol_and_port_from_tool_shed_url",
"tool_shed.util.common_util.parse_repository_dependency_tuple",
"galaxy.util.bunch.Bunch",
"os.path.exists",
"galaxy.util.asbool",
"urlparse.urljoin",
"tool_shed.util.common_util.get_tool_shed_url_from_tool_shed_registry",
"os.path.join",
"logging.getLogger"
] |
[((215, 242), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (232, 242), False, 'import logging\n'), ((877, 1273), 'galaxy.util.bunch.Bunch', 'Bunch', ([], {'NEW': '"""New"""', 'CLONING': '"""Cloning"""', 'SETTING_TOOL_VERSIONS': '"""Setting tool versions"""', 'INSTALLING_REPOSITORY_DEPENDENCIES': '"""Installing repository dependencies"""', 'INSTALLING_TOOL_DEPENDENCIES': '"""Installing tool dependencies"""', 'LOADING_PROPRIETARY_DATATYPES': '"""Loading proprietary datatypes"""', 'INSTALLED': '"""Installed"""', 'DEACTIVATED': '"""Deactivated"""', 'ERROR': '"""Error"""', 'UNINSTALLED': '"""Uninstalled"""'}), "(NEW='New', CLONING='Cloning', SETTING_TOOL_VERSIONS=\n 'Setting tool versions', INSTALLING_REPOSITORY_DEPENDENCIES=\n 'Installing repository dependencies', INSTALLING_TOOL_DEPENDENCIES=\n 'Installing tool dependencies', LOADING_PROPRIETARY_DATATYPES=\n 'Loading proprietary datatypes', INSTALLED='Installed', DEACTIVATED=\n 'Deactivated', ERROR='Error', UNINSTALLED='Uninstalled')\n", (882, 1273), False, 'from galaxy.util.bunch import Bunch\n'), ((1561, 1661), 'galaxy.util.bunch.Bunch', 'Bunch', ([], {'INSTALLING': '"""running"""', 'OK': '"""ok"""', 'WARNING': '"""queued"""', 'ERROR': '"""error"""', 'UNINSTALLED': '"""deleted_new"""'}), "(INSTALLING='running', OK='ok', WARNING='queued', ERROR='error',\n UNINSTALLED='deleted_new')\n", (1566, 1661), False, 'from galaxy.util.bunch import Bunch\n'), ((26074, 26209), 'galaxy.util.bunch.Bunch', 'Bunch', ([], {'NEVER_INSTALLED': '"""Never installed"""', 'INSTALLING': '"""Installing"""', 'INSTALLED': '"""Installed"""', 'ERROR': '"""Error"""', 'UNINSTALLED': '"""Uninstalled"""'}), "(NEVER_INSTALLED='Never installed', INSTALLING='Installing', INSTALLED\n ='Installed', ERROR='Error', UNINSTALLED='Uninstalled')\n", (26079, 26209), False, 'from galaxy.util.bunch import Bunch\n'), ((26353, 26453), 'galaxy.util.bunch.Bunch', 'Bunch', ([], {'INSTALLING': '"""running"""', 'OK': '"""ok"""', 'WARNING': '"""queued"""', 'ERROR': '"""error"""', 'UNINSTALLED': '"""deleted_new"""'}), "(INSTALLING='running', OK='ok', WARNING='queued', ERROR='error',\n UNINSTALLED='deleted_new')\n", (26358, 26453), False, 'from galaxy.util.bunch import Bunch\n'), ((3637, 3711), 'tool_shed.util.common_util.get_tool_shed_url_from_tool_shed_registry', 'common_util.get_tool_shed_url_from_tool_shed_registry', (['app', 'self.tool_shed'], {}), '(app, self.tool_shed)\n', (3690, 3711), False, 'from tool_shed.util import common_util\n'), ((13376, 13447), 'tool_shed.util.common_util.remove_protocol_and_port_from_tool_shed_url', 'common_util.remove_protocol_and_port_from_tool_shed_url', (['self.tool_shed'], {}), '(self.tool_shed)\n', (13431, 13447), False, 'from tool_shed.util import common_util\n'), ((27633, 27679), 'os.path.join', 'os.path.join', (['installation_directory', '"""env.sh"""'], {}), "(installation_directory, 'env.sh')\n", (27645, 27679), False, 'import os\n'), ((27693, 27718), 'os.path.exists', 'os.path.exists', (['file_path'], {}), '(file_path)\n', (27707, 27718), False, 'import os\n'), ((3978, 4040), 'urlparse.urljoin', 'urljoin', (['tool_shed_url', "('view/%s/%s' % (self.owner, self.name))"], {}), "(tool_shed_url, 'view/%s/%s' % (self.owner, self.name))\n", (3985, 4040), False, 'from urlparse import urljoin\n'), ((5564, 5674), 'os.path.join', 'os.path.join', (['self.tool_shed_path_name', '"""repos"""', 'self.owner', 'self.name', 'self.installed_changeset_revision'], {}), "(self.tool_shed_path_name, 'repos', self.owner, self.name, self\n .installed_changeset_revision)\n", (5576, 5674), False, 'import os\n'), ((7009, 7080), 'tool_shed.util.common_util.remove_protocol_and_port_from_tool_shed_url', 'common_util.remove_protocol_and_port_from_tool_shed_url', (['self.tool_shed'], {}), '(self.tool_shed)\n', (7064, 7080), False, 'from tool_shed.util import common_util\n'), ((13266, 13300), 'os.path.join', 'os.path.join', (['repo_path', 'self.name'], {}), '(repo_path, self.name)\n', (13278, 13300), False, 'import os\n'), ((13634, 13740), 'os.path.join', 'os.path.join', (['tool_path', 'tool_shed', '"""repos"""', 'self.owner', 'self.name', 'self.installed_changeset_revision'], {}), "(tool_path, tool_shed, 'repos', self.owner, self.name, self.\n installed_changeset_revision)\n", (13646, 13740), False, 'import os\n'), ((13753, 13782), 'os.path.exists', 'os.path.exists', (['relative_path'], {}), '(relative_path)\n', (13767, 13782), False, 'import os\n'), ((27979, 28179), 'os.path.join', 'os.path.join', (['app.config.tool_dependency_dir', 'self.name', 'self.version', 'self.tool_shed_repository.owner', 'self.tool_shed_repository.name', 'self.tool_shed_repository.installed_changeset_revision'], {}), '(app.config.tool_dependency_dir, self.name, self.version, self.\n tool_shed_repository.owner, self.tool_shed_repository.name, self.\n tool_shed_repository.installed_changeset_revision)\n', (27991, 28179), False, 'import os\n'), ((28399, 28608), 'os.path.join', 'os.path.join', (['app.config.tool_dependency_dir', '"""environment_settings"""', 'self.name', 'self.tool_shed_repository.owner', 'self.tool_shed_repository.name', 'self.tool_shed_repository.installed_changeset_revision'], {}), "(app.config.tool_dependency_dir, 'environment_settings', self.\n name, self.tool_shed_repository.owner, self.tool_shed_repository.name,\n self.tool_shed_repository.installed_changeset_revision)\n", (28411, 28608), False, 'import os\n'), ((7279, 7385), 'os.path.join', 'os.path.join', (['tool_path', 'tool_shed', '"""repos"""', 'self.owner', 'self.name', 'self.installed_changeset_revision'], {}), "(tool_path, tool_shed, 'repos', self.owner, self.name, self.\n installed_changeset_revision)\n", (7291, 7385), False, 'import os\n'), ((7402, 7431), 'os.path.exists', 'os.path.exists', (['relative_path'], {}), '(relative_path)\n', (7416, 7431), False, 'import os\n'), ((8331, 8384), 'tool_shed.util.common_util.parse_repository_dependency_tuple', 'common_util.parse_repository_dependency_tuple', (['rd_tup'], {}), '(rd_tup)\n', (8376, 8384), False, 'from tool_shed.util import common_util\n'), ((9126, 9179), 'tool_shed.util.common_util.parse_repository_dependency_tuple', 'common_util.parse_repository_dependency_tuple', (['rd_tup'], {}), '(rd_tup)\n', (9171, 9179), False, 'from tool_shed.util import common_util\n'), ((8410, 8448), 'galaxy.util.asbool', 'asbool', (['only_if_compiling_contained_td'], {}), '(only_if_compiling_contained_td)\n', (8416, 8448), False, 'from galaxy.util import asbool\n'), ((9205, 9243), 'galaxy.util.asbool', 'asbool', (['only_if_compiling_contained_td'], {}), '(only_if_compiling_contained_td)\n', (9211, 9243), False, 'from galaxy.util import asbool\n'), ((18208, 18283), 'tool_shed.util.common_util.parse_repository_dependency_tuple', 'common_util.parse_repository_dependency_tuple', (['rd_tup'], {'contains_error': '(False)'}), '(rd_tup, contains_error=False)\n', (18253, 18283), False, 'from tool_shed.util import common_util\n'), ((18309, 18344), 'galaxy.util.asbool', 'asbool', (['prior_installation_required'], {}), '(prior_installation_required)\n', (18315, 18344), False, 'from galaxy.util import asbool\n'), ((24027, 24065), 'galaxy.util.asbool', 'asbool', (['only_if_compiling_contained_td'], {}), '(only_if_compiling_contained_td)\n', (24033, 24065), False, 'from galaxy.util import asbool\n'), ((18679, 18754), 'tool_shed.util.common_util.parse_repository_dependency_tuple', 'common_util.parse_repository_dependency_tuple', (['rd_tup'], {'contains_error': '(False)'}), '(rd_tup, contains_error=False)\n', (18724, 18754), False, 'from tool_shed.util import common_util\n'), ((19120, 19158), 'galaxy.util.asbool', 'asbool', (['only_if_compiling_contained_td'], {}), '(only_if_compiling_contained_td)\n', (19126, 19158), False, 'from galaxy.util import asbool\n'), ((19189, 19224), 'galaxy.util.asbool', 'asbool', (['prior_installation_required'], {}), '(prior_installation_required)\n', (19195, 19224), False, 'from galaxy.util import asbool\n')]
|
import unittest
from doublylinkedlist import DoublyLinkedList, DoublyLinkedListNode, DoublyLinkedListError
class TestDoublyLinkedList(unittest.TestCase):
def test_create_new_linked_list(self):
dl_list = DoublyLinkedList()
self.assertIsInstance(dl_list, DoublyLinkedList)
def test_create_new_linked_list_from_list(self):
dl_list = DoublyLinkedList([1, 2, 3])
self.assertIsInstance(dl_list, DoublyLinkedList)
self.assertEqual(dl_list.firstnode().value, 1)
self.assertEqual(dl_list.lastnode().value, 3)
def test_create_new_linked_list_one_elem(self):
dl_list = DoublyLinkedList([1])
self.assertEqual(dl_list.firstnode().value, 1)
self.assertEqual(dl_list.lastnode().value, 1)
def test_sentinel_nodes_should_not_be_listed(self):
dl_list = DoublyLinkedList([1])
self.assertEqual(dl_list.firstnode().next(), None)
self.assertEqual(dl_list.firstnode().prev(), None)
def test_firstnode_on_empty_list(self):
dl_list = DoublyLinkedList()
self.assertEqual(dl_list.firstnode(), None)
def test_lastnode_on_empty_list(self):
dl_list = DoublyLinkedList()
self.assertEqual(dl_list.lastnode(), None)
def test_items_iterator(self):
dl_list = DoublyLinkedList([1, 2, 3])
count = 0
for i in dl_list.items():
self.assertIsInstance(i, DoublyLinkedListNode)
self.assertGreater(i.value, 0)
count += 1
self.assertEqual(count, 3)
def test_insert_before(self):
dl_list = DoublyLinkedList([1, 2, 3])
self.assertEqual(len([i for i in dl_list.items()]), 3)
dl_list.insertBefore(4, dl_list.lastnode().prev())
self.assertEqual(dl_list.lastnode().prev().prev().value, 4)
self.assertEqual(len([i for i in dl_list.items()]), 4)
def test_insert_before_first_item(self):
dl_list = DoublyLinkedList([1, 2, 3])
self.assertEqual(len([i for i in dl_list.items()]), 3)
dl_list.insertBefore(4, dl_list.firstnode())
self.assertEqual(dl_list.firstnode().value, 4)
self.assertEqual(len([i for i in dl_list.items()]), 4)
def test_insert_after(self):
dl_list = DoublyLinkedList([1, 2, 3])
self.assertEqual(len([i for i in dl_list.items()]), 3)
dl_list.insertAfter(4, dl_list.firstnode())
self.assertEqual(dl_list.firstnode().next().value, 4)
self.assertEqual(len([i for i in dl_list.items()]), 4)
def test_insert_after_last_item(self):
dl_list = DoublyLinkedList([1, 2, 3])
self.assertEqual(len([i for i in dl_list.items()]), 3)
dl_list.insertAfter(4, dl_list.lastnode())
self.assertEqual(dl_list.lastnode().value, 4)
self.assertEqual(len([i for i in dl_list.items()]), 4)
def test_remove_node(self):
dl_list = DoublyLinkedList([1, 2, 3])
self.assertEqual(len([i for i in dl_list.items()]), 3)
dl_list.removeNode(dl_list.lastnode().prev())
self.assertEqual(dl_list.lastnode().prev().value, 1)
self.assertEqual(len([i for i in dl_list.items()]), 2)
def test_remove_node_unique_item(self):
dl_list = DoublyLinkedList([1])
self.assertEqual(len([i for i in dl_list.items()]), 1)
dl_list.removeNode(dl_list.firstnode())
self.assertEqual(dl_list.firstnode(), None)
self.assertEqual(len([i for i in dl_list.items()]), 0)
def test_remove_node_first_item(self):
dl_list = DoublyLinkedList([1, 2, 3])
self.assertEqual(len([i for i in dl_list.items()]), 3)
dl_list.removeNode(dl_list.firstnode())
self.assertEqual(dl_list.firstnode().value, 2)
self.assertEqual(len([i for i in dl_list.items()]), 2)
def test_remove_node_last_item(self):
dl_list = DoublyLinkedList([1, 2, 3])
self.assertEqual(len([i for i in dl_list.items()]), 3)
dl_list.removeNode(dl_list.lastnode())
self.assertEqual(dl_list.lastnode().value, 2)
self.assertEqual(len([i for i in dl_list.items()]), 2)
def test_remove_before(self):
dl_list = DoublyLinkedList([1, 2, 3])
self.assertEqual(len([i for i in dl_list.items()]), 3)
dl_list.removeBeforeNode(dl_list.lastnode())
self.assertEqual(dl_list.lastnode().prev().value, 1)
self.assertEqual(len([i for i in dl_list.items()]), 2)
def test_remove_before_first_item(self):
dl_list = DoublyLinkedList([1, 2, 3])
with self.assertRaises(DoublyLinkedListError):
dl_list.removeBeforeNode(dl_list.firstnode())
def test_remove_after(self):
dl_list = DoublyLinkedList([1, 2, 3])
self.assertEqual(len([i for i in dl_list.items()]), 3)
dl_list.removeAfterNode(dl_list.firstnode())
self.assertEqual(dl_list.firstnode().next().value, 3)
self.assertEqual(len([i for i in dl_list.items()]), 2)
def test_remove_after_first_item(self):
dl_list = DoublyLinkedList([1, 2, 3])
with self.assertRaises(DoublyLinkedListError):
dl_list.removeAfterNode(dl_list.lastnode())
def test_remove_all_in_between(self):
dl_list = DoublyLinkedList([1, 2, 3, 4, 5, 6])
self.assertEqual(len([i for i in dl_list.items()]), 6)
new_list = dl_list.removeAllInBetween(\
dl_list.firstnode().next(), dl_list.lastnode().prev())
self.assertEqual(len([i for i in dl_list.items()]), 4)
self.assertEqual(len([i for i in new_list.items()]), 2)
self.assertEqual(new_list.firstnode().value, 3)
def test_remove_all_in_between_single_item(self):
dl_list = DoublyLinkedList([1, 2, 3, 4, 5, 6])
self.assertEqual(len([i for i in dl_list.items()]), 6)
new_list = dl_list.removeAllInBetween(\
dl_list.firstnode(), dl_list.firstnode().next().next())
self.assertEqual(len([i for i in dl_list.items()]), 5)
self.assertEqual(len([i for i in new_list.items()]), 1)
self.assertEqual(new_list.firstnode().value, 2)
def test_remove_all_in_between_empty(self):
dl_list = DoublyLinkedList([1, 2, 3])
self.assertEqual(len([i for i in dl_list.items()]), 3)
new_list = dl_list.removeAllInBetween(\
dl_list.firstnode(), dl_list.firstnode().next())
self.assertEqual(len([i for i in dl_list.items()]), 3)
self.assertEqual(len([i for i in new_list.items()]), 0)
self.assertEqual(new_list.firstnode(), None)
def test_insert_first_node(self):
dl_list = DoublyLinkedList([1, 2, 3])
self.assertEqual(len([i for i in dl_list.items()]), 3)
dl_list.insertFirstNode(4)
self.assertEqual(dl_list.firstnode().value, 4)
self.assertEqual(len([i for i in dl_list.items()]), 4)
def test_insert_first_node_empty_list(self):
dl_list = DoublyLinkedList()
self.assertEqual(len([i for i in dl_list.items()]), 0)
dl_list.insertFirstNode(1)
self.assertEqual(dl_list.firstnode().value, 1)
self.assertEqual(len([i for i in dl_list.items()]), 1)
def test_insert_last_node(self):
dl_list = DoublyLinkedList([1, 2, 3])
self.assertEqual(len([i for i in dl_list.items()]), 3)
dl_list.insertLastNode(4)
self.assertEqual(dl_list.lastnode().value, 4)
self.assertEqual(len([i for i in dl_list.items()]), 4)
def test_insert_last_node_empty_list(self):
dl_list = DoublyLinkedList()
self.assertEqual(len([i for i in dl_list.items()]), 0)
dl_list.insertLastNode(1)
self.assertEqual(dl_list.lastnode().value, 1)
self.assertEqual(len([i for i in dl_list.items()]), 1)
def test_remove_first_node(self):
dl_list = DoublyLinkedList([1, 2, 3])
self.assertEqual(len([i for i in dl_list.items()]), 3)
self.assertEqual(dl_list.removeFirstNode().value, 1)
self.assertEqual(len([i for i in dl_list.items()]), 2)
def test_remove_first_node_single_item(self):
dl_list = DoublyLinkedList([1])
self.assertEqual(len([i for i in dl_list.items()]), 1)
self.assertEqual(dl_list.removeFirstNode().value, 1)
self.assertEqual(len([i for i in dl_list.items()]), 0)
def test_remove_first_node_empty_list(self):
dl_list = DoublyLinkedList()
self.assertEqual(len([i for i in dl_list.items()]), 0)
self.assertEqual(dl_list.removeFirstNode(), None)
self.assertEqual(len([i for i in dl_list.items()]), 0)
def test_remove_last_node(self):
dl_list = DoublyLinkedList([1, 2, 3])
self.assertEqual(len([i for i in dl_list.items()]), 3)
self.assertEqual(dl_list.removeLastNode().value, 3)
self.assertEqual(len([i for i in dl_list.items()]), 2)
def test_remove_last_node_single_item(self):
dl_list = DoublyLinkedList([1])
self.assertEqual(len([i for i in dl_list.items()]), 1)
self.assertEqual(dl_list.removeLastNode().value, 1)
self.assertEqual(len([i for i in dl_list.items()]), 0)
def test_remove_last_node_empty_list(self):
dl_list = DoublyLinkedList()
self.assertEqual(len([i for i in dl_list.items()]), 0)
self.assertEqual(dl_list.removeLastNode(), None)
self.assertEqual(len([i for i in dl_list.items()]), 0)
|
[
"doublylinkedlist.DoublyLinkedList"
] |
[((217, 235), 'doublylinkedlist.DoublyLinkedList', 'DoublyLinkedList', ([], {}), '()\n', (233, 235), False, 'from doublylinkedlist import DoublyLinkedList, DoublyLinkedListNode, DoublyLinkedListError\n'), ((365, 392), 'doublylinkedlist.DoublyLinkedList', 'DoublyLinkedList', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (381, 392), False, 'from doublylinkedlist import DoublyLinkedList, DoublyLinkedListNode, DoublyLinkedListError\n'), ((630, 651), 'doublylinkedlist.DoublyLinkedList', 'DoublyLinkedList', (['[1]'], {}), '([1])\n', (646, 651), False, 'from doublylinkedlist import DoublyLinkedList, DoublyLinkedListNode, DoublyLinkedListError\n'), ((836, 857), 'doublylinkedlist.DoublyLinkedList', 'DoublyLinkedList', (['[1]'], {}), '([1])\n', (852, 857), False, 'from doublylinkedlist import DoublyLinkedList, DoublyLinkedListNode, DoublyLinkedListError\n'), ((1039, 1057), 'doublylinkedlist.DoublyLinkedList', 'DoublyLinkedList', ([], {}), '()\n', (1055, 1057), False, 'from doublylinkedlist import DoublyLinkedList, DoublyLinkedListNode, DoublyLinkedListError\n'), ((1172, 1190), 'doublylinkedlist.DoublyLinkedList', 'DoublyLinkedList', ([], {}), '()\n', (1188, 1190), False, 'from doublylinkedlist import DoublyLinkedList, DoublyLinkedListNode, DoublyLinkedListError\n'), ((1296, 1323), 'doublylinkedlist.DoublyLinkedList', 'DoublyLinkedList', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (1312, 1323), False, 'from doublylinkedlist import DoublyLinkedList, DoublyLinkedListNode, DoublyLinkedListError\n'), ((1589, 1616), 'doublylinkedlist.DoublyLinkedList', 'DoublyLinkedList', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (1605, 1616), False, 'from doublylinkedlist import DoublyLinkedList, DoublyLinkedListNode, DoublyLinkedListError\n'), ((1934, 1961), 'doublylinkedlist.DoublyLinkedList', 'DoublyLinkedList', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (1950, 1961), False, 'from doublylinkedlist import DoublyLinkedList, DoublyLinkedListNode, DoublyLinkedListError\n'), ((2248, 2275), 'doublylinkedlist.DoublyLinkedList', 'DoublyLinkedList', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (2264, 2275), False, 'from doublylinkedlist import DoublyLinkedList, DoublyLinkedListNode, DoublyLinkedListError\n'), ((2578, 2605), 'doublylinkedlist.DoublyLinkedList', 'DoublyLinkedList', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (2594, 2605), False, 'from doublylinkedlist import DoublyLinkedList, DoublyLinkedListNode, DoublyLinkedListError\n'), ((2888, 2915), 'doublylinkedlist.DoublyLinkedList', 'DoublyLinkedList', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (2904, 2915), False, 'from doublylinkedlist import DoublyLinkedList, DoublyLinkedListNode, DoublyLinkedListError\n'), ((3220, 3241), 'doublylinkedlist.DoublyLinkedList', 'DoublyLinkedList', (['[1]'], {}), '([1])\n', (3236, 3241), False, 'from doublylinkedlist import DoublyLinkedList, DoublyLinkedListNode, DoublyLinkedListError\n'), ((3530, 3557), 'doublylinkedlist.DoublyLinkedList', 'DoublyLinkedList', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (3546, 3557), False, 'from doublylinkedlist import DoublyLinkedList, DoublyLinkedListNode, DoublyLinkedListError\n'), ((3848, 3875), 'doublylinkedlist.DoublyLinkedList', 'DoublyLinkedList', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (3864, 3875), False, 'from doublylinkedlist import DoublyLinkedList, DoublyLinkedListNode, DoublyLinkedListError\n'), ((4156, 4183), 'doublylinkedlist.DoublyLinkedList', 'DoublyLinkedList', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (4172, 4183), False, 'from doublylinkedlist import DoublyLinkedList, DoublyLinkedListNode, DoublyLinkedListError\n'), ((4488, 4515), 'doublylinkedlist.DoublyLinkedList', 'DoublyLinkedList', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (4504, 4515), False, 'from doublylinkedlist import DoublyLinkedList, DoublyLinkedListNode, DoublyLinkedListError\n'), ((4681, 4708), 'doublylinkedlist.DoublyLinkedList', 'DoublyLinkedList', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (4697, 4708), False, 'from doublylinkedlist import DoublyLinkedList, DoublyLinkedListNode, DoublyLinkedListError\n'), ((5013, 5040), 'doublylinkedlist.DoublyLinkedList', 'DoublyLinkedList', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (5029, 5040), False, 'from doublylinkedlist import DoublyLinkedList, DoublyLinkedListNode, DoublyLinkedListError\n'), ((5213, 5249), 'doublylinkedlist.DoublyLinkedList', 'DoublyLinkedList', (['[1, 2, 3, 4, 5, 6]'], {}), '([1, 2, 3, 4, 5, 6])\n', (5229, 5249), False, 'from doublylinkedlist import DoublyLinkedList, DoublyLinkedListNode, DoublyLinkedListError\n'), ((5684, 5720), 'doublylinkedlist.DoublyLinkedList', 'DoublyLinkedList', (['[1, 2, 3, 4, 5, 6]'], {}), '([1, 2, 3, 4, 5, 6])\n', (5700, 5720), False, 'from doublylinkedlist import DoublyLinkedList, DoublyLinkedListNode, DoublyLinkedListError\n'), ((6150, 6177), 'doublylinkedlist.DoublyLinkedList', 'DoublyLinkedList', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (6166, 6177), False, 'from doublylinkedlist import DoublyLinkedList, DoublyLinkedListNode, DoublyLinkedListError\n'), ((6587, 6614), 'doublylinkedlist.DoublyLinkedList', 'DoublyLinkedList', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (6603, 6614), False, 'from doublylinkedlist import DoublyLinkedList, DoublyLinkedListNode, DoublyLinkedListError\n'), ((6899, 6917), 'doublylinkedlist.DoublyLinkedList', 'DoublyLinkedList', ([], {}), '()\n', (6915, 6917), False, 'from doublylinkedlist import DoublyLinkedList, DoublyLinkedListNode, DoublyLinkedListError\n'), ((7190, 7217), 'doublylinkedlist.DoublyLinkedList', 'DoublyLinkedList', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (7206, 7217), False, 'from doublylinkedlist import DoublyLinkedList, DoublyLinkedListNode, DoublyLinkedListError\n'), ((7499, 7517), 'doublylinkedlist.DoublyLinkedList', 'DoublyLinkedList', ([], {}), '()\n', (7515, 7517), False, 'from doublylinkedlist import DoublyLinkedList, DoublyLinkedListNode, DoublyLinkedListError\n'), ((7789, 7816), 'doublylinkedlist.DoublyLinkedList', 'DoublyLinkedList', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (7805, 7816), False, 'from doublylinkedlist import DoublyLinkedList, DoublyLinkedListNode, DoublyLinkedListError\n'), ((8073, 8094), 'doublylinkedlist.DoublyLinkedList', 'DoublyLinkedList', (['[1]'], {}), '([1])\n', (8089, 8094), False, 'from doublylinkedlist import DoublyLinkedList, DoublyLinkedListNode, DoublyLinkedListError\n'), ((8350, 8368), 'doublylinkedlist.DoublyLinkedList', 'DoublyLinkedList', ([], {}), '()\n', (8366, 8368), False, 'from doublylinkedlist import DoublyLinkedList, DoublyLinkedListNode, DoublyLinkedListError\n'), ((8609, 8636), 'doublylinkedlist.DoublyLinkedList', 'DoublyLinkedList', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (8625, 8636), False, 'from doublylinkedlist import DoublyLinkedList, DoublyLinkedListNode, DoublyLinkedListError\n'), ((8891, 8912), 'doublylinkedlist.DoublyLinkedList', 'DoublyLinkedList', (['[1]'], {}), '([1])\n', (8907, 8912), False, 'from doublylinkedlist import DoublyLinkedList, DoublyLinkedListNode, DoublyLinkedListError\n'), ((9166, 9184), 'doublylinkedlist.DoublyLinkedList', 'DoublyLinkedList', ([], {}), '()\n', (9182, 9184), False, 'from doublylinkedlist import DoublyLinkedList, DoublyLinkedListNode, DoublyLinkedListError\n')]
|
#! /usr/bin/env python3
"""Tokenize a program"""
import re
from tokenize import TokenInfo
from tokenize import ISTERMINAL as is_terminal
from tokenize import ISNONTERMINAL as is_nonterminal
from simplecompiler.compiler.Symbol import *
__all__ = [ "is_terminal", "is_nonterminal", "tokenize", "print_tokens" ]
def group(*args): return '({})'.format('|'.join(args))
def make_string():
return '[{}-{}]'.format(bytes([32, 33, 35]).decode(), bytes([126]).decode())
String = r'"{}*"'.format(make_string())
Char = r"'[+\-*/_a-zA-Z0-9]'"
Name = r'[_a-zA-Z][_a-zA-Z0-9]*'
Number = r'\d+'
Operators = group(r'[+\-*/]', r'[=!]?=', r'[<>]=?')
Bracket = '[][(){}]'
Special = group(r'[;:,]', r'\r?\n')
Whitespace = r'[ \f\t]*'
PseudoToken = re.compile(
Whitespace + group(String, Number, Char, Name, Operators, Bracket, Special))
Blank = re.compile(r'[ \t\f]*(?:[\r\n]|$)')
def _tokenize(readline):
"""Tokenize lines from ``readline()``, which should be the ``__next__``
attribute of an opened file in text mode.
>>> with open(file) as f:
... return list(tokenize(f.__next__))
"""
numchars = '0123456789'
lnum = 0
readline = iter(readline, '')
while True:
try:
line = next(readline)
lnum += 1
except StopIteration:
break
pos, max = 0, len(line)
if Blank.match(line):
continue
while pos < max:
psmat = PseudoToken.match(line, pos)
if psmat:
start, end = psmat.span(1)
spos, epos, pos = (lnum, start), (lnum, end), end
if start == end:
continue
token, initial = line[start: end], line[start]
if initial in numchars:
yield TokenInfo(NUMBER, token, spos, epos, line)
elif initial in '\r\n':
continue
elif initial in ("\"", "'"):
yield TokenInfo(CHAR if initial == "'" else STRING,
token, spos, epos, line)
elif initial.isidentifier():
yield TokenInfo(NAME, token.lower(), spos, epos, line)
else:
yield TokenInfo(OP, token, spos, epos, line)
else:
yield TokenInfo(ERRORTOKEN, line[pos],
(lnum, pos), (lnum, pos+1), line)
pos += 1
yield TokenInfo(ENDMARKER, '', (lnum, 0), (lnum, 0), '')
def tokenize(input):
return list(_tokenize(input.__next__))
def print_tokens(tokens, output):
for token in tokens:
token_range = "%d,%d-%d,%d:" % (token.start + token.end)
print("%-20s%-15s%-15r" %
(token_range, tok_name[token.type], token.string), file=output)
|
[
"tokenize.TokenInfo",
"re.compile"
] |
[((840, 878), 're.compile', 're.compile', (['"""[ \\\\t\\\\f]*(?:[\\\\r\\\\n]|$)"""'], {}), "('[ \\\\t\\\\f]*(?:[\\\\r\\\\n]|$)')\n", (850, 878), False, 'import re\n'), ((2470, 2520), 'tokenize.TokenInfo', 'TokenInfo', (['ENDMARKER', '""""""', '(lnum, 0)', '(lnum, 0)', '""""""'], {}), "(ENDMARKER, '', (lnum, 0), (lnum, 0), '')\n", (2479, 2520), False, 'from tokenize import TokenInfo\n'), ((2336, 2404), 'tokenize.TokenInfo', 'TokenInfo', (['ERRORTOKEN', 'line[pos]', '(lnum, pos)', '(lnum, pos + 1)', 'line'], {}), '(ERRORTOKEN, line[pos], (lnum, pos), (lnum, pos + 1), line)\n', (2345, 2404), False, 'from tokenize import TokenInfo\n'), ((1799, 1841), 'tokenize.TokenInfo', 'TokenInfo', (['NUMBER', 'token', 'spos', 'epos', 'line'], {}), '(NUMBER, token, spos, epos, line)\n', (1808, 1841), False, 'from tokenize import TokenInfo\n'), ((1982, 2052), 'tokenize.TokenInfo', 'TokenInfo', (['(CHAR if initial == "\'" else STRING)', 'token', 'spos', 'epos', 'line'], {}), '(CHAR if initial == "\'" else STRING, token, spos, epos, line)\n', (1991, 2052), False, 'from tokenize import TokenInfo\n'), ((2257, 2295), 'tokenize.TokenInfo', 'TokenInfo', (['OP', 'token', 'spos', 'epos', 'line'], {}), '(OP, token, spos, epos, line)\n', (2266, 2295), False, 'from tokenize import TokenInfo\n')]
|
from __future__ import division, with_statement, absolute_import
import hashlib
import logging
import sys
from ldclient.version import VERSION
log = logging.getLogger(sys.modules[__name__].__name__)
# noinspection PyBroadException
try:
import queue
except:
# noinspection PyUnresolvedReferences,PyPep8Naming
import Queue as queue
__LONG_SCALE__ = float(0xFFFFFFFFFFFFFFF)
__BUILTINS__ = ["key", "ip", "country", "email", "firstName", "lastName", "avatar", "name", "anonymous"]
try:
# noinspection PyUnresolvedReferences
unicode
except NameError:
__BASE_TYPES__ = (str, float, int, bool)
else:
# noinspection PyUnresolvedReferences
__BASE_TYPES__ = (str, float, int, bool, unicode)
def _headers(api_key):
return {'Authorization': 'api_key ' + api_key, 'User-Agent': 'PythonClient/' + VERSION,
'Content-Type': "application/json"}
def _stream_headers(api_key, client="PythonClient"):
return {'Authorization': 'api_key ' + api_key,
'User-Agent': '{}/{}'.format(client, VERSION),
'Cache-Control': 'no-cache',
'Accept': "text/event-stream"}
def _param_for_user(feature, user):
if 'key' in user and user['key']:
id_hash = user['key']
else:
log.exception('User does not have a valid key set. Returning default value for flag.')
return None
if 'secondary' in user:
id_hash += "." + user['secondary']
hash_key = '%s.%s.%s' % (feature['key'], feature['salt'], id_hash)
hash_val = int(hashlib.sha1(hash_key.encode('utf-8')).hexdigest()[:15], 16)
result = hash_val / __LONG_SCALE__
return result
def _match_target(target, user):
attr = target['attribute']
if attr in __BUILTINS__:
if attr in user:
u_value = user[attr]
return u_value in target['values']
else:
return False
else: # custom attribute
if 'custom' not in user:
return False
if attr not in user['custom']:
return False
u_value = user['custom'][attr]
if isinstance(u_value, __BASE_TYPES__):
return u_value in target['values']
elif isinstance(u_value, (list, tuple)):
return len(set(u_value).intersection(target['values'])) > 0
return False
def _match_user(variation, user):
if 'userTarget' in variation:
return _match_target(variation['userTarget'], user)
return False
def _match_variation(variation, user):
for target in variation['targets']:
if 'userTarget' in variation and target['attribute'] == 'key':
continue
if _match_target(target, user):
return True
return False
def check_uwsgi():
if 'uwsgi' in sys.modules:
# noinspection PyPackageRequirements,PyUnresolvedReferences
import uwsgi
if not uwsgi.opt.get('enable-threads'):
log.warning('The LaunchDarkly client requires the enable-threads option '
'be passed to uWSGI. If enable-threads is not provided, no '
'threads will run and event data will not be sent to LaunchDarkly. '
'To learn more, see '
'http://docs.launchdarkly.com/v1.0/docs/python-sdk-reference#configuring-uwsgi')
def _evaluate(feature, user):
if feature is None:
return None
if not feature['on']:
return None
param = _param_for_user(feature, user)
if param is None:
return None
for variation in feature['variations']:
if _match_user(variation, user):
return variation['value']
for variation in feature['variations']:
if _match_variation(variation, user):
return variation['value']
total = 0.0
for variation in feature['variations']:
total += float(variation['weight']) / 100.0
if param < total:
return variation['value']
return None
class Event(object):
def __init__(self, data='', event='message', event_id=None, retry=None):
self.data = data
self.event = event
self.id = event_id
self.retry = retry
def __str__(self, *args, **kwargs):
return self.data
|
[
"uwsgi.opt.get",
"logging.getLogger"
] |
[((151, 200), 'logging.getLogger', 'logging.getLogger', (['sys.modules[__name__].__name__'], {}), '(sys.modules[__name__].__name__)\n', (168, 200), False, 'import logging\n'), ((2870, 2901), 'uwsgi.opt.get', 'uwsgi.opt.get', (['"""enable-threads"""'], {}), "('enable-threads')\n", (2883, 2901), False, 'import uwsgi\n')]
|
from sepa.definitions.general import code_or_proprietary, party
from sepa.definitions.mandate import mandate_group_header, original_message, mandate
# PAIN.010.001.05 - Mandate Amendment Request v5
standard = 'pain.010.001.05'
name = 'mandate_amendment_request'
definition = {
'_namespaces': {
None: 'urn:iso:std:iso:20022:tech:xsd:pain.010.001.05',
'xs': 'http://www.w3.org/2001/XMLSchema'
},
'_self': 'MndtAmdmntReq',
'_sorting': ['GrpHdr', 'UndrlygAmdmntDtls', 'SplmtryData'],
'group_header': mandate_group_header('GrpHdr'),
'amendment': [{
'_self': 'UndrlygAmdmntDtls',
'original_message': original_message('OrgnlMsgInf'),
'reason': {
'_self': 'AmdmntRsn',
'originator': party('Orgtr'),
'reason': code_or_proprietary('Rsn'),
'additional_information': ['AddtInf']
},
'mandate': mandate('Mndt'),
'original_mandate': {
'_self': 'OrgnlMsgInf',
'id': 'OrgnlMndtId',
'mandate': mandate(' OrgnMndt')
},
'supplementary_data': ['SplmtryData']
}],
'supplementary_data': ['SplmtryData']
}
|
[
"sepa.definitions.mandate.mandate_group_header",
"sepa.definitions.general.code_or_proprietary",
"sepa.definitions.general.party",
"sepa.definitions.mandate.original_message",
"sepa.definitions.mandate.mandate"
] |
[((533, 563), 'sepa.definitions.mandate.mandate_group_header', 'mandate_group_header', (['"""GrpHdr"""'], {}), "('GrpHdr')\n", (553, 563), False, 'from sepa.definitions.mandate import mandate_group_header, original_message, mandate\n'), ((651, 682), 'sepa.definitions.mandate.original_message', 'original_message', (['"""OrgnlMsgInf"""'], {}), "('OrgnlMsgInf')\n", (667, 682), False, 'from sepa.definitions.mandate import mandate_group_header, original_message, mandate\n'), ((910, 925), 'sepa.definitions.mandate.mandate', 'mandate', (['"""Mndt"""'], {}), "('Mndt')\n", (917, 925), False, 'from sepa.definitions.mandate import mandate_group_header, original_message, mandate\n'), ((764, 778), 'sepa.definitions.general.party', 'party', (['"""Orgtr"""'], {}), "('Orgtr')\n", (769, 778), False, 'from sepa.definitions.general import code_or_proprietary, party\n'), ((802, 828), 'sepa.definitions.general.code_or_proprietary', 'code_or_proprietary', (['"""Rsn"""'], {}), "('Rsn')\n", (821, 828), False, 'from sepa.definitions.general import code_or_proprietary, party\n'), ((1049, 1069), 'sepa.definitions.mandate.mandate', 'mandate', (['""" OrgnMndt"""'], {}), "(' OrgnMndt')\n", (1056, 1069), False, 'from sepa.definitions.mandate import mandate_group_header, original_message, mandate\n')]
|
from distutils.core import setup
setup(name='uwsgitop',
version='0.8',
description='uWSGI top-like interface',
scripts=['uwsgitop'],
install_requires = ['simplejson']
)
|
[
"distutils.core.setup"
] |
[((34, 176), 'distutils.core.setup', 'setup', ([], {'name': '"""uwsgitop"""', 'version': '"""0.8"""', 'description': '"""uWSGI top-like interface"""', 'scripts': "['uwsgitop']", 'install_requires': "['simplejson']"}), "(name='uwsgitop', version='0.8', description=\n 'uWSGI top-like interface', scripts=['uwsgitop'], install_requires=[\n 'simplejson'])\n", (39, 176), False, 'from distutils.core import setup\n')]
|
import numpy as np
import yaml, pickle, os, librosa, argparse
from concurrent.futures import ThreadPoolExecutor as PE
from collections import deque
from threading import Thread
from tqdm import tqdm
from Audio import Audio_Prep, Mel_Generate
from yin import pitch_calc
with open('Hyper_Parameters.yaml') as f:
hp_Dict = yaml.load(f, Loader=yaml.Loader)
using_Extension = [x.upper() for x in ['.wav', '.m4a', '.flac']]
def Pitch_Generate(audio):
pitch = pitch_calc(
sig= audio,
sr= hp_Dict['Sound']['Sample_Rate'],
w_len= hp_Dict['Sound']['Frame_Length'],
w_step= hp_Dict['Sound']['Frame_Shift'],
confidence_threshold= hp_Dict['Sound']['Confidence_Threshold'],
gaussian_smoothing_sigma = hp_Dict['Sound']['Gaussian_Smoothing_Sigma']
)
return (pitch - np.min(pitch)) / (np.max(pitch) - np.min(pitch) + 1e-7)
def Pattern_Generate(audio= None, path= None, keyword_Index_Dict= None, top_db= 60, reverse= False, invert= False):
audio = audio if not audio is None else Audio_Prep(path, hp_Dict['Sound']['Sample_Rate'], top_db)
if reverse:
audio = audio[::-1]
if invert:
audio = -audio
mel = Mel_Generate(
audio= audio,
sample_rate= hp_Dict['Sound']['Sample_Rate'],
num_frequency= hp_Dict['Sound']['Spectrogram_Dim'],
num_mel= hp_Dict['Sound']['Mel_Dim'],
window_length= hp_Dict['Sound']['Frame_Length'],
hop_length= hp_Dict['Sound']['Frame_Shift'],
mel_fmin= hp_Dict['Sound']['Mel_F_Min'],
mel_fmax= hp_Dict['Sound']['Mel_F_Max'],
max_abs_value= hp_Dict['Sound']['Max_Abs_Mel']
)
pitch = Pitch_Generate(audio)
singer_ID = None
if not keyword_Index_Dict is None:
for keyword, index in keyword_Index_Dict.items():
if keyword in path:
singer_ID = index
break
if singer_ID is None:
raise ValueError('No keyword in keyword_Index_Dict.')
return audio, mel, pitch, singer_ID
def Pattern_File_Generate(path, keyword_Index_Dict, dataset, file_Prefix='', top_db= 60):
for reverse in [False, True]:
for invert in [False, True]:
sig, mel, pitch, singer_ID = Pattern_Generate(
path= path,
keyword_Index_Dict= keyword_Index_Dict,
top_db= top_db,
reverse= reverse,
invert= invert
)
new_Pattern_Dict = {
'Signal': sig.astype(np.float32),
'Mel': mel.astype(np.float32),
'Pitch': pitch.astype(np.float32),
'Singer_ID': singer_ID,
'Dataset': dataset,
}
pickle_File_Name = '{}.{}{}{}{}.PICKLE'.format(
dataset,
file_Prefix,
os.path.splitext(os.path.basename(path))[0],
'.REV' if reverse else '',
'.INV' if invert else '',
).upper()
with open(os.path.join(hp_Dict['Train']['Train_Pattern']['Path'], pickle_File_Name).replace("\\", "/"), 'wb') as f:
pickle.dump(new_Pattern_Dict, f, protocol=4)
def NUS48E_Info_Load(nus48e_Path, sex_Type):
wav_Path_List = []
singer_Dict = {}
sex_Dict = {
'ADIZ': 'F',
'JLEE': 'M',
'JTAN': 'M',
'KENN': 'M',
'MCUR': 'F',
'MPOL': 'F',
'MPUR': 'F',
'NJAT': 'F',
'PMAR': 'F',
'SAMF': 'M',
'VKOW': 'M',
'ZHIY': 'M',
}
sex_Type = sex_Type.upper()
for root, _, files in os.walk(nus48e_Path):
root = root.replace('\\', '/')
for file in files:
if root.strip().split('/')[-1].upper() != 'sing'.upper():
continue
elif not os.path.splitext(file)[1].upper() in using_Extension:
continue
path = os.path.join(root, file).replace('\\', '/')
singer = root.strip().split('/')[-2]
if sex_Type != 'B' and sex_Dict[singer] != sex_Type:
continue
wav_Path_List.append(path)
singer_Dict[path] = singer
print('NUS-48E info generated: {}'.format(len(wav_Path_List)))
return wav_Path_List, singer_Dict, list(sorted(list(set(singer_Dict.values()))))
def Metadata_Generate(keyword_Index_Dict):
new_Metadata_Dict = {
'Sample_Rate': hp_Dict['Sound']['Sample_Rate'],
'Confidence_Threshold': hp_Dict['Sound']['Confidence_Threshold'],
'Gaussian_Smoothing_Sigma': hp_Dict['Sound']['Gaussian_Smoothing_Sigma'],
'Keyword_Index_Dict': keyword_Index_Dict,
'File_List': [],
'Sig_Length_Dict': {},
'Pitch_Length_Dict': {},
'Singer_Index_Dict': {},
'Dataset_Dict': {},
}
files_TQDM = tqdm(
total= sum([len(files) for root, _, files in os.walk(hp_Dict['Train']['Train_Pattern']['Path'])]),
desc= 'Metadata'
)
for root, _, files in os.walk(hp_Dict['Train']['Train_Pattern']['Path']):
for file in files:
with open(os.path.join(root, file).replace("\\", "/"), "rb") as f:
pattern_Dict = pickle.load(f)
try:
new_Metadata_Dict['Sig_Length_Dict'][file] = pattern_Dict['Signal'].shape[0]
new_Metadata_Dict['Pitch_Length_Dict'][file] = pattern_Dict['Pitch'].shape[0]
new_Metadata_Dict['Singer_Index_Dict'][file] = pattern_Dict['Singer_ID']
new_Metadata_Dict['Dataset_Dict'][file] = pattern_Dict['Dataset']
new_Metadata_Dict['File_List'].append(file)
except:
print('File \'{}\' is not correct pattern file. This file is ignored.'.format(file))
files_TQDM.update(1)
with open(os.path.join(hp_Dict['Train']['Train_Pattern']['Path'], hp_Dict['Train']['Train_Pattern']['Metadata_File'].upper()).replace("\\", "/"), 'wb') as f:
pickle.dump(new_Metadata_Dict, f, protocol=2)
print('Metadata generate done.')
if __name__ == "__main__":
argParser = argparse.ArgumentParser()
argParser.add_argument('-nus48e', '--nus48e_path', required=False)
argParser.add_argument('-sex', '--sex_type', required= False, default= 'B')
args = argParser.parse_args()
if not args.sex_type in ['M', 'F', 'B']:
raise ValueError('Unsupported sex type. Only M, F, or B is supported')
total_Pattern_Count = 0
keyword_Index_Dict = {}
if not args.nus48e_path is None:
nus48e_File_Path_List, nus48e_Singer_Dict, nus48e_Keyword_List = NUS48E_Info_Load(
nus48e_Path= args.nus48e_path,
sex_Type= args.sex_type
)
total_Pattern_Count += len(nus48e_File_Path_List)
for index, keyword in enumerate(nus48e_Keyword_List, len(keyword_Index_Dict)):
if keyword in keyword_Index_Dict.keys():
raise ValueError('There is an overlapped keyword: \'{}\'.'.format(keyword))
keyword_Index_Dict[keyword] = index
if total_Pattern_Count == 0:
raise ValueError('Total pattern count is zero.')
os.makedirs(hp_Dict['Train']['Train_Pattern']['Path'], exist_ok= True)
if not args.nus48e_path is None:
for index, file_Path in tqdm(
enumerate(nus48e_File_Path_List),
desc= 'Pattern',
total= len(nus48e_File_Path_List)
):
Pattern_File_Generate(
file_Path,
keyword_Index_Dict,
'NUS48E',
nus48e_Singer_Dict[file_Path],
20
)
Metadata_Generate(keyword_Index_Dict)
|
[
"yaml.load",
"pickle.dump",
"argparse.ArgumentParser",
"os.makedirs",
"os.path.join",
"os.path.basename",
"os.walk",
"Audio.Audio_Prep",
"Audio.Mel_Generate",
"numpy.min",
"numpy.max",
"pickle.load",
"os.path.splitext",
"yin.pitch_calc"
] |
[((326, 358), 'yaml.load', 'yaml.load', (['f'], {'Loader': 'yaml.Loader'}), '(f, Loader=yaml.Loader)\n', (335, 358), False, 'import yaml, pickle, os, librosa, argparse\n'), ((465, 749), 'yin.pitch_calc', 'pitch_calc', ([], {'sig': 'audio', 'sr': "hp_Dict['Sound']['Sample_Rate']", 'w_len': "hp_Dict['Sound']['Frame_Length']", 'w_step': "hp_Dict['Sound']['Frame_Shift']", 'confidence_threshold': "hp_Dict['Sound']['Confidence_Threshold']", 'gaussian_smoothing_sigma': "hp_Dict['Sound']['Gaussian_Smoothing_Sigma']"}), "(sig=audio, sr=hp_Dict['Sound']['Sample_Rate'], w_len=hp_Dict[\n 'Sound']['Frame_Length'], w_step=hp_Dict['Sound']['Frame_Shift'],\n confidence_threshold=hp_Dict['Sound']['Confidence_Threshold'],\n gaussian_smoothing_sigma=hp_Dict['Sound']['Gaussian_Smoothing_Sigma'])\n", (475, 749), False, 'from yin import pitch_calc\n'), ((1190, 1590), 'Audio.Mel_Generate', 'Mel_Generate', ([], {'audio': 'audio', 'sample_rate': "hp_Dict['Sound']['Sample_Rate']", 'num_frequency': "hp_Dict['Sound']['Spectrogram_Dim']", 'num_mel': "hp_Dict['Sound']['Mel_Dim']", 'window_length': "hp_Dict['Sound']['Frame_Length']", 'hop_length': "hp_Dict['Sound']['Frame_Shift']", 'mel_fmin': "hp_Dict['Sound']['Mel_F_Min']", 'mel_fmax': "hp_Dict['Sound']['Mel_F_Max']", 'max_abs_value': "hp_Dict['Sound']['Max_Abs_Mel']"}), "(audio=audio, sample_rate=hp_Dict['Sound']['Sample_Rate'],\n num_frequency=hp_Dict['Sound']['Spectrogram_Dim'], num_mel=hp_Dict[\n 'Sound']['Mel_Dim'], window_length=hp_Dict['Sound']['Frame_Length'],\n hop_length=hp_Dict['Sound']['Frame_Shift'], mel_fmin=hp_Dict['Sound'][\n 'Mel_F_Min'], mel_fmax=hp_Dict['Sound']['Mel_F_Max'], max_abs_value=\n hp_Dict['Sound']['Max_Abs_Mel'])\n", (1202, 1590), False, 'from Audio import Audio_Prep, Mel_Generate\n'), ((3652, 3672), 'os.walk', 'os.walk', (['nus48e_Path'], {}), '(nus48e_Path)\n', (3659, 3672), False, 'import yaml, pickle, os, librosa, argparse\n'), ((5056, 5106), 'os.walk', 'os.walk', (["hp_Dict['Train']['Train_Pattern']['Path']"], {}), "(hp_Dict['Train']['Train_Pattern']['Path'])\n", (5063, 5106), False, 'import yaml, pickle, os, librosa, argparse\n'), ((6201, 6226), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (6224, 6226), False, 'import yaml, pickle, os, librosa, argparse\n'), ((7263, 7332), 'os.makedirs', 'os.makedirs', (["hp_Dict['Train']['Train_Pattern']['Path']"], {'exist_ok': '(True)'}), "(hp_Dict['Train']['Train_Pattern']['Path'], exist_ok=True)\n", (7274, 7332), False, 'import yaml, pickle, os, librosa, argparse\n'), ((1039, 1096), 'Audio.Audio_Prep', 'Audio_Prep', (['path', "hp_Dict['Sound']['Sample_Rate']", 'top_db'], {}), "(path, hp_Dict['Sound']['Sample_Rate'], top_db)\n", (1049, 1096), False, 'from Audio import Audio_Prep, Mel_Generate\n'), ((6072, 6117), 'pickle.dump', 'pickle.dump', (['new_Metadata_Dict', 'f'], {'protocol': '(2)'}), '(new_Metadata_Dict, f, protocol=2)\n', (6083, 6117), False, 'import yaml, pickle, os, librosa, argparse\n'), ((822, 835), 'numpy.min', 'np.min', (['pitch'], {}), '(pitch)\n', (828, 835), True, 'import numpy as np\n'), ((840, 853), 'numpy.max', 'np.max', (['pitch'], {}), '(pitch)\n', (846, 853), True, 'import numpy as np\n'), ((856, 869), 'numpy.min', 'np.min', (['pitch'], {}), '(pitch)\n', (862, 869), True, 'import numpy as np\n'), ((3177, 3221), 'pickle.dump', 'pickle.dump', (['new_Pattern_Dict', 'f'], {'protocol': '(4)'}), '(new_Pattern_Dict, f, protocol=4)\n', (3188, 3221), False, 'import yaml, pickle, os, librosa, argparse\n'), ((5245, 5259), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (5256, 5259), False, 'import yaml, pickle, os, librosa, argparse\n'), ((3954, 3978), 'os.path.join', 'os.path.join', (['root', 'file'], {}), '(root, file)\n', (3966, 3978), False, 'import yaml, pickle, os, librosa, argparse\n'), ((4940, 4990), 'os.walk', 'os.walk', (["hp_Dict['Train']['Train_Pattern']['Path']"], {}), "(hp_Dict['Train']['Train_Pattern']['Path'])\n", (4947, 4990), False, 'import yaml, pickle, os, librosa, argparse\n'), ((3055, 3128), 'os.path.join', 'os.path.join', (["hp_Dict['Train']['Train_Pattern']['Path']", 'pickle_File_Name'], {}), "(hp_Dict['Train']['Train_Pattern']['Path'], pickle_File_Name)\n", (3067, 3128), False, 'import yaml, pickle, os, librosa, argparse\n'), ((5157, 5181), 'os.path.join', 'os.path.join', (['root', 'file'], {}), '(root, file)\n', (5169, 5181), False, 'import yaml, pickle, os, librosa, argparse\n'), ((2893, 2915), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (2909, 2915), False, 'import yaml, pickle, os, librosa, argparse\n'), ((3856, 3878), 'os.path.splitext', 'os.path.splitext', (['file'], {}), '(file)\n', (3872, 3878), False, 'import yaml, pickle, os, librosa, argparse\n')]
|
from rpython.jit.metainterp.test.support import LLJitMixin, noConst
from rpython.rlib import jit
class CallTest(object):
def test_indirect_call(self):
@jit.dont_look_inside
def f1(x):
return x + 1
@jit.dont_look_inside
def f2(x):
return x + 2
@jit.dont_look_inside
def choice(i):
if i:
return f1
return f2
def f(i):
func = choice(i)
return func(i)
res = self.interp_operations(f, [3])
assert res == f(3)
def test_cond_call(self):
def f(l, n):
l.append(n)
def main(n):
l = []
jit.conditional_call(n == 10, f, l, n)
return len(l)
assert self.interp_operations(main, [10]) == 1
assert self.interp_operations(main, [5]) == 0
def test_cond_call_disappears(self):
driver = jit.JitDriver(greens = [], reds = ['n'])
def f(n):
raise ValueError
def main(n):
while n > 0:
driver.jit_merge_point(n=n)
jit.conditional_call(False, f, 10)
n -= 1
return 42
assert self.meta_interp(main, [10]) == 42
self.check_resops(guard_no_exception=0)
def test_cond_call_i(self):
def f(n):
return n * 200
def main(n, m):
return jit.conditional_call_elidable(n, f, m)
assert self.interp_operations(main, [0, 10]) == 2000
assert self.interp_operations(main, [15, 42]) == 15
def test_cond_call_r(self):
def f(n):
return [n]
def main(n):
if n == 10:
l = []
else:
l = None
l = jit.conditional_call_elidable(l, f, n)
return len(l)
assert main(10) == 0
assert main(5) == 1
assert self.interp_operations(main, [10]) == 0
assert self.interp_operations(main, [5]) == 1
def test_cond_call_constant_in_pyjitpl(self):
def f(a, b):
return a + b
def main(n):
# this is completely constant-folded because the arguments
# to f() are constants.
return jit.conditional_call_elidable(n, f, 40, 2)
assert main(12) == 12
assert main(0) == 42
assert self.interp_operations(main, [12]) == 12
self.check_operations_history({'finish': 1}) # empty history
assert self.interp_operations(main, [0]) == 42
self.check_operations_history({'finish': 1}) # empty history
def test_cond_call_constant_in_optimizer(self):
myjitdriver = jit.JitDriver(greens = ['m'], reds = ['n', 'p'])
def externfn(x):
return x - 3
class V:
def __init__(self, value):
self.value = value
def f(n, m, p):
while n > 0:
myjitdriver.can_enter_jit(n=n, p=p, m=m)
myjitdriver.jit_merge_point(n=n, p=p, m=m)
m1 = noConst(m)
n -= jit.conditional_call_elidable(p, externfn, m1)
return n
res = self.meta_interp(f, [21, 5, 0])
assert res == -1
# the COND_CALL_VALUE is constant-folded away by optimizeopt.py
self.check_resops({'int_sub': 2, 'int_gt': 2, 'guard_true': 2,
'jump': 1})
def test_cond_call_constant_in_optimizer_1(self):
# same as test_cond_call_constant_in_optimizer, but the 'value'
# argument changes
myjitdriver = jit.JitDriver(greens = ['m'], reds = ['n', 'p'])
def externfn(x):
return x - 3
class V:
def __init__(self, value):
self.value = value
def f(n, m, p):
while n > 0:
myjitdriver.can_enter_jit(n=n, p=p, m=m)
myjitdriver.jit_merge_point(n=n, p=p, m=m)
m1 = noConst(m)
n -= jit.conditional_call_elidable(p, externfn, m1)
return n
assert f(21, 5, 0) == -1
res = self.meta_interp(f, [21, 5, 0])
assert res == -1
# the COND_CALL_VALUE is constant-folded away by optimizeopt.py
self.check_resops({'int_sub': 2, 'int_gt': 2, 'guard_true': 2,
'jump': 1})
def test_cond_call_constant_in_optimizer_2(self):
myjitdriver = jit.JitDriver(greens = ['m'], reds = ['n', 'p'])
def externfn(x):
return 2
def f(n, m, p):
while n > 0:
myjitdriver.can_enter_jit(n=n, p=p, m=m)
myjitdriver.jit_merge_point(n=n, p=p, m=m)
assert p > -1
assert p < 1
n -= jit.conditional_call_elidable(p, externfn, n)
return n
res = self.meta_interp(f, [21, 5, 0])
assert res == -1
# optimizer: the COND_CALL_VALUE is turned into a regular
# CALL_PURE, which itself becomes a CALL
self.check_resops(call_pure_i=0, cond_call_value_i=0, call_i=2,
int_sub=2)
def test_cond_call_constant_in_optimizer_3(self):
myjitdriver = jit.JitDriver(greens = ['m'], reds = ['n', 'p'])
def externfn(x):
return 1
def f(n, m, p):
while n > 0:
myjitdriver.can_enter_jit(n=n, p=p, m=m)
myjitdriver.jit_merge_point(n=n, p=p, m=m)
assert p > -1
assert p < 1
n0 = n
n -= jit.conditional_call_elidable(p, externfn, n0)
n -= jit.conditional_call_elidable(p, externfn, n0)
return n
res = self.meta_interp(f, [21, 5, 0])
assert res == -1
# same as test_cond_call_constant_in_optimizer_2, but the two
# intermediate CALL_PUREs are replaced with only one, because
# they are called with the same arguments
self.check_resops(call_pure_i=0, cond_call_value_i=0, call_i=2,
int_sub=4)
def test_cond_call_constant_in_optimizer_4(self):
class X:
def __init__(self, value):
self.value = value
self.triple = 0
def _compute_triple(self):
self.triple = self.value * 3
return self.triple
def get_triple(self):
return jit.conditional_call_elidable(self.triple,
X._compute_triple, self)
myjitdriver = jit.JitDriver(greens = [], reds = 'auto')
def main(n):
total = 0
while n > 1:
myjitdriver.jit_merge_point()
x = X(n)
total += x.get_triple() + x.get_triple() + x.get_triple()
n -= 10
return total
res = self.meta_interp(main, [100])
assert res == main(100)
# remaining: only the first call to get_triple(), as a call_i
# because we know that x.triple == 0 here. The remaining calls
# are removed because equal to the first one.
self.check_resops(call_i=2, cond_call_value_i=0,
new_with_vtable=2) # escapes: _compute_triple(self)
def test_cond_call_constant_in_optimizer_5(self):
def _compute_triple(value):
return value * 3
class X:
def __init__(self, value):
self.value = value
self.triple = 0
def get_triple(self):
res = jit.conditional_call_elidable(self.triple,
_compute_triple, self.value)
self.triple = res
return res
myjitdriver = jit.JitDriver(greens = [], reds = 'auto')
def main(n):
total = 0
while n > 1:
myjitdriver.jit_merge_point()
x = X(n)
total += x.get_triple() + x.get_triple() + x.get_triple()
n -= 10
return total
res = self.meta_interp(main, [100])
assert res == main(100)
# remaining: only the first call to get_triple(), as a call_i
# because we know that x.triple == 0 here. The remaining calls
# are removed because equal to the first one.
self.check_resops(call_i=2, cond_call_value_i=0,
new_with_vtable=0) # all virtual
def test_cond_call_multiple_in_optimizer_1(self):
# test called several times with the same arguments, but
# the condition is not available to the short preamble.
# This means that the second cond_call_value after unrolling
# can't be removed.
myjitdriver = jit.JitDriver(greens = [], reds = ['n', 'p', 'm'])
def externfn(x):
return 2000 # never actually called
@jit.dont_look_inside
def randomish(p):
return p + 1
def f(n, m, p):
while n > 0:
myjitdriver.can_enter_jit(n=n, p=p, m=m)
myjitdriver.jit_merge_point(n=n, p=p, m=m)
n -= jit.conditional_call_elidable(randomish(p), externfn, m)
return n
assert f(21, 5, 1) == -1
res = self.meta_interp(f, [21, 5, 1])
assert res == -1
self.check_resops(call_pure_i=0, cond_call_value_i=2,
call_i=2, # randomish()
int_sub=2)
def test_cond_call_multiple_in_optimizer_2(self):
# test called several times with the same arguments. Ideally
# we would like them to be consolidated into one call even if
# the 'value' are different but available from the short
# preamble. We don't do it so far---it's a mess, because the
# short preamble is supposed to depend only on loop-invariant
# things, and 'value' is (most of the time) not loop-invariant.
myjitdriver = jit.JitDriver(greens = [], reds = ['n', 'p', 'm'])
def externfn(x):
return 2 # called only the first time
def f(n, m, p):
while n > 0:
myjitdriver.can_enter_jit(n=n, p=p, m=m)
myjitdriver.jit_merge_point(n=n, p=p, m=m)
p = jit.conditional_call_elidable(p, externfn, m)
n -= p
return n
assert f(21, 5, 0) == -1
res = self.meta_interp(f, [21, 5, 0])
assert res == -1
self.check_resops(call_pure_i=0,
cond_call_value_i=2, # ideally 1, but see above
int_sub=2)
def test_cond_call_in_blackhole(self):
myjitdriver = jit.JitDriver(greens = [], reds = ['n', 'p', 'm'])
def externfn(x):
return 2
def f(n, m, p):
while n > 0:
myjitdriver.can_enter_jit(n=n, p=p, m=m)
myjitdriver.jit_merge_point(n=n, p=p, m=m)
if n > 6: # will fail and finish in the blackhole
pass
if jit.we_are_jitted(): # manually inline here
p = jit._jit_conditional_call_value(p, externfn, m)
else:
p = jit.conditional_call_elidable(p, externfn, m)
n -= p
return n
assert f(21, 5, 0) == -1
res = self.meta_interp(f, [21, 5, 0])
assert res == -1
def test_cond_call_raises(self):
myjitdriver = jit.JitDriver(greens = [], reds = ['n', 'p', 'm'])
def externfn(x, m):
if m == 1 or m == 1008:
raise ValueError
return x + m
def f(n, m, p):
while n > 0:
myjitdriver.can_enter_jit(n=n, p=p, m=m)
myjitdriver.jit_merge_point(n=n, p=p, m=m)
try:
p = jit.conditional_call_elidable(p, externfn, n, m)
p -= (n + m) # => zero again
except ValueError:
m += 1000
m += 1
n -= 2
return n * m
assert f(21, 0, 0) == -2011
res = self.meta_interp(f, [21, 0, 0])
assert res == -2011
class TestCall(LLJitMixin, CallTest):
pass
|
[
"rpython.jit.metainterp.test.support.noConst",
"rpython.rlib.jit.we_are_jitted",
"rpython.rlib.jit.JitDriver",
"rpython.rlib.jit.conditional_call",
"rpython.rlib.jit.conditional_call_elidable",
"rpython.rlib.jit._jit_conditional_call_value"
] |
[((937, 973), 'rpython.rlib.jit.JitDriver', 'jit.JitDriver', ([], {'greens': '[]', 'reds': "['n']"}), "(greens=[], reds=['n'])\n", (950, 973), False, 'from rpython.rlib import jit\n'), ((2704, 2748), 'rpython.rlib.jit.JitDriver', 'jit.JitDriver', ([], {'greens': "['m']", 'reds': "['n', 'p']"}), "(greens=['m'], reds=['n', 'p'])\n", (2717, 2748), False, 'from rpython.rlib import jit\n'), ((3609, 3653), 'rpython.rlib.jit.JitDriver', 'jit.JitDriver', ([], {'greens': "['m']", 'reds': "['n', 'p']"}), "(greens=['m'], reds=['n', 'p'])\n", (3622, 3653), False, 'from rpython.rlib import jit\n'), ((4448, 4492), 'rpython.rlib.jit.JitDriver', 'jit.JitDriver', ([], {'greens': "['m']", 'reds': "['n', 'p']"}), "(greens=['m'], reds=['n', 'p'])\n", (4461, 4492), False, 'from rpython.rlib import jit\n'), ((5227, 5271), 'rpython.rlib.jit.JitDriver', 'jit.JitDriver', ([], {'greens': "['m']", 'reds': "['n', 'p']"}), "(greens=['m'], reds=['n', 'p'])\n", (5240, 5271), False, 'from rpython.rlib import jit\n'), ((6591, 6628), 'rpython.rlib.jit.JitDriver', 'jit.JitDriver', ([], {'greens': '[]', 'reds': '"""auto"""'}), "(greens=[], reds='auto')\n", (6604, 6628), False, 'from rpython.rlib import jit\n'), ((7811, 7848), 'rpython.rlib.jit.JitDriver', 'jit.JitDriver', ([], {'greens': '[]', 'reds': '"""auto"""'}), "(greens=[], reds='auto')\n", (7824, 7848), False, 'from rpython.rlib import jit\n'), ((8808, 8854), 'rpython.rlib.jit.JitDriver', 'jit.JitDriver', ([], {'greens': '[]', 'reds': "['n', 'p', 'm']"}), "(greens=[], reds=['n', 'p', 'm'])\n", (8821, 8854), False, 'from rpython.rlib import jit\n'), ((10032, 10078), 'rpython.rlib.jit.JitDriver', 'jit.JitDriver', ([], {'greens': '[]', 'reds': "['n', 'p', 'm']"}), "(greens=[], reds=['n', 'p', 'm'])\n", (10045, 10078), False, 'from rpython.rlib import jit\n'), ((10762, 10808), 'rpython.rlib.jit.JitDriver', 'jit.JitDriver', ([], {'greens': '[]', 'reds': "['n', 'p', 'm']"}), "(greens=[], reds=['n', 'p', 'm'])\n", (10775, 10808), False, 'from rpython.rlib import jit\n'), ((11555, 11601), 'rpython.rlib.jit.JitDriver', 'jit.JitDriver', ([], {'greens': '[]', 'reds': "['n', 'p', 'm']"}), "(greens=[], reds=['n', 'p', 'm'])\n", (11568, 11601), False, 'from rpython.rlib import jit\n'), ((703, 741), 'rpython.rlib.jit.conditional_call', 'jit.conditional_call', (['(n == 10)', 'f', 'l', 'n'], {}), '(n == 10, f, l, n)\n', (723, 741), False, 'from rpython.rlib import jit\n'), ((1434, 1472), 'rpython.rlib.jit.conditional_call_elidable', 'jit.conditional_call_elidable', (['n', 'f', 'm'], {}), '(n, f, m)\n', (1463, 1472), False, 'from rpython.rlib import jit\n'), ((1797, 1835), 'rpython.rlib.jit.conditional_call_elidable', 'jit.conditional_call_elidable', (['l', 'f', 'n'], {}), '(l, f, n)\n', (1826, 1835), False, 'from rpython.rlib import jit\n'), ((2273, 2315), 'rpython.rlib.jit.conditional_call_elidable', 'jit.conditional_call_elidable', (['n', 'f', '(40)', '(2)'], {}), '(n, f, 40, 2)\n', (2302, 2315), False, 'from rpython.rlib import jit\n'), ((1133, 1167), 'rpython.rlib.jit.conditional_call', 'jit.conditional_call', (['(False)', 'f', '(10)'], {}), '(False, f, 10)\n', (1153, 1167), False, 'from rpython.rlib import jit\n'), ((3080, 3090), 'rpython.jit.metainterp.test.support.noConst', 'noConst', (['m'], {}), '(m)\n', (3087, 3090), False, 'from rpython.jit.metainterp.test.support import LLJitMixin, noConst\n'), ((3112, 3158), 'rpython.rlib.jit.conditional_call_elidable', 'jit.conditional_call_elidable', (['p', 'externfn', 'm1'], {}), '(p, externfn, m1)\n', (3141, 3158), False, 'from rpython.rlib import jit\n'), ((3985, 3995), 'rpython.jit.metainterp.test.support.noConst', 'noConst', (['m'], {}), '(m)\n', (3992, 3995), False, 'from rpython.jit.metainterp.test.support import LLJitMixin, noConst\n'), ((4017, 4063), 'rpython.rlib.jit.conditional_call_elidable', 'jit.conditional_call_elidable', (['p', 'externfn', 'm1'], {}), '(p, externfn, m1)\n', (4046, 4063), False, 'from rpython.rlib import jit\n'), ((4788, 4833), 'rpython.rlib.jit.conditional_call_elidable', 'jit.conditional_call_elidable', (['p', 'externfn', 'n'], {}), '(p, externfn, n)\n', (4817, 4833), False, 'from rpython.rlib import jit\n'), ((5590, 5636), 'rpython.rlib.jit.conditional_call_elidable', 'jit.conditional_call_elidable', (['p', 'externfn', 'n0'], {}), '(p, externfn, n0)\n', (5619, 5636), False, 'from rpython.rlib import jit\n'), ((5658, 5704), 'rpython.rlib.jit.conditional_call_elidable', 'jit.conditional_call_elidable', (['p', 'externfn', 'n0'], {}), '(p, externfn, n0)\n', (5687, 5704), False, 'from rpython.rlib import jit\n'), ((6450, 6517), 'rpython.rlib.jit.conditional_call_elidable', 'jit.conditional_call_elidable', (['self.triple', 'X._compute_triple', 'self'], {}), '(self.triple, X._compute_triple, self)\n', (6479, 6517), False, 'from rpython.rlib import jit\n'), ((7603, 7674), 'rpython.rlib.jit.conditional_call_elidable', 'jit.conditional_call_elidable', (['self.triple', '_compute_triple', 'self.value'], {}), '(self.triple, _compute_triple, self.value)\n', (7632, 7674), False, 'from rpython.rlib import jit\n'), ((10348, 10393), 'rpython.rlib.jit.conditional_call_elidable', 'jit.conditional_call_elidable', (['p', 'externfn', 'm'], {}), '(p, externfn, m)\n', (10377, 10393), False, 'from rpython.rlib import jit\n'), ((11137, 11156), 'rpython.rlib.jit.we_are_jitted', 'jit.we_are_jitted', ([], {}), '()\n', (11154, 11156), False, 'from rpython.rlib import jit\n'), ((11207, 11254), 'rpython.rlib.jit._jit_conditional_call_value', 'jit._jit_conditional_call_value', (['p', 'externfn', 'm'], {}), '(p, externfn, m)\n', (11238, 11254), False, 'from rpython.rlib import jit\n'), ((11301, 11346), 'rpython.rlib.jit.conditional_call_elidable', 'jit.conditional_call_elidable', (['p', 'externfn', 'm'], {}), '(p, externfn, m)\n', (11330, 11346), False, 'from rpython.rlib import jit\n'), ((11938, 11986), 'rpython.rlib.jit.conditional_call_elidable', 'jit.conditional_call_elidable', (['p', 'externfn', 'n', 'm'], {}), '(p, externfn, n, m)\n', (11967, 11986), False, 'from rpython.rlib import jit\n')]
|