code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
from artificial_idiot.networks import networks
from artificial_idiot.search.RL import (
ParametrisedRL, simple_grid_extractor, full_grid_extractor
)
import glob
import os
from artificial_idiot.game.state import State
from artificial_idiot.game.game import Game
from artificial_idiot.player import Player
from artificial_idiot.game.node import *
architectures = networks.architectures
loading = False
if loading:
time_stamp = 20190518192015
path = f"/Users/Dovermore/Documents/2019t1/COMP30024-AritificialIntelligence/ArtificialIdiotProject/artificial_idiot/artificial_idiot/machine_learning/{time_stamp}"
checkpoint_path = f"{path}/checkpoints"
checkpoint_files = glob.glob(f'{checkpoint_path}/*')
latest_checkpoint = max(checkpoint_files, key=os.path.getctime)
final_file = f"{path}/network"
model = latest_checkpoint
agent = ParametrisedRL.from_file(model, full_grid_extractor)
else:
# agent = ParametrisedRL(*architectures["simple_two_sig"])
# agent = ParametrisedRL(*architectures["full_four_lkrl"])
# agent = ParametrisedRL(*architectures["full_four_sig"])
# agent = ParametrisedRL(*architectures["full_three_layer_sigmoid_network"])
# agent = ParametrisedRL(*architectures["full_linear"])
agent = ParametrisedRL(*architectures["full_two_sig"])
pass
# node_type = WinningRLNode
# node_type = InitialRLNode
# node_type = SimpleRLNode
node_type = SimpleRLNode2
policy = "greedy"
# policy = "choice"
# debug = 0.001
# debug = 0.1
debug = 0
# explore = 0
# explore = 0.1
# explore = 0.2
# explore = 0.5
explore = 1
# theta = 0.05
# theta = 0.01
theta = 0.005
# theta = 0.001
# theta = 0.0005
gamma = 1
# gamma = 0.99
initial_state = State(Player.start_config, "red")
game = Game("red", initial_state)
agent.td_train(game, initial_state, debug=debug,
node_type=node_type, policy=policy,
explore=explore, theta=theta, gamma=gamma)
|
[
"artificial_idiot.game.game.Game",
"artificial_idiot.search.RL.ParametrisedRL",
"artificial_idiot.game.state.State",
"glob.glob",
"artificial_idiot.search.RL.ParametrisedRL.from_file"
] |
[((1710, 1743), 'artificial_idiot.game.state.State', 'State', (['Player.start_config', '"""red"""'], {}), "(Player.start_config, 'red')\n", (1715, 1743), False, 'from artificial_idiot.game.state import State\n'), ((1751, 1777), 'artificial_idiot.game.game.Game', 'Game', (['"""red"""', 'initial_state'], {}), "('red', initial_state)\n", (1755, 1777), False, 'from artificial_idiot.game.game import Game\n'), ((688, 721), 'glob.glob', 'glob.glob', (['f"""{checkpoint_path}/*"""'], {}), "(f'{checkpoint_path}/*')\n", (697, 721), False, 'import glob\n'), ((867, 919), 'artificial_idiot.search.RL.ParametrisedRL.from_file', 'ParametrisedRL.from_file', (['model', 'full_grid_extractor'], {}), '(model, full_grid_extractor)\n', (891, 919), False, 'from artificial_idiot.search.RL import ParametrisedRL, simple_grid_extractor, full_grid_extractor\n'), ((1267, 1313), 'artificial_idiot.search.RL.ParametrisedRL', 'ParametrisedRL', (["*architectures['full_two_sig']"], {}), "(*architectures['full_two_sig'])\n", (1281, 1313), False, 'from artificial_idiot.search.RL import ParametrisedRL, simple_grid_extractor, full_grid_extractor\n')]
|
#! /usr/bin/env python
"""Pre-processing image datasets for model training"""
import os
import json
import tensorflow as tf
import csv
import xml.etree.ElementTree as ET
from tqdm import tqdm
from PIL import Image
from utils import get_immediate_subdirectories, get_file_list, get_bb_count, get_int, group_to_tf_record, get_dir_list
__author__ = "<NAME>"
__email__ = "<EMAIL>"
OPEN_IMAGES_OBJECTS_SET = set()
IMAGENET_OBJECTS_SET = set()
YOUCOLL_OBJECTS_SET = set()
IMAGENET_DATA_DIR = "data/IMGNET"
IMAGENET_DATASET_NAME = "imgnet"
OPENIMAGES_DATA_DIR = "data/OID"
OPENIMAGES_DATASET_NAME = "oid"
MERGED_DATA_DIR = "data/MERGED"
MERGED_DATASET_NAME = "merged"
RND_DATA_DIR = "data/RND"
RND_DATASET_NAME = "rnd"
YOUCOLL_DATA_DIR = "data/YOUCOLL"
YOUCOLL_DATASET_NAME = "youcoll"
LABEL_STOI = dict()
def save_label_map(data_dir, dataset_name="data"):
"""
Save label map to disk
:param data_dir: Path to save map
:type data_dir: String
:param dataset_name: dataset name to save file
:type dataset_name: String
"""
label_file = os.path.join(data_dir, dataset_name + ".names")
with open(label_file, 'w') as label_map_out:
label_map_out.write("\n".join(LABEL_STOI.keys()))
def process_youcoll(data_dir, class_filter=None, skip_frames=None):
"""
Process Youcoll annotated datasets
:param data_dir:
:type data_dir:
:param class_filter:
:type class_filter:
:return:
:rtype:
"""
global YOUCOLL_OBJECTS_SET
object_track_dir = os.path.join(data_dir, "annotations")
video_frames_dir = os.path.join(data_dir, "imgs")
obj_file_list = get_file_list(object_track_dir, format='.txt')
video_frames_dir_list = get_dir_list(video_frames_dir, only_top=True)
dataset = dict()
for split in ['train', 'test']:
print("Current split:", split)
dataset[split] = {'images': dict(), 'boxes': dict()}
data_rows = []
with open(os.path.join(data_dir, split + '_frames.csv')) as csv_file:
csv_reader = csv.reader(csv_file, delimiter=',')
line_count = 0
for row in csv_reader:
data_rows.append(row)
for row in tqdm(data_rows):
img_path = row[0]
box = eval(row[1])
cls_name = row[2]
img_name = os.path.basename(img_path).split('.')[0]
if class_filter:
if cls_name not in class_filter:
continue
# Store in label set
YOUCOLL_OBJECTS_SET.add(cls_name)
dataset[split]['images'][img_name] = img_path
# Store annotation from xml to box format
if img_name not in dataset[split]['boxes']:
dataset[split]['boxes'][img_name] = list()
annotation = []
annotation.append(cls_name)
annotation.append(box[0])
annotation.append(box[1])
annotation.append(box[2])
annotation.append(box[3])
dataset[split]['boxes'][img_name].append(annotation)
YOUCOLL_OBJECTS_SET = sorted(YOUCOLL_OBJECTS_SET)
update_label_map(YOUCOLL_OBJECTS_SET)
return dataset
def process_openimages(data_dir, class_filter=None):
"""
Process open Images dataset
:param data_dir:
:type data_dir:
:return:
:rtype:
"""
global OPEN_IMAGES_OBJECTS_SET
dataset = dict()
splits = [os.path.join(data_dir, split) for split in ['train', 'test', 'validation']]
for split in splits:
split_dir = os.path.basename(split)
print("Current split:", split_dir)
dataset[split_dir] = {'images': dict(), 'boxes': dict()}
obj_list = get_immediate_subdirectories(split)
for obj in tqdm(obj_list):
obj_name = os.path.basename(obj).lower()
if class_filter:
if obj_name not in class_filter:
continue
img_file_list = get_file_list(obj, format=".jpg")
if len(img_file_list) > 0:
OPEN_IMAGES_OBJECTS_SET.add(obj_name)
label_dir = os.path.join(obj, 'Label')
label_list = get_file_list(label_dir, format=".txt")
for img in img_file_list:
img_name, _ = os.path.splitext(os.path.basename(img))
dataset[split_dir]['images'][img_name] = img
for label in label_list:
label_name, _ = os.path.splitext(os.path.basename(label))
if label_name not in dataset[split_dir]['boxes']:
dataset[split_dir]['boxes'][label_name] = list()
with open(label, 'r') as label_file:
annotations = label_file.readlines()
for annotation in annotations:
dataset[split_dir]['boxes'][label_name].append(annotation.lower().split())
OPEN_IMAGES_OBJECTS_SET = sorted(OPEN_IMAGES_OBJECTS_SET)
update_label_map(OPEN_IMAGES_OBJECTS_SET)
return dataset
def update_label_map(obj_list):
"""
Add new label ()
:param obj_list:
:type obj_list:
:return:
:rtype:
"""
global LABEL_STOI
cur_len = len(LABEL_STOI)
for i, v in enumerate(obj_list):
if v not in LABEL_STOI:
LABEL_STOI[v] = cur_len
cur_len += 1
def process_imagenet(data_dir, class_filter=None):
"""
Process imagenet dataset
:param data_dir: Directory with images folder, annotations folder and data_split.json
:type data_dir: String
:return:
:rtype:
"""
global IMAGENET_OBJECTS_SET
images_dir = os.path.join(data_dir, 'images')
anno_dir = os.path.join(data_dir, 'annotations')
data_split_filename = os.path.join(data_dir, 'data_split.json')
wnid_map_filename = os.path.join(data_dir, 'wnid_map.json')
with open(data_split_filename, 'r') as data_split_file:
data_split = json.load(data_split_file)
with open(wnid_map_filename, 'r') as wnid_map_file:
object_wnid_map = json.load(wnid_map_file)
wnid_object_map = {v: k for k,v in object_wnid_map.items()}
dataset = dict()
splits = ['train', 'test', 'validation']
for split in splits:
print("Current split:", split)
dataset[split] = {'images': dict(), 'boxes': dict()}
for img_ind in tqdm(data_split[split]):
img_name = data_split['data'][img_ind]
cls_name = img_name.split('_')[0]
cls_name = wnid_object_map[cls_name]
if class_filter:
if cls_name not in class_filter:
continue
# Store in label set
IMAGENET_OBJECTS_SET.add(cls_name)
# Store image path
img = os.path.join(images_dir, img_name+".jpg")
if not os.path.exists(img):
img = os.path.join(images_dir, img_name + ".JPEG")
try:
width, height = Image.open(img).size
except:
print('Error', img)
continue
dataset[split]['images'][img_name] = img
# Store annotation from xml to box format
if img_name not in dataset[split]['boxes']:
dataset[split]['boxes'][img_name] = list()
anno_filename = os.path.join(anno_dir, img_name + ".xml")
root = ET.parse(anno_filename).getroot()
for i in range(get_bb_count(root)):
annotation = []
annotation.append(cls_name)
annotation.append(get_int('xmin', root, i) * (width / get_int('width', root)))
annotation.append(get_int('ymin', root, i) * (height / get_int('height', root)))
annotation.append(get_int('xmax', root, i) * (width / get_int('width', root)))
annotation.append(get_int('ymax', root, i) * (height / get_int('height', root)))
dataset[split]['boxes'][img_name].append(annotation)
IMAGENET_OBJECTS_SET = sorted(IMAGENET_OBJECTS_SET)
update_label_map(IMAGENET_OBJECTS_SET)
return dataset
def write_tf_records(datasets, record_save_dir):
"""
Write TF records to file
:param datasets:
:type datasets:
:param record_save_dir:
:type record_save_dir:
:return:
:rtype:
"""
if not os.path.exists(record_save_dir):
os.mkdir(record_save_dir)
for split in tqdm(datasets, desc="Splits completed"):
record_save_path = os.path.join(record_save_dir, split)
if not os.path.exists(record_save_path):
os.mkdir(record_save_path)
record_save_file = os.path.join(record_save_path, 'data.tfrecord')
writer = tf.python_io.TFRecordWriter(record_save_file)
for img_name in tqdm(datasets[split]['images'], desc="Writing to file"):
record = group_to_tf_record(datasets[split]['boxes'][img_name], datasets[split]['images'][img_name], LABEL_STOI)
if record:
serialized = record.SerializeToString()
writer.write(serialized)
writer.close()
def merge_datasets(dataset_list):
"""
Merge dataset objects in the list
:param dataset_list: datasets in list
:type dataset_list: List[Dict{}]
:return: Merged dataset
:rtype: Dict{}
"""
result_datset = {}
for dataset in dataset_list:
for split in dataset:
if split not in result_datset:
result_datset[split] = {'images': {}, 'boxes': {}}
for img_name in dataset[split]['images']:
result_datset[split]['boxes'][img_name] = dataset[split]['boxes'][img_name]
result_datset[split]['images'][img_name] = dataset[split]['images'][img_name]
return result_datset
def save_dataset(dataset_obj, data_dir, data_filename):
"""
Json dump the dataset object
:param dataset_obj: Parsed dataset object
:type dataset_obj: Dictionary
:param data_dir: Directory to save file
:type data_dir: String
:param data_filename: Filename for dump file
:type data_filename: String
:return: None
:rtype: None
"""
with open(os.path.join(data_dir, data_filename + ".json"), 'w') as data_file:
json.dump(dataset_obj, data_file)
def process_dataset(dataset_dir, dataset_names, class_filter=None):
"""
Process dataset main method
:param dataset_dir:
:type dataset_dir:
:param dataset_name:
:type dataset_name:
:return:
:rtype:
"""
datasets = []
for i, dataset_name in enumerate(dataset_names):
if dataset_name == OPENIMAGES_DATASET_NAME:
dataset = process_openimages(dataset_dir[i], class_filter)
elif dataset_name == IMAGENET_DATASET_NAME:
dataset = process_imagenet(dataset_dir[i], class_filter)
elif dataset_name == YOUCOLL_DATASET_NAME:
dataset = process_youcoll(dataset_dir[i], class_filter)
datasets.append(dataset)
if len(datasets) == 1:
dataset = datasets[0]
dataset_dir = dataset_dir[0]
dataset_name = dataset_names[0]
else:
dataset = merge_datasets(datasets)
dataset_dir = RND_DATA_DIR # MERGED_DATA_DIR
dataset_name = RND_DATASET_NAME # MERGED_DATASET_NAME
save_label_map(dataset_dir, dataset_name)
save_dataset(dataset, dataset_dir, dataset_name)
write_tf_records(dataset, os.path.join(dataset_dir, 'tfrecords'))
print("Processed", dataset_name, "dataset..")
if __name__ == "__main__":
main_data_dir = "data"
# process_dataset([OPENIMAGES_DATA_DIR], [OPENIMAGES_DATASET_NAME])
# process_dataset([IMAGENET_DATA_DIR], [IMAGENET_DATASET_NAME])
# process_dataset([OPENIMAGES_DATA_DIR, IMAGENET_DATA_DIR], [OPENIMAGES_DATASET_NAME, IMAGENET_DATASET_NAME],
# class_filter=['bowl'])
process_dataset([YOUCOLL_DATA_DIR], [YOUCOLL_DATASET_NAME])
|
[
"os.mkdir",
"tqdm.tqdm",
"utils.get_dir_list",
"json.load",
"tensorflow.python_io.TFRecordWriter",
"json.dump",
"os.path.basename",
"csv.reader",
"utils.get_file_list",
"xml.etree.ElementTree.parse",
"os.path.exists",
"utils.get_bb_count",
"PIL.Image.open",
"utils.get_int",
"utils.group_to_tf_record",
"os.path.join",
"utils.get_immediate_subdirectories"
] |
[((1066, 1113), 'os.path.join', 'os.path.join', (['data_dir', "(dataset_name + '.names')"], {}), "(data_dir, dataset_name + '.names')\n", (1078, 1113), False, 'import os\n'), ((1517, 1554), 'os.path.join', 'os.path.join', (['data_dir', '"""annotations"""'], {}), "(data_dir, 'annotations')\n", (1529, 1554), False, 'import os\n'), ((1578, 1608), 'os.path.join', 'os.path.join', (['data_dir', '"""imgs"""'], {}), "(data_dir, 'imgs')\n", (1590, 1608), False, 'import os\n'), ((1630, 1676), 'utils.get_file_list', 'get_file_list', (['object_track_dir'], {'format': '""".txt"""'}), "(object_track_dir, format='.txt')\n", (1643, 1676), False, 'from utils import get_immediate_subdirectories, get_file_list, get_bb_count, get_int, group_to_tf_record, get_dir_list\n'), ((1705, 1750), 'utils.get_dir_list', 'get_dir_list', (['video_frames_dir'], {'only_top': '(True)'}), '(video_frames_dir, only_top=True)\n', (1717, 1750), False, 'from utils import get_immediate_subdirectories, get_file_list, get_bb_count, get_int, group_to_tf_record, get_dir_list\n'), ((5641, 5673), 'os.path.join', 'os.path.join', (['data_dir', '"""images"""'], {}), "(data_dir, 'images')\n", (5653, 5673), False, 'import os\n'), ((5689, 5726), 'os.path.join', 'os.path.join', (['data_dir', '"""annotations"""'], {}), "(data_dir, 'annotations')\n", (5701, 5726), False, 'import os\n'), ((5753, 5794), 'os.path.join', 'os.path.join', (['data_dir', '"""data_split.json"""'], {}), "(data_dir, 'data_split.json')\n", (5765, 5794), False, 'import os\n'), ((5819, 5858), 'os.path.join', 'os.path.join', (['data_dir', '"""wnid_map.json"""'], {}), "(data_dir, 'wnid_map.json')\n", (5831, 5858), False, 'import os\n'), ((8431, 8470), 'tqdm.tqdm', 'tqdm', (['datasets'], {'desc': '"""Splits completed"""'}), "(datasets, desc='Splits completed')\n", (8435, 8470), False, 'from tqdm import tqdm\n'), ((2193, 2208), 'tqdm.tqdm', 'tqdm', (['data_rows'], {}), '(data_rows)\n', (2197, 2208), False, 'from tqdm import tqdm\n'), ((3456, 3485), 'os.path.join', 'os.path.join', (['data_dir', 'split'], {}), '(data_dir, split)\n', (3468, 3485), False, 'import os\n'), ((3577, 3600), 'os.path.basename', 'os.path.basename', (['split'], {}), '(split)\n', (3593, 3600), False, 'import os\n'), ((3728, 3763), 'utils.get_immediate_subdirectories', 'get_immediate_subdirectories', (['split'], {}), '(split)\n', (3756, 3763), False, 'from utils import get_immediate_subdirectories, get_file_list, get_bb_count, get_int, group_to_tf_record, get_dir_list\n'), ((3783, 3797), 'tqdm.tqdm', 'tqdm', (['obj_list'], {}), '(obj_list)\n', (3787, 3797), False, 'from tqdm import tqdm\n'), ((5941, 5967), 'json.load', 'json.load', (['data_split_file'], {}), '(data_split_file)\n', (5950, 5967), False, 'import json\n'), ((6051, 6075), 'json.load', 'json.load', (['wnid_map_file'], {}), '(wnid_map_file)\n', (6060, 6075), False, 'import json\n'), ((6359, 6382), 'tqdm.tqdm', 'tqdm', (['data_split[split]'], {}), '(data_split[split])\n', (6363, 6382), False, 'from tqdm import tqdm\n'), ((8346, 8377), 'os.path.exists', 'os.path.exists', (['record_save_dir'], {}), '(record_save_dir)\n', (8360, 8377), False, 'import os\n'), ((8387, 8412), 'os.mkdir', 'os.mkdir', (['record_save_dir'], {}), '(record_save_dir)\n', (8395, 8412), False, 'import os\n'), ((8499, 8535), 'os.path.join', 'os.path.join', (['record_save_dir', 'split'], {}), '(record_save_dir, split)\n', (8511, 8535), False, 'import os\n'), ((8653, 8700), 'os.path.join', 'os.path.join', (['record_save_path', '"""data.tfrecord"""'], {}), "(record_save_path, 'data.tfrecord')\n", (8665, 8700), False, 'import os\n'), ((8719, 8764), 'tensorflow.python_io.TFRecordWriter', 'tf.python_io.TFRecordWriter', (['record_save_file'], {}), '(record_save_file)\n', (8746, 8764), True, 'import tensorflow as tf\n'), ((8789, 8844), 'tqdm.tqdm', 'tqdm', (["datasets[split]['images']"], {'desc': '"""Writing to file"""'}), "(datasets[split]['images'], desc='Writing to file')\n", (8793, 8844), False, 'from tqdm import tqdm\n'), ((10256, 10289), 'json.dump', 'json.dump', (['dataset_obj', 'data_file'], {}), '(dataset_obj, data_file)\n', (10265, 10289), False, 'import json\n'), ((11433, 11471), 'os.path.join', 'os.path.join', (['dataset_dir', '"""tfrecords"""'], {}), "(dataset_dir, 'tfrecords')\n", (11445, 11471), False, 'import os\n'), ((2037, 2072), 'csv.reader', 'csv.reader', (['csv_file'], {'delimiter': '""","""'}), "(csv_file, delimiter=',')\n", (2047, 2072), False, 'import csv\n'), ((3989, 4022), 'utils.get_file_list', 'get_file_list', (['obj'], {'format': '""".jpg"""'}), "(obj, format='.jpg')\n", (4002, 4022), False, 'from utils import get_immediate_subdirectories, get_file_list, get_bb_count, get_int, group_to_tf_record, get_dir_list\n'), ((4142, 4168), 'os.path.join', 'os.path.join', (['obj', '"""Label"""'], {}), "(obj, 'Label')\n", (4154, 4168), False, 'import os\n'), ((4194, 4233), 'utils.get_file_list', 'get_file_list', (['label_dir'], {'format': '""".txt"""'}), "(label_dir, format='.txt')\n", (4207, 4233), False, 'from utils import get_immediate_subdirectories, get_file_list, get_bb_count, get_int, group_to_tf_record, get_dir_list\n'), ((6769, 6812), 'os.path.join', 'os.path.join', (['images_dir', "(img_name + '.jpg')"], {}), "(images_dir, img_name + '.jpg')\n", (6781, 6812), False, 'import os\n'), ((7324, 7365), 'os.path.join', 'os.path.join', (['anno_dir', "(img_name + '.xml')"], {}), "(anno_dir, img_name + '.xml')\n", (7336, 7365), False, 'import os\n'), ((8552, 8584), 'os.path.exists', 'os.path.exists', (['record_save_path'], {}), '(record_save_path)\n', (8566, 8584), False, 'import os\n'), ((8598, 8624), 'os.mkdir', 'os.mkdir', (['record_save_path'], {}), '(record_save_path)\n', (8606, 8624), False, 'import os\n'), ((8867, 8975), 'utils.group_to_tf_record', 'group_to_tf_record', (["datasets[split]['boxes'][img_name]", "datasets[split]['images'][img_name]", 'LABEL_STOI'], {}), "(datasets[split]['boxes'][img_name], datasets[split][\n 'images'][img_name], LABEL_STOI)\n", (8885, 8975), False, 'from utils import get_immediate_subdirectories, get_file_list, get_bb_count, get_int, group_to_tf_record, get_dir_list\n'), ((10180, 10227), 'os.path.join', 'os.path.join', (['data_dir', "(data_filename + '.json')"], {}), "(data_dir, data_filename + '.json')\n", (10192, 10227), False, 'import os\n'), ((1952, 1997), 'os.path.join', 'os.path.join', (['data_dir', "(split + '_frames.csv')"], {}), "(data_dir, split + '_frames.csv')\n", (1964, 1997), False, 'import os\n'), ((6830, 6849), 'os.path.exists', 'os.path.exists', (['img'], {}), '(img)\n', (6844, 6849), False, 'import os\n'), ((6873, 6917), 'os.path.join', 'os.path.join', (['images_dir', "(img_name + '.JPEG')"], {}), "(images_dir, img_name + '.JPEG')\n", (6885, 6917), False, 'import os\n'), ((7448, 7466), 'utils.get_bb_count', 'get_bb_count', (['root'], {}), '(root)\n', (7460, 7466), False, 'from utils import get_immediate_subdirectories, get_file_list, get_bb_count, get_int, group_to_tf_record, get_dir_list\n'), ((3822, 3843), 'os.path.basename', 'os.path.basename', (['obj'], {}), '(obj)\n', (3838, 3843), False, 'import os\n'), ((4319, 4340), 'os.path.basename', 'os.path.basename', (['img'], {}), '(img)\n', (4335, 4340), False, 'import os\n'), ((4489, 4512), 'os.path.basename', 'os.path.basename', (['label'], {}), '(label)\n', (4505, 4512), False, 'import os\n'), ((6968, 6983), 'PIL.Image.open', 'Image.open', (['img'], {}), '(img)\n', (6978, 6983), False, 'from PIL import Image\n'), ((7386, 7409), 'xml.etree.ElementTree.parse', 'ET.parse', (['anno_filename'], {}), '(anno_filename)\n', (7394, 7409), True, 'import xml.etree.ElementTree as ET\n'), ((2325, 2351), 'os.path.basename', 'os.path.basename', (['img_path'], {}), '(img_path)\n', (2341, 2351), False, 'import os\n'), ((7579, 7603), 'utils.get_int', 'get_int', (['"""xmin"""', 'root', 'i'], {}), "('xmin', root, i)\n", (7586, 7603), False, 'from utils import get_immediate_subdirectories, get_file_list, get_bb_count, get_int, group_to_tf_record, get_dir_list\n'), ((7674, 7698), 'utils.get_int', 'get_int', (['"""ymin"""', 'root', 'i'], {}), "('ymin', root, i)\n", (7681, 7698), False, 'from utils import get_immediate_subdirectories, get_file_list, get_bb_count, get_int, group_to_tf_record, get_dir_list\n'), ((7771, 7795), 'utils.get_int', 'get_int', (['"""xmax"""', 'root', 'i'], {}), "('xmax', root, i)\n", (7778, 7795), False, 'from utils import get_immediate_subdirectories, get_file_list, get_bb_count, get_int, group_to_tf_record, get_dir_list\n'), ((7866, 7890), 'utils.get_int', 'get_int', (['"""ymax"""', 'root', 'i'], {}), "('ymax', root, i)\n", (7873, 7890), False, 'from utils import get_immediate_subdirectories, get_file_list, get_bb_count, get_int, group_to_tf_record, get_dir_list\n'), ((7615, 7637), 'utils.get_int', 'get_int', (['"""width"""', 'root'], {}), "('width', root)\n", (7622, 7637), False, 'from utils import get_immediate_subdirectories, get_file_list, get_bb_count, get_int, group_to_tf_record, get_dir_list\n'), ((7711, 7734), 'utils.get_int', 'get_int', (['"""height"""', 'root'], {}), "('height', root)\n", (7718, 7734), False, 'from utils import get_immediate_subdirectories, get_file_list, get_bb_count, get_int, group_to_tf_record, get_dir_list\n'), ((7807, 7829), 'utils.get_int', 'get_int', (['"""width"""', 'root'], {}), "('width', root)\n", (7814, 7829), False, 'from utils import get_immediate_subdirectories, get_file_list, get_bb_count, get_int, group_to_tf_record, get_dir_list\n'), ((7903, 7926), 'utils.get_int', 'get_int', (['"""height"""', 'root'], {}), "('height', root)\n", (7910, 7926), False, 'from utils import get_immediate_subdirectories, get_file_list, get_bb_count, get_int, group_to_tf_record, get_dir_list\n')]
|
import os
AUTH_ENV_VAR_NAME = "AadAuthorityUri"
KUSTO_CLIENT_APP_ID = "db662dc1-0cfe-4e1c-a843-19a68e65be58"
PUBLIC_LOGIN_URL = "https://login.microsoftonline.com"
class CloudInfo:
""" This class holds the data for a specific cloud instance. """
def __init__(self, auth_endpoint: str, kusto_client_app_id: str, redirect_uri: str):
self.aad_authority_uri = auth_endpoint
self.kusto_client_app_id = kusto_client_app_id
self.login_redirect_uri = redirect_uri # will be used for interactive login
class CloudSettings:
""" This class holds data for all cloud instances, and returns the specific data instance by parsing the dns suffix from a URL """
_cloud_info = None
@classmethod
def _init_once(cls):
if cls._cloud_info is not None:
return
# todo: replace this with a call to the auth metadata endpoint
aad_authority_uri = os.environ.get(AUTH_ENV_VAR_NAME, PUBLIC_LOGIN_URL)
cls._cloud_info = CloudInfo(aad_authority_uri, KUSTO_CLIENT_APP_ID, None)
@classmethod
def get_cloud_info(cls) -> CloudInfo:
""" Get the details of a cloud according to the DNS suffix of the provided connection string """
cls._init_once()
return cls._cloud_info
|
[
"os.environ.get"
] |
[((941, 992), 'os.environ.get', 'os.environ.get', (['AUTH_ENV_VAR_NAME', 'PUBLIC_LOGIN_URL'], {}), '(AUTH_ENV_VAR_NAME, PUBLIC_LOGIN_URL)\n', (955, 992), False, 'import os\n')]
|
from datetime import timedelta
from airflow import DAG
from airflow.providers.docker.operators.docker import DockerOperator
from airflow.utils.dates import days_ago
default_args = {
"owner": "airflow",
"depends_on_past": False,
"start_date": days_ago(2),
"retries": 1,
"retry_delay": timedelta(minutes=5),
}
with DAG(
"docker_operator_data_gen",
default_args=default_args,
schedule_interval="@daily",
start_date=days_ago(5),
) as dag:
data_gen = DockerOperator(
image="data-gen-docker-operator",
command="/data/raw/{{ ds }}",
task_id="docker-airflow-data-gen",
do_xcom_push=False,
volumes=["/Users/Lily/PycharmProjects/airflow_examples/airflow_examples/data:/data"]
)
data_gen
|
[
"airflow.utils.dates.days_ago",
"airflow.providers.docker.operators.docker.DockerOperator",
"datetime.timedelta"
] |
[((265, 276), 'airflow.utils.dates.days_ago', 'days_ago', (['(2)'], {}), '(2)\n', (273, 276), False, 'from airflow.utils.dates import days_ago\n'), ((317, 337), 'datetime.timedelta', 'timedelta', ([], {'minutes': '(5)'}), '(minutes=5)\n', (326, 337), False, 'from datetime import timedelta\n'), ((525, 764), 'airflow.providers.docker.operators.docker.DockerOperator', 'DockerOperator', ([], {'image': '"""data-gen-docker-operator"""', 'command': '"""/data/raw/{{ ds }}"""', 'task_id': '"""docker-airflow-data-gen"""', 'do_xcom_push': '(False)', 'volumes': "['/Users/Lily/PycharmProjects/airflow_examples/airflow_examples/data:/data']"}), "(image='data-gen-docker-operator', command=\n '/data/raw/{{ ds }}', task_id='docker-airflow-data-gen', do_xcom_push=\n False, volumes=[\n '/Users/Lily/PycharmProjects/airflow_examples/airflow_examples/data:/data']\n )\n", (539, 764), False, 'from airflow.providers.docker.operators.docker import DockerOperator\n'), ((485, 496), 'airflow.utils.dates.days_ago', 'days_ago', (['(5)'], {}), '(5)\n', (493, 496), False, 'from airflow.utils.dates import days_ago\n')]
|
import logging
import argparse
import sys
##################
#### logger ###
##################
# Set up logger
formatter = logging.Formatter('[%(asctime)s] %(levelname)s: %(message)s')
logger = logging.getLogger('ev_logger')
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
#Set up args
parser = argparse.ArgumentParser(description='Process some integers.')
parser.add_argument('-v', '--verbose', action='store_true', dest='verbose',
help='Enable debug info on consolo')
args = parser.parse_args()
'''
# Print out logging message on console
h_console = logging.StreamHandler()
h_console.setFormatter(formatter)
if args.verbose:
h_console.setLevel(logging.DEBUG)
else:
h_console.setLevel(logging.INFO)
logger.addHandler(h_console)
'''
# Record logging message at logging file
h_file = logging.FileHandler("elevator_server.log")
h_file.setFormatter(formatter)
h_file.setLevel(logging.INFO)
logger.addHandler(h_file)
|
[
"argparse.ArgumentParser",
"logging.basicConfig",
"logging.FileHandler",
"logging.Formatter",
"logging.getLogger"
] |
[((130, 191), 'logging.Formatter', 'logging.Formatter', (['"""[%(asctime)s] %(levelname)s: %(message)s"""'], {}), "('[%(asctime)s] %(levelname)s: %(message)s')\n", (147, 191), False, 'import logging\n'), ((201, 231), 'logging.getLogger', 'logging.getLogger', (['"""ev_logger"""'], {}), "('ev_logger')\n", (218, 231), False, 'import logging\n'), ((232, 290), 'logging.basicConfig', 'logging.basicConfig', ([], {'stream': 'sys.stdout', 'level': 'logging.INFO'}), '(stream=sys.stdout, level=logging.INFO)\n', (251, 290), False, 'import logging\n'), ((314, 375), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Process some integers."""'}), "(description='Process some integers.')\n", (337, 375), False, 'import argparse\n'), ((830, 872), 'logging.FileHandler', 'logging.FileHandler', (['"""elevator_server.log"""'], {}), "('elevator_server.log')\n", (849, 872), False, 'import logging\n')]
|
# Copyright 2016 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------------------------------------------------------
from __future__ import print_function
import getpass
import os
import sys
from sawtooth_signing import create_context
from sawtooth_cli.exceptions import CliException
def add_keygen_parser(subparsers, parent_parser):
parser = subparsers.add_parser(
'keygen',
help='Creates user signing keys',
description='Generates keys with which the user can sign '
'transactions and batches.',
epilog='The private and public key files are stored in '
'<key-dir>/<key-name>.priv and <key-dir>/<key-name>.pub. '
'<key-dir> defaults to ~/.sawtooth and <key-name> defaults to $USER.',
parents=[parent_parser])
parser.add_argument(
'key_name',
help='specify the name of the key to create',
nargs='?')
parser.add_argument(
'--key-dir',
help="specify the directory for the key files")
parser.add_argument(
'--force',
help="overwrite files if they exist",
action='store_true')
parser.add_argument(
'-q',
'--quiet',
help="do not display output",
action='store_true')
def do_keygen(args):
if args.key_name is not None:
key_name = args.key_name
else:
key_name = getpass.getuser()
if args.key_dir is not None:
key_dir = args.key_dir
if not os.path.exists(key_dir):
raise CliException('no such directory: {}'.format(key_dir))
else:
key_dir = os.path.join(os.path.expanduser('~'), '.sawtooth', 'keys')
if not os.path.exists(key_dir):
if not args.quiet:
print('creating key directory: {}'.format(key_dir))
try:
os.makedirs(key_dir, 0o755)
except IOError as e:
raise CliException('IOError: {}'.format(str(e))) from e
priv_filename = os.path.join(key_dir, key_name + '.priv')
pub_filename = os.path.join(key_dir, key_name + '.pub')
if not args.force:
file_exists = False
for filename in [priv_filename, pub_filename]:
if os.path.exists(filename):
file_exists = True
print('file exists: {}'.format(filename), file=sys.stderr)
if file_exists:
raise CliException(
'files exist, rerun with --force to overwrite existing files')
context = create_context('secp256k1')
private_key = context.new_random_private_key()
public_key = context.get_public_key(private_key)
try:
priv_exists = os.path.exists(priv_filename)
with open(priv_filename, 'w') as priv_fd:
if not args.quiet:
if priv_exists:
print('overwriting file: {}'.format(priv_filename))
else:
print('writing file: {}'.format(priv_filename))
priv_fd.write(private_key.as_hex())
priv_fd.write('\n')
# Set the private key u+rw g+r
os.chmod(priv_filename, 0o640)
pub_exists = os.path.exists(pub_filename)
with open(pub_filename, 'w') as pub_fd:
if not args.quiet:
if pub_exists:
print('overwriting file: {}'.format(pub_filename))
else:
print('writing file: {}'.format(pub_filename))
pub_fd.write(public_key.as_hex())
pub_fd.write('\n')
# Set the public key u+rw g+r o+r
os.chmod(pub_filename, 0o644)
except IOError as ioe:
raise CliException('IOError: {}'.format(str(ioe))) from ioe
|
[
"sawtooth_signing.create_context",
"os.path.expanduser",
"os.chmod",
"getpass.getuser",
"os.makedirs",
"os.path.exists",
"sawtooth_cli.exceptions.CliException",
"os.path.join"
] |
[((2524, 2565), 'os.path.join', 'os.path.join', (['key_dir', "(key_name + '.priv')"], {}), "(key_dir, key_name + '.priv')\n", (2536, 2565), False, 'import os\n'), ((2585, 2625), 'os.path.join', 'os.path.join', (['key_dir', "(key_name + '.pub')"], {}), "(key_dir, key_name + '.pub')\n", (2597, 2625), False, 'import os\n'), ((3034, 3061), 'sawtooth_signing.create_context', 'create_context', (['"""secp256k1"""'], {}), "('secp256k1')\n", (3048, 3061), False, 'from sawtooth_signing import create_context\n'), ((1916, 1933), 'getpass.getuser', 'getpass.getuser', ([], {}), '()\n', (1931, 1933), False, 'import getpass\n'), ((3198, 3227), 'os.path.exists', 'os.path.exists', (['priv_filename'], {}), '(priv_filename)\n', (3212, 3227), False, 'import os\n'), ((3691, 3719), 'os.path.exists', 'os.path.exists', (['pub_filename'], {}), '(pub_filename)\n', (3705, 3719), False, 'import os\n'), ((2014, 2037), 'os.path.exists', 'os.path.exists', (['key_dir'], {}), '(key_dir)\n', (2028, 2037), False, 'import os\n'), ((2152, 2175), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (2170, 2175), False, 'import os\n'), ((2213, 2236), 'os.path.exists', 'os.path.exists', (['key_dir'], {}), '(key_dir)\n', (2227, 2236), False, 'import os\n'), ((2748, 2772), 'os.path.exists', 'os.path.exists', (['filename'], {}), '(filename)\n', (2762, 2772), False, 'import os\n'), ((2926, 3001), 'sawtooth_cli.exceptions.CliException', 'CliException', (['"""files exist, rerun with --force to overwrite existing files"""'], {}), "('files exist, rerun with --force to overwrite existing files')\n", (2938, 3001), False, 'from sawtooth_cli.exceptions import CliException\n'), ((3638, 3666), 'os.chmod', 'os.chmod', (['priv_filename', '(416)'], {}), '(priv_filename, 416)\n', (3646, 3666), False, 'import os\n'), ((4125, 4152), 'os.chmod', 'os.chmod', (['pub_filename', '(420)'], {}), '(pub_filename, 420)\n', (4133, 4152), False, 'import os\n'), ((2370, 2395), 'os.makedirs', 'os.makedirs', (['key_dir', '(493)'], {}), '(key_dir, 493)\n', (2381, 2395), False, 'import os\n')]
|
#! /usr/bin/env python3
import socket
import time
from dateutil import parser, tz
from datetime import datetime
import time
#configuration
LOG_MODE=2 #1:ERROR, 2:TRACE
CONSOLE_LOG=True
SLEEP_TIMER=0.25
#parameters
TIMEZONE="Europe/Zurich"
CHECKUNTIL_TIME="Dec 20, 2021 0:30 AM"
REMOTE_SERVER_DNS = "one.one.one.one"
REMOTE_SERVER_IP = "1.1.1.1"
REMOTE_SERVER_PORT = "80" #HTTP
REMOTE_SERVER_RETRYTIME = 1
FILE_PATH= "/home/admin/Desktop/netcheck.log"
def logging_error(msg):
if LOG_MODE >= 1:
mydate=datetime.now(tz=tz.tzlocal())
FILE.write('[ERROR] '+ mydate.strftime("%Y-%m-%d %H:%M:%S-%f") + ', ' + msg + '\n')
if CONSOLE_LOG == True:
print('[ERROR] '+ mydate.strftime("%Y-%m-%d %H:%M:%S-%f") + ', ' + msg + '\n')
def logging_trace(msg):
if LOG_MODE >= 2:
mydate=datetime.now(tz=tz.tzlocal())
FILE.write('[TRACE] ' + mydate.strftime("%Y-%m-%d %H:%M:%S-%f") + ', ' + msg + '\n')
if CONSOLE_LOG == True:
print('[TRACE] ' + mydate.strftime("%Y-%m-%d %H:%M:%S-%f") + ', ' + msg + '\n')
def is_connected(hostip, port, retryTime):
try:
s = socket.create_connection((hostip, port), retryTime)
s.close()
logging_trace("socket opened, connection established and socket closed")
except socket.error as exc:
logging_error("OSError: {0}".format(exc))
return False
return True
def isExitTimeReached(mydatetime):
PYCON_DATE = parser.parse(mydatetime)
PYCON_DATE = PYCON_DATE.replace(tzinfo=tz.gettz(TIMEZONE))
now = datetime.now(tz=tz.tzlocal())
countdown = PYCON_DATE - now
if(countdown.total_seconds() <= 0):
logging_trace("end time reached, exit ...")
return True
return False
def checkUntil(endTime, checkIp, checkPort, checkRetryTime):
logging_trace('check remote ip: ' + checkIp)
while isExitTimeReached(endTime) == False:
is_connected(checkIp, checkPort, checkRetryTime)
time.sleep(SLEEP_TIMER)
#Program entrypoint
FILE = open(FILE_PATH, 'a', buffering=1)
checkUntil(CHECKUNTIL_TIME, REMOTE_SERVER_IP, REMOTE_SERVER_PORT, REMOTE_SERVER_RETRYTIME)
FILE.close()
|
[
"dateutil.parser.parse",
"socket.create_connection",
"dateutil.tz.gettz",
"dateutil.tz.tzlocal",
"time.sleep"
] |
[((1394, 1418), 'dateutil.parser.parse', 'parser.parse', (['mydatetime'], {}), '(mydatetime)\n', (1406, 1418), False, 'from dateutil import parser, tz\n'), ((1093, 1144), 'socket.create_connection', 'socket.create_connection', (['(hostip, port)', 'retryTime'], {}), '((hostip, port), retryTime)\n', (1117, 1144), False, 'import socket\n'), ((1882, 1905), 'time.sleep', 'time.sleep', (['SLEEP_TIMER'], {}), '(SLEEP_TIMER)\n', (1892, 1905), False, 'import time\n'), ((1460, 1478), 'dateutil.tz.gettz', 'tz.gettz', (['TIMEZONE'], {}), '(TIMEZONE)\n', (1468, 1478), False, 'from dateutil import parser, tz\n'), ((1504, 1516), 'dateutil.tz.tzlocal', 'tz.tzlocal', ([], {}), '()\n', (1514, 1516), False, 'from dateutil import parser, tz\n'), ((526, 538), 'dateutil.tz.tzlocal', 'tz.tzlocal', ([], {}), '()\n', (536, 538), False, 'from dateutil import parser, tz\n'), ((817, 829), 'dateutil.tz.tzlocal', 'tz.tzlocal', ([], {}), '()\n', (827, 829), False, 'from dateutil import parser, tz\n')]
|
"""
tests for time conversions relevant to MSISE00
"""
from __future__ import annotations
import datetime
import typing
import numpy as np
from pytest import approx
import sciencedates as sd
T: list[typing.Any] = [datetime.datetime(2013, 7, 2, 12, 0, 0)]
T.append(T[0].date())
T.append(np.datetime64(T[0]))
T.append(str(T[0]))
def test_str():
t = T[3]
assert isinstance(t, str)
iyd, utsec, stl = sd.datetime2gtd(t, glon=42)
assert iyd == 183
assert utsec == 43200
assert stl == approx(14.8)
def test_dt64():
t = T[2]
assert isinstance(t, np.datetime64)
iyd, utsec, stl = sd.datetime2gtd(t, glon=42)
assert iyd == 183
assert utsec == 43200
assert stl == approx(14.8)
def test_date():
t = T[1]
assert isinstance(t, datetime.date)
iyd, utsec, stl = sd.datetime2gtd(t, glon=42)
assert iyd == 183
assert utsec == 0
assert stl == approx(2.8)
def test_datetime():
t = T[0]
assert isinstance(t, datetime.datetime)
iyd, utsec, stl = sd.datetime2gtd(t, glon=42)
assert iyd == 183
assert utsec == 43200
assert stl == approx(14.8)
def test_list():
iyd, utsec, stl = sd.datetime2gtd(T, glon=42)
assert (iyd == 183).all()
assert utsec == approx((43200, 0, 43200, 43200))
assert stl == approx((14.8, 2.8, 14.8, 14.8))
def test_glon():
glon = range(-180, 180 + 45, 45)
iyd, utsec, stl = sd.datetime2gtd(T, glon)
Estl = np.array(
[
np.arange(0, 24 + 3, 3),
np.arange(-12, 12 + 3, 3),
np.arange(0, 24 + 3, 3),
np.arange(0, 24 + 3, 3),
]
)
assert utsec == approx((43200, 0, 43200, 43200))
assert stl == approx(Estl)
|
[
"numpy.datetime64",
"sciencedates.datetime2gtd",
"datetime.datetime",
"numpy.arange",
"pytest.approx"
] |
[((218, 257), 'datetime.datetime', 'datetime.datetime', (['(2013)', '(7)', '(2)', '(12)', '(0)', '(0)'], {}), '(2013, 7, 2, 12, 0, 0)\n', (235, 257), False, 'import datetime\n'), ((290, 309), 'numpy.datetime64', 'np.datetime64', (['T[0]'], {}), '(T[0])\n', (303, 309), True, 'import numpy as np\n'), ((415, 442), 'sciencedates.datetime2gtd', 'sd.datetime2gtd', (['t'], {'glon': '(42)'}), '(t, glon=42)\n', (430, 442), True, 'import sciencedates as sd\n'), ((618, 645), 'sciencedates.datetime2gtd', 'sd.datetime2gtd', (['t'], {'glon': '(42)'}), '(t, glon=42)\n', (633, 645), True, 'import sciencedates as sd\n'), ((821, 848), 'sciencedates.datetime2gtd', 'sd.datetime2gtd', (['t'], {'glon': '(42)'}), '(t, glon=42)\n', (836, 848), True, 'import sciencedates as sd\n'), ((1027, 1054), 'sciencedates.datetime2gtd', 'sd.datetime2gtd', (['t'], {'glon': '(42)'}), '(t, glon=42)\n', (1042, 1054), True, 'import sciencedates as sd\n'), ((1177, 1204), 'sciencedates.datetime2gtd', 'sd.datetime2gtd', (['T'], {'glon': '(42)'}), '(T, glon=42)\n', (1192, 1204), True, 'import sciencedates as sd\n'), ((1418, 1442), 'sciencedates.datetime2gtd', 'sd.datetime2gtd', (['T', 'glon'], {}), '(T, glon)\n', (1433, 1442), True, 'import sciencedates as sd\n'), ((510, 522), 'pytest.approx', 'approx', (['(14.8)'], {}), '(14.8)\n', (516, 522), False, 'from pytest import approx\n'), ((713, 725), 'pytest.approx', 'approx', (['(14.8)'], {}), '(14.8)\n', (719, 725), False, 'from pytest import approx\n'), ((912, 923), 'pytest.approx', 'approx', (['(2.8)'], {}), '(2.8)\n', (918, 923), False, 'from pytest import approx\n'), ((1122, 1134), 'pytest.approx', 'approx', (['(14.8)'], {}), '(14.8)\n', (1128, 1134), False, 'from pytest import approx\n'), ((1256, 1288), 'pytest.approx', 'approx', (['(43200, 0, 43200, 43200)'], {}), '((43200, 0, 43200, 43200))\n', (1262, 1288), False, 'from pytest import approx\n'), ((1307, 1338), 'pytest.approx', 'approx', (['(14.8, 2.8, 14.8, 14.8)'], {}), '((14.8, 2.8, 14.8, 14.8))\n', (1313, 1338), False, 'from pytest import approx\n'), ((1662, 1694), 'pytest.approx', 'approx', (['(43200, 0, 43200, 43200)'], {}), '((43200, 0, 43200, 43200))\n', (1668, 1694), False, 'from pytest import approx\n'), ((1713, 1725), 'pytest.approx', 'approx', (['Estl'], {}), '(Estl)\n', (1719, 1725), False, 'from pytest import approx\n'), ((1487, 1510), 'numpy.arange', 'np.arange', (['(0)', '(24 + 3)', '(3)'], {}), '(0, 24 + 3, 3)\n', (1496, 1510), True, 'import numpy as np\n'), ((1524, 1549), 'numpy.arange', 'np.arange', (['(-12)', '(12 + 3)', '(3)'], {}), '(-12, 12 + 3, 3)\n', (1533, 1549), True, 'import numpy as np\n'), ((1563, 1586), 'numpy.arange', 'np.arange', (['(0)', '(24 + 3)', '(3)'], {}), '(0, 24 + 3, 3)\n', (1572, 1586), True, 'import numpy as np\n'), ((1600, 1623), 'numpy.arange', 'np.arange', (['(0)', '(24 + 3)', '(3)'], {}), '(0, 24 + 3, 3)\n', (1609, 1623), True, 'import numpy as np\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import ovito as ov
import glob
import numpy as np
import matplotlib.pyplot as plt
import os
from scipy import optimize
import pickle
from itertools import product
from multiprocessing import get_context
def func(x, phi_l):
a = (4/3)*np.pi*-2
b = 2*1.919
q = (a*(x**3)) + (b*x) - phi_l
return q
def main(file):
# hgs = np.zeros(0)
# time = np.zeros(0)
# vols = np.zeros(0)
try:
pipeline = ov.io.import_file(file)
#select W particles in the system.
pipeline.modifiers.append(ov.modifiers.SelectTypeModifier(property = 'Particle Type',
types = {'W'}))
solid_vols = np.zeros(0)
cell_vols = np.zeros(0)
sa = np.zeros(0)
calc_area = np.zeros(0)
mod1 = ov.modifiers.ConstructSurfaceModifier(only_selected = True,
radius = 1,
smoothing_level = 8,
identify_regions = True)
pipeline.modifiers.append(mod1)
loops = np.linspace(10,16,15)
for i in loops:
mod1.radius = np.round(i, decimals = 1)
data = pipeline.compute()
area = data.attributes['ConstructSurfaceMesh.surface_area']
solid_volume = data.attributes['ConstructSurfaceMesh.filled_volume']
cell_volume = data.attributes['ConstructSurfaceMesh.cell_volume']
fraction = solid_volume/cell_volume
tprop = data.particles['Particle Type']
#get the c5a id
c5a_id = tprop.type_by_name('C5A').id
try:
c2_id = tprop.type_by_name('C2').id
n_lip = np.count_nonzero(tprop == c5a_id) + np.count_nonzero(tprop == c2_id)
except KeyError:
n_lip = np.count_nonzero(tprop == c5a_id)
pass
#count the number of terminal carbons
phi_l = 1-fraction
#need a in nm not Angstroms
a = data.cell.matrix[0,0]
sigma = 1.919
chi = -2
root = optimize.fsolve(func, x0 = [0], args = phi_l)
l = root[0]*a
A_L = (sigma*(a**2))+((2*np.pi*chi)*(l**2))
a_0 = ((2*A_L)/(n_lip))
calc_area = np.append(calc_area, a_0)
solid_vols = np.append(solid_vols, solid_volume)
cell_vols = np.append(cell_vols, cell_volume)
sa = np.append(sa, area/n_lip)
# print('MAKING PLOT NOW')
# fig, (ax0,ax1) = plt.subplots(2,1,sharex = True)
# ax0.scatter(loops, v)
# ax1.scatter(loops, sa, label = 'measured')
# ax1.scatter(loops, calc_area, label = 'calculated')
# ax1.legend()
# ax0.set_ylabel('Surface Volume\nFraction in Unit Cell')
# ax1.set_ylabel('Surface Area\nper molecule (Å$^2$)')
# ax0.axhline(v.mean())
# ax1.axhline(sa.mean())
# ax1.axhline(calc_area.mean())
# ax0.text(loops[1],v.mean(),'Mean = '+str(v.mean())[:4])
# ax1.text(loops[1],sa.mean(),'Mean = '+str(sa.mean())[:4] +' Å')
# ax1.text(loops[1],calc_area.mean(),'Mean = '+str(calc_area.mean())[:4] +' Å')
# ax1.set_xlabel('Probe Sphere Radius')
# fig.subplots_adjust(hspace=0.1)
# name = files[f].split('.pdb')[0] + ' headgroup analysis.png'
# fig.savefig(name, dpi =200)
d = {'Solid Volume': solid_vols,
'Cell Volume': cell_vols,
'Surface Area per Lipid': sa,
'Calculated Area per Lipid': calc_area,
'Number of Lipids': n_lip}
dname = file.split('.pdb')[0] + '_headgroup_analysis.p'
pickle.dump(d, open(dname, 'wb'))
# t = file.split('md')[1].split('-')[0]
# hgs = np.append(hgs, sa.mean())
# vols = np.append(vols, v.mean())
# time = np.append(time, int(t))
except RuntimeError:
print('error!', file)
pass
# fig1, ax2 = plt.subplots(1,1)
# ax2.scatter(time/100, hgs)
# ax2.set_xlabel('Simulation Time ($\mu$s)')
# ax2.set_ylabel('Mean Head Group Area (Å$^{2}$)')
# ax2.axhline(hgs.mean())
# ax2.set_xlim(0,time.max()/100+0.1)
# ax2.text(0,hgs.mean(), 'mean = %.3f, std = %.3f' %(hgs.mean(), hgs.std()))
# fig1.savefig(folder+'/head group areas.png', dpi =200)
# fig2, ax3 = plt.subplots(1,1)
# ax3.scatter(time/100, vols)
# ax3.set_xlabel('Simulation Time ($\mu$s)')
# ax3.set_ylabel('Fractional Volume of Surface')
# ax3.axhline(vols.mean())
# ax3.set_xlim(0,time.max()/100+0.1)
# ax3.text(0,vols.mean(), 'mean = %.3f, std = %.3f' %(vols.mean(), vols.std()))
# fig2.savefig(folder+'/volumes.png', dpi =200)
if __name__ == '__main__':
folder = os.getcwd()
files = glob.glob(folder+'/*.pdb')
paramlist = list(product(files))
k = len(paramlist)/14
if k < 1:
csize = 1
else:
csize = int(k)
print(paramlist, csize)
with get_context("spawn").Pool(processes = 14) as pool:
pool.starmap(main, paramlist, chunksize = csize)
|
[
"ovito.io.import_file",
"numpy.count_nonzero",
"os.getcwd",
"ovito.modifiers.ConstructSurfaceModifier",
"numpy.zeros",
"scipy.optimize.fsolve",
"multiprocessing.get_context",
"numpy.append",
"numpy.linspace",
"glob.glob",
"itertools.product",
"numpy.round",
"ovito.modifiers.SelectTypeModifier"
] |
[((5286, 5297), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (5295, 5297), False, 'import os\n'), ((5315, 5343), 'glob.glob', 'glob.glob', (["(folder + '/*.pdb')"], {}), "(folder + '/*.pdb')\n", (5324, 5343), False, 'import glob\n'), ((507, 530), 'ovito.io.import_file', 'ov.io.import_file', (['file'], {}), '(file)\n', (524, 530), True, 'import ovito as ov\n'), ((802, 813), 'numpy.zeros', 'np.zeros', (['(0)'], {}), '(0)\n', (810, 813), True, 'import numpy as np\n'), ((838, 849), 'numpy.zeros', 'np.zeros', (['(0)'], {}), '(0)\n', (846, 849), True, 'import numpy as np\n'), ((867, 878), 'numpy.zeros', 'np.zeros', (['(0)'], {}), '(0)\n', (875, 878), True, 'import numpy as np\n'), ((899, 910), 'numpy.zeros', 'np.zeros', (['(0)'], {}), '(0)\n', (907, 910), True, 'import numpy as np\n'), ((935, 1048), 'ovito.modifiers.ConstructSurfaceModifier', 'ov.modifiers.ConstructSurfaceModifier', ([], {'only_selected': '(True)', 'radius': '(1)', 'smoothing_level': '(8)', 'identify_regions': '(True)'}), '(only_selected=True, radius=1,\n smoothing_level=8, identify_regions=True)\n', (972, 1048), True, 'import ovito as ov\n'), ((1277, 1300), 'numpy.linspace', 'np.linspace', (['(10)', '(16)', '(15)'], {}), '(10, 16, 15)\n', (1288, 1300), True, 'import numpy as np\n'), ((5364, 5378), 'itertools.product', 'product', (['files'], {}), '(files)\n', (5371, 5378), False, 'from itertools import product\n'), ((621, 691), 'ovito.modifiers.SelectTypeModifier', 'ov.modifiers.SelectTypeModifier', ([], {'property': '"""Particle Type"""', 'types': "{'W'}"}), "(property='Particle Type', types={'W'})\n", (652, 691), True, 'import ovito as ov\n'), ((1350, 1373), 'numpy.round', 'np.round', (['i'], {'decimals': '(1)'}), '(i, decimals=1)\n', (1358, 1373), True, 'import numpy as np\n'), ((2450, 2491), 'scipy.optimize.fsolve', 'optimize.fsolve', (['func'], {'x0': '[0]', 'args': 'phi_l'}), '(func, x0=[0], args=phi_l)\n', (2465, 2491), False, 'from scipy import optimize\n'), ((2673, 2698), 'numpy.append', 'np.append', (['calc_area', 'a_0'], {}), '(calc_area, a_0)\n', (2682, 2698), True, 'import numpy as np\n'), ((2724, 2759), 'numpy.append', 'np.append', (['solid_vols', 'solid_volume'], {}), '(solid_vols, solid_volume)\n', (2733, 2759), True, 'import numpy as np\n'), ((2784, 2817), 'numpy.append', 'np.append', (['cell_vols', 'cell_volume'], {}), '(cell_vols, cell_volume)\n', (2793, 2817), True, 'import numpy as np\n'), ((2835, 2862), 'numpy.append', 'np.append', (['sa', '(area / n_lip)'], {}), '(sa, area / n_lip)\n', (2844, 2862), True, 'import numpy as np\n'), ((5534, 5554), 'multiprocessing.get_context', 'get_context', (['"""spawn"""'], {}), "('spawn')\n", (5545, 5554), False, 'from multiprocessing import get_context\n'), ((1982, 2015), 'numpy.count_nonzero', 'np.count_nonzero', (['(tprop == c5a_id)'], {}), '(tprop == c5a_id)\n', (1998, 2015), True, 'import numpy as np\n'), ((2018, 2050), 'numpy.count_nonzero', 'np.count_nonzero', (['(tprop == c2_id)'], {}), '(tprop == c2_id)\n', (2034, 2050), True, 'import numpy as np\n'), ((2105, 2138), 'numpy.count_nonzero', 'np.count_nonzero', (['(tprop == c5a_id)'], {}), '(tprop == c5a_id)\n', (2121, 2138), True, 'import numpy as np\n')]
|
from flask import render_template
from flask import Flask, render_template, request, current_app, url_for
from flask import request, jsonify,make_response,Markup,send_file
from flask_cors import *
import json
import requests
from flask import make_response
from flask import Response
import os
server = Flask(__name__)
# server.config['SQLALCHEMY_DATABASE_URI'] = 'mysql://123456:123456@localhost:3306/token1' # 这里登陆的是root用户,要填上自己的密码,MySQL的默认端口是3306,填上之前创建的数据库名text1
#需要实现只对少数端口开放。
@server.route('/',methods=['get','post'])
def test():
response = make_response(render_template('test.html'))
return response
@server.route('/get_user_id',methods=['get','post'])
def get_user_id():
return '123'
@server.route('/index',methods=['get','post'])
def index():
response = make_response(render_template('index.html'))
# response.mimetype = 'application/wasm'
return response
@server.route('/decode',methods=['get','post'])
def decode():
if request.method == 'POST':
url=request.form.get("ip")
sq=request.form.get("sq")
cypher =request.form.get("cypher")
# user_id = request.form.get("user_id")
# token = request.form.get("token")
else:
url=request.args.get("ip")
sq=request.args.get("sq")
cypher =request.args.get("cypher")
# user_id = request.args.get("user_id")
# token = request.args.get("token")
data = {
'ip': url,
'sq':sq,
'cypher': cypher,
# 'user_id':user_id,
# 'token':token,
}
print(data)
url='http://www.hyluz.cn:5000/server_decode?ip='+url+'&sq='+sq+'&cypher='+cypher
result = requests.get(url=url)
result = json.loads(result.text)
# print(type(result))
result['result']=str(result['result'])
print(result)
return result
@server.route('/get_server_res',methods=['get','post'])
def get_server_res():
if request.method == 'POST':
url=request.form.get("ip")
sq=request.form.get("sq")
cypher =request.form.get("cypher")
user_id = request.form.get("user_id")
token = request.form.get("token")
else:
url=request.args.get("ip")
sq=request.args.get("sq")
cypher =request.args.get("cypher")
user_id = request.args.get("user_id")
token = request.args.get("token")
data = {
'ip': url,
'sq':sq,
'cypher': cypher,
# 'user_id':user_id,
# 'token':token,
}
print(data)
url1='http://www.hyluz.cn:5000/check_token?ip='+url+'&user_id='+user_id+'&token='+token
result0=requests.get(url=url1)
result0 = json.loads(result0.text)
print(result0)
url2='http://www.hyluz.cn:5000/server_decode?ip='+url+'&sq='+sq+'&cypher='+cypher
result = requests.get(url=url2)
result = json.loads(result.text)
# print(type(result))
result['result']=str(result['result'])
print(result)
# print(result['code'])
# return result
if result0['code']==200 and result['result']=="b'I am front'":
print(1)
return {'code':200,'result':'I am server'}
else:
print(2)
return {'code':10000,'result':'wrong'}
# @server.route('/get_wasm',methods=['get','post'])
# def get_wasm():
#
# response =''
#
# response = make_response(server.send_static_file('encode.wasm'))
# response.mimetype = 'application/wasm'
# return response
# @server.route('/get_wasm1')
# def wasm_file():
# return server.send_file('/static/encode.wasm', mimetype = 'application/wasm');
# @server.route('/get_wasm')
# def get_wasm(path):
# base_dir = os.path.dirname(__file__)
# resp = make_response(open(os.path.join(base_dir,path)).read())
#
# resp.headers["Content-type"]='application/wasm'
# return resp
@server.route('/index1',methods=['get','post'])
def index1():
response = make_response(render_template('encode.html'))
return response
# @server.route('/get_msg',methods=['get','post'])
# def get_msg():
# if request.method == 'POST':
# mm=request.form.get("mm")
# token=request.form.get("token")
# else:
# mm=request.args.get("mm")
# token = request.args.get("token")
#
#
# data = {
# 'ip': ip,
# 'user_id': user_id,
# 'token': token
# }
#
# result = requests.post(url="http://127.0.0.1:5000/check_token", data=data)
# result = json.loads(result.text)
# if mm!='I am front':
# return 'not a true password!'
# else:
# return 'I am server!'
# return response
if __name__=='__main__':
# init_ip()
# result = requests.post("http://127.0.0.1:5000/test")
server.run(debug='true' , port=8886, host='0.0.0.0') # 指定端口、host,0.0.0.0代表不管几个网卡,任何ip都可以访问
|
[
"json.loads",
"flask.request.args.get",
"flask.request.form.get",
"flask.Flask",
"flask.render_template",
"requests.get"
] |
[((303, 318), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (308, 318), False, 'from flask import Flask, render_template, request, current_app, url_for\n'), ((1657, 1678), 'requests.get', 'requests.get', ([], {'url': 'url'}), '(url=url)\n', (1669, 1678), False, 'import requests\n'), ((1693, 1716), 'json.loads', 'json.loads', (['result.text'], {}), '(result.text)\n', (1703, 1716), False, 'import json\n'), ((2601, 2623), 'requests.get', 'requests.get', ([], {'url': 'url1'}), '(url=url1)\n', (2613, 2623), False, 'import requests\n'), ((2638, 2662), 'json.loads', 'json.loads', (['result0.text'], {}), '(result0.text)\n', (2648, 2662), False, 'import json\n'), ((2781, 2803), 'requests.get', 'requests.get', ([], {'url': 'url2'}), '(url=url2)\n', (2793, 2803), False, 'import requests\n'), ((2818, 2841), 'json.loads', 'json.loads', (['result.text'], {}), '(result.text)\n', (2828, 2841), False, 'import json\n'), ((567, 595), 'flask.render_template', 'render_template', (['"""test.html"""'], {}), "('test.html')\n", (582, 595), False, 'from flask import Flask, render_template, request, current_app, url_for\n'), ((797, 826), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (812, 826), False, 'from flask import Flask, render_template, request, current_app, url_for\n'), ((1001, 1023), 'flask.request.form.get', 'request.form.get', (['"""ip"""'], {}), "('ip')\n", (1017, 1023), False, 'from flask import request, jsonify, make_response, Markup, send_file\n'), ((1035, 1057), 'flask.request.form.get', 'request.form.get', (['"""sq"""'], {}), "('sq')\n", (1051, 1057), False, 'from flask import request, jsonify, make_response, Markup, send_file\n'), ((1074, 1100), 'flask.request.form.get', 'request.form.get', (['"""cypher"""'], {}), "('cypher')\n", (1090, 1100), False, 'from flask import request, jsonify, make_response, Markup, send_file\n'), ((1215, 1237), 'flask.request.args.get', 'request.args.get', (['"""ip"""'], {}), "('ip')\n", (1231, 1237), False, 'from flask import request, jsonify, make_response, Markup, send_file\n'), ((1249, 1271), 'flask.request.args.get', 'request.args.get', (['"""sq"""'], {}), "('sq')\n", (1265, 1271), False, 'from flask import request, jsonify, make_response, Markup, send_file\n'), ((1288, 1314), 'flask.request.args.get', 'request.args.get', (['"""cypher"""'], {}), "('cypher')\n", (1304, 1314), False, 'from flask import request, jsonify, make_response, Markup, send_file\n'), ((1946, 1968), 'flask.request.form.get', 'request.form.get', (['"""ip"""'], {}), "('ip')\n", (1962, 1968), False, 'from flask import request, jsonify, make_response, Markup, send_file\n'), ((1980, 2002), 'flask.request.form.get', 'request.form.get', (['"""sq"""'], {}), "('sq')\n", (1996, 2002), False, 'from flask import request, jsonify, make_response, Markup, send_file\n'), ((2019, 2045), 'flask.request.form.get', 'request.form.get', (['"""cypher"""'], {}), "('cypher')\n", (2035, 2045), False, 'from flask import request, jsonify, make_response, Markup, send_file\n'), ((2064, 2091), 'flask.request.form.get', 'request.form.get', (['"""user_id"""'], {}), "('user_id')\n", (2080, 2091), False, 'from flask import request, jsonify, make_response, Markup, send_file\n'), ((2108, 2133), 'flask.request.form.get', 'request.form.get', (['"""token"""'], {}), "('token')\n", (2124, 2133), False, 'from flask import request, jsonify, make_response, Markup, send_file\n'), ((2156, 2178), 'flask.request.args.get', 'request.args.get', (['"""ip"""'], {}), "('ip')\n", (2172, 2178), False, 'from flask import request, jsonify, make_response, Markup, send_file\n'), ((2190, 2212), 'flask.request.args.get', 'request.args.get', (['"""sq"""'], {}), "('sq')\n", (2206, 2212), False, 'from flask import request, jsonify, make_response, Markup, send_file\n'), ((2229, 2255), 'flask.request.args.get', 'request.args.get', (['"""cypher"""'], {}), "('cypher')\n", (2245, 2255), False, 'from flask import request, jsonify, make_response, Markup, send_file\n'), ((2274, 2301), 'flask.request.args.get', 'request.args.get', (['"""user_id"""'], {}), "('user_id')\n", (2290, 2301), False, 'from flask import request, jsonify, make_response, Markup, send_file\n'), ((2318, 2343), 'flask.request.args.get', 'request.args.get', (['"""token"""'], {}), "('token')\n", (2334, 2343), False, 'from flask import request, jsonify, make_response, Markup, send_file\n'), ((3882, 3912), 'flask.render_template', 'render_template', (['"""encode.html"""'], {}), "('encode.html')\n", (3897, 3912), False, 'from flask import Flask, render_template, request, current_app, url_for\n')]
|
import cv2
import torch
import numpy as np
from torch import nn
from collections import OrderedDict
from torch.nn.functional import one_hot
from utils.box.bbox import bbox_switch, angle_switch, bbox_iou, encode, decode
from utils.box.ext.rotate_overlap_diff.oriented_iou_loss import cal_iou, cal_diou, cal_giou
from utils.box.rbbox import rbbox_batched_nms as nms
from utils.utils import soft_weight
def iou_obb_diff(gts, preds, type='diou'):
gt_bboxes = angle_switch(gts)
pred_bboxes = angle_switch(preds)
if type == 'riou':
iou, *_ = cal_iou(gt_bboxes.unsqueeze(0), pred_bboxes.unsqueeze(0))
linear = False
if linear:
iou_loss = 1 - iou
else:
iou_loss = - iou.clamp(min=1e-6).log()
elif type in ['giou', 'diou']:
riou_func = cal_giou if type == 'giou' else cal_diou
iou_loss, iou = riou_func(gt_bboxes.unsqueeze(0), pred_bboxes.unsqueeze(0))
else:
raise NotImplementedError
return iou, iou_loss
def match(bboxes_xyxy, anchors_xyxy, bboxes, anchors, iou_thresh, process=None, batch=32):
# Reduce GPU memory usage
ious = torch.cat([bbox_iou(bboxes_xyxy[i: i + batch], anchors_xyxy) for i in range(0, bboxes_xyxy.size(0), batch)])
max_ious, bbox_indexes = torch.max(ious, dim=0)
mask_neg = max_ious < iou_thresh[0]
mask_pos = max_ious > iou_thresh[1]
max_gt, argmax_gt = torch.max(ious, dim=1)
if (max_gt <= iou_thresh[1]).any():
mask_pos[argmax_gt[max_gt <= iou_thresh[1]]] = True
mask_neg[argmax_gt[max_gt <= iou_thresh[1]]] = False
pnms_thres = soft_weight(process)
r_anchors = torch.cat([anchors, torch.zeros_like(anchors[:,0]).unsqueeze(1)], -1)
scores = iou_obb_diff(bboxes[bbox_indexes[mask_pos]], r_anchors[mask_pos], type='riou')[0].squeeze(0)
labels = torch.zeros_like(scores)
keeps = nms(r_anchors[mask_pos], scores, labels, pnms_thres)[:500]
mask_keep = mask_pos.nonzero()[keeps]
mask_pos = torch.zeros_like(mask_pos)
mask_pos[mask_keep] = True
iou_balance = True
num_pos = mask_pos.sum().item()
if not iou_balance:
ratio = 1 # neg2pos
num_neg = ratio * num_pos
neg_indices = mask_neg.nonzero().squeeze()
sampled_neg_indices = np.random.choice(neg_indices.cpu(), size=num_neg)
mask_neg.fill_(False)[sampled_neg_indices] = True
else:
ratio_hard = 2 # hard2pos
ratio_bg = 100 # bg2pos
num_hard = ratio_hard * num_pos
num_bg = ratio_bg * num_pos
hard_indices = ((max_ious > 0.1) & (max_ious < iou_thresh[0])).nonzero().squeeze()
bg_indices = (max_ious < 1e-2).nonzero().squeeze()
sampled_hard_indices = np.random.choice(hard_indices.cpu(), size=num_hard)
sampled_bg_indices = np.random.choice(bg_indices.cpu(), size=num_bg)
sampled_neg_indices = np.concatenate([sampled_bg_indices, sampled_hard_indices])
mask_neg.fill_(False)[sampled_neg_indices] = True
return mask_pos, mask_neg, bbox_indexes
def calc_loss(pred_cls, pred_loc, targets, anchors, iou_thresh, variance, balance, process=None):
device = pred_cls.device
num_classes = pred_cls.size(-1)
weight_pos, weight_neg = 2 * balance, 2 * (1 - balance)
anchors_xyxy = bbox_switch(anchors, 'xywh', 'xyxy')
criterion_cls = nn.BCEWithLogitsLoss(reduction='none')
criterion_loc = nn.SmoothL1Loss(reduction='sum')
loss_cls, loss_loc = torch.zeros([2], dtype=torch.float, device=device, requires_grad=True)
num_pos = 0
for i, target in enumerate(targets):
if target:
bboxes = target['bboxes'].to(device)
labels = target['labels'].to(device)
bboxes_xyxy = bbox_switch(bboxes[:, :4], 'xywh', 'xyxy')
pred_box = decode(pred_loc[i], anchors, variance)
mask_pos, mask_neg, bbox_indexes = match(bboxes_xyxy, anchors_xyxy, bboxes, anchors, iou_thresh, process=process)
labels = labels[bbox_indexes]
indexes_pos = bbox_indexes[mask_pos]
bboxes_matched = bboxes[indexes_pos]
anchors_matched = anchors[mask_pos]
bboxes_pred = pred_loc[i][mask_pos] # offsets
gt_bboxes, det_bboxes = encode(bboxes_matched, bboxes_pred, anchors_matched, variance)
labels = one_hot(labels, num_classes=num_classes).float()
labels[mask_neg] = 0
loss_cls_ = criterion_cls(pred_cls[i], labels)
loss_cls = loss_cls + loss_cls_[mask_pos].sum() * weight_pos + loss_cls_[mask_neg].sum() * weight_neg
use_iou = False
if use_iou:
rious, riou_loss = iou_obb_diff(bboxes_matched, pred_box[mask_pos])
loss_loc = loss_loc + riou_loss.sum()
else:
loss_loc = loss_loc + criterion_loc(gt_bboxes, det_bboxes)
num_pos += mask_pos.sum().item()
else:
loss_cls = loss_cls + criterion_cls(pred_cls[i], torch.zeros_like(pred_cls[i])).sum()
num_pos = max(num_pos, 1)
return OrderedDict([('loss_cls', loss_cls / num_pos), ('loss_loc', loss_loc / num_pos)])
|
[
"torch.nn.BCEWithLogitsLoss",
"torch.zeros_like",
"utils.box.bbox.encode",
"utils.box.bbox.angle_switch",
"torch.nn.functional.one_hot",
"utils.utils.soft_weight",
"utils.box.rbbox.rbbox_batched_nms",
"utils.box.bbox.decode",
"torch.max",
"torch.zeros",
"utils.box.bbox.bbox_switch",
"torch.nn.SmoothL1Loss",
"collections.OrderedDict",
"numpy.concatenate",
"utils.box.bbox.bbox_iou"
] |
[((461, 478), 'utils.box.bbox.angle_switch', 'angle_switch', (['gts'], {}), '(gts)\n', (473, 478), False, 'from utils.box.bbox import bbox_switch, angle_switch, bbox_iou, encode, decode\n'), ((497, 516), 'utils.box.bbox.angle_switch', 'angle_switch', (['preds'], {}), '(preds)\n', (509, 516), False, 'from utils.box.bbox import bbox_switch, angle_switch, bbox_iou, encode, decode\n'), ((1276, 1298), 'torch.max', 'torch.max', (['ious'], {'dim': '(0)'}), '(ious, dim=0)\n', (1285, 1298), False, 'import torch\n'), ((1403, 1425), 'torch.max', 'torch.max', (['ious'], {'dim': '(1)'}), '(ious, dim=1)\n', (1412, 1425), False, 'import torch\n'), ((1610, 1630), 'utils.utils.soft_weight', 'soft_weight', (['process'], {}), '(process)\n', (1621, 1630), False, 'from utils.utils import soft_weight\n'), ((1836, 1860), 'torch.zeros_like', 'torch.zeros_like', (['scores'], {}), '(scores)\n', (1852, 1860), False, 'import torch\n'), ((1989, 2015), 'torch.zeros_like', 'torch.zeros_like', (['mask_pos'], {}), '(mask_pos)\n', (2005, 2015), False, 'import torch\n'), ((3288, 3324), 'utils.box.bbox.bbox_switch', 'bbox_switch', (['anchors', '"""xywh"""', '"""xyxy"""'], {}), "(anchors, 'xywh', 'xyxy')\n", (3299, 3324), False, 'from utils.box.bbox import bbox_switch, angle_switch, bbox_iou, encode, decode\n'), ((3346, 3384), 'torch.nn.BCEWithLogitsLoss', 'nn.BCEWithLogitsLoss', ([], {'reduction': '"""none"""'}), "(reduction='none')\n", (3366, 3384), False, 'from torch import nn\n'), ((3405, 3437), 'torch.nn.SmoothL1Loss', 'nn.SmoothL1Loss', ([], {'reduction': '"""sum"""'}), "(reduction='sum')\n", (3420, 3437), False, 'from torch import nn\n'), ((3463, 3533), 'torch.zeros', 'torch.zeros', (['[2]'], {'dtype': 'torch.float', 'device': 'device', 'requires_grad': '(True)'}), '([2], dtype=torch.float, device=device, requires_grad=True)\n', (3474, 3533), False, 'import torch\n'), ((5070, 5155), 'collections.OrderedDict', 'OrderedDict', (["[('loss_cls', loss_cls / num_pos), ('loss_loc', loss_loc / num_pos)]"], {}), "([('loss_cls', loss_cls / num_pos), ('loss_loc', loss_loc /\n num_pos)])\n", (5081, 5155), False, 'from collections import OrderedDict\n'), ((1873, 1925), 'utils.box.rbbox.rbbox_batched_nms', 'nms', (['r_anchors[mask_pos]', 'scores', 'labels', 'pnms_thres'], {}), '(r_anchors[mask_pos], scores, labels, pnms_thres)\n', (1876, 1925), True, 'from utils.box.rbbox import rbbox_batched_nms as nms\n'), ((2879, 2937), 'numpy.concatenate', 'np.concatenate', (['[sampled_bg_indices, sampled_hard_indices]'], {}), '([sampled_bg_indices, sampled_hard_indices])\n', (2893, 2937), True, 'import numpy as np\n'), ((1149, 1197), 'utils.box.bbox.bbox_iou', 'bbox_iou', (['bboxes_xyxy[i:i + batch]', 'anchors_xyxy'], {}), '(bboxes_xyxy[i:i + batch], anchors_xyxy)\n', (1157, 1197), False, 'from utils.box.bbox import bbox_switch, angle_switch, bbox_iou, encode, decode\n'), ((3734, 3776), 'utils.box.bbox.bbox_switch', 'bbox_switch', (['bboxes[:, :4]', '"""xywh"""', '"""xyxy"""'], {}), "(bboxes[:, :4], 'xywh', 'xyxy')\n", (3745, 3776), False, 'from utils.box.bbox import bbox_switch, angle_switch, bbox_iou, encode, decode\n'), ((3800, 3838), 'utils.box.bbox.decode', 'decode', (['pred_loc[i]', 'anchors', 'variance'], {}), '(pred_loc[i], anchors, variance)\n', (3806, 3838), False, 'from utils.box.bbox import bbox_switch, angle_switch, bbox_iou, encode, decode\n'), ((4249, 4311), 'utils.box.bbox.encode', 'encode', (['bboxes_matched', 'bboxes_pred', 'anchors_matched', 'variance'], {}), '(bboxes_matched, bboxes_pred, anchors_matched, variance)\n', (4255, 4311), False, 'from utils.box.bbox import bbox_switch, angle_switch, bbox_iou, encode, decode\n'), ((1667, 1698), 'torch.zeros_like', 'torch.zeros_like', (['anchors[:, 0]'], {}), '(anchors[:, 0])\n', (1683, 1698), False, 'import torch\n'), ((4334, 4374), 'torch.nn.functional.one_hot', 'one_hot', (['labels'], {'num_classes': 'num_classes'}), '(labels, num_classes=num_classes)\n', (4341, 4374), False, 'from torch.nn.functional import one_hot\n'), ((4992, 5021), 'torch.zeros_like', 'torch.zeros_like', (['pred_cls[i]'], {}), '(pred_cls[i])\n', (5008, 5021), False, 'import torch\n')]
|
# Copyright 2018 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Workarounds for sympy issues"""
from typing import Any
import sympy
import numpy as np
def proper_repr(value: Any) -> str:
"""Overrides sympy and numpy returning repr strings that don't parse."""
if isinstance(value, sympy.Basic):
result = sympy.srepr(value)
# HACK: work around https://github.com/sympy/sympy/issues/16074
# (only handles a few cases)
fixed_tokens = [
'Symbol', 'pi', 'Mul', 'Add', 'Mod', 'Integer', 'Float', 'Rational'
]
for token in fixed_tokens:
result = result.replace(token, 'sympy.' + token)
return result
if isinstance(value, np.ndarray):
return 'np.array({!r})'.format(value.tolist())
return repr(value)
|
[
"sympy.srepr"
] |
[((848, 866), 'sympy.srepr', 'sympy.srepr', (['value'], {}), '(value)\n', (859, 866), False, 'import sympy\n')]
|
import os
import numpy as np
import string
import re
dataDir = '/u/cs401/A3/data/'
# dataDir = './subdata/'
def Levenshtein(r, h):
"""
Calculation of WER with Levenshtein distance.
Works only for iterables up to 254 elements (uint8).
O(nm) time ans space complexity.
Parameters
----------
r : list of strings
h : list of strings
Returns
-------
(WER, nS, nI, nD): (float, int, int, int) WER, number of substitutions, insertions, and deletions respectively
Examples
--------
>>> wer("who is there".split(), "is there".split())
0.333 0 0 1
>>> wer("who is there".split(), "".split())
1.0 0 0 3
>>> wer("".split(), "who is there".split())
Inf 0 3 0
"""
n = len(r)
m = len(h)
R = np.zeros((n + 1, m + 1)) # matrix of distances
B = np.zeros((n + 1, m + 1)) # backtracing matrix
# initialize R
R[:, 0] = np.arange(n + 1)
R[0, :] = np.arange(m + 1)
# initialize backtrace, first row can only go left, first column can only go up
B[1:, 0] = 1
B[0, 1:] = 2
# statr loop
for i in range(1, n + 1):
for j in range(1, m + 1):
dele = R[i - 1, j] + 1
sub = R[i - 1, j - 1] if r[i - 1] == h[j - 1] else R[i - 1, j - 1] + 1
ins = R[i, j - 1] + 1
R[i, j] = min(dele, sub, ins)
if(R[i, j] == dele):
B[i, j] = 1 # up
elif(R[i, j] == ins):
B[i, j] = 2 # left
else:
B[i, j] = 3 # up-left
# get wer
wer = R[n, m] / n
# backtrace to get nS, nI, nD
nS, nI, nD = 0, 0, 0
i, j = n, m
while i != 0 or j != 0:
if(B[i, j] == 1): # up, delete
nD += 1
i -= 1
elif(B[i, j] == 2): # left, insert
nI += 1
j -= 1
else:
# up-left substitute
if(R[i, j] == R[i - 1, j - 1] + 1):
nS += 1
i -= 1
j -= 1
return wer, nS, nI, nD
def preprocess(sent):
puncs = list(string.punctuation)
puncs.remove('[')
puncs.remove(']')
# lowercase and ignore [i] [label]
sent = sent.strip().lower().split()
trans = sent[2:]
trans = ' '.join(trans)
# remove <> and [] contents in transcripts
pattern = re.compile(r"<\w+>")
trans = re.sub(pattern, '', trans)
pattern = re.compile(r"\[\w+\]")
trans = re.sub(pattern, '', trans)
# remove punctuations
for punc in puncs:
trans = trans.replace(punc, '')
return trans.split()
if __name__ == "__main__":
google_wer = []
kaldi_wer = []
# discussion file
with open("asrDiscussion.txt", "w+") as f:
for subdir, dirs, files in os.walk(dataDir):
for speaker in dirs:
# read in transcript files for such speaker
trans_path = os.path.join(dataDir, speaker, 'transcripts.txt')
google_path = os.path.join(dataDir, speaker, 'transcripts.Google.txt')
kaldi_path = os.path.join(dataDir, speaker, 'transcripts.Kaldi.txt')
trans = open(trans_path, 'r').readlines()
google = open(google_path, 'r').readlines()
kaldi = open(kaldi_path, 'r').readlines()
# only process when transcript is nonempty and reference exist
valid = len(trans) != 0 and (len(google) != 0 or len(kaldi) != 0)
if(valid):
lines = min(len(trans), len(google), len(kaldi))
# for each paired lines, we find its wer
for i in range(lines):
curr_trans = preprocess(trans[i])
# calculate result for google
if(len(google) != 0):
curr_google = preprocess(google[i])
g_wer, g_sub, g_ins, g_del = Levenshtein(curr_trans, curr_google)
google_wer.append(g_wer)
g_res = speaker + " Google " + str(i) + " " + str(g_wer) + " S: " + str(g_sub) + " I: " + str(g_ins) + " D: " + str(g_del)
f.write(g_res)
f.write('\n')
print(g_res)
# calculate result for kaldi
if(len(kaldi) != 0):
curr_kaldi = preprocess(kaldi[i])
k_wer, k_sub, k_ins, k_del = Levenshtein(curr_trans, curr_kaldi)
kaldi_wer.append(k_wer)
k_res = speaker + " Kaldi " + str(i) + " " + str(k_wer) + " S: " + str(k_sub) + " I: " + str(k_ins) + " D: " + str(k_del)
f.write(k_res)
f.write('\n')
print(k_res)
f.write('\n')
f.write('\n')
# report summary of result
g_mean, g_std = np.mean(google_wer), np.std(google_wer)
k_mean, k_std = np.mean(kaldi_wer), np.std(kaldi_wer)
g_sum = "Google: mean is " + str(g_mean) + ", std is " + str(g_std)
k_sum = "Kaldi: mean is " + str(k_mean) + ", std is " + str(k_std)
f.write(g_sum)
f.write('\n')
f.write(k_sum)
print(g_sum)
print(k_sum)
f.close()
|
[
"os.path.join",
"numpy.std",
"os.walk",
"numpy.zeros",
"numpy.mean",
"numpy.arange",
"re.sub",
"re.compile"
] |
[((2123, 2147), 'numpy.zeros', 'np.zeros', (['(n + 1, m + 1)'], {}), '((n + 1, m + 1))\n', (2131, 2147), True, 'import numpy as np\n'), ((2178, 2202), 'numpy.zeros', 'np.zeros', (['(n + 1, m + 1)'], {}), '((n + 1, m + 1))\n', (2186, 2202), True, 'import numpy as np\n'), ((2258, 2274), 'numpy.arange', 'np.arange', (['(n + 1)'], {}), '(n + 1)\n', (2267, 2274), True, 'import numpy as np\n'), ((2289, 2305), 'numpy.arange', 'np.arange', (['(m + 1)'], {}), '(m + 1)\n', (2298, 2305), True, 'import numpy as np\n'), ((3691, 3711), 're.compile', 're.compile', (['"""<\\\\w+>"""'], {}), "('<\\\\w+>')\n", (3701, 3711), False, 'import re\n'), ((3724, 3750), 're.sub', 're.sub', (['pattern', '""""""', 'trans'], {}), "(pattern, '', trans)\n", (3730, 3750), False, 'import re\n'), ((3765, 3789), 're.compile', 're.compile', (['"""\\\\[\\\\w+\\\\]"""'], {}), "('\\\\[\\\\w+\\\\]')\n", (3775, 3789), False, 'import re\n'), ((3800, 3826), 're.sub', 're.sub', (['pattern', '""""""', 'trans'], {}), "(pattern, '', trans)\n", (3806, 3826), False, 'import re\n'), ((4121, 4137), 'os.walk', 'os.walk', (['dataDir'], {}), '(dataDir)\n', (4128, 4137), False, 'import os\n'), ((6386, 6405), 'numpy.mean', 'np.mean', (['google_wer'], {}), '(google_wer)\n', (6393, 6405), True, 'import numpy as np\n'), ((6407, 6425), 'numpy.std', 'np.std', (['google_wer'], {}), '(google_wer)\n', (6413, 6425), True, 'import numpy as np\n'), ((6450, 6468), 'numpy.mean', 'np.mean', (['kaldi_wer'], {}), '(kaldi_wer)\n', (6457, 6468), True, 'import numpy as np\n'), ((6470, 6487), 'numpy.std', 'np.std', (['kaldi_wer'], {}), '(kaldi_wer)\n', (6476, 6487), True, 'import numpy as np\n'), ((4261, 4310), 'os.path.join', 'os.path.join', (['dataDir', 'speaker', '"""transcripts.txt"""'], {}), "(dataDir, speaker, 'transcripts.txt')\n", (4273, 4310), False, 'import os\n'), ((4341, 4397), 'os.path.join', 'os.path.join', (['dataDir', 'speaker', '"""transcripts.Google.txt"""'], {}), "(dataDir, speaker, 'transcripts.Google.txt')\n", (4353, 4397), False, 'import os\n'), ((4427, 4482), 'os.path.join', 'os.path.join', (['dataDir', 'speaker', '"""transcripts.Kaldi.txt"""'], {}), "(dataDir, speaker, 'transcripts.Kaldi.txt')\n", (4439, 4482), False, 'import os\n')]
|
# Copyright 2014 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import copy
import os
import re
import sys
import ddt
import mock
from cinder import context
from cinder import exception
from cinder.image import image_utils
from cinder.objects import fields
from cinder import test
from cinder.tests.unit import fake_snapshot
from cinder.tests.unit import fake_volume
from cinder import utils
from cinder.volume.drivers import remotefs
from cinder.volume import utils as volume_utils
@ddt.ddt
class RemoteFsSnapDriverTestCase(test.TestCase):
_FAKE_MNT_POINT = '/mnt/fake_hash'
def setUp(self):
super(RemoteFsSnapDriverTestCase, self).setUp()
self._driver = remotefs.RemoteFSSnapDriver()
self._driver._remotefsclient = mock.Mock()
self._driver._execute = mock.Mock()
self._driver._delete = mock.Mock()
self.context = context.get_admin_context()
self._fake_volume = fake_volume.fake_volume_obj(
self.context, provider_location='fake_share')
self._fake_volume_path = os.path.join(self._FAKE_MNT_POINT,
self._fake_volume.name)
self._fake_snapshot = fake_snapshot.fake_snapshot_obj(self.context)
self._fake_snapshot_path = (self._fake_volume_path + '.' +
self._fake_snapshot.id)
self._fake_snapshot.volume = self._fake_volume
@ddt.data({'current_state': 'in-use',
'acceptable_states': ['available', 'in-use']},
{'current_state': 'in-use',
'acceptable_states': ['available'],
'expected_exception': exception.InvalidVolume})
@ddt.unpack
def test_validate_state(self, current_state, acceptable_states,
expected_exception=None):
if expected_exception:
self.assertRaises(expected_exception,
self._driver._validate_state,
current_state,
acceptable_states)
else:
self._driver._validate_state(current_state, acceptable_states)
def _test_delete_snapshot(self, volume_in_use=False,
stale_snapshot=False,
is_active_image=True,
is_tmp_snap=False):
# If the snapshot is not the active image, it is guaranteed that
# another snapshot exists having it as backing file.
fake_snapshot_name = os.path.basename(self._fake_snapshot_path)
fake_info = {'active': fake_snapshot_name,
self._fake_snapshot.id: fake_snapshot_name}
fake_snap_img_info = mock.Mock()
fake_base_img_info = mock.Mock()
if stale_snapshot:
fake_snap_img_info.backing_file = None
else:
fake_snap_img_info.backing_file = self._fake_volume.name
fake_snap_img_info.file_format = 'qcow2'
fake_base_img_info.backing_file = None
fake_base_img_info.file_format = 'raw'
self._driver._local_path_volume_info = mock.Mock(
return_value=mock.sentinel.fake_info_path)
self._driver._qemu_img_info = mock.Mock(
side_effect=[fake_snap_img_info, fake_base_img_info])
self._driver._local_volume_dir = mock.Mock(
return_value=self._FAKE_MNT_POINT)
self._driver._validate_state = mock.Mock()
self._driver._read_info_file = mock.Mock()
self._driver._write_info_file = mock.Mock()
self._driver._img_commit = mock.Mock()
self._driver._rebase_img = mock.Mock()
self._driver._delete_stale_snapshot = mock.Mock()
self._driver._delete_snapshot_online = mock.Mock()
expected_info = {
'active': fake_snapshot_name,
self._fake_snapshot.id: fake_snapshot_name
}
exp_acceptable_states = ['available', 'in-use', 'backing-up',
'deleting', 'downloading']
if volume_in_use:
self._fake_snapshot.volume.status = 'backing-up'
self._fake_snapshot.volume.attach_status = 'attached'
self._driver._read_info_file.return_value = fake_info
self._driver._delete_snapshot(self._fake_snapshot)
self._driver._validate_state.assert_called_once_with(
self._fake_snapshot.volume.status,
exp_acceptable_states)
if stale_snapshot:
self._driver._delete_stale_snapshot.assert_called_once_with(
self._fake_snapshot)
else:
expected_online_delete_info = {
'active_file': fake_snapshot_name,
'snapshot_file': fake_snapshot_name,
'base_file': self._fake_volume.name,
'base_id': None,
'new_base_file': None
}
self._driver._delete_snapshot_online.assert_called_once_with(
self.context, self._fake_snapshot,
expected_online_delete_info)
elif is_active_image:
self._driver._read_info_file.return_value = fake_info
self._driver._delete_snapshot(self._fake_snapshot)
self._driver._img_commit.assert_called_once_with(
self._fake_snapshot_path)
self.assertNotIn(self._fake_snapshot.id, fake_info)
self._driver._write_info_file.assert_called_once_with(
mock.sentinel.fake_info_path, fake_info)
else:
fake_upper_snap_id = 'fake_upper_snap_id'
fake_upper_snap_path = (
self._fake_volume_path + '-snapshot' + fake_upper_snap_id)
fake_upper_snap_name = os.path.basename(fake_upper_snap_path)
fake_backing_chain = [
{'filename': fake_upper_snap_name,
'backing-filename': fake_snapshot_name},
{'filename': fake_snapshot_name,
'backing-filename': self._fake_volume.name},
{'filename': self._fake_volume.name,
'backing-filename': None}]
fake_info[fake_upper_snap_id] = fake_upper_snap_name
fake_info[self._fake_snapshot.id] = fake_snapshot_name
fake_info['active'] = fake_upper_snap_name
expected_info = copy.deepcopy(fake_info)
del expected_info[self._fake_snapshot.id]
self._driver._read_info_file.return_value = fake_info
self._driver._get_backing_chain_for_path = mock.Mock(
return_value=fake_backing_chain)
self._driver._delete_snapshot(self._fake_snapshot)
self._driver._img_commit.assert_called_once_with(
self._fake_snapshot_path)
self._driver._rebase_img.assert_called_once_with(
fake_upper_snap_path, self._fake_volume.name,
fake_base_img_info.file_format)
self._driver._write_info_file.assert_called_once_with(
mock.sentinel.fake_info_path, expected_info)
def test_delete_snapshot_when_active_file(self):
self._test_delete_snapshot()
def test_delete_snapshot_in_use(self):
self._test_delete_snapshot(volume_in_use=True)
def test_delete_snapshot_in_use_stale_snapshot(self):
self._test_delete_snapshot(volume_in_use=True,
stale_snapshot=True)
def test_delete_snapshot_with_one_upper_file(self):
self._test_delete_snapshot(is_active_image=False)
def test_delete_stale_snapshot(self):
fake_snapshot_name = os.path.basename(self._fake_snapshot_path)
fake_snap_info = {
'active': self._fake_volume.name,
self._fake_snapshot.id: fake_snapshot_name
}
expected_info = {'active': self._fake_volume.name}
self._driver._local_path_volume_info = mock.Mock(
return_value=mock.sentinel.fake_info_path)
self._driver._read_info_file = mock.Mock(
return_value=fake_snap_info)
self._driver._local_volume_dir = mock.Mock(
return_value=self._FAKE_MNT_POINT)
self._driver._write_info_file = mock.Mock()
self._driver._delete_stale_snapshot(self._fake_snapshot)
self._driver._delete.assert_called_once_with(self._fake_snapshot_path)
self._driver._write_info_file.assert_called_once_with(
mock.sentinel.fake_info_path, expected_info)
@mock.patch.object(remotefs.RemoteFSDriver,
'secure_file_operations_enabled',
return_value=True)
@mock.patch.object(os, 'stat')
def test_do_create_snapshot(self, _mock_stat, _mock_sec_enabled):
self._driver._local_volume_dir = mock.Mock(
return_value=self._fake_volume_path)
fake_backing_path = os.path.join(
self._driver._local_volume_dir(),
self._fake_volume.name)
self._driver._execute = mock.Mock()
self._driver._set_rw_permissions = mock.Mock()
self._driver._qemu_img_info = mock.Mock(
return_value=mock.Mock(file_format=mock.sentinel.backing_fmt))
self._driver._do_create_snapshot(self._fake_snapshot,
self._fake_volume.name,
self._fake_snapshot_path)
command1 = ['qemu-img', 'create', '-f', 'qcow2', '-o',
'backing_file=%s,backing_fmt=%s' %
(fake_backing_path,
mock.sentinel.backing_fmt),
self._fake_snapshot_path,
"%dG" % self._fake_volume.size]
command2 = ['qemu-img', 'rebase', '-u',
'-b', self._fake_volume.name,
'-F', mock.sentinel.backing_fmt,
self._fake_snapshot_path]
command3 = ['chown', '--reference=%s' % fake_backing_path,
self._fake_snapshot_path]
calls = [mock.call(*command1, run_as_root=True),
mock.call(*command2, run_as_root=True),
mock.call(*command3, run_as_root=True)]
self._driver._execute.assert_has_calls(calls)
def _test_create_snapshot(self, volume_in_use=False, tmp_snap=False):
fake_snapshot_info = {}
fake_snapshot_file_name = os.path.basename(self._fake_snapshot_path)
self._driver._local_path_volume_info = mock.Mock(
return_value=mock.sentinel.fake_info_path)
self._driver._read_info_file = mock.Mock(
return_value=fake_snapshot_info)
self._driver._do_create_snapshot = mock.Mock()
self._driver._create_snapshot_online = mock.Mock()
self._driver._write_info_file = mock.Mock()
self._driver.get_active_image_from_info = mock.Mock(
return_value=self._fake_volume.name)
self._driver._get_new_snap_path = mock.Mock(
return_value=self._fake_snapshot_path)
self._driver._validate_state = mock.Mock()
expected_snapshot_info = {
'active': fake_snapshot_file_name,
self._fake_snapshot.id: fake_snapshot_file_name
}
exp_acceptable_states = ['available', 'in-use', 'backing-up']
if tmp_snap:
exp_acceptable_states.append('downloading')
self._fake_snapshot.id = 'tmp-snap-%s' % self._fake_snapshot.id
if volume_in_use:
self._fake_snapshot.volume.status = 'backing-up'
self._fake_snapshot.volume.attach_status = 'attached'
expected_method_called = '_create_snapshot_online'
else:
self._fake_snapshot.volume.status = 'available'
expected_method_called = '_do_create_snapshot'
self._driver._create_snapshot(self._fake_snapshot)
self._driver._validate_state.assert_called_once_with(
self._fake_snapshot.volume.status,
exp_acceptable_states)
fake_method = getattr(self._driver, expected_method_called)
fake_method.assert_called_with(
self._fake_snapshot, self._fake_volume.name,
self._fake_snapshot_path)
self._driver._write_info_file.assert_called_with(
mock.sentinel.fake_info_path,
expected_snapshot_info)
def test_create_snapshot_volume_available(self):
self._test_create_snapshot()
def test_create_snapshot_volume_in_use(self):
self._test_create_snapshot(volume_in_use=True)
def test_create_snapshot_invalid_volume(self):
self._fake_snapshot.volume.status = 'error'
self.assertRaises(exception.InvalidVolume,
self._driver._create_snapshot,
self._fake_snapshot)
@mock.patch('cinder.db.snapshot_get')
@mock.patch('time.sleep')
def test_create_snapshot_online_with_concurrent_delete(
self, mock_sleep, mock_snapshot_get):
self._driver._nova = mock.Mock()
# Test what happens when progress is so slow that someone
# decides to delete the snapshot while the last known status is
# "creating".
mock_snapshot_get.side_effect = [
{'status': 'creating', 'progress': '42%'},
{'status': 'creating', 'progress': '45%'},
{'status': 'deleting'},
]
fake_snapshot = self._fake_snapshot
fake_snapshot.context = self.context
with mock.patch.object(self._driver, '_do_create_snapshot') as \
mock_do_create_snapshot:
self.assertRaises(exception.RemoteFSConcurrentRequest,
self._driver._create_snapshot_online,
fake_snapshot,
self._fake_volume.name,
self._fake_snapshot_path)
mock_do_create_snapshot.assert_called_once_with(
fake_snapshot, self._fake_volume.name,
self._fake_snapshot_path)
self.assertEqual([mock.call(1), mock.call(1)],
mock_sleep.call_args_list)
self.assertEqual(3, mock_snapshot_get.call_count)
mock_snapshot_get.assert_called_with(self._fake_snapshot._context,
self._fake_snapshot.id)
@mock.patch.object(utils, 'synchronized')
def _locked_volume_operation_test_helper(self, mock_synchronized, func,
expected_exception=False,
*args, **kwargs):
def mock_decorator(*args, **kwargs):
def mock_inner(f):
return f
return mock_inner
mock_synchronized.side_effect = mock_decorator
expected_lock = '%s-%s' % (self._driver.driver_prefix,
self._fake_volume.id)
if expected_exception:
self.assertRaises(expected_exception, func,
self._driver,
*args, **kwargs)
else:
ret_val = func(self._driver, *args, **kwargs)
mock_synchronized.assert_called_with(expected_lock,
external=False)
self.assertEqual(mock.sentinel.ret_val, ret_val)
def test_locked_volume_id_operation(self):
mock_volume = mock.Mock()
mock_volume.id = self._fake_volume.id
@remotefs.locked_volume_id_operation
def synchronized_func(inst, volume):
return mock.sentinel.ret_val
self._locked_volume_operation_test_helper(func=synchronized_func,
volume=mock_volume)
def test_locked_volume_id_snapshot_operation(self):
mock_snapshot = mock.Mock()
mock_snapshot.volume.id = self._fake_volume.id
@remotefs.locked_volume_id_operation
def synchronized_func(inst, snapshot):
return mock.sentinel.ret_val
self._locked_volume_operation_test_helper(func=synchronized_func,
snapshot=mock_snapshot)
def test_locked_volume_id_operation_exception(self):
@remotefs.locked_volume_id_operation
def synchronized_func(inst):
return mock.sentinel.ret_val
self._locked_volume_operation_test_helper(
func=synchronized_func,
expected_exception=exception.VolumeBackendAPIException)
@mock.patch.object(image_utils, 'qemu_img_info')
@mock.patch('os.path.basename')
def _test_qemu_img_info(self, mock_basename,
mock_qemu_img_info, backing_file, basedir,
template=None, valid_backing_file=True):
fake_vol_name = 'fake_vol_name'
mock_info = mock_qemu_img_info.return_value
mock_info.image = mock.sentinel.image_path
mock_info.backing_file = backing_file
self._driver._VALID_IMAGE_EXTENSIONS = ['vhd', 'vhdx', 'raw', 'qcow2']
mock_basename.side_effect = [mock.sentinel.image_basename,
mock.sentinel.backing_file_basename]
if valid_backing_file:
img_info = self._driver._qemu_img_info_base(
mock.sentinel.image_path, fake_vol_name, basedir,
ext_bf_template=template)
self.assertEqual(mock_info, img_info)
self.assertEqual(mock.sentinel.image_basename,
mock_info.image)
expected_basename_calls = [mock.call(mock.sentinel.image_path)]
if backing_file:
self.assertEqual(mock.sentinel.backing_file_basename,
mock_info.backing_file)
expected_basename_calls.append(mock.call(backing_file))
mock_basename.assert_has_calls(expected_basename_calls)
else:
self.assertRaises(exception.RemoteFSInvalidBackingFile,
self._driver._qemu_img_info_base,
mock.sentinel.image_path,
fake_vol_name, basedir)
mock_qemu_img_info.assert_called_with(mock.sentinel.image_path,
force_share=False,
run_as_root=True)
@ddt.data([None, '/fake_basedir'],
['/fake_basedir/cb2016/fake_vol_name', '/fake_basedir'],
['/fake_basedir/cb2016/fake_vol_name.VHD', '/fake_basedir'],
['/fake_basedir/cb2016/fake_vol_name.404f-404',
'/fake_basedir'],
['/fake_basedir/cb2016/fake_vol_name.tmp-snap-404f-404',
'/fake_basedir'])
@ddt.unpack
def test_qemu_img_info_valid_backing_file(self, backing_file, basedir):
self._test_qemu_img_info(backing_file=backing_file,
basedir=basedir)
@ddt.data(['/other_random_path', '/fake_basedir'],
['/other_basedir/cb2016/fake_vol_name', '/fake_basedir'],
['/fake_basedir/invalid_hash/fake_vol_name', '/fake_basedir'],
['/fake_basedir/cb2016/invalid_vol_name', '/fake_basedir'],
['/fake_basedir/cb2016/fake_vol_name.info', '/fake_basedir'],
['/fake_basedir/cb2016/fake_vol_name-random-suffix',
'/fake_basedir'],
['/fake_basedir/cb2016/fake_vol_name.invalidext',
'/fake_basedir'])
@ddt.unpack
def test_qemu_img_info_invalid_backing_file(self, backing_file, basedir):
self._test_qemu_img_info(backing_file=backing_file,
basedir=basedir,
valid_backing_file=False)
@ddt.data([None, '/fake_basedir'],
['/fake_basedir/cb2016/fake_vol_name', '/fake_basedir'],
['/fake_basedir/cb2016/fake_vol_name.VHD', '/fake_basedir'],
['/fake_basedir/cb2016/fake_vol_name.404f-404',
'/fake_basedir'],
['/fake_basedir/cb2016/fake_vol_name.tmp-snap-404f-404',
'/fake_basedir'],
['/fake_basedir/cb2016/other_dir/404f-404',
'/fake_basedir'],
['/fake_basedir/cb2016/other_dir/tmp-snap-404f-404',
'/fake_basedir'],
['/fake_basedir/cb2016/other_dir/404f-404.mod1-404f-404',
'/fake_basedir'],
['/fake_basedir/cb2016/other_dir/404f-404.mod2-404f-404',
'/fake_basedir'])
@ddt.unpack
def test_qemu_img_info_extended_backing_file(self, backing_file, basedir):
"""Tests using a special backing file template
The special backing file template used in here allows backing files
in a subdirectory and with special extended names (.mod1-[], .mod2-[],
...).
"""
ext_template = ("(#basedir/[0-9a-f]+/)?(#volname(.(tmp-snap-)"
"?[0-9a-f-]+)?#valid_ext|other_dir/(tmp-snap-)?"
"[0-9a-f-]+(.(mod1-|mod2-)[0-9a-f-]+)?)$")
self._test_qemu_img_info(backing_file=backing_file,
basedir=basedir,
template=remotefs.BackingFileTemplate(
ext_template),
valid_backing_file=True)
@ddt.data(['/other_random_path', '/fake_basedir'],
['/other_basedir/cb2016/fake_vol_name', '/fake_basedir'],
['/fake_basedir/invalid_hash/fake_vol_name', '/fake_basedir'],
['/fake_basedir/cb2016/invalid_vol_name', '/fake_basedir'],
['/fake_basedir/cb2016/fake_vol_name.info', '/fake_basedir'],
['/fake_basedir/cb2016/fake_vol_name-random-suffix',
'/fake_basedir'],
['/fake_basedir/cb2016/fake_vol_name.invalidext',
'/fake_basedir'],
['/fake_basedir/cb2016/invalid_dir/404f-404',
'/fake_basedir'],
['/fake_basedir/cb2016/other_dir/invalid-prefix-404f-404',
'/fake_basedir'],
['/fake_basedir/cb2016/other_dir/404f-404.mod3-404f-404',
'/fake_basedir'],
['/fake_basedir/cb2016/other_dir/404f-404.mod2-404f-404.invalid',
'/fake_basedir'])
@ddt.unpack
def test_qemu_img_info_extended_backing_file_invalid(self, backing_file,
basedir):
"""Tests using a special backing file template with invalid files
The special backing file template used in here allows backing files
in a subdirectory and with special extended names (.mod1-[], .mod2-[],
...).
"""
ext_template = ("(#basedir/[0-9a-f]+/)?(#volname(.(tmp-snap-)"
"?[0-9a-f-]+)?#valid_ext|other_dir/(tmp-snap-)?"
"[0-9a-f-]+(.(mod1-|mod2-)[0-9a-f-]+)?)$")
self._test_qemu_img_info(backing_file=backing_file,
basedir=basedir,
template=remotefs.BackingFileTemplate(
ext_template),
valid_backing_file=False)
@mock.patch.object(remotefs.RemoteFSSnapDriver, '_local_volume_dir')
@mock.patch.object(remotefs.RemoteFSSnapDriver,
'get_active_image_from_info')
def test_local_path_active_image(self, mock_get_active_img,
mock_local_vol_dir):
fake_vol_dir = 'fake_vol_dir'
fake_active_img = 'fake_active_img_fname'
mock_get_active_img.return_value = fake_active_img
mock_local_vol_dir.return_value = fake_vol_dir
active_img_path = self._driver._local_path_active_image(
mock.sentinel.volume)
exp_act_img_path = os.path.join(fake_vol_dir, fake_active_img)
self.assertEqual(exp_act_img_path, active_img_path)
mock_get_active_img.assert_called_once_with(mock.sentinel.volume)
mock_local_vol_dir.assert_called_once_with(mock.sentinel.volume)
@ddt.data({},
{'provider_location': None},
{'active_fpath': 'last_snap_img',
'expect_snaps': True})
@ddt.unpack
@mock.patch.object(remotefs.RemoteFSSnapDriver,
'_local_path_active_image')
@mock.patch.object(remotefs.RemoteFSSnapDriver,
'local_path')
def test_snapshots_exist(self, mock_local_path,
mock_local_path_active_img,
provider_location='fake_share',
active_fpath='base_img_path',
base_vol_path='base_img_path',
expect_snaps=False):
self._fake_volume.provider_location = provider_location
mock_local_path.return_value = base_vol_path
mock_local_path_active_img.return_value = active_fpath
snaps_exist = self._driver._snapshots_exist(self._fake_volume)
self.assertEqual(expect_snaps, snaps_exist)
if provider_location:
mock_local_path.assert_called_once_with(self._fake_volume)
mock_local_path_active_img.assert_called_once_with(
self._fake_volume)
else:
self.assertFalse(mock_local_path.called)
@ddt.data({},
{'snapshots_exist': True},
{'force_temp_snap': True})
@ddt.unpack
@mock.patch.object(sys.modules['cinder.objects'], "Snapshot")
@mock.patch.object(remotefs.RemoteFSSnapDriver, 'local_path')
@mock.patch.object(remotefs.RemoteFSSnapDriver, '_snapshots_exist')
@mock.patch.object(remotefs.RemoteFSSnapDriver, '_copy_volume_image')
@mock.patch.object(remotefs.RemoteFSSnapDriver, '_extend_volume')
@mock.patch.object(remotefs.RemoteFSSnapDriver, '_validate_state')
@mock.patch.object(remotefs.RemoteFSSnapDriver, '_create_snapshot')
@mock.patch.object(remotefs.RemoteFSSnapDriver, '_delete_snapshot')
@mock.patch.object(remotefs.RemoteFSSnapDriver,
'_copy_volume_from_snapshot')
def test_create_cloned_volume(self, mock_copy_volume_from_snapshot,
mock_delete_snapshot,
mock_create_snapshot,
mock_validate_state,
mock_extend_volume,
mock_copy_volume_image,
mock_snapshots_exist,
mock_local_path,
mock_obj_snap,
snapshots_exist=False,
force_temp_snap=False):
drv = self._driver
# prepare test
volume = fake_volume.fake_volume_obj(self.context)
src_vref_id = '375e32b2-804a-49f2-b282-85d1d5a5b9e1'
src_vref = fake_volume.fake_volume_obj(
self.context,
id=src_vref_id,
name='volume-%s' % src_vref_id)
src_vref.context = self.context
mock_snapshots_exist.return_value = snapshots_exist
drv._always_use_temp_snap_when_cloning = force_temp_snap
vol_attrs = ['provider_location', 'size', 'id', 'name', 'status',
'volume_type', 'metadata']
Volume = collections.namedtuple('Volume', vol_attrs)
volume_ref = Volume(id=volume.id,
metadata=volume.metadata,
name=volume.name,
provider_location=volume.provider_location,
status=volume.status,
size=volume.size,
volume_type=volume.volume_type,)
snap_args_creation = {
'volume_id': src_vref.id,
'user_id': None,
'project_id': None,
'status': fields.SnapshotStatus.CREATING,
'progress': '0%',
'volume_size': src_vref.size,
'display_name': 'tmp-snap-%s' % src_vref['id'],
'display_description': None,
'volume_type_id': src_vref.volume_type_id,
'encryption_key_id': None,
}
snap_args_deletion = snap_args_creation.copy()
snap_args_deletion["status"] = fields.SnapshotStatus.DELETED
snap_args_deletion["deleted"] = True
mock_obj_snap.return_value = mock.Mock()
mock_obj_snap.return_value.create = mock.Mock()
# end of prepare test
# run test
drv.create_cloned_volume(volume, src_vref)
# evaluate test
exp_acceptable_states = ['available', 'backing-up', 'downloading']
mock_validate_state.assert_called_once_with(
src_vref.status,
exp_acceptable_states,
obj_description='source volume')
if snapshots_exist or force_temp_snap:
mock_obj_snap.return_value.create.assert_called_once_with()
mock_obj_snap.assert_called_once_with(
context=self.context, **snap_args_creation)
mock_create_snapshot.assert_called_once_with(
mock_obj_snap.return_value)
mock_copy_volume_from_snapshot.assert_called_once_with(
mock_obj_snap.return_value, volume_ref, volume['size'])
mock_delete_snapshot.called_once_with(snap_args_deletion)
else:
self.assertFalse(mock_create_snapshot.called)
mock_snapshots_exist.assert_called_once_with(src_vref)
mock_copy_volume_image.assert_called_once_with(
mock_local_path.return_value,
mock_local_path.return_value)
mock_local_path.assert_has_calls(
[mock.call(src_vref), mock.call(volume_ref)])
mock_extend_volume.assert_called_once_with(volume_ref, volume.size)
@mock.patch('shutil.copyfile')
@mock.patch.object(remotefs.RemoteFSSnapDriver, '_set_rw_permissions')
def test_copy_volume_image(self, mock_set_perm, mock_copyfile):
self._driver._copy_volume_image(mock.sentinel.src, mock.sentinel.dest)
mock_copyfile.assert_called_once_with(mock.sentinel.src,
mock.sentinel.dest)
mock_set_perm.assert_called_once_with(mock.sentinel.dest)
def test_create_regular_file(self):
self._driver._create_regular_file('/path', 1)
self._driver._execute.assert_called_once_with('dd', 'if=/dev/zero',
'of=/path', 'bs=1M',
'count=1024',
run_as_root=True)
@mock.patch.object(remotefs.RemoteFSSnapDriver, '_local_path_volume_info')
@mock.patch.object(remotefs.RemoteFSSnapDriver, '_read_info_file')
@mock.patch.object(remotefs.RemoteFSSnapDriver, '_local_volume_dir')
@mock.patch.object(remotefs.RemoteFSSnapDriver, '_qemu_img_info')
def test_get_snapshot_backing_file(
self, mock_qemu_img_info, mock_local_vol_dir,
mock_read_info_file, mock_local_path_vol_info):
fake_snapshot_file_name = os.path.basename(self._fake_snapshot_path)
fake_snapshot_info = {self._fake_snapshot.id: fake_snapshot_file_name}
fake_snap_img_info = mock.Mock()
fake_snap_img_info.backing_file = self._fake_volume.name
mock_read_info_file.return_value = fake_snapshot_info
mock_qemu_img_info.return_value = fake_snap_img_info
mock_local_vol_dir.return_value = self._FAKE_MNT_POINT
snap_backing_file = self._driver._get_snapshot_backing_file(
self._fake_snapshot)
self.assertEqual(os.path.basename(self._fake_volume_path),
snap_backing_file)
mock_local_path_vol_info.assert_called_once_with(self._fake_volume)
mock_read_info_file.assert_called_once_with(
mock_local_path_vol_info.return_value)
mock_local_vol_dir.assert_called_once_with(self._fake_volume)
mock_qemu_img_info.assert_called_once_with(self._fake_snapshot_path)
@ddt.data({},
{'info_file_exists': True},
{'os_name': 'nt'})
@ddt.unpack
@mock.patch('json.dump')
@mock.patch('cinder.volume.drivers.remotefs.open')
@mock.patch('os.path.exists')
def test_write_info_file(self,
mock_os_path_exists,
mock_open,
mock_json_dump,
info_file_exists=False,
os_name='posix'):
mock_os_path_exists.return_value = info_file_exists
fake_info_path = '/path/to/info'
fake_snapshot_info = {'active': self._fake_snapshot_path}
self._driver._execute = mock.Mock()
self._driver._set_rw_permissions = mock.Mock()
self._driver._write_info_file(fake_info_path, fake_snapshot_info)
mock_open.assert_called_once_with(fake_info_path, 'w')
mock_json_dump.assert_called_once_with(
fake_snapshot_info, mock.ANY, indent=1, sort_keys=True)
if info_file_exists or os.name == 'nt':
self._driver._execute.assert_not_called()
self._driver._set_rw_permissions.assert_not_called()
else:
self._driver._execute.assert_called_once_with(
'truncate', "-s0", fake_info_path,
run_as_root=self._driver._execute_as_root)
self._driver._set_rw_permissions.assert_called_once_with(
fake_info_path)
fake_snapshot_info.pop('active')
self.assertRaises(exception.RemoteFSException,
self._driver._write_info_file,
fake_info_path,
fake_snapshot_info)
class RemoteFSPoolMixinTestCase(test.TestCase):
def setUp(self):
super(RemoteFSPoolMixinTestCase, self).setUp()
# We'll instantiate this directly for now.
self._driver = remotefs.RemoteFSPoolMixin()
self.context = context.get_admin_context()
@mock.patch.object(remotefs.RemoteFSPoolMixin,
'_get_pool_name_from_volume')
@mock.patch.object(remotefs.RemoteFSPoolMixin,
'_get_share_from_pool_name')
def test_find_share(self, mock_get_share_from_pool,
mock_get_pool_from_volume):
share = self._driver._find_share(mock.sentinel.volume)
self.assertEqual(mock_get_share_from_pool.return_value, share)
mock_get_pool_from_volume.assert_called_once_with(
mock.sentinel.volume)
mock_get_share_from_pool.assert_called_once_with(
mock_get_pool_from_volume.return_value)
def test_get_pool_name_from_volume(self):
fake_pool = 'fake_pool'
fake_host = 'fake_host@fake_backend#%s' % fake_pool
fake_vol = fake_volume.fake_volume_obj(
self.context, provider_location='fake_share',
host=fake_host)
pool_name = self._driver._get_pool_name_from_volume(fake_vol)
self.assertEqual(fake_pool, pool_name)
def test_update_volume_stats(self):
share_total_gb = 3
share_free_gb = 2
share_used_gb = 4 # provisioned space
self._driver._mounted_shares = [mock.sentinel.share]
self._driver.configuration = mock.Mock()
self._driver.configuration.safe_get.return_value = (
mock.sentinel.backend_name)
self._driver.vendor_name = mock.sentinel.vendor_name
self._driver.driver_volume_type = mock.sentinel.driver_volume_type
self._driver._thin_provisioning_support = (
mock.sentinel.thin_prov_support)
self._driver._thick_provisioning_support = (
mock.sentinel.thick_prov_support)
self._driver.get_version = mock.Mock(
return_value=mock.sentinel.driver_version)
self._driver._ensure_shares_mounted = mock.Mock()
self._driver._get_capacity_info = mock.Mock(
return_value=(share_total_gb << 30,
share_free_gb << 30,
share_used_gb << 30))
self._driver._get_pool_name_from_share = mock.Mock(
return_value=mock.sentinel.pool_name)
expected_pool = {
'pool_name': mock.sentinel.pool_name,
'total_capacity_gb': float(share_total_gb),
'free_capacity_gb': float(share_free_gb),
'provisioned_capacity_gb': float(share_used_gb),
'reserved_percentage': (
self._driver.configuration.reserved_percentage),
'max_over_subscription_ratio': (
self._driver.configuration.max_over_subscription_ratio),
'thin_provisioning_support': (
mock.sentinel.thin_prov_support),
'thick_provisioning_support': (
mock.sentinel.thick_prov_support),
'QoS_support': False,
}
expected_stats = {
'volume_backend_name': mock.sentinel.backend_name,
'vendor_name': mock.sentinel.vendor_name,
'driver_version': mock.sentinel.driver_version,
'storage_protocol': mock.sentinel.driver_volume_type,
'total_capacity_gb': 0,
'free_capacity_gb': 0,
'pools': [expected_pool],
}
self._driver._update_volume_stats()
self.assertDictEqual(expected_stats, self._driver._stats)
self._driver._get_capacity_info.assert_called_once_with(
mock.sentinel.share)
self._driver.configuration.safe_get.assert_called_once_with(
'volume_backend_name')
@ddt.ddt
class RevertToSnapshotMixinTestCase(test.TestCase):
_FAKE_MNT_POINT = '/mnt/fake_hash'
def setUp(self):
super(RevertToSnapshotMixinTestCase, self).setUp()
self._driver = remotefs.RevertToSnapshotMixin()
self._driver._remotefsclient = mock.Mock()
self._driver._execute = mock.Mock()
self._driver._delete = mock.Mock()
self.context = context.get_admin_context()
self._fake_volume = fake_volume.fake_volume_obj(
self.context, provider_location='fake_share')
self._fake_volume_path = os.path.join(self._FAKE_MNT_POINT,
self._fake_volume.name)
self._fake_snapshot = fake_snapshot.fake_snapshot_obj(self.context)
self._fake_snapshot_path = (self._fake_volume_path + '.' +
self._fake_snapshot.id)
self._fake_snapshot_name = os.path.basename(
self._fake_snapshot_path)
self._fake_snapshot.volume = self._fake_volume
@ddt.data(True, False)
@mock.patch.object(remotefs.RevertToSnapshotMixin, '_validate_state',
create=True)
@mock.patch.object(remotefs.RevertToSnapshotMixin, '_read_info_file',
create=True)
@mock.patch.object(remotefs.RevertToSnapshotMixin,
'_local_path_volume_info', create=True)
@mock.patch.object(remotefs.RevertToSnapshotMixin, '_qemu_img_info',
create=True)
@mock.patch.object(remotefs.RevertToSnapshotMixin, '_do_create_snapshot',
create=True)
@mock.patch.object(remotefs.RevertToSnapshotMixin, '_local_volume_dir',
create=True)
def test_revert_to_snapshot(self,
is_latest_snapshot,
mock_local_vol_dir,
mock_do_create_snapshot,
mock_qemu_img_info,
mock_local_path_vol_info,
mock_read_info_file,
mock_validate_state):
active_file = (self._fake_snapshot_name if is_latest_snapshot
else 'fake_latest_snap')
fake_snapshot_info = {
'active': active_file,
self._fake_snapshot.id: self._fake_snapshot_name
}
mock_read_info_file.return_value = fake_snapshot_info
fake_snap_img_info = mock.Mock()
fake_snap_img_info.backing_file = self._fake_volume.name
mock_qemu_img_info.return_value = fake_snap_img_info
mock_local_vol_dir.return_value = self._FAKE_MNT_POINT
if is_latest_snapshot:
self._driver._revert_to_snapshot(self.context, self._fake_volume,
self._fake_snapshot)
self._driver._delete.assert_called_once_with(
self._fake_snapshot_path)
mock_do_create_snapshot.assert_called_once_with(
self._fake_snapshot,
fake_snap_img_info.backing_file,
self._fake_snapshot_path)
mock_qemu_img_info.assert_called_once_with(
self._fake_snapshot_path,
self._fake_volume.name)
elif not is_latest_snapshot:
self.assertRaises(exception.InvalidSnapshot,
self._driver._revert_to_snapshot,
self.context, self._fake_volume,
self._fake_snapshot)
self._driver._delete.assert_not_called()
exp_acceptable_states = ['available', 'reverting']
mock_validate_state.assert_called_once_with(
self._fake_snapshot.volume.status,
exp_acceptable_states)
mock_local_path_vol_info.assert_called_once_with(
self._fake_snapshot.volume)
mock_read_info_file.assert_called_once_with(
mock_local_path_vol_info.return_value)
@ddt.ddt
class RemoteFSManageableVolumesTestCase(test.TestCase):
def setUp(self):
super(RemoteFSManageableVolumesTestCase, self).setUp()
# We'll instantiate this directly for now.
self._driver = remotefs.RemoteFSManageableVolumesMixin()
@mock.patch.object(remotefs.RemoteFSManageableVolumesMixin,
'_get_mount_point_for_share', create=True)
@mock.patch.object(os.path, 'isfile')
def test_get_manageable_vol_location_invalid(self, mock_is_file,
mock_get_mount_point):
self.assertRaises(exception.ManageExistingInvalidReference,
self._driver._get_manageable_vol_location,
{})
self._driver._mounted_shares = []
self.assertRaises(exception.ManageExistingInvalidReference,
self._driver._get_manageable_vol_location,
{'source-name': '//hots/share/img'})
self._driver._mounted_shares = ['//host/share']
mock_get_mount_point.return_value = '/fake_mountpoint'
mock_is_file.return_value = False
self.assertRaises(exception.ManageExistingInvalidReference,
self._driver._get_manageable_vol_location,
{'source-name': '//host/share/subdir/img'})
mock_is_file.assert_any_call(
os.path.normpath('/fake_mountpoint/subdir/img'))
@mock.patch.object(remotefs.RemoteFSManageableVolumesMixin,
'_get_mount_point_for_share', create=True)
@mock.patch.object(os.path, 'isfile')
def test_get_manageable_vol_location(self, mock_is_file,
mock_get_mount_point):
self._driver._mounted_shares = [
'//host/share2/subdir',
'//host/share/subdir',
'host:/dir/subdir'
]
mock_get_mount_point.return_value = '/fake_mountpoint'
mock_is_file.return_value = True
location_info = self._driver._get_manageable_vol_location(
{'source-name': 'host:/dir/subdir/import/img'})
exp_location_info = {
'share': 'host:/dir/subdir',
'mountpoint': mock_get_mount_point.return_value,
'vol_local_path': '/fake_mountpoint/import/img',
'vol_remote_path': 'host:/dir/subdir/import/img'
}
self.assertEqual(exp_location_info, location_info)
@mock.patch.object(remotefs.RemoteFSManageableVolumesMixin,
'_get_mount_point_for_share', create=True)
@mock.patch.object(os.path, 'isfile')
@mock.patch.object(os.path, 'normpath', lambda x: x.replace('/', '\\'))
@mock.patch.object(os.path, 'normcase', lambda x: x.lower())
@mock.patch.object(os.path, 'join', lambda *args: '\\'.join(args))
@mock.patch.object(os.path, 'sep', '\\')
def test_get_manageable_vol_location_win32(self, mock_is_file,
mock_get_mount_point):
self._driver._mounted_shares = [
'//host/share2/subdir',
'//host/share/subdir',
'host:/dir/subdir'
]
mock_get_mount_point.return_value = r'c:\fake_mountpoint'
mock_is_file.return_value = True
location_info = self._driver._get_manageable_vol_location(
{'source-name': '//Host/share/Subdir/import/img'})
exp_location_info = {
'share': '//host/share/subdir',
'mountpoint': mock_get_mount_point.return_value,
'vol_local_path': r'c:\fake_mountpoint\import\img',
'vol_remote_path': r'\\host\share\subdir\import\img'
}
self.assertEqual(exp_location_info, location_info)
def test_get_managed_vol_exp_path(self):
fake_vol = fake_volume.fake_volume_obj(mock.sentinel.context)
vol_location = dict(mountpoint='fake-mountpoint')
exp_path = os.path.join(vol_location['mountpoint'],
fake_vol.name)
ret_val = self._driver._get_managed_vol_expected_path(
fake_vol, vol_location)
self.assertEqual(exp_path, ret_val)
@ddt.data(
{'already_managed': True},
{'qemu_side_eff': exception.RemoteFSInvalidBackingFile},
{'qemu_side_eff': Exception},
{'qemu_side_eff': [mock.Mock(backing_file=None,
file_format='fakefmt')]},
{'qemu_side_eff': [mock.Mock(backing_file='backing_file',
file_format='raw')]}
)
@ddt.unpack
@mock.patch.object(remotefs.RemoteFSManageableVolumesMixin,
'_qemu_img_info', create=True)
def test_check_unmanageable_volume(self, mock_qemu_info,
qemu_side_eff=None,
already_managed=False):
mock_qemu_info.side_effect = qemu_side_eff
manageable = self._driver._is_volume_manageable(
mock.sentinel.volume_path,
already_managed=already_managed)[0]
self.assertFalse(manageable)
@mock.patch.object(remotefs.RemoteFSManageableVolumesMixin,
'_qemu_img_info', create=True)
def test_check_manageable_volume(self, mock_qemu_info,
qemu_side_eff=None,
already_managed=False):
mock_qemu_info.return_value = mock.Mock(
backing_file=None,
file_format='raw')
manageable = self._driver._is_volume_manageable(
mock.sentinel.volume_path)[0]
self.assertTrue(manageable)
@mock.patch.object(remotefs.RemoteFSManageableVolumesMixin,
'_get_manageable_vol_location')
@mock.patch.object(remotefs.RemoteFSManageableVolumesMixin,
'_is_volume_manageable')
def test_manage_existing_unmanageable(self, mock_check_manageable,
mock_get_location):
fake_vol = fake_volume.fake_volume_obj(mock.sentinel.context)
mock_get_location.return_value = dict(
vol_local_path=mock.sentinel.local_path)
mock_check_manageable.return_value = False, mock.sentinel.resason
self.assertRaises(exception.ManageExistingInvalidReference,
self._driver.manage_existing,
fake_vol,
mock.sentinel.existing_ref)
mock_get_location.assert_called_once_with(mock.sentinel.existing_ref)
mock_check_manageable.assert_called_once_with(
mock.sentinel.local_path)
@mock.patch.object(remotefs.RemoteFSManageableVolumesMixin,
'_get_manageable_vol_location')
@mock.patch.object(remotefs.RemoteFSManageableVolumesMixin,
'_is_volume_manageable')
@mock.patch.object(remotefs.RemoteFSManageableVolumesMixin,
'_set_rw_permissions', create=True)
@mock.patch.object(remotefs.RemoteFSManageableVolumesMixin,
'_get_managed_vol_expected_path')
@mock.patch.object(os, 'rename')
def test_manage_existing_manageable(self, mock_rename,
mock_get_exp_path,
mock_set_perm,
mock_check_manageable,
mock_get_location):
fake_vol = fake_volume.fake_volume_obj(mock.sentinel.context)
mock_get_location.return_value = dict(
vol_local_path=mock.sentinel.local_path,
share=mock.sentinel.share)
mock_check_manageable.return_value = True, None
exp_ret_val = {'provider_location': mock.sentinel.share}
ret_val = self._driver.manage_existing(fake_vol,
mock.sentinel.existing_ref)
self.assertEqual(exp_ret_val, ret_val)
mock_get_exp_path.assert_called_once_with(
fake_vol, mock_get_location.return_value)
mock_set_perm.assert_called_once_with(mock.sentinel.local_path)
mock_rename.assert_called_once_with(mock.sentinel.local_path,
mock_get_exp_path.return_value)
@mock.patch.object(image_utils, 'qemu_img_info')
def _get_rounded_manageable_image_size(self, mock_qemu_info):
mock_qemu_info.return_value.virtual_size = 1 << 30 + 1
exp_rounded_size_gb = 2
size = self._driver._get_rounded_manageable_image_size(
mock.sentinel.image_path)
self.assertEqual(exp_rounded_size_gb, size)
mock_qemu_info.assert_called_once_with(mock.sentinel.image_path)
@mock.patch.object(remotefs.RemoteFSManageableVolumesMixin,
'_get_manageable_vol_location')
@mock.patch.object(remotefs.RemoteFSManageableVolumesMixin,
'_get_rounded_manageable_image_size')
def test_manage_existing_get_size(self, mock_get_size,
mock_get_location):
mock_get_location.return_value = dict(
vol_local_path=mock.sentinel.image_path)
size = self._driver.manage_existing_get_size(
mock.sentinel.volume,
mock.sentinel.existing_ref)
self.assertEqual(mock_get_size.return_value, size)
mock_get_location.assert_called_once_with(mock.sentinel.existing_ref)
mock_get_size.assert_called_once_with(mock.sentinel.image_path)
@ddt.data(
{},
{'managed_volume': mock.Mock(size=mock.sentinel.sz),
'exp_size': mock.sentinel.sz,
'manageable_check_ret_val': False,
'exp_manageable': False},
{'exp_size': None,
'get_size_side_effect': Exception,
'exp_manageable': False})
@ddt.unpack
@mock.patch.object(remotefs.RemoteFSManageableVolumesMixin,
'_is_volume_manageable')
@mock.patch.object(remotefs.RemoteFSManageableVolumesMixin,
'_get_rounded_manageable_image_size')
@mock.patch.object(remotefs.RemoteFSManageableVolumesMixin,
'_get_mount_point_for_share', create=True)
def test_get_manageable_volume(
self, mock_get_mount_point,
mock_get_size, mock_check_manageable,
managed_volume=None,
get_size_side_effect=(mock.sentinel.size_gb, ),
manageable_check_ret_val=True,
exp_size=mock.sentinel.size_gb,
exp_manageable=True):
share = '//host/share'
mountpoint = '/fake-mountpoint'
volume_path = '/fake-mountpoint/subdir/vol'
exp_ret_val = {
'reference': {'source-name': '//host/share/subdir/vol'},
'size': exp_size,
'safe_to_manage': exp_manageable,
'reason_not_safe': mock.ANY,
'cinder_id': managed_volume.id if managed_volume else None,
'extra_info': None,
}
mock_get_size.side_effect = get_size_side_effect
mock_check_manageable.return_value = (manageable_check_ret_val,
mock.sentinel.reason)
mock_get_mount_point.return_value = mountpoint
ret_val = self._driver._get_manageable_volume(
share, volume_path, managed_volume)
self.assertEqual(exp_ret_val, ret_val)
mock_check_manageable.assert_called_once_with(
volume_path, already_managed=managed_volume is not None)
mock_get_mount_point.assert_called_once_with(share)
if managed_volume:
mock_get_size.assert_not_called()
else:
mock_get_size.assert_called_once_with(volume_path)
@mock.patch.object(remotefs.RemoteFSManageableVolumesMixin,
'_get_mount_point_for_share', create=True)
@mock.patch.object(remotefs.RemoteFSManageableVolumesMixin,
'_get_manageable_volume')
@mock.patch.object(os, 'walk')
@mock.patch.object(os.path, 'join', lambda *args: '/'.join(args))
def test_get_share_manageable_volumes(
self, mock_walk, mock_get_manageable_volume,
mock_get_mount_point):
mount_path = '/fake-mountpoint'
mock_walk.return_value = [
[mount_path, ['subdir'], ['volume-1.vhdx']],
['/fake-mountpoint/subdir', [], ['volume-0', 'volume-3.vhdx']]]
mock_get_manageable_volume.side_effect = [
Exception,
mock.sentinel.managed_volume]
self._driver._MANAGEABLE_IMAGE_RE = re.compile(r'.*\.(?:vhdx)$')
managed_volumes = {'volume-1': mock.sentinel.vol1}
exp_manageable = [mock.sentinel.managed_volume]
manageable_volumes = self._driver._get_share_manageable_volumes(
mock.sentinel.share,
managed_volumes)
self.assertEqual(exp_manageable, manageable_volumes)
mock_get_manageable_volume.assert_has_calls(
[mock.call(mock.sentinel.share,
'/fake-mountpoint/volume-1.vhdx',
mock.sentinel.vol1),
mock.call(mock.sentinel.share,
'/fake-mountpoint/subdir/volume-3.vhdx',
None)])
@mock.patch.object(remotefs.RemoteFSManageableVolumesMixin,
'_get_share_manageable_volumes')
@mock.patch.object(volume_utils, 'paginate_entries_list')
def test_get_manageable_volumes(self, mock_paginate, mock_get_share_vols):
fake_vol = fake_volume.fake_volume_obj(mock.sentinel.context)
self._driver._mounted_shares = [mock.sentinel.share0,
mock.sentinel.share1]
mock_get_share_vols.side_effect = [
Exception, [mock.sentinel.manageable_vol]]
pagination_args = [
mock.sentinel.marker, mock.sentinel.limit,
mock.sentinel.offset, mock.sentinel.sort_keys,
mock.sentinel.sort_dirs]
ret_val = self._driver.get_manageable_volumes(
[fake_vol], *pagination_args)
self.assertEqual(mock_paginate.return_value, ret_val)
mock_paginate.assert_called_once_with(
[mock.sentinel.manageable_vol], *pagination_args)
exp_managed_vols_dict = {fake_vol.name: fake_vol}
mock_get_share_vols.assert_has_calls(
[mock.call(share, exp_managed_vols_dict)
for share in self._driver._mounted_shares])
|
[
"cinder.volume.drivers.remotefs.RemoteFSSnapDriver",
"cinder.tests.unit.fake_snapshot.fake_snapshot_obj",
"os.path.join",
"cinder.volume.drivers.remotefs.RemoteFSPoolMixin",
"cinder.tests.unit.fake_volume.fake_volume_obj",
"os.path.normpath",
"mock.patch.object",
"copy.deepcopy",
"ddt.data",
"mock.call",
"os.path.basename",
"cinder.volume.drivers.remotefs.RevertToSnapshotMixin",
"mock.patch",
"cinder.volume.drivers.remotefs.BackingFileTemplate",
"re.compile",
"cinder.context.get_admin_context",
"cinder.volume.drivers.remotefs.RemoteFSManageableVolumesMixin",
"collections.namedtuple",
"mock.Mock"
] |
[((1996, 2200), 'ddt.data', 'ddt.data', (["{'current_state': 'in-use', 'acceptable_states': ['available', 'in-use']}", "{'current_state': 'in-use', 'acceptable_states': ['available'],\n 'expected_exception': exception.InvalidVolume}"], {}), "({'current_state': 'in-use', 'acceptable_states': ['available',\n 'in-use']}, {'current_state': 'in-use', 'acceptable_states': [\n 'available'], 'expected_exception': exception.InvalidVolume})\n", (2004, 2200), False, 'import ddt\n'), ((9120, 9219), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSDriver', '"""secure_file_operations_enabled"""'], {'return_value': '(True)'}), "(remotefs.RemoteFSDriver, 'secure_file_operations_enabled',\n return_value=True)\n", (9137, 9219), False, 'import mock\n'), ((9267, 9296), 'mock.patch.object', 'mock.patch.object', (['os', '"""stat"""'], {}), "(os, 'stat')\n", (9284, 9296), False, 'import mock\n'), ((13407, 13443), 'mock.patch', 'mock.patch', (['"""cinder.db.snapshot_get"""'], {}), "('cinder.db.snapshot_get')\n", (13417, 13443), False, 'import mock\n'), ((13449, 13473), 'mock.patch', 'mock.patch', (['"""time.sleep"""'], {}), "('time.sleep')\n", (13459, 13473), False, 'import mock\n'), ((14940, 14980), 'mock.patch.object', 'mock.patch.object', (['utils', '"""synchronized"""'], {}), "(utils, 'synchronized')\n", (14957, 14980), False, 'import mock\n'), ((17119, 17166), 'mock.patch.object', 'mock.patch.object', (['image_utils', '"""qemu_img_info"""'], {}), "(image_utils, 'qemu_img_info')\n", (17136, 17166), False, 'import mock\n'), ((17172, 17202), 'mock.patch', 'mock.patch', (['"""os.path.basename"""'], {}), "('os.path.basename')\n", (17182, 17202), False, 'import mock\n'), ((18991, 19300), 'ddt.data', 'ddt.data', (["[None, '/fake_basedir']", "['/fake_basedir/cb2016/fake_vol_name', '/fake_basedir']", "['/fake_basedir/cb2016/fake_vol_name.VHD', '/fake_basedir']", "['/fake_basedir/cb2016/fake_vol_name.404f-404', '/fake_basedir']", "['/fake_basedir/cb2016/fake_vol_name.tmp-snap-404f-404', '/fake_basedir']"], {}), "([None, '/fake_basedir'], ['/fake_basedir/cb2016/fake_vol_name',\n '/fake_basedir'], ['/fake_basedir/cb2016/fake_vol_name.VHD',\n '/fake_basedir'], ['/fake_basedir/cb2016/fake_vol_name.404f-404',\n '/fake_basedir'], [\n '/fake_basedir/cb2016/fake_vol_name.tmp-snap-404f-404', '/fake_basedir'])\n", (18999, 19300), False, 'import ddt\n'), ((19578, 20039), 'ddt.data', 'ddt.data', (["['/other_random_path', '/fake_basedir']", "['/other_basedir/cb2016/fake_vol_name', '/fake_basedir']", "['/fake_basedir/invalid_hash/fake_vol_name', '/fake_basedir']", "['/fake_basedir/cb2016/invalid_vol_name', '/fake_basedir']", "['/fake_basedir/cb2016/fake_vol_name.info', '/fake_basedir']", "['/fake_basedir/cb2016/fake_vol_name-random-suffix', '/fake_basedir']", "['/fake_basedir/cb2016/fake_vol_name.invalidext', '/fake_basedir']"], {}), "(['/other_random_path', '/fake_basedir'], [\n '/other_basedir/cb2016/fake_vol_name', '/fake_basedir'], [\n '/fake_basedir/invalid_hash/fake_vol_name', '/fake_basedir'], [\n '/fake_basedir/cb2016/invalid_vol_name', '/fake_basedir'], [\n '/fake_basedir/cb2016/fake_vol_name.info', '/fake_basedir'], [\n '/fake_basedir/cb2016/fake_vol_name-random-suffix', '/fake_basedir'], [\n '/fake_basedir/cb2016/fake_vol_name.invalidext', '/fake_basedir'])\n", (19586, 20039), False, 'import ddt\n'), ((20393, 21011), 'ddt.data', 'ddt.data', (["[None, '/fake_basedir']", "['/fake_basedir/cb2016/fake_vol_name', '/fake_basedir']", "['/fake_basedir/cb2016/fake_vol_name.VHD', '/fake_basedir']", "['/fake_basedir/cb2016/fake_vol_name.404f-404', '/fake_basedir']", "['/fake_basedir/cb2016/fake_vol_name.tmp-snap-404f-404', '/fake_basedir']", "['/fake_basedir/cb2016/other_dir/404f-404', '/fake_basedir']", "['/fake_basedir/cb2016/other_dir/tmp-snap-404f-404', '/fake_basedir']", "['/fake_basedir/cb2016/other_dir/404f-404.mod1-404f-404', '/fake_basedir']", "['/fake_basedir/cb2016/other_dir/404f-404.mod2-404f-404', '/fake_basedir']"], {}), "([None, '/fake_basedir'], ['/fake_basedir/cb2016/fake_vol_name',\n '/fake_basedir'], ['/fake_basedir/cb2016/fake_vol_name.VHD',\n '/fake_basedir'], ['/fake_basedir/cb2016/fake_vol_name.404f-404',\n '/fake_basedir'], [\n '/fake_basedir/cb2016/fake_vol_name.tmp-snap-404f-404', '/fake_basedir'\n ], ['/fake_basedir/cb2016/other_dir/404f-404', '/fake_basedir'], [\n '/fake_basedir/cb2016/other_dir/tmp-snap-404f-404', '/fake_basedir'], [\n '/fake_basedir/cb2016/other_dir/404f-404.mod1-404f-404',\n '/fake_basedir'], [\n '/fake_basedir/cb2016/other_dir/404f-404.mod2-404f-404', '/fake_basedir'])\n", (20401, 21011), False, 'import ddt\n'), ((22014, 22808), 'ddt.data', 'ddt.data', (["['/other_random_path', '/fake_basedir']", "['/other_basedir/cb2016/fake_vol_name', '/fake_basedir']", "['/fake_basedir/invalid_hash/fake_vol_name', '/fake_basedir']", "['/fake_basedir/cb2016/invalid_vol_name', '/fake_basedir']", "['/fake_basedir/cb2016/fake_vol_name.info', '/fake_basedir']", "['/fake_basedir/cb2016/fake_vol_name-random-suffix', '/fake_basedir']", "['/fake_basedir/cb2016/fake_vol_name.invalidext', '/fake_basedir']", "['/fake_basedir/cb2016/invalid_dir/404f-404', '/fake_basedir']", "['/fake_basedir/cb2016/other_dir/invalid-prefix-404f-404', '/fake_basedir']", "['/fake_basedir/cb2016/other_dir/404f-404.mod3-404f-404', '/fake_basedir']", "['/fake_basedir/cb2016/other_dir/404f-404.mod2-404f-404.invalid',\n '/fake_basedir']"], {}), "(['/other_random_path', '/fake_basedir'], [\n '/other_basedir/cb2016/fake_vol_name', '/fake_basedir'], [\n '/fake_basedir/invalid_hash/fake_vol_name', '/fake_basedir'], [\n '/fake_basedir/cb2016/invalid_vol_name', '/fake_basedir'], [\n '/fake_basedir/cb2016/fake_vol_name.info', '/fake_basedir'], [\n '/fake_basedir/cb2016/fake_vol_name-random-suffix', '/fake_basedir'], [\n '/fake_basedir/cb2016/fake_vol_name.invalidext', '/fake_basedir'], [\n '/fake_basedir/cb2016/invalid_dir/404f-404', '/fake_basedir'], [\n '/fake_basedir/cb2016/other_dir/invalid-prefix-404f-404',\n '/fake_basedir'], [\n '/fake_basedir/cb2016/other_dir/404f-404.mod3-404f-404',\n '/fake_basedir'], [\n '/fake_basedir/cb2016/other_dir/404f-404.mod2-404f-404.invalid',\n '/fake_basedir'])\n", (22022, 22808), False, 'import ddt\n'), ((23903, 23970), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSSnapDriver', '"""_local_volume_dir"""'], {}), "(remotefs.RemoteFSSnapDriver, '_local_volume_dir')\n", (23920, 23970), False, 'import mock\n'), ((23976, 24052), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSSnapDriver', '"""get_active_image_from_info"""'], {}), "(remotefs.RemoteFSSnapDriver, 'get_active_image_from_info')\n", (23993, 24052), False, 'import mock\n'), ((24786, 24888), 'ddt.data', 'ddt.data', (['{}', "{'provider_location': None}", "{'active_fpath': 'last_snap_img', 'expect_snaps': True}"], {}), "({}, {'provider_location': None}, {'active_fpath': 'last_snap_img',\n 'expect_snaps': True})\n", (24794, 24888), False, 'import ddt\n'), ((24949, 25023), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSSnapDriver', '"""_local_path_active_image"""'], {}), "(remotefs.RemoteFSSnapDriver, '_local_path_active_image')\n", (24966, 25023), False, 'import mock\n'), ((25052, 25112), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSSnapDriver', '"""local_path"""'], {}), "(remotefs.RemoteFSSnapDriver, 'local_path')\n", (25069, 25112), False, 'import mock\n'), ((26055, 26121), 'ddt.data', 'ddt.data', (['{}', "{'snapshots_exist': True}", "{'force_temp_snap': True}"], {}), "({}, {'snapshots_exist': True}, {'force_temp_snap': True})\n", (26063, 26121), False, 'import ddt\n'), ((26171, 26231), 'mock.patch.object', 'mock.patch.object', (["sys.modules['cinder.objects']", '"""Snapshot"""'], {}), "(sys.modules['cinder.objects'], 'Snapshot')\n", (26188, 26231), False, 'import mock\n'), ((26237, 26297), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSSnapDriver', '"""local_path"""'], {}), "(remotefs.RemoteFSSnapDriver, 'local_path')\n", (26254, 26297), False, 'import mock\n'), ((26303, 26369), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSSnapDriver', '"""_snapshots_exist"""'], {}), "(remotefs.RemoteFSSnapDriver, '_snapshots_exist')\n", (26320, 26369), False, 'import mock\n'), ((26375, 26443), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSSnapDriver', '"""_copy_volume_image"""'], {}), "(remotefs.RemoteFSSnapDriver, '_copy_volume_image')\n", (26392, 26443), False, 'import mock\n'), ((26449, 26513), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSSnapDriver', '"""_extend_volume"""'], {}), "(remotefs.RemoteFSSnapDriver, '_extend_volume')\n", (26466, 26513), False, 'import mock\n'), ((26519, 26584), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSSnapDriver', '"""_validate_state"""'], {}), "(remotefs.RemoteFSSnapDriver, '_validate_state')\n", (26536, 26584), False, 'import mock\n'), ((26590, 26656), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSSnapDriver', '"""_create_snapshot"""'], {}), "(remotefs.RemoteFSSnapDriver, '_create_snapshot')\n", (26607, 26656), False, 'import mock\n'), ((26662, 26728), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSSnapDriver', '"""_delete_snapshot"""'], {}), "(remotefs.RemoteFSSnapDriver, '_delete_snapshot')\n", (26679, 26728), False, 'import mock\n'), ((26734, 26810), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSSnapDriver', '"""_copy_volume_from_snapshot"""'], {}), "(remotefs.RemoteFSSnapDriver, '_copy_volume_from_snapshot')\n", (26751, 26810), False, 'import mock\n'), ((30625, 30654), 'mock.patch', 'mock.patch', (['"""shutil.copyfile"""'], {}), "('shutil.copyfile')\n", (30635, 30654), False, 'import mock\n'), ((30660, 30729), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSSnapDriver', '"""_set_rw_permissions"""'], {}), "(remotefs.RemoteFSSnapDriver, '_set_rw_permissions')\n", (30677, 30729), False, 'import mock\n'), ((31467, 31540), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSSnapDriver', '"""_local_path_volume_info"""'], {}), "(remotefs.RemoteFSSnapDriver, '_local_path_volume_info')\n", (31484, 31540), False, 'import mock\n'), ((31546, 31611), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSSnapDriver', '"""_read_info_file"""'], {}), "(remotefs.RemoteFSSnapDriver, '_read_info_file')\n", (31563, 31611), False, 'import mock\n'), ((31617, 31684), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSSnapDriver', '"""_local_volume_dir"""'], {}), "(remotefs.RemoteFSSnapDriver, '_local_volume_dir')\n", (31634, 31684), False, 'import mock\n'), ((31690, 31754), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSSnapDriver', '"""_qemu_img_info"""'], {}), "(remotefs.RemoteFSSnapDriver, '_qemu_img_info')\n", (31707, 31754), False, 'import mock\n'), ((32912, 32971), 'ddt.data', 'ddt.data', (['{}', "{'info_file_exists': True}", "{'os_name': 'nt'}"], {}), "({}, {'info_file_exists': True}, {'os_name': 'nt'})\n", (32920, 32971), False, 'import ddt\n'), ((33021, 33044), 'mock.patch', 'mock.patch', (['"""json.dump"""'], {}), "('json.dump')\n", (33031, 33044), False, 'import mock\n'), ((33050, 33099), 'mock.patch', 'mock.patch', (['"""cinder.volume.drivers.remotefs.open"""'], {}), "('cinder.volume.drivers.remotefs.open')\n", (33060, 33099), False, 'import mock\n'), ((33105, 33133), 'mock.patch', 'mock.patch', (['"""os.path.exists"""'], {}), "('os.path.exists')\n", (33115, 33133), False, 'import mock\n'), ((34908, 34983), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSPoolMixin', '"""_get_pool_name_from_volume"""'], {}), "(remotefs.RemoteFSPoolMixin, '_get_pool_name_from_volume')\n", (34925, 34983), False, 'import mock\n'), ((35012, 35086), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSPoolMixin', '"""_get_share_from_pool_name"""'], {}), "(remotefs.RemoteFSPoolMixin, '_get_share_from_pool_name')\n", (35029, 35086), False, 'import mock\n'), ((39543, 39564), 'ddt.data', 'ddt.data', (['(True)', '(False)'], {}), '(True, False)\n', (39551, 39564), False, 'import ddt\n'), ((39570, 39656), 'mock.patch.object', 'mock.patch.object', (['remotefs.RevertToSnapshotMixin', '"""_validate_state"""'], {'create': '(True)'}), "(remotefs.RevertToSnapshotMixin, '_validate_state', create\n =True)\n", (39587, 39656), False, 'import mock\n'), ((39680, 39766), 'mock.patch.object', 'mock.patch.object', (['remotefs.RevertToSnapshotMixin', '"""_read_info_file"""'], {'create': '(True)'}), "(remotefs.RevertToSnapshotMixin, '_read_info_file', create\n =True)\n", (39697, 39766), False, 'import mock\n'), ((39790, 39883), 'mock.patch.object', 'mock.patch.object', (['remotefs.RevertToSnapshotMixin', '"""_local_path_volume_info"""'], {'create': '(True)'}), "(remotefs.RevertToSnapshotMixin, '_local_path_volume_info',\n create=True)\n", (39807, 39883), False, 'import mock\n'), ((39908, 39993), 'mock.patch.object', 'mock.patch.object', (['remotefs.RevertToSnapshotMixin', '"""_qemu_img_info"""'], {'create': '(True)'}), "(remotefs.RevertToSnapshotMixin, '_qemu_img_info', create=True\n )\n", (39925, 39993), False, 'import mock\n'), ((40017, 40106), 'mock.patch.object', 'mock.patch.object', (['remotefs.RevertToSnapshotMixin', '"""_do_create_snapshot"""'], {'create': '(True)'}), "(remotefs.RevertToSnapshotMixin, '_do_create_snapshot',\n create=True)\n", (40034, 40106), False, 'import mock\n'), ((40131, 40218), 'mock.patch.object', 'mock.patch.object', (['remotefs.RevertToSnapshotMixin', '"""_local_volume_dir"""'], {'create': '(True)'}), "(remotefs.RevertToSnapshotMixin, '_local_volume_dir',\n create=True)\n", (40148, 40218), False, 'import mock\n'), ((42803, 42908), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSManageableVolumesMixin', '"""_get_mount_point_for_share"""'], {'create': '(True)'}), "(remotefs.RemoteFSManageableVolumesMixin,\n '_get_mount_point_for_share', create=True)\n", (42820, 42908), False, 'import mock\n'), ((42933, 42969), 'mock.patch.object', 'mock.patch.object', (['os.path', '"""isfile"""'], {}), "(os.path, 'isfile')\n", (42950, 42969), False, 'import mock\n'), ((43996, 44101), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSManageableVolumesMixin', '"""_get_mount_point_for_share"""'], {'create': '(True)'}), "(remotefs.RemoteFSManageableVolumesMixin,\n '_get_mount_point_for_share', create=True)\n", (44013, 44101), False, 'import mock\n'), ((44126, 44162), 'mock.patch.object', 'mock.patch.object', (['os.path', '"""isfile"""'], {}), "(os.path, 'isfile')\n", (44143, 44162), False, 'import mock\n'), ((45004, 45109), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSManageableVolumesMixin', '"""_get_mount_point_for_share"""'], {'create': '(True)'}), "(remotefs.RemoteFSManageableVolumesMixin,\n '_get_mount_point_for_share', create=True)\n", (45021, 45109), False, 'import mock\n'), ((45134, 45170), 'mock.patch.object', 'mock.patch.object', (['os.path', '"""isfile"""'], {}), "(os.path, 'isfile')\n", (45151, 45170), False, 'import mock\n'), ((45388, 45427), 'mock.patch.object', 'mock.patch.object', (['os.path', '"""sep"""', '"""\\\\"""'], {}), "(os.path, 'sep', '\\\\')\n", (45405, 45427), False, 'import mock\n'), ((47140, 47233), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSManageableVolumesMixin', '"""_qemu_img_info"""'], {'create': '(True)'}), "(remotefs.RemoteFSManageableVolumesMixin, '_qemu_img_info',\n create=True)\n", (47157, 47233), False, 'import mock\n'), ((47675, 47768), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSManageableVolumesMixin', '"""_qemu_img_info"""'], {'create': '(True)'}), "(remotefs.RemoteFSManageableVolumesMixin, '_qemu_img_info',\n create=True)\n", (47692, 47768), False, 'import mock\n'), ((48218, 48312), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSManageableVolumesMixin', '"""_get_manageable_vol_location"""'], {}), "(remotefs.RemoteFSManageableVolumesMixin,\n '_get_manageable_vol_location')\n", (48235, 48312), False, 'import mock\n'), ((48337, 48424), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSManageableVolumesMixin', '"""_is_volume_manageable"""'], {}), "(remotefs.RemoteFSManageableVolumesMixin,\n '_is_volume_manageable')\n", (48354, 48424), False, 'import mock\n'), ((49214, 49308), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSManageableVolumesMixin', '"""_get_manageable_vol_location"""'], {}), "(remotefs.RemoteFSManageableVolumesMixin,\n '_get_manageable_vol_location')\n", (49231, 49308), False, 'import mock\n'), ((49333, 49420), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSManageableVolumesMixin', '"""_is_volume_manageable"""'], {}), "(remotefs.RemoteFSManageableVolumesMixin,\n '_is_volume_manageable')\n", (49350, 49420), False, 'import mock\n'), ((49445, 49543), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSManageableVolumesMixin', '"""_set_rw_permissions"""'], {'create': '(True)'}), "(remotefs.RemoteFSManageableVolumesMixin,\n '_set_rw_permissions', create=True)\n", (49462, 49543), False, 'import mock\n'), ((49568, 49664), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSManageableVolumesMixin', '"""_get_managed_vol_expected_path"""'], {}), "(remotefs.RemoteFSManageableVolumesMixin,\n '_get_managed_vol_expected_path')\n", (49585, 49664), False, 'import mock\n'), ((49689, 49720), 'mock.patch.object', 'mock.patch.object', (['os', '"""rename"""'], {}), "(os, 'rename')\n", (49706, 49720), False, 'import mock\n'), ((50858, 50905), 'mock.patch.object', 'mock.patch.object', (['image_utils', '"""qemu_img_info"""'], {}), "(image_utils, 'qemu_img_info')\n", (50875, 50905), False, 'import mock\n'), ((51302, 51396), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSManageableVolumesMixin', '"""_get_manageable_vol_location"""'], {}), "(remotefs.RemoteFSManageableVolumesMixin,\n '_get_manageable_vol_location')\n", (51319, 51396), False, 'import mock\n'), ((51421, 51521), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSManageableVolumesMixin', '"""_get_rounded_manageable_image_size"""'], {}), "(remotefs.RemoteFSManageableVolumesMixin,\n '_get_rounded_manageable_image_size')\n", (51438, 51521), False, 'import mock\n'), ((52431, 52518), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSManageableVolumesMixin', '"""_is_volume_manageable"""'], {}), "(remotefs.RemoteFSManageableVolumesMixin,\n '_is_volume_manageable')\n", (52448, 52518), False, 'import mock\n'), ((52543, 52643), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSManageableVolumesMixin', '"""_get_rounded_manageable_image_size"""'], {}), "(remotefs.RemoteFSManageableVolumesMixin,\n '_get_rounded_manageable_image_size')\n", (52560, 52643), False, 'import mock\n'), ((52668, 52773), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSManageableVolumesMixin', '"""_get_mount_point_for_share"""'], {'create': '(True)'}), "(remotefs.RemoteFSManageableVolumesMixin,\n '_get_mount_point_for_share', create=True)\n", (52685, 52773), False, 'import mock\n'), ((54326, 54431), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSManageableVolumesMixin', '"""_get_mount_point_for_share"""'], {'create': '(True)'}), "(remotefs.RemoteFSManageableVolumesMixin,\n '_get_mount_point_for_share', create=True)\n", (54343, 54431), False, 'import mock\n'), ((54456, 54544), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSManageableVolumesMixin', '"""_get_manageable_volume"""'], {}), "(remotefs.RemoteFSManageableVolumesMixin,\n '_get_manageable_volume')\n", (54473, 54544), False, 'import mock\n'), ((54569, 54598), 'mock.patch.object', 'mock.patch.object', (['os', '"""walk"""'], {}), "(os, 'walk')\n", (54586, 54598), False, 'import mock\n'), ((55861, 55956), 'mock.patch.object', 'mock.patch.object', (['remotefs.RemoteFSManageableVolumesMixin', '"""_get_share_manageable_volumes"""'], {}), "(remotefs.RemoteFSManageableVolumesMixin,\n '_get_share_manageable_volumes')\n", (55878, 55956), False, 'import mock\n'), ((55981, 56037), 'mock.patch.object', 'mock.patch.object', (['volume_utils', '"""paginate_entries_list"""'], {}), "(volume_utils, 'paginate_entries_list')\n", (55998, 56037), False, 'import mock\n'), ((1258, 1287), 'cinder.volume.drivers.remotefs.RemoteFSSnapDriver', 'remotefs.RemoteFSSnapDriver', ([], {}), '()\n', (1285, 1287), False, 'from cinder.volume.drivers import remotefs\n'), ((1327, 1338), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (1336, 1338), False, 'import mock\n'), ((1371, 1382), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (1380, 1382), False, 'import mock\n'), ((1414, 1425), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (1423, 1425), False, 'import mock\n'), ((1450, 1477), 'cinder.context.get_admin_context', 'context.get_admin_context', ([], {}), '()\n', (1475, 1477), False, 'from cinder import context\n'), ((1507, 1580), 'cinder.tests.unit.fake_volume.fake_volume_obj', 'fake_volume.fake_volume_obj', (['self.context'], {'provider_location': '"""fake_share"""'}), "(self.context, provider_location='fake_share')\n", (1534, 1580), False, 'from cinder.tests.unit import fake_volume\n'), ((1627, 1685), 'os.path.join', 'os.path.join', (['self._FAKE_MNT_POINT', 'self._fake_volume.name'], {}), '(self._FAKE_MNT_POINT, self._fake_volume.name)\n', (1639, 1685), False, 'import os\n'), ((1762, 1807), 'cinder.tests.unit.fake_snapshot.fake_snapshot_obj', 'fake_snapshot.fake_snapshot_obj', (['self.context'], {}), '(self.context)\n', (1793, 1807), False, 'from cinder.tests.unit import fake_snapshot\n'), ((3089, 3131), 'os.path.basename', 'os.path.basename', (['self._fake_snapshot_path'], {}), '(self._fake_snapshot_path)\n', (3105, 3131), False, 'import os\n'), ((3277, 3288), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (3286, 3288), False, 'import mock\n'), ((3318, 3329), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (3327, 3329), False, 'import mock\n'), ((3682, 3734), 'mock.Mock', 'mock.Mock', ([], {'return_value': 'mock.sentinel.fake_info_path'}), '(return_value=mock.sentinel.fake_info_path)\n', (3691, 3734), False, 'import mock\n'), ((3786, 3849), 'mock.Mock', 'mock.Mock', ([], {'side_effect': '[fake_snap_img_info, fake_base_img_info]'}), '(side_effect=[fake_snap_img_info, fake_base_img_info])\n', (3795, 3849), False, 'import mock\n'), ((3904, 3948), 'mock.Mock', 'mock.Mock', ([], {'return_value': 'self._FAKE_MNT_POINT'}), '(return_value=self._FAKE_MNT_POINT)\n', (3913, 3948), False, 'import mock\n'), ((4002, 4013), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (4011, 4013), False, 'import mock\n'), ((4053, 4064), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (4062, 4064), False, 'import mock\n'), ((4105, 4116), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (4114, 4116), False, 'import mock\n'), ((4152, 4163), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (4161, 4163), False, 'import mock\n'), ((4199, 4210), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (4208, 4210), False, 'import mock\n'), ((4257, 4268), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (4266, 4268), False, 'import mock\n'), ((4316, 4327), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (4325, 4327), False, 'import mock\n'), ((8252, 8294), 'os.path.basename', 'os.path.basename', (['self._fake_snapshot_path'], {}), '(self._fake_snapshot_path)\n', (8268, 8294), False, 'import os\n'), ((8540, 8592), 'mock.Mock', 'mock.Mock', ([], {'return_value': 'mock.sentinel.fake_info_path'}), '(return_value=mock.sentinel.fake_info_path)\n', (8549, 8592), False, 'import mock\n'), ((8645, 8683), 'mock.Mock', 'mock.Mock', ([], {'return_value': 'fake_snap_info'}), '(return_value=fake_snap_info)\n', (8654, 8683), False, 'import mock\n'), ((8738, 8782), 'mock.Mock', 'mock.Mock', ([], {'return_value': 'self._FAKE_MNT_POINT'}), '(return_value=self._FAKE_MNT_POINT)\n', (8747, 8782), False, 'import mock\n'), ((8836, 8847), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (8845, 8847), False, 'import mock\n'), ((9408, 9454), 'mock.Mock', 'mock.Mock', ([], {'return_value': 'self._fake_volume_path'}), '(return_value=self._fake_volume_path)\n', (9417, 9454), False, 'import mock\n'), ((9625, 9636), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (9634, 9636), False, 'import mock\n'), ((9680, 9691), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (9689, 9691), False, 'import mock\n'), ((10992, 11034), 'os.path.basename', 'os.path.basename', (['self._fake_snapshot_path'], {}), '(self._fake_snapshot_path)\n', (11008, 11034), False, 'import os\n'), ((11083, 11135), 'mock.Mock', 'mock.Mock', ([], {'return_value': 'mock.sentinel.fake_info_path'}), '(return_value=mock.sentinel.fake_info_path)\n', (11092, 11135), False, 'import mock\n'), ((11188, 11230), 'mock.Mock', 'mock.Mock', ([], {'return_value': 'fake_snapshot_info'}), '(return_value=fake_snapshot_info)\n', (11197, 11230), False, 'import mock\n'), ((11287, 11298), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (11296, 11298), False, 'import mock\n'), ((11346, 11357), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (11355, 11357), False, 'import mock\n'), ((11398, 11409), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (11407, 11409), False, 'import mock\n'), ((11460, 11506), 'mock.Mock', 'mock.Mock', ([], {'return_value': 'self._fake_volume.name'}), '(return_value=self._fake_volume.name)\n', (11469, 11506), False, 'import mock\n'), ((11562, 11610), 'mock.Mock', 'mock.Mock', ([], {'return_value': 'self._fake_snapshot_path'}), '(return_value=self._fake_snapshot_path)\n', (11571, 11610), False, 'import mock\n'), ((11663, 11674), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (11672, 11674), False, 'import mock\n'), ((13613, 13624), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (13622, 13624), False, 'import mock\n'), ((16010, 16021), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (16019, 16021), False, 'import mock\n'), ((16426, 16437), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (16435, 16437), False, 'import mock\n'), ((24528, 24571), 'os.path.join', 'os.path.join', (['fake_vol_dir', 'fake_active_img'], {}), '(fake_vol_dir, fake_active_img)\n', (24540, 24571), False, 'import os\n'), ((27524, 27565), 'cinder.tests.unit.fake_volume.fake_volume_obj', 'fake_volume.fake_volume_obj', (['self.context'], {}), '(self.context)\n', (27551, 27565), False, 'from cinder.tests.unit import fake_volume\n'), ((27646, 27739), 'cinder.tests.unit.fake_volume.fake_volume_obj', 'fake_volume.fake_volume_obj', (['self.context'], {'id': 'src_vref_id', 'name': "('volume-%s' % src_vref_id)"}), "(self.context, id=src_vref_id, name='volume-%s' %\n src_vref_id)\n", (27673, 27739), False, 'from cinder.tests.unit import fake_volume\n'), ((28079, 28122), 'collections.namedtuple', 'collections.namedtuple', (['"""Volume"""', 'vol_attrs'], {}), "('Volume', vol_attrs)\n", (28101, 28122), False, 'import collections\n'), ((29164, 29175), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (29173, 29175), False, 'import mock\n'), ((29220, 29231), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (29229, 29231), False, 'import mock\n'), ((31948, 31990), 'os.path.basename', 'os.path.basename', (['self._fake_snapshot_path'], {}), '(self._fake_snapshot_path)\n', (31964, 31990), False, 'import os\n'), ((32100, 32111), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (32109, 32111), False, 'import mock\n'), ((33604, 33615), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (33613, 33615), False, 'import mock\n'), ((33659, 33670), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (33668, 33670), False, 'import mock\n'), ((34821, 34849), 'cinder.volume.drivers.remotefs.RemoteFSPoolMixin', 'remotefs.RemoteFSPoolMixin', ([], {}), '()\n', (34847, 34849), False, 'from cinder.volume.drivers import remotefs\n'), ((34874, 34901), 'cinder.context.get_admin_context', 'context.get_admin_context', ([], {}), '()\n', (34899, 34901), False, 'from cinder import context\n'), ((35714, 35807), 'cinder.tests.unit.fake_volume.fake_volume_obj', 'fake_volume.fake_volume_obj', (['self.context'], {'provider_location': '"""fake_share"""', 'host': 'fake_host'}), "(self.context, provider_location='fake_share',\n host=fake_host)\n", (35741, 35807), False, 'from cinder.tests.unit import fake_volume\n'), ((36188, 36199), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (36197, 36199), False, 'import mock\n'), ((36669, 36721), 'mock.Mock', 'mock.Mock', ([], {'return_value': 'mock.sentinel.driver_version'}), '(return_value=mock.sentinel.driver_version)\n', (36678, 36721), False, 'import mock\n'), ((36781, 36792), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (36790, 36792), False, 'import mock\n'), ((36835, 36928), 'mock.Mock', 'mock.Mock', ([], {'return_value': '(share_total_gb << 30, share_free_gb << 30, share_used_gb << 30)'}), '(return_value=(share_total_gb << 30, share_free_gb << 30, \n share_used_gb << 30))\n', (36844, 36928), False, 'import mock\n'), ((37038, 37085), 'mock.Mock', 'mock.Mock', ([], {'return_value': 'mock.sentinel.pool_name'}), '(return_value=mock.sentinel.pool_name)\n', (37047, 37085), False, 'import mock\n'), ((38711, 38743), 'cinder.volume.drivers.remotefs.RevertToSnapshotMixin', 'remotefs.RevertToSnapshotMixin', ([], {}), '()\n', (38741, 38743), False, 'from cinder.volume.drivers import remotefs\n'), ((38783, 38794), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (38792, 38794), False, 'import mock\n'), ((38827, 38838), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (38836, 38838), False, 'import mock\n'), ((38870, 38881), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (38879, 38881), False, 'import mock\n'), ((38906, 38933), 'cinder.context.get_admin_context', 'context.get_admin_context', ([], {}), '()\n', (38931, 38933), False, 'from cinder import context\n'), ((38963, 39036), 'cinder.tests.unit.fake_volume.fake_volume_obj', 'fake_volume.fake_volume_obj', (['self.context'], {'provider_location': '"""fake_share"""'}), "(self.context, provider_location='fake_share')\n", (38990, 39036), False, 'from cinder.tests.unit import fake_volume\n'), ((39083, 39141), 'os.path.join', 'os.path.join', (['self._FAKE_MNT_POINT', 'self._fake_volume.name'], {}), '(self._FAKE_MNT_POINT, self._fake_volume.name)\n', (39095, 39141), False, 'import os\n'), ((39218, 39263), 'cinder.tests.unit.fake_snapshot.fake_snapshot_obj', 'fake_snapshot.fake_snapshot_obj', (['self.context'], {}), '(self.context)\n', (39249, 39263), False, 'from cinder.tests.unit import fake_snapshot\n'), ((39426, 39468), 'os.path.basename', 'os.path.basename', (['self._fake_snapshot_path'], {}), '(self._fake_snapshot_path)\n', (39442, 39468), False, 'import os\n'), ((41003, 41014), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (41012, 41014), False, 'import mock\n'), ((42755, 42796), 'cinder.volume.drivers.remotefs.RemoteFSManageableVolumesMixin', 'remotefs.RemoteFSManageableVolumesMixin', ([], {}), '()\n', (42794, 42796), False, 'from cinder.volume.drivers import remotefs\n'), ((46356, 46406), 'cinder.tests.unit.fake_volume.fake_volume_obj', 'fake_volume.fake_volume_obj', (['mock.sentinel.context'], {}), '(mock.sentinel.context)\n', (46383, 46406), False, 'from cinder.tests.unit import fake_volume\n'), ((46485, 46540), 'os.path.join', 'os.path.join', (["vol_location['mountpoint']", 'fake_vol.name'], {}), "(vol_location['mountpoint'], fake_vol.name)\n", (46497, 46540), False, 'import os\n'), ((48003, 48050), 'mock.Mock', 'mock.Mock', ([], {'backing_file': 'None', 'file_format': '"""raw"""'}), "(backing_file=None, file_format='raw')\n", (48012, 48050), False, 'import mock\n'), ((48596, 48646), 'cinder.tests.unit.fake_volume.fake_volume_obj', 'fake_volume.fake_volume_obj', (['mock.sentinel.context'], {}), '(mock.sentinel.context)\n', (48623, 48646), False, 'from cinder.tests.unit import fake_volume\n'), ((50036, 50086), 'cinder.tests.unit.fake_volume.fake_volume_obj', 'fake_volume.fake_volume_obj', (['mock.sentinel.context'], {}), '(mock.sentinel.context)\n', (50063, 50086), False, 'from cinder.tests.unit import fake_volume\n'), ((55174, 55202), 're.compile', 're.compile', (['""".*\\\\.(?:vhdx)$"""'], {}), "('.*\\\\.(?:vhdx)$')\n", (55184, 55202), False, 'import re\n'), ((56136, 56186), 'cinder.tests.unit.fake_volume.fake_volume_obj', 'fake_volume.fake_volume_obj', (['mock.sentinel.context'], {}), '(mock.sentinel.context)\n', (56163, 56186), False, 'from cinder.tests.unit import fake_volume\n'), ((10643, 10681), 'mock.call', 'mock.call', (['*command1'], {'run_as_root': '(True)'}), '(*command1, run_as_root=True)\n', (10652, 10681), False, 'import mock\n'), ((10700, 10738), 'mock.call', 'mock.call', (['*command2'], {'run_as_root': '(True)'}), '(*command2, run_as_root=True)\n', (10709, 10738), False, 'import mock\n'), ((10757, 10795), 'mock.call', 'mock.call', (['*command3'], {'run_as_root': '(True)'}), '(*command3, run_as_root=True)\n', (10766, 10795), False, 'import mock\n'), ((14087, 14141), 'mock.patch.object', 'mock.patch.object', (['self._driver', '"""_do_create_snapshot"""'], {}), "(self._driver, '_do_create_snapshot')\n", (14104, 14141), False, 'import mock\n'), ((32492, 32532), 'os.path.basename', 'os.path.basename', (['self._fake_volume_path'], {}), '(self._fake_volume_path)\n', (32508, 32532), False, 'import os\n'), ((43941, 43988), 'os.path.normpath', 'os.path.normpath', (['"""/fake_mountpoint/subdir/img"""'], {}), "('/fake_mountpoint/subdir/img')\n", (43957, 43988), False, 'import os\n'), ((52152, 52184), 'mock.Mock', 'mock.Mock', ([], {'size': 'mock.sentinel.sz'}), '(size=mock.sentinel.sz)\n', (52161, 52184), False, 'import mock\n'), ((6366, 6404), 'os.path.basename', 'os.path.basename', (['fake_upper_snap_path'], {}), '(fake_upper_snap_path)\n', (6382, 6404), False, 'import os\n'), ((6975, 6999), 'copy.deepcopy', 'copy.deepcopy', (['fake_info'], {}), '(fake_info)\n', (6988, 6999), False, 'import copy\n'), ((7176, 7218), 'mock.Mock', 'mock.Mock', ([], {'return_value': 'fake_backing_chain'}), '(return_value=fake_backing_chain)\n', (7185, 7218), False, 'import mock\n'), ((9766, 9814), 'mock.Mock', 'mock.Mock', ([], {'file_format': 'mock.sentinel.backing_fmt'}), '(file_format=mock.sentinel.backing_fmt)\n', (9775, 9814), False, 'import mock\n'), ((14651, 14663), 'mock.call', 'mock.call', (['(1)'], {}), '(1)\n', (14660, 14663), False, 'import mock\n'), ((14665, 14677), 'mock.call', 'mock.call', (['(1)'], {}), '(1)\n', (14674, 14677), False, 'import mock\n'), ((18194, 18229), 'mock.call', 'mock.call', (['mock.sentinel.image_path'], {}), '(mock.sentinel.image_path)\n', (18203, 18229), False, 'import mock\n'), ((21868, 21910), 'cinder.volume.drivers.remotefs.BackingFileTemplate', 'remotefs.BackingFileTemplate', (['ext_template'], {}), '(ext_template)\n', (21896, 21910), False, 'from cinder.volume.drivers import remotefs\n'), ((23756, 23798), 'cinder.volume.drivers.remotefs.BackingFileTemplate', 'remotefs.BackingFileTemplate', (['ext_template'], {}), '(ext_template)\n', (23784, 23798), False, 'from cinder.volume.drivers import remotefs\n'), ((46897, 46948), 'mock.Mock', 'mock.Mock', ([], {'backing_file': 'None', 'file_format': '"""fakefmt"""'}), "(backing_file=None, file_format='fakefmt')\n", (46906, 46948), False, 'import mock\n'), ((47016, 47073), 'mock.Mock', 'mock.Mock', ([], {'backing_file': '"""backing_file"""', 'file_format': '"""raw"""'}), "(backing_file='backing_file', file_format='raw')\n", (47025, 47073), False, 'import mock\n'), ((55584, 55673), 'mock.call', 'mock.call', (['mock.sentinel.share', '"""/fake-mountpoint/volume-1.vhdx"""', 'mock.sentinel.vol1'], {}), "(mock.sentinel.share, '/fake-mountpoint/volume-1.vhdx', mock.\n sentinel.vol1)\n", (55593, 55673), False, 'import mock\n'), ((55729, 55806), 'mock.call', 'mock.call', (['mock.sentinel.share', '"""/fake-mountpoint/subdir/volume-3.vhdx"""', 'None'], {}), "(mock.sentinel.share, '/fake-mountpoint/subdir/volume-3.vhdx', None)\n", (55738, 55806), False, 'import mock\n'), ((56978, 57017), 'mock.call', 'mock.call', (['share', 'exp_managed_vols_dict'], {}), '(share, exp_managed_vols_dict)\n', (56987, 57017), False, 'import mock\n'), ((18434, 18457), 'mock.call', 'mock.call', (['backing_file'], {}), '(backing_file)\n', (18443, 18457), False, 'import mock\n'), ((30494, 30513), 'mock.call', 'mock.call', (['src_vref'], {}), '(src_vref)\n', (30503, 30513), False, 'import mock\n'), ((30515, 30536), 'mock.call', 'mock.call', (['volume_ref'], {}), '(volume_ref)\n', (30524, 30536), False, 'import mock\n')]
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Helper functions for running models in a distributed setting."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import random
import string
import tensorflow as tf
def get_distribution_strategy(num_gpus,
all_reduce_alg=None,
turn_off_distribution_strategy=False):
"""Return a DistributionStrategy for running the model.
Args:
num_gpus: Number of GPUs to run this model.
all_reduce_alg: Specify which algorithm to use when performing all-reduce.
See tf.contrib.distribute.AllReduceCrossDeviceOps for available
algorithms. If None, DistributionStrategy will choose based on device
topology.
turn_off_distribution_strategy: when set to True, do not use any
distribution strategy. Note that when it is True, and num_gpus is
larger than 1, it will raise a ValueError.
Returns:
tf.contrib.distribute.DistibutionStrategy object.
Raises:
ValueError: if turn_off_distribution_strategy is True and num_gpus is
larger than 1
"""
if num_gpus == 0:
if turn_off_distribution_strategy:
return None
else:
return tf.contrib.distribute.OneDeviceStrategy("device:CPU:0")
elif num_gpus == 1:
if turn_off_distribution_strategy:
return None
else:
return tf.contrib.distribute.OneDeviceStrategy("device:GPU:0")
elif turn_off_distribution_strategy:
raise ValueError("When {} GPUs are specified, "
"turn_off_distribution_strategy flag cannot be set to"
"True.".format(num_gpus))
else: # num_gpus > 1 and not turn_off_distribution_strategy
devices = ["device:GPU:%d" % i for i in range(num_gpus)]
if all_reduce_alg:
return tf.distribute.MirroredStrategy(
devices=devices,
cross_device_ops=tf.contrib.distribute.AllReduceCrossDeviceOps(
all_reduce_alg, num_packs=2))
else:
return tf.distribute.MirroredStrategy(devices=devices)
def per_device_batch_size(batch_size, num_gpus):
"""For multi-gpu, batch-size must be a multiple of the number of GPUs.
Note that distribution strategy handles this automatically when used with
Keras. For using with Estimator, we need to get per GPU batch.
Args:
batch_size: Global batch size to be divided among devices. This should be
equal to num_gpus times the single-GPU batch_size for multi-gpu training.
num_gpus: How many GPUs are used with DistributionStrategies.
Returns:
Batch size per device.
Raises:
ValueError: if batch_size is not divisible by number of devices
"""
if num_gpus <= 1:
return batch_size
remainder = batch_size % num_gpus
if remainder:
err = ("When running with multiple GPUs, batch size "
"must be a multiple of the number of available GPUs. Found {} "
"GPUs with a batch size of {}; try --batch_size={} instead."
).format(num_gpus, batch_size, batch_size - remainder)
raise ValueError(err)
return int(batch_size / num_gpus)
# The `SyntheticDataset` is a temporary solution for generating synthetic data
# directly on devices. It is only useful for Keras with Distribution
# Strategies. We will have better support in `tf.data` or Distribution Strategy
# later.
class SyntheticDataset(object):
"""A dataset that generates synthetic data on each device."""
def __init__(self, dataset, split_by=1):
self._input_data = {}
# dataset.take(1) doesn't have GPU kernel.
with tf.device("device:CPU:0"):
tensor = tf.data.experimental.get_single_element(dataset.take(1))
flat_tensor = tf.nest.flatten(tensor)
variable_data = []
self._initializers = []
for t in flat_tensor:
rebatched_t = tf.split(t, num_or_size_splits=split_by, axis=0)[0]
assert rebatched_t.shape.is_fully_defined(), rebatched_t.shape
v = tf.get_local_variable(self.random_name(), initializer=rebatched_t) # pylint: disable=cell-var-from-loop
variable_data.append(v)
self._initializers.append(v.initializer)
self._input_data = tf.nest.pack_sequence_as(tensor, variable_data)
def get_next(self):
return self._input_data
def initialize(self):
if tf.executing_eagerly():
return tf.no_op()
else:
return self._initializers
def random_name(self, size=10, chars=string.ascii_uppercase + string.digits):
return "".join(random.choice(chars) for _ in range(size))
def _monkey_patch_dataset_method(strategy):
"""Monkey-patch `strategy`'s `make_dataset_iterator` method."""
def make_dataset_iterator(self, dataset):
tf.logging.info("Using pure synthetic data.")
with self.scope():
if self.extended._global_batch_size: # pylint: disable=protected-access
return SyntheticDataset(dataset, self.num_replicas_in_sync)
else:
return SyntheticDataset(dataset)
strategy.org_make_dataset_iterator = strategy.make_dataset_iterator
strategy.make_dataset_iterator = make_dataset_iterator
def _undo_monkey_patch_dataset_method(strategy):
if hasattr(strategy, "org_make_dataset_iterator"):
strategy.make_dataset_iterator = strategy.org_make_dataset_iterator
def set_up_synthetic_data():
_monkey_patch_dataset_method(tf.distribute.MirroredStrategy)
_monkey_patch_dataset_method(tf.contrib.distribute.MirroredStrategy)
_monkey_patch_dataset_method(tf.contrib.distribute.OneDeviceStrategy)
def undo_set_up_synthetic_data():
_undo_monkey_patch_dataset_method(tf.distribute.MirroredStrategy)
_undo_monkey_patch_dataset_method(tf.contrib.distribute.MirroredStrategy)
_undo_monkey_patch_dataset_method(tf.contrib.distribute.OneDeviceStrategy)
|
[
"tensorflow.contrib.distribute.AllReduceCrossDeviceOps",
"tensorflow.logging.info",
"tensorflow.device",
"tensorflow.distribute.MirroredStrategy",
"random.choice",
"tensorflow.no_op",
"tensorflow.nest.flatten",
"tensorflow.executing_eagerly",
"tensorflow.contrib.distribute.OneDeviceStrategy",
"tensorflow.split",
"tensorflow.nest.pack_sequence_as"
] |
[((4366, 4389), 'tensorflow.nest.flatten', 'tf.nest.flatten', (['tensor'], {}), '(tensor)\n', (4381, 4389), True, 'import tensorflow as tf\n'), ((4823, 4870), 'tensorflow.nest.pack_sequence_as', 'tf.nest.pack_sequence_as', (['tensor', 'variable_data'], {}), '(tensor, variable_data)\n', (4847, 4870), True, 'import tensorflow as tf\n'), ((4954, 4976), 'tensorflow.executing_eagerly', 'tf.executing_eagerly', ([], {}), '()\n', (4974, 4976), True, 'import tensorflow as tf\n'), ((5347, 5392), 'tensorflow.logging.info', 'tf.logging.info', (['"""Using pure synthetic data."""'], {}), "('Using pure synthetic data.')\n", (5362, 5392), True, 'import tensorflow as tf\n'), ((1902, 1957), 'tensorflow.contrib.distribute.OneDeviceStrategy', 'tf.contrib.distribute.OneDeviceStrategy', (['"""device:CPU:0"""'], {}), "('device:CPU:0')\n", (1941, 1957), True, 'import tensorflow as tf\n'), ((4249, 4274), 'tensorflow.device', 'tf.device', (['"""device:CPU:0"""'], {}), "('device:CPU:0')\n", (4258, 4274), True, 'import tensorflow as tf\n'), ((4991, 5001), 'tensorflow.no_op', 'tf.no_op', ([], {}), '()\n', (4999, 5001), True, 'import tensorflow as tf\n'), ((2060, 2115), 'tensorflow.contrib.distribute.OneDeviceStrategy', 'tf.contrib.distribute.OneDeviceStrategy', (['"""device:GPU:0"""'], {}), "('device:GPU:0')\n", (2099, 2115), True, 'import tensorflow as tf\n'), ((4487, 4535), 'tensorflow.split', 'tf.split', (['t'], {'num_or_size_splits': 'split_by', 'axis': '(0)'}), '(t, num_or_size_splits=split_by, axis=0)\n', (4495, 4535), True, 'import tensorflow as tf\n'), ((5144, 5164), 'random.choice', 'random.choice', (['chars'], {}), '(chars)\n', (5157, 5164), False, 'import random\n'), ((2690, 2737), 'tensorflow.distribute.MirroredStrategy', 'tf.distribute.MirroredStrategy', ([], {'devices': 'devices'}), '(devices=devices)\n', (2720, 2737), True, 'import tensorflow as tf\n'), ((2576, 2650), 'tensorflow.contrib.distribute.AllReduceCrossDeviceOps', 'tf.contrib.distribute.AllReduceCrossDeviceOps', (['all_reduce_alg'], {'num_packs': '(2)'}), '(all_reduce_alg, num_packs=2)\n', (2621, 2650), True, 'import tensorflow as tf\n')]
|
#!/usr/bin/python3
import sys
import getopt
import yaml
import os
import random
import string
def randomString(stringLength=10):
"""Generate a random string of fixed length """
letters = string.ascii_lowercase
return ''.join(random.choice(letters) for i in range(stringLength))
def main(argv):
"""Creates an operator group for the operator CSV if the CSV does not support AllNamespaces."""
crds_path = None
csv_path = None
namespace = None
bundle_path = None
proxy_image = None
deploy_dir = None
cdrd_path = None
try:
opts, args = getopt.getopt(argv, "c:v:n:b:p:d:r:", ["cdrd=", "crds=", "bundle=", "csvfile=", "namespace=", "proxy=", "deploy-dir="])
except getopt.GetoptError as e:
print(e)
sys.exit(2)
for opt, arg in opts:
if opt in ("-c", "--cdrd"):
cdrd_path = arg
elif opt in ("-r", "--crds"):
crds_path = arg
elif opt in ("-v", "--csvfile"):
csv_path = arg
elif opt in ("-n", "--namespace"):
namespace = arg
elif opt in ("-b", "--bundle"):
bundle_path = arg
elif opt in ("-p", "--proxy"):
proxy_image = arg
elif opt in ("-d", "--deploy-dir"):
deploy_dir = arg
crds = os.listdir(crds_path)
crds = [os.path.join(crds_path, filename) if filename.endswith("cr.yaml") else None for filename in crds]
crds = list(filter(lambda x: x is not None, crds))
for cr in list(crds):
scorecard_bundle = {
"scorecard": {
"output": "text",
"plugins": [
{"basic": {
"olm-deployed": True,
"namespace": namespace,
"crds-dir": cdrd_path,
"cr-manifest": [cr],
"proxy-image": proxy_image,
"bundle": deploy_dir,
"proxy-pull-policy": "Never",
"csv-path": csv_path,
"init-timeout": 180
}},
{"olm": {
"olm-deployed": True,
"namespace": namespace,
"crds-dir": cdrd_path,
"bundle": deploy_dir,
"cr-manifest": [cr],
"proxy-image": proxy_image,
"proxy-pull-policy": "Never",
"csv-path": csv_path,
"init-timeout": 180
}}
]
}
}
if scorecard_bundle is not None:
with open(os.path.join(bundle_path, randomString() + ".bundle.yaml"), 'w') as write_file:
print(yaml.safe_dump(scorecard_bundle, default_flow_style=False), file=write_file)
if crds is not None:
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main(sys.argv[1:])
|
[
"getopt.getopt",
"yaml.safe_dump",
"random.choice",
"os.path.join",
"os.listdir",
"sys.exit"
] |
[((1304, 1325), 'os.listdir', 'os.listdir', (['crds_path'], {}), '(crds_path)\n', (1314, 1325), False, 'import os\n'), ((591, 714), 'getopt.getopt', 'getopt.getopt', (['argv', '"""c:v:n:b:p:d:r:"""', "['cdrd=', 'crds=', 'bundle=', 'csvfile=', 'namespace=', 'proxy=', 'deploy-dir='\n ]"], {}), "(argv, 'c:v:n:b:p:d:r:', ['cdrd=', 'crds=', 'bundle=',\n 'csvfile=', 'namespace=', 'proxy=', 'deploy-dir='])\n", (604, 714), False, 'import getopt\n'), ((2920, 2931), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (2928, 2931), False, 'import sys\n'), ((2950, 2961), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2958, 2961), False, 'import sys\n'), ((238, 260), 'random.choice', 'random.choice', (['letters'], {}), '(letters)\n', (251, 260), False, 'import random\n'), ((772, 783), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (780, 783), False, 'import sys\n'), ((1338, 1371), 'os.path.join', 'os.path.join', (['crds_path', 'filename'], {}), '(crds_path, filename)\n', (1350, 1371), False, 'import os\n'), ((2809, 2867), 'yaml.safe_dump', 'yaml.safe_dump', (['scorecard_bundle'], {'default_flow_style': '(False)'}), '(scorecard_bundle, default_flow_style=False)\n', (2823, 2867), False, 'import yaml\n')]
|
from keras.models import Sequential, Model
from keras.layers import Dense, Activation, Input
def get_bonds_model():
model = Sequential()
model.add(Dense(128, activation='relu', input_dim=17))
model.add(Dense(256, activation='relu'))
model.add(Dense(128, activation='relu'))
model.add(Dense(1, activation='linear'))
return model
def get_angles_model():
model = Sequential()
model.add(Dense(128, activation='relu', input_dim=27))
model.add(Dense(350, activation='relu'))
model.add(Dense(128, activation='relu'))
model.add(Dense(1, activation='linear'))
return model
def get_nonbonds_model():
model = Sequential()
model.add(Dense(128, activation='relu', input_dim=17))
model.add(Dense(256, activation='relu'))
model.add(Dense(128, activation='relu'))
model.add(Dense(1, activation='linear'))
return model
def get_dihedrals_model():
model = Sequential()
model.add(Dense(128, activation='relu', input_dim=38))
model.add(Dense(512, activation='relu'))
model.add(Dense(128, activation='relu'))
model.add(Dense(1, activation='linear'))
return model
|
[
"keras.models.Sequential",
"keras.layers.Dense"
] |
[((129, 141), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (139, 141), False, 'from keras.models import Sequential, Model\n'), ((390, 402), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (400, 402), False, 'from keras.models import Sequential, Model\n'), ((653, 665), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (663, 665), False, 'from keras.models import Sequential, Model\n'), ((917, 929), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (927, 929), False, 'from keras.models import Sequential, Model\n'), ((156, 199), 'keras.layers.Dense', 'Dense', (['(128)'], {'activation': '"""relu"""', 'input_dim': '(17)'}), "(128, activation='relu', input_dim=17)\n", (161, 199), False, 'from keras.layers import Dense, Activation, Input\n'), ((215, 244), 'keras.layers.Dense', 'Dense', (['(256)'], {'activation': '"""relu"""'}), "(256, activation='relu')\n", (220, 244), False, 'from keras.layers import Dense, Activation, Input\n'), ((260, 289), 'keras.layers.Dense', 'Dense', (['(128)'], {'activation': '"""relu"""'}), "(128, activation='relu')\n", (265, 289), False, 'from keras.layers import Dense, Activation, Input\n'), ((305, 334), 'keras.layers.Dense', 'Dense', (['(1)'], {'activation': '"""linear"""'}), "(1, activation='linear')\n", (310, 334), False, 'from keras.layers import Dense, Activation, Input\n'), ((417, 460), 'keras.layers.Dense', 'Dense', (['(128)'], {'activation': '"""relu"""', 'input_dim': '(27)'}), "(128, activation='relu', input_dim=27)\n", (422, 460), False, 'from keras.layers import Dense, Activation, Input\n'), ((476, 505), 'keras.layers.Dense', 'Dense', (['(350)'], {'activation': '"""relu"""'}), "(350, activation='relu')\n", (481, 505), False, 'from keras.layers import Dense, Activation, Input\n'), ((521, 550), 'keras.layers.Dense', 'Dense', (['(128)'], {'activation': '"""relu"""'}), "(128, activation='relu')\n", (526, 550), False, 'from keras.layers import Dense, Activation, Input\n'), ((566, 595), 'keras.layers.Dense', 'Dense', (['(1)'], {'activation': '"""linear"""'}), "(1, activation='linear')\n", (571, 595), False, 'from keras.layers import Dense, Activation, Input\n'), ((680, 723), 'keras.layers.Dense', 'Dense', (['(128)'], {'activation': '"""relu"""', 'input_dim': '(17)'}), "(128, activation='relu', input_dim=17)\n", (685, 723), False, 'from keras.layers import Dense, Activation, Input\n'), ((739, 768), 'keras.layers.Dense', 'Dense', (['(256)'], {'activation': '"""relu"""'}), "(256, activation='relu')\n", (744, 768), False, 'from keras.layers import Dense, Activation, Input\n'), ((784, 813), 'keras.layers.Dense', 'Dense', (['(128)'], {'activation': '"""relu"""'}), "(128, activation='relu')\n", (789, 813), False, 'from keras.layers import Dense, Activation, Input\n'), ((829, 858), 'keras.layers.Dense', 'Dense', (['(1)'], {'activation': '"""linear"""'}), "(1, activation='linear')\n", (834, 858), False, 'from keras.layers import Dense, Activation, Input\n'), ((944, 987), 'keras.layers.Dense', 'Dense', (['(128)'], {'activation': '"""relu"""', 'input_dim': '(38)'}), "(128, activation='relu', input_dim=38)\n", (949, 987), False, 'from keras.layers import Dense, Activation, Input\n'), ((1003, 1032), 'keras.layers.Dense', 'Dense', (['(512)'], {'activation': '"""relu"""'}), "(512, activation='relu')\n", (1008, 1032), False, 'from keras.layers import Dense, Activation, Input\n'), ((1048, 1077), 'keras.layers.Dense', 'Dense', (['(128)'], {'activation': '"""relu"""'}), "(128, activation='relu')\n", (1053, 1077), False, 'from keras.layers import Dense, Activation, Input\n'), ((1093, 1122), 'keras.layers.Dense', 'Dense', (['(1)'], {'activation': '"""linear"""'}), "(1, activation='linear')\n", (1098, 1122), False, 'from keras.layers import Dense, Activation, Input\n')]
|
"""
ft_console.py
==========
Adaptation of Python's console found in code.py so that it can be
used to show some "friendly" tracebacks.
"""
import os
import platform
import sys
import traceback
from code import InteractiveConsole
import codeop # need to import to exclude from tracebacks
import friendly_traceback
from . import source_cache
from .console_helpers import helpers
from .ft_gettext import current_lang
def type_friendly():
_ = current_lang.translate
return _("Type 'Friendly' for help on special functions/methods.")
BANNER = "\nFriendly-traceback Console version {}. [Python version: {}]\n".format(
friendly_traceback.__version__, platform.python_version()
)
_old_displayhook = sys.displayhook
def rich_displayhook(value):
"""Custom display hook intended to show some brief function descriptions
that can be translated into various languages, for functions that have
a custom '__rich_repr__' attribute.
Compatible with Rich (https://github.com/willmcgugan/rich)
"""
if value is None:
return
if str(type(value)) == "<class 'function'>" and hasattr(value, "__rich_repr__"):
print(f"{value.__name__}(): {value.__rich_repr__()[0]}")
return
_old_displayhook(value)
class FriendlyTracebackConsole(InteractiveConsole):
def __init__(self, local_vars=None, formatter="repl", displayhook=None):
"""This class builds upon Python's code.InteractiveConsole
so as to provide friendly tracebacks. It keeps track
of code fragment executed by treating each of them as
an individual source file.
"""
_ = current_lang.translate
friendly_traceback.exclude_file_from_traceback(codeop.__file__)
self.fake_filename = "<friendly-console:%d>"
self.counter = 1
friendly_traceback.set_formatter(formatter)
if displayhook is not None:
sys.displayhook = displayhook
super().__init__(locals=local_vars)
def push(self, line):
"""Push a line to the interpreter.
The line should not have a trailing newline; it may have
internal newlines. The line is appended to a buffer and the
interpreter's runsource() method is called with the
concatenated contents of the buffer as source. If this
indicates that the command was executed or invalid, the buffer
is reset; otherwise, the command is incomplete, and the buffer
is left as it was after the line was appended. The return
value is True if more input is required, False if the line was dealt
with in some way (this is the same as runsource()).
"""
self.buffer.append(line)
source = "\n".join(self.buffer)
# Each valid code sample is saved with its own fake filename.
# They are numbered consecutively to help understanding
# the traceback history.
# If self.counter was not updated, it means that the previous
# code sample was not valid and we reuse the same file name
filename = self.fake_filename % self.counter
source_cache.cache.add(filename, source)
more = self.runsource(source, filename)
if not more:
self.resetbuffer()
self.counter += 1
return more
def runsource(self, source, filename="<friendly-console>", symbol="single"):
"""Compile and run some source in the interpreter.
Arguments are as for compile_command().
One several things can happen:
1) The input is incorrect; compile_command() raised an
exception (SyntaxError or OverflowError). A syntax traceback
will be printed .
2) The input is incomplete, and more input is required;
compile_command() returned None. Nothing happens.
3) The input is complete; compile_command() returned a code
object. The code is executed by calling self.runcode() (which
also handles run-time exceptions, except for SystemExit).
The return value is True in case 2, False in the other cases (unless
an exception is raised). The return value can be used to
decide whether to use sys.ps1 or sys.ps2 to prompt the next
line.
"""
try:
code = self.compile(source, filename, symbol)
except (OverflowError, SyntaxError, ValueError):
# Case 1
friendly_traceback.explain_traceback()
return False
if code is None:
# Case 2
return True
# Case 3
self.runcode(code)
return False
def runcode(self, code):
"""Execute a code object.
When an exception occurs, friendly_traceback.explain_traceback() is called to
display a traceback. All exceptions are caught except
SystemExit, which, unlike the case for the original version in the
standard library, cleanly exists the program. This is done
so as to avoid our Friendly's exception hook to intercept
it and confuse the users.
A note about KeyboardInterrupt: this exception may occur
elsewhere in this code, and may not always be caught. The
caller should be prepared to deal with it.
"""
_ = current_lang.translate
try:
exec(code, self.locals)
except SystemExit:
os._exit(1) # noqa -pycharm
except Exception: # noqa
try:
friendly_traceback.explain_traceback()
except Exception: # noqa
print("Friendly Internal Error")
print("-" * 60)
traceback.print_exc()
print("-" * 60)
# The following two methods are never used in this class, but they are
# defined in the parent class. The following are the equivalent methods
# that can be used if an explicit call is desired for some reason.
def showsyntaxerror(self, filename=None):
friendly_traceback.explain_traceback()
def showtraceback(self):
friendly_traceback.explain_traceback()
def start_console(
local_vars=None,
formatter="repl",
include="friendly_tb",
lang="en",
banner=None,
displayhook=None,
):
"""Starts a console; modified from code.interact"""
# from . import config
if banner is None:
banner = BANNER + type_friendly() + "\n"
if displayhook is None:
displayhook = rich_displayhook
friendly_traceback.install(include=include, lang=lang)
if local_vars is not None:
# Make sure we don't overwrite with our own functions
helpers.update(local_vars)
console = FriendlyTracebackConsole(
local_vars=helpers, formatter=formatter, displayhook=displayhook
)
console.interact(banner=banner)
|
[
"friendly_traceback.set_formatter",
"platform.python_version",
"traceback.print_exc",
"os._exit",
"friendly_traceback.install",
"friendly_traceback.exclude_file_from_traceback",
"friendly_traceback.explain_traceback"
] |
[((665, 690), 'platform.python_version', 'platform.python_version', ([], {}), '()\n', (688, 690), False, 'import platform\n'), ((6490, 6544), 'friendly_traceback.install', 'friendly_traceback.install', ([], {'include': 'include', 'lang': 'lang'}), '(include=include, lang=lang)\n', (6516, 6544), False, 'import friendly_traceback\n'), ((1665, 1728), 'friendly_traceback.exclude_file_from_traceback', 'friendly_traceback.exclude_file_from_traceback', (['codeop.__file__'], {}), '(codeop.__file__)\n', (1711, 1728), False, 'import friendly_traceback\n'), ((1815, 1858), 'friendly_traceback.set_formatter', 'friendly_traceback.set_formatter', (['formatter'], {}), '(formatter)\n', (1847, 1858), False, 'import friendly_traceback\n'), ((5998, 6036), 'friendly_traceback.explain_traceback', 'friendly_traceback.explain_traceback', ([], {}), '()\n', (6034, 6036), False, 'import friendly_traceback\n'), ((6075, 6113), 'friendly_traceback.explain_traceback', 'friendly_traceback.explain_traceback', ([], {}), '()\n', (6111, 6113), False, 'import friendly_traceback\n'), ((4420, 4458), 'friendly_traceback.explain_traceback', 'friendly_traceback.explain_traceback', ([], {}), '()\n', (4456, 4458), False, 'import friendly_traceback\n'), ((5396, 5407), 'os._exit', 'os._exit', (['(1)'], {}), '(1)\n', (5404, 5407), False, 'import os\n'), ((5492, 5530), 'friendly_traceback.explain_traceback', 'friendly_traceback.explain_traceback', ([], {}), '()\n', (5528, 5530), False, 'import friendly_traceback\n'), ((5666, 5687), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (5685, 5687), False, 'import traceback\n')]
|
from django.urls import path
from . import views
urlpatterns = [
path("login/", views.signin, name="signin"),
path("signup/", views.signup, name="signup"),
]
|
[
"django.urls.path"
] |
[((70, 113), 'django.urls.path', 'path', (['"""login/"""', 'views.signin'], {'name': '"""signin"""'}), "('login/', views.signin, name='signin')\n", (74, 113), False, 'from django.urls import path\n'), ((119, 163), 'django.urls.path', 'path', (['"""signup/"""', 'views.signup'], {'name': '"""signup"""'}), "('signup/', views.signup, name='signup')\n", (123, 163), False, 'from django.urls import path\n')]
|
from argparse import ArgumentParser
import os
import sys
import traceback
from tempfile import mkstemp
from shutil import move, copymode
from os import fdopen, remove, path
def rem_empty_lines(filename):
changed = False;
ignore_next = False;
fh, abs_path = mkstemp()
with fdopen(fh,'w') as new_file:
with open(filename) as old_file:
remove_now = False
for line in old_file:
linestripped = line.strip()
if not remove_now or line.strip() != '':
new_file.write(line)
else:
changed = True
remove_now = bool(linestripped.endswith('{') and not ignore_next)
#dont removed defines
ignore_next = bool(linestripped.endswith('\\'))
if changed:
copymode(filename, abs_path)
remove(filename)
move(abs_path, filename)
print(f"File {filename} changed")
else:
print(f"File {filename} NOT changed")
remove(abs_path)
def main(input, ext):
if input == '':
exit(0)
if os.path.isdir(input):
for dirpath, dirnames, filenames in os.walk(input):
for filename in [f for f in filenames if f.endswith(ext)]:
x = os.path.join(dirpath, filename)
rem_empty_lines(x)
else:
rem_empty_lines(input)
if __name__ == '__main__':
parser = ArgumentParser(description='Removes empty lines after block')
parser.add_argument('input', nargs='?', default='',
help='file/dir to check')
parser.add_argument('-ext', type=str, metavar='string', nargs='?', default=".cpp",
help='file extension ')
args = parser.parse_args()
main(args.input, args.ext)
|
[
"os.remove",
"argparse.ArgumentParser",
"tempfile.mkstemp",
"os.path.join",
"os.path.isdir",
"os.walk",
"shutil.move",
"os.fdopen",
"shutil.copymode"
] |
[((270, 279), 'tempfile.mkstemp', 'mkstemp', ([], {}), '()\n', (277, 279), False, 'from tempfile import mkstemp\n'), ((1107, 1127), 'os.path.isdir', 'os.path.isdir', (['input'], {}), '(input)\n', (1120, 1127), False, 'import os\n'), ((1430, 1491), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': '"""Removes empty lines after block"""'}), "(description='Removes empty lines after block')\n", (1444, 1491), False, 'from argparse import ArgumentParser\n'), ((289, 304), 'os.fdopen', 'fdopen', (['fh', '"""w"""'], {}), "(fh, 'w')\n", (295, 304), False, 'from os import fdopen, remove, path\n'), ((830, 858), 'shutil.copymode', 'copymode', (['filename', 'abs_path'], {}), '(filename, abs_path)\n', (838, 858), False, 'from shutil import move, copymode\n'), ((867, 883), 'os.remove', 'remove', (['filename'], {}), '(filename)\n', (873, 883), False, 'from os import fdopen, remove, path\n'), ((892, 916), 'shutil.move', 'move', (['abs_path', 'filename'], {}), '(abs_path, filename)\n', (896, 916), False, 'from shutil import move, copymode\n'), ((1023, 1039), 'os.remove', 'remove', (['abs_path'], {}), '(abs_path)\n', (1029, 1039), False, 'from os import fdopen, remove, path\n'), ((1173, 1187), 'os.walk', 'os.walk', (['input'], {}), '(input)\n', (1180, 1187), False, 'import os\n'), ((1280, 1311), 'os.path.join', 'os.path.join', (['dirpath', 'filename'], {}), '(dirpath, filename)\n', (1292, 1311), False, 'import os\n')]
|
import requests
import urllib.parse
class Tweets:
class RateLimit:
def __init__(self) -> None:
self.EndpointLimit = None
self.Remaining = None
self.ResetIn = None
def toJSON(self):
return {'EndpointLimit': self.EndpointLimit, 'Remaining': self.Remaining, 'ResetIn': self.ResetIn}
def __init__(self, bearer=None, tweet_search_uri=None, search_query=None, search_parameters=None, since_tweet_id=None, until_tweet_id=None, next_token=None) -> None:
self.bearer = bearer
self.tweet_search_uri = tweet_search_uri
self.search_query = search_query
self.parameters = search_parameters
self.since_tweet_id = since_tweet_id
self.until_tweet_id = until_tweet_id
self.next_token = next_token
self.url = None
self.data = None
self.is_json = False
self.is_error = False
self.is_success = False
self.ratelimit = self.RateLimit()
def create_headers(self):
if not self.bearer:
self.bearer = ''
self.headers = {"Authorization": "Bearer {}".format(self.bearer)}
def create_url(self):
if not self.tweet_search_uri:
raise Exception('The Twitter Search URI is missing. Please set as "tweet_search_uri"-attribute.')
url_parameter = None
if self.search_query:
url_parameter = "query=" + urllib.parse.quote(self.search_query)
if self.parameters:
url_parameter = url_parameter + "&" + self.parameters
if self.since_tweet_id:
url_parameter = url_parameter + "&since_id=" + self.since_tweet_id
if self.until_tweet_id:
url_parameter = url_parameter + "&until_id=" + self.until_tweet_id
if self.next_token:
url_parameter = url_parameter + "&next_token=" + self.next_token
if url_parameter:
self.url = self.tweet_search_uri + url_parameter
else:
self.url = self.tweet_search_uri
def connect_to_endpoint(self):
if not self.headers:
raise Exception('Headers missing. Set the headers first.')
if not self.url:
raise Exception('The URL is missing. Please execute "create_url()" first.')
response = requests.request("GET", self.url, headers=self.headers)
if response.status_code != 200:
self.is_error = True
try:
self.data = response.json()
self.is_json = True
except:
self.data = response.text
else:
self.is_success = True
try:
self.data = response.json()
self.is_json = True
except:
self.data = response.text
if response.headers['x-rate-limit-limit']:
self.ratelimit.EndpointLimit = response.headers['x-rate-limit-limit']
if response.headers['x-rate-limit-remaining']:
self.ratelimit.Remaining = response.headers['x-rate-limit-remaining']
if response.headers['x-rate-limit-reset']:
self.ratelimit.ResetIn = response.headers['x-rate-limit-reset']
def getdata(self):
self.create_headers()
self.create_url()
self.connect_to_endpoint()
def main():
test = Tweets(tweet_search_uri='https://www.twitter.com/')
test.getdata()
print('Connection test: {}'.format(test.is_success))
if __name__ == "__main__":
main()
|
[
"requests.request"
] |
[((2312, 2367), 'requests.request', 'requests.request', (['"""GET"""', 'self.url'], {'headers': 'self.headers'}), "('GET', self.url, headers=self.headers)\n", (2328, 2367), False, 'import requests\n')]
|
import torch
import numpy as np
STATE_MANDATORY_NEXT = 0
STATE_ACT_NEXT = 1
STATE_CSTE_NEXT = 2
STATE_BOOL_NEXT = 3
STATE_POSTCOND_OPEN_PAREN = 4
open_paren_token = ["m(", "c(", "r(", "w(", "i(", "e("]
close_paren_token = ["m)", "c)", "r)", "w)", "i)", "e)"]
flow_leads = ["REPEAT", "WHILE", "IF", "IFELSE"]
flow_need_bool = ["WHILE", "IF", "IFELSE"]
acts = ["move", "turnLeft", "turnRight", "pickMarker", "putMarker"]
bool_check = ["markersPresent", "noMarkersPresent", "leftIsClear", "rightIsClear", "frontIsClear"]
next_is_act = ["i(", "e(", "r(", "m(", "w("]
postcond_open_paren = ["i(", "w("]
possible_mandatories = ["DEF", "run", "c)", "ELSE", "<pad>"] + open_paren_token
def check_type(var, dtype):
assert type(var) == dtype, 'data type should be {} but found {}'.format(dtype, type(var))
class CheckerState(object):
def __init__(self, state, next_mandatory,
i_need_else_stack_pos, to_close_stack_pos,
c_deep, next_actblock_open):
check_type(state, int)
check_type(next_mandatory, int)
check_type(i_need_else_stack_pos, int)
check_type(to_close_stack_pos, int)
check_type(c_deep, int)
check_type(next_actblock_open, int)
self.state = state
self.next_mandatory = next_mandatory
self.i_need_else_stack_pos = i_need_else_stack_pos
self.to_close_stack_pos = to_close_stack_pos
self.c_deep = c_deep
self.next_actblock_open = next_actblock_open
self.i_need_else_stack = torch.tensor(128 * [False], dtype=torch.bool)
self.to_close_stack = 128 * [None]
def __copy__(self):
new_state = CheckerState(self.state, self.next_mandatory,
self.i_need_else_stack_pos, self.to_close_stack_pos,
self.c_deep, self.next_actblock_open)
for i in range(0, self.i_need_else_stack_pos+1):
new_state.i_need_else_stack[i] = self.i_need_else_stack[i]
for i in range(0, self.to_close_stack_pos+1):
new_state.to_close_stack[i] = self.to_close_stack[i]
return new_state
def push_closeparen_to_stack(self, close_paren):
check_type(close_paren, int)
self.to_close_stack_pos += 1
self.to_close_stack[self.to_close_stack_pos] = close_paren
def pop_close_paren(self):
to_ret = self.to_close_stack[self.to_close_stack_pos]
self.to_close_stack_pos -= 1
check_type(to_ret, int)
return to_ret
def paren_to_close(self):
return self.to_close_stack[self.to_close_stack_pos]
def make_next_mandatory(self, next_mandatory):
check_type(next_mandatory, int)
self.state = STATE_MANDATORY_NEXT
self.next_mandatory = next_mandatory
def make_bool_next(self):
self.state = STATE_BOOL_NEXT
self.c_deep += 1
def make_act_next(self):
self.state = STATE_ACT_NEXT
def close_cond_paren(self):
self.c_deep -= 1
if self.c_deep == 0:
self.state = STATE_POSTCOND_OPEN_PAREN
else:
self.state = STATE_MANDATORY_NEXT
# The mandatory next should already be "c)"
def push_needelse_stack(self, need_else):
check_type(need_else, bool)
assert need_else == 0 or need_else == 1
self.i_need_else_stack_pos += 1
self.i_need_else_stack[self.i_need_else_stack_pos] = need_else
def pop_needelse_stack(self):
to_ret = self.i_need_else_stack[self.i_need_else_stack_pos]
self.i_need_else_stack_pos -= 1
# check_type(to_ret, torch.bool)
return to_ret
def set_next_actblock(self, next_actblock):
check_type(next_actblock, int)
self.next_actblock_open = next_actblock
def make_next_cste(self):
self.state = STATE_CSTE_NEXT
class SyntaxVocabulary(object):
def __init__(self, def_tkn, run_tkn,
m_open_tkn, m_close_tkn,
else_tkn, e_open_tkn,
c_open_tkn, c_close_tkn,
i_open_tkn, i_close_tkn,
while_tkn, w_open_tkn,
repeat_tkn, r_open_tkn,
not_tkn, pad_tkn):
self.def_tkn = def_tkn
self.run_tkn = run_tkn
self.m_open_tkn = m_open_tkn
self.m_close_tkn = m_close_tkn
self.else_tkn = else_tkn
self.e_open_tkn = e_open_tkn
self.c_open_tkn = c_open_tkn
self.c_close_tkn = c_close_tkn
self.i_open_tkn = i_open_tkn
self.i_close_tkn = i_close_tkn
self.while_tkn = while_tkn
self.w_open_tkn = w_open_tkn
self.repeat_tkn = repeat_tkn
self.r_open_tkn = r_open_tkn
self.not_tkn = not_tkn
self.pad_tkn = pad_tkn
class PySyntaxChecker(object):
def __init__(self, T2I, use_cuda, use_simplified_dsl=False, new_tokens=None):
# check_type(args.no_cuda, bool)
if use_simplified_dsl:
global open_paren_token, close_paren_token, flow_leads, flow_need_bool, acts, bool_check
global postcond_open_paren, possible_mandatories
open_paren_token = [prl_tkn for prl_tkn in open_paren_token if prl_tkn in new_tokens]
close_paren_token = [prl_tkn for prl_tkn in close_paren_token if prl_tkn in new_tokens]
flow_leads = [prl_tkn for prl_tkn in flow_leads if prl_tkn in new_tokens]
flow_need_bool = [prl_tkn for prl_tkn in flow_need_bool if prl_tkn in new_tokens]
acts = [prl_tkn for prl_tkn in acts if prl_tkn in new_tokens]
bool_check = [prl_tkn for prl_tkn in bool_check if prl_tkn in new_tokens]
postcond_open_paren = [prl_tkn for prl_tkn in postcond_open_paren if prl_tkn in new_tokens]
possible_mandatories = ["DEF", "run", "c)", "ELSE", "<pad>"] + open_paren_token
possible_mandatories = [prl_tkn for prl_tkn in possible_mandatories if prl_tkn in new_tokens]
# since we don't have DEF and run in simplified DSL, assign them a value that you will never see
self.vocab = SyntaxVocabulary(len(T2I)+2, len(T2I)+2,
T2I["m("], T2I["m)"], T2I["ELSE"], T2I["e("],
T2I["c("], T2I["c)"], T2I["i("], T2I["i)"],
T2I["WHILE"], T2I["w("], T2I["REPEAT"], T2I["r("],
T2I["not"], T2I["<pad>"])
else:
self.vocab = SyntaxVocabulary(T2I["DEF"], T2I["run"],
T2I["m("], T2I["m)"], T2I["ELSE"], T2I["e("],
T2I["c("], T2I["c)"], T2I["i("], T2I["i)"],
T2I["WHILE"], T2I["w("], T2I["REPEAT"], T2I["r("],
T2I["not"], T2I["<pad>"])
self.use_cuda = use_cuda
self.open_parens = set([T2I[op] for op in open_paren_token])
self.close_parens = set([T2I[op] for op in close_paren_token])
self.if_statements = set([T2I[tkn] for tkn in ["IF", "IFELSE"]])
self.op2cl = {}
for op, cl in zip(open_paren_token, close_paren_token):
self.op2cl[T2I[op]] = T2I[cl]
self.need_else = {T2I["IF"]: False,
T2I["IFELSE"]: True}
self.flow_lead = set([T2I[flow_lead_tkn] for flow_lead_tkn in flow_leads])
self.effect_acts = set([T2I[act_tkn] for act_tkn in acts])
self.act_acceptable = self.effect_acts | self.flow_lead | self.close_parens
self.flow_needs_bool = set([T2I[flow_tkn] for flow_tkn in flow_need_bool])
self.postcond_open_paren = set([T2I[op] for op in postcond_open_paren])
self.range_cste = set([idx for tkn, idx in T2I.items() if tkn.startswith("R=")])
self.bool_checks = set([T2I[bcheck] for bcheck in bool_check])
tt = torch.cuda if use_cuda else torch
self.vocab_size = len(T2I)
self.mandatories_mask = {}
for mand_tkn in possible_mandatories:
mask = tt.BoolTensor(1,1,self.vocab_size).fill_(1)
mask[0,0,T2I[mand_tkn]] = 0
self.mandatories_mask[T2I[mand_tkn]] = mask
self.act_next_masks = {}
for close_tkn in self.close_parens:
mask = tt.BoolTensor(1,1,self.vocab_size).fill_(1)
mask[0,0,close_tkn] = 0
for effect_idx in self.effect_acts:
mask[0,0,effect_idx] = 0
for flowlead_idx in self.flow_lead:
mask[0,0,flowlead_idx] = 0
self.act_next_masks[close_tkn] = mask
self.range_mask = tt.BoolTensor(1,1,self.vocab_size).fill_(1)
for ridx in self.range_cste:
self.range_mask[0,0,ridx] = 0
self.boolnext_mask = tt.BoolTensor(1,1,self.vocab_size).fill_(1)
for bcheck_idx in self.bool_checks:
self.boolnext_mask[0,0,bcheck_idx] = 0
self.boolnext_mask[0,0,self.vocab.not_tkn] = 0
self.postcond_open_paren_masks = {}
for tkn in self.postcond_open_paren:
mask = tt.BoolTensor(1,1,self.vocab_size).fill_(1)
mask[0,0,tkn] = 0
self.postcond_open_paren_masks[tkn] = mask
def forward(self, state, new_idx):
check_type(state, CheckerState)
check_type(new_idx, int)
# Whatever happens, if we open a paren, it needs to be closed
if new_idx in self.open_parens:
state.push_closeparen_to_stack(self.op2cl[new_idx])
if new_idx in self.close_parens:
paren_to_end = state.pop_close_paren()
assert(new_idx == paren_to_end)
if state.state == STATE_MANDATORY_NEXT:
assert(new_idx == state.next_mandatory)
if new_idx == self.vocab.def_tkn:
state.make_next_mandatory(self.vocab.run_tkn)
elif new_idx == self.vocab.run_tkn:
state.make_next_mandatory(self.vocab.m_open_tkn)
elif new_idx == self.vocab.else_tkn:
state.make_next_mandatory(self.vocab.e_open_tkn)
elif new_idx in self.open_parens:
if new_idx == self.vocab.c_open_tkn:
state.make_bool_next()
else:
state.make_act_next()
elif new_idx == self.vocab.c_close_tkn:
state.close_cond_paren()
elif new_idx == self.vocab.pad_tkn:
# Should this be at the top?
# Keep the state in mandatory next, targetting <pad>
# Once you go <pad>, you never go back.
pass
else:
raise NotImplementedError
elif state.state == STATE_ACT_NEXT:
assert(new_idx in self.act_acceptable)
if new_idx in self.flow_needs_bool:
state.make_next_mandatory(self.vocab.c_open_tkn)
# If we open one of the IF statements, we need to keep track if
# it's one with a else statement or not
if new_idx in self.if_statements:
state.push_needelse_stack(self.need_else[new_idx])
state.set_next_actblock(self.vocab.i_open_tkn)
elif new_idx == self.vocab.while_tkn:
state.set_next_actblock(self.vocab.w_open_tkn)
else:
raise NotImplementedError
elif new_idx == self.vocab.repeat_tkn:
state.make_next_cste()
elif new_idx in self.effect_acts:
pass
elif new_idx in self.close_parens:
if new_idx == self.vocab.i_close_tkn:
need_else = state.pop_needelse_stack()
if need_else:
state.make_next_mandatory(self.vocab.else_tkn)
else:
state.make_act_next()
elif new_idx == self.vocab.m_close_tkn:
state.make_next_mandatory(self.vocab.pad_tkn)
else:
state.make_act_next()
else:
raise NotImplementedError
elif state.state == STATE_CSTE_NEXT:
assert(new_idx in self.range_cste)
state.make_next_mandatory(self.vocab.r_open_tkn)
elif state.state == STATE_BOOL_NEXT:
if new_idx in self.bool_checks:
state.make_next_mandatory(self.vocab.c_close_tkn)
elif new_idx == self.vocab.not_tkn:
state.make_next_mandatory(self.vocab.c_open_tkn)
else:
raise NotImplementedError
elif state.state == STATE_POSTCOND_OPEN_PAREN:
assert(new_idx in self.postcond_open_paren)
assert(new_idx == state.next_actblock_open)
state.make_act_next()
else:
raise NotImplementedError
def allowed_tokens(self, state):
check_type(state, CheckerState)
if state.state == STATE_MANDATORY_NEXT:
# Only one possible token follows
return self.mandatories_mask[state.next_mandatory]
elif state.state == STATE_ACT_NEXT:
# Either an action, a control flow statement or a closing of an open-paren
return self.act_next_masks[state.paren_to_close()]
elif state.state == STATE_CSTE_NEXT:
return self.range_mask
elif state.state == STATE_BOOL_NEXT:
return self.boolnext_mask
elif state.state == STATE_POSTCOND_OPEN_PAREN:
return self.postcond_open_paren_masks[state.next_actblock_open]
def get_sequence_mask(self, state, inp_sequence):
check_type(state, CheckerState)
check_type(inp_sequence, list)
if len(inp_sequence) == 1:
self.forward(state, inp_sequence[0])
return self.allowed_tokens(state)
else:
tt = torch.cuda if self.use_cuda else torch
mask_infeasible_list = []
mask_infeasible = tt.BoolTensor(1, 1, self.vocab_size)
for stp_idx, inp in enumerate(inp_sequence):
self.forward(state, inp)
mask_infeasible_list.append(self.allowed_tokens(state))
torch.cat(mask_infeasible_list, 1, out=mask_infeasible)
return mask_infeasible
def get_initial_checker_state(self):
return CheckerState(STATE_MANDATORY_NEXT, self.vocab.def_tkn,
-1, -1, 0, -1)
def get_initial_checker_state2(self):
return CheckerState(STATE_MANDATORY_NEXT, self.vocab.m_open_tkn,
-1, -1, 0, -1)
if __name__ == '__main__':
import argparse
import sys
sys.path.insert(0, '.')
from fetch_mapping import fetch_mapping
parser = argparse.ArgumentParser(description='RL')
# remap prl tokens to dsl tokens
parser.add_argument('--mapping_file',
default='mapping_karel2prl.txt',
type=str)
args = parser.parse_args()
# fetch the mapping from prl tokens to dsl tokens
if args.mapping_file is not None:
args.dsl2prl_mapping, args.prl2dsl_mapping, args.dsl_tokens, args.prl_tokens = \
fetch_mapping(args.mapping_file)
args.use_simplified_dsl = True
args.use_shorter_if = True if 'shorter_if' in args.mapping_file else False
else:
args.use_simplified_dsl = False
T2I = {token: i for i, token in enumerate(args.dsl_tokens)}
I2T = {i: token for i, token in enumerate(args.dsl_tokens)}
T2I['<pad>'] = len(args.dsl_tokens)
I2T[len(args.dsl_tokens)] = '<pad>'
sample_program = [0, 1, 2, 49, 32, 41, 33, 47, 31, 13, 9, 8, 10, 4, 4, 48, 3, len(args.dsl_tokens)]
sample_program = [0, 1, 2, 38, ]
use_cuda = False
syntax_checker = PySyntaxChecker(T2I, use_cuda)
initial_state = syntax_checker.get_initial_checker_state()
sequence_mask = syntax_checker.get_sequence_mask(initial_state, sample_program).squeeze()
for idx, token in enumerate(sample_program):
valid_tokens = torch.where(sequence_mask[idx] == 0)[0]
valid_tokens = [I2T[tkn.detach().cpu().numpy().tolist()] for tkn in valid_tokens]
valid_tokens = " ".join(valid_tokens)
print("valid tokens for {}: {}".format(I2T[token], valid_tokens))
|
[
"fetch_mapping.fetch_mapping",
"argparse.ArgumentParser",
"torch.where",
"torch.cat",
"sys.path.insert",
"torch.tensor"
] |
[((15070, 15093), 'sys.path.insert', 'sys.path.insert', (['(0)', '"""."""'], {}), "(0, '.')\n", (15085, 15093), False, 'import sys\n'), ((15155, 15196), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""RL"""'}), "(description='RL')\n", (15178, 15196), False, 'import argparse\n'), ((1567, 1612), 'torch.tensor', 'torch.tensor', (['(128 * [False])'], {'dtype': 'torch.bool'}), '(128 * [False], dtype=torch.bool)\n', (1579, 1612), False, 'import torch\n'), ((15602, 15634), 'fetch_mapping.fetch_mapping', 'fetch_mapping', (['args.mapping_file'], {}), '(args.mapping_file)\n', (15615, 15634), False, 'from fetch_mapping import fetch_mapping\n'), ((14582, 14637), 'torch.cat', 'torch.cat', (['mask_infeasible_list', '(1)'], {'out': 'mask_infeasible'}), '(mask_infeasible_list, 1, out=mask_infeasible)\n', (14591, 14637), False, 'import torch\n'), ((16478, 16514), 'torch.where', 'torch.where', (['(sequence_mask[idx] == 0)'], {}), '(sequence_mask[idx] == 0)\n', (16489, 16514), False, 'import torch\n')]
|
import os
import sys
import time
from glob import glob
import numpy as np
import pandas as pd
import pytest
from PartSegCore.algorithm_describe_base import SegmentationProfile
from PartSegCore.analysis.batch_processing import batch_backend
from PartSegCore.analysis.batch_processing.batch_backend import CalculationManager, CalculationProcess
from PartSegCore.analysis.calculation_plan import (
Calculation,
CalculationPlan,
CalculationTree,
FileCalculation,
MaskCreate,
MaskSuffix,
MeasurementCalculate,
RootType,
)
from PartSegCore.analysis.measurement_base import AreaType, Leaf, MeasurementEntry, Node, PerComponent
from PartSegCore.analysis.measurement_calculation import MeasurementProfile
from PartSegCore.image_operations import RadiusType
from PartSegCore.mask_create import MaskProperty
from PartSegCore.segmentation.noise_filtering import DimensionType
from PartSegCore.universal_const import UNIT_SCALE, Units
from PartSegImage import Image, ImageWriter, TiffImageReader
class MocksCalculation:
def __init__(self, file_path):
self.file_path = file_path
@pytest.fixture
def create_test_data(tmpdir):
# for future use
spacing = tuple([x / UNIT_SCALE[Units.nm.value] for x in (210, 70, 70)])
res = []
for i in range(8):
mask_data = np.zeros((10, 20, 20 + i), dtype=np.uint8)
mask_data[1:-1, 2:-2, 2:-2] = 1
data = np.zeros(mask_data.shape + (2,), dtype=np.uint16)
data[1:-1, 2:-2, 2:-2] = 15000
data[2:-2, 3:-3, 3:7] = 33000
data[2:-2, 3:-3, -7:-3] = 33000
image = Image(data, spacing, "", mask=mask_data, axes_order="ZYXC")
ImageWriter.save(image, os.path.join(str(tmpdir), f"file_{i}.tif"))
res.append(os.path.join(str(tmpdir), f"file_{i}.tif"))
ImageWriter.save_mask(image, os.path.join(str(tmpdir), f"file_{i}_mask.tif"))
return res
# TODO add check of per component measurements
# noinspection DuplicatedCode
class TestCalculationProcess:
@staticmethod
def create_calculation_plan():
parameters = {
"channel": 1,
"minimum_size": 200,
"threshold": {
"name": "Base/Core",
"values": {
"core_threshold": {"name": "Manual", "values": {"threshold": 30000}},
"base_threshold": {"name": "Manual", "values": {"threshold": 13000}},
},
},
"noise_filtering": {"name": "Gauss", "values": {"dimension_type": DimensionType.Layer, "radius": 1.0}},
"side_connection": False,
"sprawl_type": {"name": "Euclidean", "values": {}},
}
segmentation = SegmentationProfile(name="test", algorithm="Lower threshold with watershed", values=parameters)
mask_suffix = MaskSuffix(name="", suffix="_mask")
chosen_fields = [
MeasurementEntry(
name="Segmentation Volume",
calculation_tree=Leaf(name="Volume", area=AreaType.ROI, per_component=PerComponent.No),
),
MeasurementEntry(
name="Segmentation Volume/Mask Volume",
calculation_tree=Node(
left=Leaf(name="Volume", area=AreaType.ROI, per_component=PerComponent.No),
op="/",
right=Leaf(name="Volume", area=AreaType.Mask, per_component=PerComponent.No),
),
),
MeasurementEntry(
"Segmentation Components Number",
calculation_tree=Leaf("Components number", area=AreaType.ROI, per_component=PerComponent.No),
),
]
statistic = MeasurementProfile(name="base_measure", chosen_fields=chosen_fields, name_prefix="")
statistic_calculate = MeasurementCalculate(
channel=0, units=Units.µm, statistic_profile=statistic, name_prefix=""
)
tree = CalculationTree(
RootType.Image,
[CalculationTree(mask_suffix, [CalculationTree(segmentation, [CalculationTree(statistic_calculate, [])])])],
)
return CalculationPlan(tree=tree, name="test")
@staticmethod
def create_calculation_plan2():
parameters = {
"channel": 0,
"minimum_size": 200,
"threshold": {
"name": "Base/Core",
"values": {
"core_threshold": {"name": "Manual", "values": {"threshold": 30000}},
"base_threshold": {"name": "Manual", "values": {"threshold": 13000}},
},
},
"noise_filtering": {"name": "Gauss", "values": {"dimension_type": DimensionType.Layer, "radius": 1.0}},
"side_connection": False,
"sprawl_type": {"name": "Euclidean", "values": {}},
}
segmentation = SegmentationProfile(name="test", algorithm="Lower threshold with watershed", values=parameters)
chosen_fields = [
MeasurementEntry(
name="Segmentation Volume",
calculation_tree=Leaf(name="Volume", area=AreaType.ROI, per_component=PerComponent.No),
),
MeasurementEntry(
name="Segmentation Volume/Mask Volume",
calculation_tree=Node(
left=Leaf(name="Volume", area=AreaType.ROI, per_component=PerComponent.No),
op="/",
right=Leaf(name="Volume", area=AreaType.Mask, per_component=PerComponent.No),
),
),
MeasurementEntry(
"Segmentation Components Number",
calculation_tree=Leaf("Components number", area=AreaType.ROI, per_component=PerComponent.No),
),
]
statistic = MeasurementProfile(name="base_measure", chosen_fields=chosen_fields, name_prefix="")
statistic_calculate = MeasurementCalculate(
channel=0, units=Units.µm, statistic_profile=statistic, name_prefix=""
)
tree = CalculationTree(
RootType.Mask_project, [CalculationTree(segmentation, [CalculationTree(statistic_calculate, [])])]
)
return CalculationPlan(tree=tree, name="test2")
@staticmethod
def create_calculation_plan3():
parameters = {
"channel": 1,
"minimum_size": 200,
"threshold": {
"name": "Base/Core",
"values": {
"core_threshold": {"name": "Manual", "values": {"threshold": 30000}},
"base_threshold": {"name": "Manual", "values": {"threshold": 13000}},
},
},
"noise_filtering": {"name": "Gauss", "values": {"dimension_type": DimensionType.Layer, "radius": 1.0}},
"side_connection": False,
"sprawl_type": {"name": "Euclidean", "values": {}},
}
segmentation = SegmentationProfile(name="test", algorithm="Lower threshold with watershed", values=parameters)
mask_suffix = MaskSuffix(name="", suffix="_mask")
chosen_fields = [
MeasurementEntry(
name="Segmentation Volume",
calculation_tree=Leaf(name="Volume", area=AreaType.ROI, per_component=PerComponent.No),
),
MeasurementEntry(
name="Segmentation Volume/Mask Volume",
calculation_tree=Node(
left=Leaf(name="Volume", area=AreaType.ROI, per_component=PerComponent.No),
op="/",
right=Leaf(name="Volume", area=AreaType.Mask, per_component=PerComponent.No),
),
),
MeasurementEntry(
"Segmentation Components Number",
calculation_tree=Leaf("Components number", area=AreaType.ROI, per_component=PerComponent.No),
),
MeasurementEntry(
"Segmentation Volume per component",
calculation_tree=Leaf("Volume", area=AreaType.ROI, per_component=PerComponent.Yes),
),
]
statistic = MeasurementProfile(name="base_measure", chosen_fields=chosen_fields, name_prefix="")
statistic_calculate = MeasurementCalculate(
channel=0, units=Units.µm, statistic_profile=statistic, name_prefix=""
)
mask_create = MaskCreate("", MaskProperty(RadiusType.NO, 0, RadiusType.NO, 0, True, False, False))
parameters2 = {
"channel": 1,
"minimum_size": 200,
"threshold": {"name": "Manual", "values": {"threshold": 30000}},
"noise_filtering": {"name": "Gauss", "values": {"dimension_type": DimensionType.Layer, "radius": 1.0}},
"side_connection": False,
}
segmentation2 = SegmentationProfile(name="test", algorithm="Lower threshold", values=parameters2)
chosen_fields = [
MeasurementEntry(
name="Segmentation Volume",
calculation_tree=Leaf(name="Volume", area=AreaType.ROI, per_component=PerComponent.No),
),
MeasurementEntry(
name="Segmentation Volume/Mask Volume",
calculation_tree=Node(
left=Leaf(name="Volume", area=AreaType.ROI, per_component=PerComponent.No),
op="/",
right=Leaf(name="Volume", area=AreaType.Mask, per_component=PerComponent.No),
),
),
MeasurementEntry(
"Segmentation Components Number",
calculation_tree=Leaf("Components number", area=AreaType.ROI, per_component=PerComponent.No),
),
MeasurementEntry(
"Mask Volume per component",
calculation_tree=Leaf("Volume", area=AreaType.Mask, per_component=PerComponent.Yes),
),
]
statistic = MeasurementProfile(name="base_measure2", chosen_fields=chosen_fields[:], name_prefix="aa_")
statistic_calculate2 = MeasurementCalculate(
channel=0, units=Units.µm, statistic_profile=statistic, name_prefix=""
)
chosen_fields.append(
MeasurementEntry(
"Segmentation Volume per component",
calculation_tree=Leaf("Volume", area=AreaType.ROI, per_component=PerComponent.Yes),
)
)
statistic = MeasurementProfile(name="base_measure3", chosen_fields=chosen_fields[:], name_prefix="bb_")
statistic_calculate3 = MeasurementCalculate(
channel=0, units=Units.µm, statistic_profile=statistic, name_prefix=""
)
tree = CalculationTree(
RootType.Image,
[
CalculationTree(
mask_suffix,
[
CalculationTree(
segmentation,
[
CalculationTree(statistic_calculate, []),
CalculationTree(
mask_create,
[
CalculationTree(
segmentation2,
[
CalculationTree(statistic_calculate2, []),
CalculationTree(statistic_calculate3, []),
],
),
],
),
],
)
],
)
],
)
return CalculationPlan(tree=tree, name="test")
def test_one_file(self, data_test_dir):
plan = self.create_calculation_plan()
process = CalculationProcess()
file_path = os.path.join(data_test_dir, "stack1_components", "stack1_component5.tif")
calc = MocksCalculation(file_path)
process.calculation = calc
process.image = TiffImageReader.read_image(file_path)
process.iterate_over(plan.execution_tree)
assert len(process.measurement[0]) == 3
@pytest.mark.filterwarnings("ignore:This method will be removed")
def test_full_pipeline(self, tmpdir, data_test_dir, monkeypatch):
monkeypatch.setattr(batch_backend, "CalculationProcess", MockCalculationProcess)
plan = self.create_calculation_plan()
file_pattern = os.path.join(data_test_dir, "stack1_components", "stack1_component*[0-9].tif")
file_paths = sorted(glob(file_pattern))
assert os.path.basename(file_paths[0]) == "stack1_component1.tif"
calc = Calculation(
file_paths,
base_prefix=data_test_dir,
result_prefix=data_test_dir,
measurement_file_path=os.path.join(tmpdir, "test.xlsx"),
sheet_name="Sheet1",
calculation_plan=plan,
voxel_size=(1, 1, 1),
)
manager = CalculationManager()
manager.set_number_of_workers(3)
manager.add_calculation(calc)
while manager.has_work:
time.sleep(0.1)
manager.get_results()
manager.writer.finish()
if sys.platform == "darwin":
time.sleep(2)
else:
time.sleep(0.4)
assert os.path.exists(os.path.join(tmpdir, "test.xlsx"))
df = pd.read_excel(os.path.join(tmpdir, "test.xlsx"), index_col=0, header=[0, 1])
assert df.shape == (8, 4)
for i in range(8):
assert os.path.basename(df.name.units[i]) == f"stack1_component{i+1}.tif"
@pytest.mark.filterwarnings("ignore:This method will be removed")
def test_full_pipeline_mask_project(self, tmpdir, data_test_dir):
plan = self.create_calculation_plan2()
file_pattern = os.path.join(data_test_dir, "*nucleus.seg")
file_paths = glob(file_pattern)
calc = Calculation(
file_paths,
base_prefix=data_test_dir,
result_prefix=data_test_dir,
measurement_file_path=os.path.join(tmpdir, "test2.xlsx"),
sheet_name="Sheet1",
calculation_plan=plan,
voxel_size=(1, 1, 1),
)
manager = CalculationManager()
manager.set_number_of_workers(2)
manager.add_calculation(calc)
while manager.has_work:
time.sleep(0.1)
manager.get_results()
if sys.platform == "darwin":
time.sleep(2)
else:
time.sleep(0.4)
manager.writer.finish()
assert os.path.exists(os.path.join(tmpdir, "test2.xlsx"))
df = pd.read_excel(os.path.join(tmpdir, "test2.xlsx"), index_col=0, header=[0, 1])
assert df.shape == (2, 4)
@pytest.mark.filterwarnings("ignore:This method will be removed")
def test_full_pipeline_component_split(self, tmpdir, data_test_dir):
plan = self.create_calculation_plan3()
file_pattern = os.path.join(data_test_dir, "stack1_components", "stack1_component*[0-9].tif")
file_paths = glob(file_pattern)
calc = Calculation(
file_paths,
base_prefix=data_test_dir,
result_prefix=data_test_dir,
measurement_file_path=os.path.join(tmpdir, "test3.xlsx"),
sheet_name="Sheet1",
calculation_plan=plan,
voxel_size=(1, 1, 1),
)
manager = CalculationManager()
manager.set_number_of_workers(2)
manager.add_calculation(calc)
while manager.has_work:
time.sleep(0.1)
res = manager.get_results()
if res.errors:
print(res.errors, file=sys.stderr)
if sys.platform == "darwin":
time.sleep(2)
else:
time.sleep(0.4)
manager.writer.finish()
assert os.path.exists(os.path.join(tmpdir, "test3.xlsx"))
df = pd.read_excel(os.path.join(tmpdir, "test3.xlsx"), index_col=0, header=[0, 1])
assert df.shape == (8, 10)
df2 = pd.read_excel(os.path.join(tmpdir, "test3.xlsx"), sheet_name=1, index_col=0, header=[0, 1])
assert df2.shape[0] > 8
assert df2.shape == (df["Segmentation Components Number"]["count"].sum(), 6)
df3 = pd.read_excel(os.path.join(tmpdir, "test3.xlsx"), sheet_name=2, index_col=0, header=[0, 1])
assert df3.shape == (df["Segmentation Components Number"]["count"].sum(), 6)
df4 = pd.read_excel(os.path.join(tmpdir, "test3.xlsx"), sheet_name=3, index_col=0, header=[0, 1])
assert df4.shape == (df["Segmentation Components Number"]["count"].sum(), 8)
class MockCalculationProcess(CalculationProcess):
def do_calculation(self, calculation: FileCalculation):
if os.path.basename(calculation.file_path) == "stack1_component1.tif":
time.sleep(0.5)
return super().do_calculation(calculation)
|
[
"PartSegCore.analysis.calculation_plan.MaskSuffix",
"PartSegCore.analysis.calculation_plan.CalculationPlan",
"os.path.basename",
"PartSegCore.analysis.calculation_plan.CalculationTree",
"PartSegImage.TiffImageReader.read_image",
"PartSegImage.Image",
"numpy.zeros",
"PartSegCore.analysis.measurement_calculation.MeasurementProfile",
"PartSegCore.analysis.batch_processing.batch_backend.CalculationManager",
"PartSegCore.mask_create.MaskProperty",
"time.sleep",
"PartSegCore.analysis.measurement_base.Leaf",
"PartSegCore.analysis.calculation_plan.MeasurementCalculate",
"glob.glob",
"pytest.mark.filterwarnings",
"PartSegCore.analysis.batch_processing.batch_backend.CalculationProcess",
"os.path.join",
"PartSegCore.algorithm_describe_base.SegmentationProfile"
] |
[((12277, 12341), 'pytest.mark.filterwarnings', 'pytest.mark.filterwarnings', (['"""ignore:This method will be removed"""'], {}), "('ignore:This method will be removed')\n", (12303, 12341), False, 'import pytest\n'), ((13743, 13807), 'pytest.mark.filterwarnings', 'pytest.mark.filterwarnings', (['"""ignore:This method will be removed"""'], {}), "('ignore:This method will be removed')\n", (13769, 13807), False, 'import pytest\n'), ((14894, 14958), 'pytest.mark.filterwarnings', 'pytest.mark.filterwarnings', (['"""ignore:This method will be removed"""'], {}), "('ignore:This method will be removed')\n", (14920, 14958), False, 'import pytest\n'), ((1315, 1357), 'numpy.zeros', 'np.zeros', (['(10, 20, 20 + i)'], {'dtype': 'np.uint8'}), '((10, 20, 20 + i), dtype=np.uint8)\n', (1323, 1357), True, 'import numpy as np\n'), ((1413, 1462), 'numpy.zeros', 'np.zeros', (['(mask_data.shape + (2,))'], {'dtype': 'np.uint16'}), '(mask_data.shape + (2,), dtype=np.uint16)\n', (1421, 1462), True, 'import numpy as np\n'), ((1596, 1655), 'PartSegImage.Image', 'Image', (['data', 'spacing', '""""""'], {'mask': 'mask_data', 'axes_order': '"""ZYXC"""'}), "(data, spacing, '', mask=mask_data, axes_order='ZYXC')\n", (1601, 1655), False, 'from PartSegImage import Image, ImageWriter, TiffImageReader\n'), ((2699, 2798), 'PartSegCore.algorithm_describe_base.SegmentationProfile', 'SegmentationProfile', ([], {'name': '"""test"""', 'algorithm': '"""Lower threshold with watershed"""', 'values': 'parameters'}), "(name='test', algorithm='Lower threshold with watershed',\n values=parameters)\n", (2718, 2798), False, 'from PartSegCore.algorithm_describe_base import SegmentationProfile\n'), ((2817, 2852), 'PartSegCore.analysis.calculation_plan.MaskSuffix', 'MaskSuffix', ([], {'name': '""""""', 'suffix': '"""_mask"""'}), "(name='', suffix='_mask')\n", (2827, 2852), False, 'from PartSegCore.analysis.calculation_plan import Calculation, CalculationPlan, CalculationTree, FileCalculation, MaskCreate, MaskSuffix, MeasurementCalculate, RootType\n'), ((3688, 3776), 'PartSegCore.analysis.measurement_calculation.MeasurementProfile', 'MeasurementProfile', ([], {'name': '"""base_measure"""', 'chosen_fields': 'chosen_fields', 'name_prefix': '""""""'}), "(name='base_measure', chosen_fields=chosen_fields,\n name_prefix='')\n", (3706, 3776), False, 'from PartSegCore.analysis.measurement_calculation import MeasurementProfile\n'), ((3803, 3899), 'PartSegCore.analysis.calculation_plan.MeasurementCalculate', 'MeasurementCalculate', ([], {'channel': '(0)', 'units': 'Units.μm', 'statistic_profile': 'statistic', 'name_prefix': '""""""'}), "(channel=0, units=Units.μm, statistic_profile=statistic,\n name_prefix='')\n", (3823, 3899), False, 'from PartSegCore.analysis.calculation_plan import Calculation, CalculationPlan, CalculationTree, FileCalculation, MaskCreate, MaskSuffix, MeasurementCalculate, RootType\n'), ((4124, 4163), 'PartSegCore.analysis.calculation_plan.CalculationPlan', 'CalculationPlan', ([], {'tree': 'tree', 'name': '"""test"""'}), "(tree=tree, name='test')\n", (4139, 4163), False, 'from PartSegCore.analysis.calculation_plan import Calculation, CalculationPlan, CalculationTree, FileCalculation, MaskCreate, MaskSuffix, MeasurementCalculate, RootType\n'), ((4859, 4958), 'PartSegCore.algorithm_describe_base.SegmentationProfile', 'SegmentationProfile', ([], {'name': '"""test"""', 'algorithm': '"""Lower threshold with watershed"""', 'values': 'parameters'}), "(name='test', algorithm='Lower threshold with watershed',\n values=parameters)\n", (4878, 4958), False, 'from PartSegCore.algorithm_describe_base import SegmentationProfile\n'), ((5790, 5878), 'PartSegCore.analysis.measurement_calculation.MeasurementProfile', 'MeasurementProfile', ([], {'name': '"""base_measure"""', 'chosen_fields': 'chosen_fields', 'name_prefix': '""""""'}), "(name='base_measure', chosen_fields=chosen_fields,\n name_prefix='')\n", (5808, 5878), False, 'from PartSegCore.analysis.measurement_calculation import MeasurementProfile\n'), ((5905, 6001), 'PartSegCore.analysis.calculation_plan.MeasurementCalculate', 'MeasurementCalculate', ([], {'channel': '(0)', 'units': 'Units.μm', 'statistic_profile': 'statistic', 'name_prefix': '""""""'}), "(channel=0, units=Units.μm, statistic_profile=statistic,\n name_prefix='')\n", (5925, 6001), False, 'from PartSegCore.analysis.calculation_plan import Calculation, CalculationPlan, CalculationTree, FileCalculation, MaskCreate, MaskSuffix, MeasurementCalculate, RootType\n'), ((6188, 6228), 'PartSegCore.analysis.calculation_plan.CalculationPlan', 'CalculationPlan', ([], {'tree': 'tree', 'name': '"""test2"""'}), "(tree=tree, name='test2')\n", (6203, 6228), False, 'from PartSegCore.analysis.calculation_plan import Calculation, CalculationPlan, CalculationTree, FileCalculation, MaskCreate, MaskSuffix, MeasurementCalculate, RootType\n'), ((6924, 7023), 'PartSegCore.algorithm_describe_base.SegmentationProfile', 'SegmentationProfile', ([], {'name': '"""test"""', 'algorithm': '"""Lower threshold with watershed"""', 'values': 'parameters'}), "(name='test', algorithm='Lower threshold with watershed',\n values=parameters)\n", (6943, 7023), False, 'from PartSegCore.algorithm_describe_base import SegmentationProfile\n'), ((7042, 7077), 'PartSegCore.analysis.calculation_plan.MaskSuffix', 'MaskSuffix', ([], {'name': '""""""', 'suffix': '"""_mask"""'}), "(name='', suffix='_mask')\n", (7052, 7077), False, 'from PartSegCore.analysis.calculation_plan import Calculation, CalculationPlan, CalculationTree, FileCalculation, MaskCreate, MaskSuffix, MeasurementCalculate, RootType\n'), ((8111, 8199), 'PartSegCore.analysis.measurement_calculation.MeasurementProfile', 'MeasurementProfile', ([], {'name': '"""base_measure"""', 'chosen_fields': 'chosen_fields', 'name_prefix': '""""""'}), "(name='base_measure', chosen_fields=chosen_fields,\n name_prefix='')\n", (8129, 8199), False, 'from PartSegCore.analysis.measurement_calculation import MeasurementProfile\n'), ((8226, 8322), 'PartSegCore.analysis.calculation_plan.MeasurementCalculate', 'MeasurementCalculate', ([], {'channel': '(0)', 'units': 'Units.μm', 'statistic_profile': 'statistic', 'name_prefix': '""""""'}), "(channel=0, units=Units.μm, statistic_profile=statistic,\n name_prefix='')\n", (8246, 8322), False, 'from PartSegCore.analysis.calculation_plan import Calculation, CalculationPlan, CalculationTree, FileCalculation, MaskCreate, MaskSuffix, MeasurementCalculate, RootType\n'), ((8797, 8883), 'PartSegCore.algorithm_describe_base.SegmentationProfile', 'SegmentationProfile', ([], {'name': '"""test"""', 'algorithm': '"""Lower threshold"""', 'values': 'parameters2'}), "(name='test', algorithm='Lower threshold', values=\n parameters2)\n", (8816, 8883), False, 'from PartSegCore.algorithm_describe_base import SegmentationProfile\n'), ((9905, 10000), 'PartSegCore.analysis.measurement_calculation.MeasurementProfile', 'MeasurementProfile', ([], {'name': '"""base_measure2"""', 'chosen_fields': 'chosen_fields[:]', 'name_prefix': '"""aa_"""'}), "(name='base_measure2', chosen_fields=chosen_fields[:],\n name_prefix='aa_')\n", (9923, 10000), False, 'from PartSegCore.analysis.measurement_calculation import MeasurementProfile\n'), ((10028, 10124), 'PartSegCore.analysis.calculation_plan.MeasurementCalculate', 'MeasurementCalculate', ([], {'channel': '(0)', 'units': 'Units.μm', 'statistic_profile': 'statistic', 'name_prefix': '""""""'}), "(channel=0, units=Units.μm, statistic_profile=statistic,\n name_prefix='')\n", (10048, 10124), False, 'from PartSegCore.analysis.calculation_plan import Calculation, CalculationPlan, CalculationTree, FileCalculation, MaskCreate, MaskSuffix, MeasurementCalculate, RootType\n'), ((10400, 10495), 'PartSegCore.analysis.measurement_calculation.MeasurementProfile', 'MeasurementProfile', ([], {'name': '"""base_measure3"""', 'chosen_fields': 'chosen_fields[:]', 'name_prefix': '"""bb_"""'}), "(name='base_measure3', chosen_fields=chosen_fields[:],\n name_prefix='bb_')\n", (10418, 10495), False, 'from PartSegCore.analysis.measurement_calculation import MeasurementProfile\n'), ((10523, 10619), 'PartSegCore.analysis.calculation_plan.MeasurementCalculate', 'MeasurementCalculate', ([], {'channel': '(0)', 'units': 'Units.μm', 'statistic_profile': 'statistic', 'name_prefix': '""""""'}), "(channel=0, units=Units.μm, statistic_profile=statistic,\n name_prefix='')\n", (10543, 10619), False, 'from PartSegCore.analysis.calculation_plan import Calculation, CalculationPlan, CalculationTree, FileCalculation, MaskCreate, MaskSuffix, MeasurementCalculate, RootType\n'), ((11769, 11808), 'PartSegCore.analysis.calculation_plan.CalculationPlan', 'CalculationPlan', ([], {'tree': 'tree', 'name': '"""test"""'}), "(tree=tree, name='test')\n", (11784, 11808), False, 'from PartSegCore.analysis.calculation_plan import Calculation, CalculationPlan, CalculationTree, FileCalculation, MaskCreate, MaskSuffix, MeasurementCalculate, RootType\n'), ((11918, 11938), 'PartSegCore.analysis.batch_processing.batch_backend.CalculationProcess', 'CalculationProcess', ([], {}), '()\n', (11936, 11938), False, 'from PartSegCore.analysis.batch_processing.batch_backend import CalculationManager, CalculationProcess\n'), ((11959, 12032), 'os.path.join', 'os.path.join', (['data_test_dir', '"""stack1_components"""', '"""stack1_component5.tif"""'], {}), "(data_test_dir, 'stack1_components', 'stack1_component5.tif')\n", (11971, 12032), False, 'import os\n'), ((12135, 12172), 'PartSegImage.TiffImageReader.read_image', 'TiffImageReader.read_image', (['file_path'], {}), '(file_path)\n', (12161, 12172), False, 'from PartSegImage import Image, ImageWriter, TiffImageReader\n'), ((12570, 12648), 'os.path.join', 'os.path.join', (['data_test_dir', '"""stack1_components"""', '"""stack1_component*[0-9].tif"""'], {}), "(data_test_dir, 'stack1_components', 'stack1_component*[0-9].tif')\n", (12582, 12648), False, 'import os\n'), ((13103, 13123), 'PartSegCore.analysis.batch_processing.batch_backend.CalculationManager', 'CalculationManager', ([], {}), '()\n', (13121, 13123), False, 'from PartSegCore.analysis.batch_processing.batch_backend import CalculationManager, CalculationProcess\n'), ((13948, 13991), 'os.path.join', 'os.path.join', (['data_test_dir', '"""*nucleus.seg"""'], {}), "(data_test_dir, '*nucleus.seg')\n", (13960, 13991), False, 'import os\n'), ((14013, 14031), 'glob.glob', 'glob', (['file_pattern'], {}), '(file_pattern)\n', (14017, 14031), False, 'from glob import glob\n'), ((14365, 14385), 'PartSegCore.analysis.batch_processing.batch_backend.CalculationManager', 'CalculationManager', ([], {}), '()\n', (14383, 14385), False, 'from PartSegCore.analysis.batch_processing.batch_backend import CalculationManager, CalculationProcess\n'), ((15102, 15180), 'os.path.join', 'os.path.join', (['data_test_dir', '"""stack1_components"""', '"""stack1_component*[0-9].tif"""'], {}), "(data_test_dir, 'stack1_components', 'stack1_component*[0-9].tif')\n", (15114, 15180), False, 'import os\n'), ((15202, 15220), 'glob.glob', 'glob', (['file_pattern'], {}), '(file_pattern)\n', (15206, 15220), False, 'from glob import glob\n'), ((15554, 15574), 'PartSegCore.analysis.batch_processing.batch_backend.CalculationManager', 'CalculationManager', ([], {}), '()\n', (15572, 15574), False, 'from PartSegCore.analysis.batch_processing.batch_backend import CalculationManager, CalculationProcess\n'), ((8378, 8446), 'PartSegCore.mask_create.MaskProperty', 'MaskProperty', (['RadiusType.NO', '(0)', 'RadiusType.NO', '(0)', '(True)', '(False)', '(False)'], {}), '(RadiusType.NO, 0, RadiusType.NO, 0, True, False, False)\n', (8390, 8446), False, 'from PartSegCore.mask_create import MaskProperty\n'), ((12677, 12695), 'glob.glob', 'glob', (['file_pattern'], {}), '(file_pattern)\n', (12681, 12695), False, 'from glob import glob\n'), ((12712, 12743), 'os.path.basename', 'os.path.basename', (['file_paths[0]'], {}), '(file_paths[0])\n', (12728, 12743), False, 'import os\n'), ((13248, 13263), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (13258, 13263), False, 'import time\n'), ((13379, 13392), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (13389, 13392), False, 'import time\n'), ((13419, 13434), 'time.sleep', 'time.sleep', (['(0.4)'], {}), '(0.4)\n', (13429, 13434), False, 'import time\n'), ((13465, 13498), 'os.path.join', 'os.path.join', (['tmpdir', '"""test.xlsx"""'], {}), "(tmpdir, 'test.xlsx')\n", (13477, 13498), False, 'import os\n'), ((13527, 13560), 'os.path.join', 'os.path.join', (['tmpdir', '"""test.xlsx"""'], {}), "(tmpdir, 'test.xlsx')\n", (13539, 13560), False, 'import os\n'), ((14510, 14525), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (14520, 14525), False, 'import time\n'), ((14609, 14622), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (14619, 14622), False, 'import time\n'), ((14649, 14664), 'time.sleep', 'time.sleep', (['(0.4)'], {}), '(0.4)\n', (14659, 14664), False, 'import time\n'), ((14727, 14761), 'os.path.join', 'os.path.join', (['tmpdir', '"""test2.xlsx"""'], {}), "(tmpdir, 'test2.xlsx')\n", (14739, 14761), False, 'import os\n'), ((14790, 14824), 'os.path.join', 'os.path.join', (['tmpdir', '"""test2.xlsx"""'], {}), "(tmpdir, 'test2.xlsx')\n", (14802, 14824), False, 'import os\n'), ((15699, 15714), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (15709, 15714), False, 'import time\n'), ((15882, 15895), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (15892, 15895), False, 'import time\n'), ((15922, 15937), 'time.sleep', 'time.sleep', (['(0.4)'], {}), '(0.4)\n', (15932, 15937), False, 'import time\n'), ((16000, 16034), 'os.path.join', 'os.path.join', (['tmpdir', '"""test3.xlsx"""'], {}), "(tmpdir, 'test3.xlsx')\n", (16012, 16034), False, 'import os\n'), ((16063, 16097), 'os.path.join', 'os.path.join', (['tmpdir', '"""test3.xlsx"""'], {}), "(tmpdir, 'test3.xlsx')\n", (16075, 16097), False, 'import os\n'), ((16190, 16224), 'os.path.join', 'os.path.join', (['tmpdir', '"""test3.xlsx"""'], {}), "(tmpdir, 'test3.xlsx')\n", (16202, 16224), False, 'import os\n'), ((16413, 16447), 'os.path.join', 'os.path.join', (['tmpdir', '"""test3.xlsx"""'], {}), "(tmpdir, 'test3.xlsx')\n", (16425, 16447), False, 'import os\n'), ((16604, 16638), 'os.path.join', 'os.path.join', (['tmpdir', '"""test3.xlsx"""'], {}), "(tmpdir, 'test3.xlsx')\n", (16616, 16638), False, 'import os\n'), ((16890, 16929), 'os.path.basename', 'os.path.basename', (['calculation.file_path'], {}), '(calculation.file_path)\n', (16906, 16929), False, 'import os\n'), ((16970, 16985), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (16980, 16985), False, 'import time\n'), ((12937, 12970), 'os.path.join', 'os.path.join', (['tmpdir', '"""test.xlsx"""'], {}), "(tmpdir, 'test.xlsx')\n", (12949, 12970), False, 'import os\n'), ((13670, 13704), 'os.path.basename', 'os.path.basename', (['df.name.units[i]'], {}), '(df.name.units[i])\n', (13686, 13704), False, 'import os\n'), ((14198, 14232), 'os.path.join', 'os.path.join', (['tmpdir', '"""test2.xlsx"""'], {}), "(tmpdir, 'test2.xlsx')\n", (14210, 14232), False, 'import os\n'), ((15387, 15421), 'os.path.join', 'os.path.join', (['tmpdir', '"""test3.xlsx"""'], {}), "(tmpdir, 'test3.xlsx')\n", (15399, 15421), False, 'import os\n'), ((2986, 3055), 'PartSegCore.analysis.measurement_base.Leaf', 'Leaf', ([], {'name': '"""Volume"""', 'area': 'AreaType.ROI', 'per_component': 'PerComponent.No'}), "(name='Volume', area=AreaType.ROI, per_component=PerComponent.No)\n", (2990, 3055), False, 'from PartSegCore.analysis.measurement_base import AreaType, Leaf, MeasurementEntry, Node, PerComponent\n'), ((3566, 3641), 'PartSegCore.analysis.measurement_base.Leaf', 'Leaf', (['"""Components number"""'], {'area': 'AreaType.ROI', 'per_component': 'PerComponent.No'}), "('Components number', area=AreaType.ROI, per_component=PerComponent.No)\n", (3570, 3641), False, 'from PartSegCore.analysis.measurement_base import AreaType, Leaf, MeasurementEntry, Node, PerComponent\n'), ((5088, 5157), 'PartSegCore.analysis.measurement_base.Leaf', 'Leaf', ([], {'name': '"""Volume"""', 'area': 'AreaType.ROI', 'per_component': 'PerComponent.No'}), "(name='Volume', area=AreaType.ROI, per_component=PerComponent.No)\n", (5092, 5157), False, 'from PartSegCore.analysis.measurement_base import AreaType, Leaf, MeasurementEntry, Node, PerComponent\n'), ((5668, 5743), 'PartSegCore.analysis.measurement_base.Leaf', 'Leaf', (['"""Components number"""'], {'area': 'AreaType.ROI', 'per_component': 'PerComponent.No'}), "('Components number', area=AreaType.ROI, per_component=PerComponent.No)\n", (5672, 5743), False, 'from PartSegCore.analysis.measurement_base import AreaType, Leaf, MeasurementEntry, Node, PerComponent\n'), ((7211, 7280), 'PartSegCore.analysis.measurement_base.Leaf', 'Leaf', ([], {'name': '"""Volume"""', 'area': 'AreaType.ROI', 'per_component': 'PerComponent.No'}), "(name='Volume', area=AreaType.ROI, per_component=PerComponent.No)\n", (7215, 7280), False, 'from PartSegCore.analysis.measurement_base import AreaType, Leaf, MeasurementEntry, Node, PerComponent\n'), ((7791, 7866), 'PartSegCore.analysis.measurement_base.Leaf', 'Leaf', (['"""Components number"""'], {'area': 'AreaType.ROI', 'per_component': 'PerComponent.No'}), "('Components number', area=AreaType.ROI, per_component=PerComponent.No)\n", (7795, 7866), False, 'from PartSegCore.analysis.measurement_base import AreaType, Leaf, MeasurementEntry, Node, PerComponent\n'), ((7999, 8064), 'PartSegCore.analysis.measurement_base.Leaf', 'Leaf', (['"""Volume"""'], {'area': 'AreaType.ROI', 'per_component': 'PerComponent.Yes'}), "('Volume', area=AreaType.ROI, per_component=PerComponent.Yes)\n", (8003, 8064), False, 'from PartSegCore.analysis.measurement_base import AreaType, Leaf, MeasurementEntry, Node, PerComponent\n'), ((9012, 9081), 'PartSegCore.analysis.measurement_base.Leaf', 'Leaf', ([], {'name': '"""Volume"""', 'area': 'AreaType.ROI', 'per_component': 'PerComponent.No'}), "(name='Volume', area=AreaType.ROI, per_component=PerComponent.No)\n", (9016, 9081), False, 'from PartSegCore.analysis.measurement_base import AreaType, Leaf, MeasurementEntry, Node, PerComponent\n'), ((9592, 9667), 'PartSegCore.analysis.measurement_base.Leaf', 'Leaf', (['"""Components number"""'], {'area': 'AreaType.ROI', 'per_component': 'PerComponent.No'}), "('Components number', area=AreaType.ROI, per_component=PerComponent.No)\n", (9596, 9667), False, 'from PartSegCore.analysis.measurement_base import AreaType, Leaf, MeasurementEntry, Node, PerComponent\n'), ((9792, 9858), 'PartSegCore.analysis.measurement_base.Leaf', 'Leaf', (['"""Volume"""'], {'area': 'AreaType.Mask', 'per_component': 'PerComponent.Yes'}), "('Volume', area=AreaType.Mask, per_component=PerComponent.Yes)\n", (9796, 9858), False, 'from PartSegCore.analysis.measurement_base import AreaType, Leaf, MeasurementEntry, Node, PerComponent\n'), ((10289, 10354), 'PartSegCore.analysis.measurement_base.Leaf', 'Leaf', (['"""Volume"""'], {'area': 'AreaType.ROI', 'per_component': 'PerComponent.Yes'}), "('Volume', area=AreaType.ROI, per_component=PerComponent.Yes)\n", (10293, 10354), False, 'from PartSegCore.analysis.measurement_base import AreaType, Leaf, MeasurementEntry, Node, PerComponent\n'), ((6119, 6159), 'PartSegCore.analysis.calculation_plan.CalculationTree', 'CalculationTree', (['statistic_calculate', '[]'], {}), '(statistic_calculate, [])\n', (6134, 6159), False, 'from PartSegCore.analysis.calculation_plan import Calculation, CalculationPlan, CalculationTree, FileCalculation, MaskCreate, MaskSuffix, MeasurementCalculate, RootType\n'), ((3222, 3291), 'PartSegCore.analysis.measurement_base.Leaf', 'Leaf', ([], {'name': '"""Volume"""', 'area': 'AreaType.ROI', 'per_component': 'PerComponent.No'}), "(name='Volume', area=AreaType.ROI, per_component=PerComponent.No)\n", (3226, 3291), False, 'from PartSegCore.analysis.measurement_base import AreaType, Leaf, MeasurementEntry, Node, PerComponent\n'), ((3347, 3417), 'PartSegCore.analysis.measurement_base.Leaf', 'Leaf', ([], {'name': '"""Volume"""', 'area': 'AreaType.Mask', 'per_component': 'PerComponent.No'}), "(name='Volume', area=AreaType.Mask, per_component=PerComponent.No)\n", (3351, 3417), False, 'from PartSegCore.analysis.measurement_base import AreaType, Leaf, MeasurementEntry, Node, PerComponent\n'), ((5324, 5393), 'PartSegCore.analysis.measurement_base.Leaf', 'Leaf', ([], {'name': '"""Volume"""', 'area': 'AreaType.ROI', 'per_component': 'PerComponent.No'}), "(name='Volume', area=AreaType.ROI, per_component=PerComponent.No)\n", (5328, 5393), False, 'from PartSegCore.analysis.measurement_base import AreaType, Leaf, MeasurementEntry, Node, PerComponent\n'), ((5449, 5519), 'PartSegCore.analysis.measurement_base.Leaf', 'Leaf', ([], {'name': '"""Volume"""', 'area': 'AreaType.Mask', 'per_component': 'PerComponent.No'}), "(name='Volume', area=AreaType.Mask, per_component=PerComponent.No)\n", (5453, 5519), False, 'from PartSegCore.analysis.measurement_base import AreaType, Leaf, MeasurementEntry, Node, PerComponent\n'), ((7447, 7516), 'PartSegCore.analysis.measurement_base.Leaf', 'Leaf', ([], {'name': '"""Volume"""', 'area': 'AreaType.ROI', 'per_component': 'PerComponent.No'}), "(name='Volume', area=AreaType.ROI, per_component=PerComponent.No)\n", (7451, 7516), False, 'from PartSegCore.analysis.measurement_base import AreaType, Leaf, MeasurementEntry, Node, PerComponent\n'), ((7572, 7642), 'PartSegCore.analysis.measurement_base.Leaf', 'Leaf', ([], {'name': '"""Volume"""', 'area': 'AreaType.Mask', 'per_component': 'PerComponent.No'}), "(name='Volume', area=AreaType.Mask, per_component=PerComponent.No)\n", (7576, 7642), False, 'from PartSegCore.analysis.measurement_base import AreaType, Leaf, MeasurementEntry, Node, PerComponent\n'), ((9248, 9317), 'PartSegCore.analysis.measurement_base.Leaf', 'Leaf', ([], {'name': '"""Volume"""', 'area': 'AreaType.ROI', 'per_component': 'PerComponent.No'}), "(name='Volume', area=AreaType.ROI, per_component=PerComponent.No)\n", (9252, 9317), False, 'from PartSegCore.analysis.measurement_base import AreaType, Leaf, MeasurementEntry, Node, PerComponent\n'), ((9373, 9443), 'PartSegCore.analysis.measurement_base.Leaf', 'Leaf', ([], {'name': '"""Volume"""', 'area': 'AreaType.Mask', 'per_component': 'PerComponent.No'}), "(name='Volume', area=AreaType.Mask, per_component=PerComponent.No)\n", (9377, 9443), False, 'from PartSegCore.analysis.measurement_base import AreaType, Leaf, MeasurementEntry, Node, PerComponent\n'), ((4052, 4092), 'PartSegCore.analysis.calculation_plan.CalculationTree', 'CalculationTree', (['statistic_calculate', '[]'], {}), '(statistic_calculate, [])\n', (4067, 4092), False, 'from PartSegCore.analysis.calculation_plan import Calculation, CalculationPlan, CalculationTree, FileCalculation, MaskCreate, MaskSuffix, MeasurementCalculate, RootType\n'), ((10945, 10985), 'PartSegCore.analysis.calculation_plan.CalculationTree', 'CalculationTree', (['statistic_calculate', '[]'], {}), '(statistic_calculate, [])\n', (10960, 10985), False, 'from PartSegCore.analysis.calculation_plan import Calculation, CalculationPlan, CalculationTree, FileCalculation, MaskCreate, MaskSuffix, MeasurementCalculate, RootType\n'), ((11333, 11374), 'PartSegCore.analysis.calculation_plan.CalculationTree', 'CalculationTree', (['statistic_calculate2', '[]'], {}), '(statistic_calculate2, [])\n', (11348, 11374), False, 'from PartSegCore.analysis.calculation_plan import Calculation, CalculationPlan, CalculationTree, FileCalculation, MaskCreate, MaskSuffix, MeasurementCalculate, RootType\n'), ((11424, 11465), 'PartSegCore.analysis.calculation_plan.CalculationTree', 'CalculationTree', (['statistic_calculate3', '[]'], {}), '(statistic_calculate3, [])\n', (11439, 11465), False, 'from PartSegCore.analysis.calculation_plan import Calculation, CalculationPlan, CalculationTree, FileCalculation, MaskCreate, MaskSuffix, MeasurementCalculate, RootType\n')]
|
import tempfile
import os
import shutil
from regulations.generator.api_client import ApiClient
from unittest import TestCase
class ClientTest(TestCase):
def test_local_filesystem(self):
""" Verify that it's possible to host the files locally, where
index.html is used in place of just the directory name. """
tmp_root = tempfile.mkdtemp() + os.sep
notice_path = tmp_root + os.sep + "notice" + os.sep
os.mkdir(notice_path)
with open(notice_path + "index.html", 'w') as f:
f.write('{"results": ["example"]}')
client = ApiClient()
client.base_url = tmp_root
results = client.get('notice')
shutil.rmtree(tmp_root)
self.assertEqual(["example"], results['results'])
|
[
"regulations.generator.api_client.ApiClient",
"os.mkdir",
"tempfile.mkdtemp",
"shutil.rmtree"
] |
[((447, 468), 'os.mkdir', 'os.mkdir', (['notice_path'], {}), '(notice_path)\n', (455, 468), False, 'import os\n'), ((591, 602), 'regulations.generator.api_client.ApiClient', 'ApiClient', ([], {}), '()\n', (600, 602), False, 'from regulations.generator.api_client import ApiClient\n'), ((685, 708), 'shutil.rmtree', 'shutil.rmtree', (['tmp_root'], {}), '(tmp_root)\n', (698, 708), False, 'import shutil\n'), ((351, 369), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (367, 369), False, 'import tempfile\n')]
|
import py._path.local
import pytest
from energuide import element
from energuide.exceptions import ElementGetValueError
@pytest.fixture
def fragment() -> str:
return "<Foo><Bar id='1'>baz</Bar><Bar id='2'>qux</Bar></Foo>"
@pytest.fixture
def fragment_file_path(fragment: str, tmpdir: py._path.local.LocalPath) -> str:
file = tmpdir.join('data.xml')
file.write_text('<?xml version="1.0" encoding="UTF-8" ?>', encoding='utf-8')
file.write_text(fragment, encoding='utf-8')
return str(file)
@pytest.fixture
def fragment_node(fragment: str) -> element.Element:
return element.Element.from_string(fragment)
def test_from_string(fragment: str) -> None:
output = element.Element.from_string(fragment)
assert isinstance(output, element.Element)
assert output.tag == 'Foo'
def test_findtext(fragment_node: element.Element) -> None:
assert fragment_node.findtext('Bar') == 'baz'
assert fragment_node.findtext('Baz') is None
def test_get_text(fragment_node: element.Element) -> None:
assert fragment_node.get_text('Bar') == 'baz'
def test_get_text_raises_when_not_found(fragment_node: element.Element) -> None:
with pytest.raises(ElementGetValueError):
fragment_node.get_text('Baz')
def test_attrib(fragment_node: element.Element) -> None:
bar_node = fragment_node.find('Bar')
assert bar_node
assert bar_node.attrib['id'] == '1'
assert 'baz' not in bar_node.attrib
def test_xpath_returns_elements(fragment_node: element.Element) -> None:
output = fragment_node.xpath('Bar')
assert len(output) == 2
assert all([isinstance(bar_node, element.Element) for bar_node in output])
assert output[0].attrib['id'] == '1'
def test_parse(fragment_file_path: str) -> None:
with open(fragment_file_path) as xml_file:
node = element.Element.parse(xml_file)
assert node.tag == 'Foo'
def test_iter(fragment_node: element.Element) -> None:
child_nodes = [child for child in fragment_node]
assert len(child_nodes) == 2
assert all([isinstance(child, element.Element) for child in child_nodes])
assert all([child.tag == 'Bar' for child in child_nodes])
def test_find(fragment_node: element.Element) -> None:
bar_node = fragment_node.find('Bar')
assert bar_node
assert bar_node.tag == 'Bar'
assert bar_node.attrib['id'] == '1'
def test_find_returns_none(fragment_node: element.Element) -> None:
assert fragment_node.find('Baz') is None
def test_to_string(fragment_node: element.Element) -> None:
bar_node = fragment_node.find('Bar')
assert bar_node
assert bar_node.to_string() == '<Bar id="1">baz</Bar>'
def test_tag(fragment_node: element.Element) -> None:
assert fragment_node.tag == 'Foo'
def test_new() -> None:
output = element.Element.new('Foo')
assert output.tag == 'Foo'
def test_from_malformed_string() -> None:
with pytest.raises(element.MalformedXmlError):
element.Element.from_string('</Foo></Foo>')
def test_insert_node() -> None:
root = element.Element.new('Root')
child1 = element.Element.new('Child1')
child2 = element.Element.new('Child2')
root.insert(0, child1)
root.insert(0, child2)
assert len(root.xpath('*')) == 2
def test_get_int(fragment_node: element.Element) -> None:
result = fragment_node.get('Bar/@id', int)
assert result == 1
assert isinstance(result, int)
def test_get_float(fragment_node: element.Element) -> None:
result = fragment_node.get('Bar/@id', float)
assert result == 1.0
assert isinstance(result, float)
def test_get_str(fragment_node: element.Element) -> None:
result = fragment_node.get('Bar/@id', str)
assert result == '1'
assert isinstance(result, str)
def test_get_raises_when_not_found(fragment_node: element.Element) -> None:
with pytest.raises(ElementGetValueError):
fragment_node.get('Bar/@foo', int)
def test_get_raises_when_cant_cast(fragment_node: element.Element) -> None:
with pytest.raises(ElementGetValueError):
fragment_node.get('Bar/text()', int)
|
[
"energuide.element.Element.from_string",
"energuide.element.Element.new",
"pytest.raises",
"energuide.element.Element.parse"
] |
[((593, 630), 'energuide.element.Element.from_string', 'element.Element.from_string', (['fragment'], {}), '(fragment)\n', (620, 630), False, 'from energuide import element\n'), ((691, 728), 'energuide.element.Element.from_string', 'element.Element.from_string', (['fragment'], {}), '(fragment)\n', (718, 728), False, 'from energuide import element\n'), ((2786, 2812), 'energuide.element.Element.new', 'element.Element.new', (['"""Foo"""'], {}), "('Foo')\n", (2805, 2812), False, 'from energuide import element\n'), ((3036, 3063), 'energuide.element.Element.new', 'element.Element.new', (['"""Root"""'], {}), "('Root')\n", (3055, 3063), False, 'from energuide import element\n'), ((3077, 3106), 'energuide.element.Element.new', 'element.Element.new', (['"""Child1"""'], {}), "('Child1')\n", (3096, 3106), False, 'from energuide import element\n'), ((3120, 3149), 'energuide.element.Element.new', 'element.Element.new', (['"""Child2"""'], {}), "('Child2')\n", (3139, 3149), False, 'from energuide import element\n'), ((1170, 1205), 'pytest.raises', 'pytest.raises', (['ElementGetValueError'], {}), '(ElementGetValueError)\n', (1183, 1205), False, 'import pytest\n'), ((1821, 1852), 'energuide.element.Element.parse', 'element.Element.parse', (['xml_file'], {}), '(xml_file)\n', (1842, 1852), False, 'from energuide import element\n'), ((2897, 2937), 'pytest.raises', 'pytest.raises', (['element.MalformedXmlError'], {}), '(element.MalformedXmlError)\n', (2910, 2937), False, 'import pytest\n'), ((2947, 2990), 'energuide.element.Element.from_string', 'element.Element.from_string', (['"""</Foo></Foo>"""'], {}), "('</Foo></Foo>')\n", (2974, 2990), False, 'from energuide import element\n'), ((3833, 3868), 'pytest.raises', 'pytest.raises', (['ElementGetValueError'], {}), '(ElementGetValueError)\n', (3846, 3868), False, 'import pytest\n'), ((4000, 4035), 'pytest.raises', 'pytest.raises', (['ElementGetValueError'], {}), '(ElementGetValueError)\n', (4013, 4035), False, 'import pytest\n')]
|
'''
homelette
=========
homelette is an interface for various homology modelling tools, enabling the
user to easily assemble custom homology modelling pipelines.
Please check out the documentation and tutorials at
https://homelette.readthedocs.io/.
The docstring examples assume that `homelette` has been imported as `hm`. Code
snipets are indicated by three greater-than signs::
>>> import homelette as hm
Use the build-in ``help`` function to view the docstring of a function or
class::
>>> help(hm.Task)
Available subpackages
---------------------
organization
Classes for organizing workflows and models
alignment
Classes and functions for handling multiple sequence alignments
routines
Classes for homology model generation
evaluation
Classes for homology model evaluation
pdb_io
Interface for handling and modifying PDB files
extension
Interface for extending `homelette`
'''
__all__ = ['Task', 'Model', 'Alignment', 'routines', 'evaluation']
__version__ = '1.3'
__author__ = '<NAME>, <NAME>'
__email__ = '<EMAIL>'
__maintainer__ = '<NAME>'
__license__ = 'MIT'
# Standard library imports
import warnings
# Local application imports
from .organization import Task, Model
from .alignment import Alignment
from . import routines
from . import evaluation
# Check third party imports and report missing modules
def _check_imports() -> None:
'''
Helper function that checks third-party imports and raises warnings if they
could not be imported.
Returns
-------
None
'''
for (module, imported) in sorted(_IMPORTS.items()):
if not imported:
msg = 'Module "{}" could not be imported.'.format(module)
warnings.warn(msg)
if not all(_IMPORTS.values()):
msg = ('Please install the missing modules in order to enjoy the full '
'functionality of "homology"')
warnings.warn(msg)
# gather imports from submodules
_IMPORTS = {**evaluation._IMPORTS, **routines._IMPORTS}
_check_imports()
|
[
"warnings.warn"
] |
[((1896, 1914), 'warnings.warn', 'warnings.warn', (['msg'], {}), '(msg)\n', (1909, 1914), False, 'import warnings\n'), ((1707, 1725), 'warnings.warn', 'warnings.warn', (['msg'], {}), '(msg)\n', (1720, 1725), False, 'import warnings\n')]
|
'''
total number of delimeters
total number of hyphens
the length of the hostname
the length of the entire URL
the number of dots
a binary feature for each token in the hostname
a binary feature for each token in the path
'''
from urllib.parse import urlparse
import whois
import tldextract
import pandas as pd
import numpy as np
pd.set_option('display.max_columns', 10000)
pd.set_option('display.max_rows', 10000)
pd.set_option('display.max_colwidth', 10000)
pd.set_option('display.width',1000)
np.set_printoptions(threshold=np.inf)
All_Known_TLD = ['com', 'at', 'uk', 'pl', 'be', 'biz', 'co', 'jp', 'co_jp', 'cz', 'de', 'eu', 'fr', 'info', 'it', 'ru', 'lv', 'me', 'name', 'net', 'nz', 'org', 'us']
# List of Suspicious Words Present in URL
Suspicious_Words=['secure','account','update','banking','login','click','confirm','password','verify','signin','ebayisapi','lucky','bonus']
# List of Suspicious Top Level Domains in URLs
Suspicious_TLD=['zip','cricket','link','work','party','gq','kim','country','science','tk']
dataset_path = '../Data/train_dataset.csv'
Lexical_Feature_path = 'Lexical_FeatureSet.npy'
# Calculate the total number delimeters in a URL
def Total_delims(str):
delim = ['-', '_', '?', '=', '&']
count = 0
for i in str:
for j in delim:
if i == j:
count += 1
return count
# Calculate the total number of hyphens in a URL
def Total_hyphens(link):
hyph = '-'
count = 0
for i in link:
if i == hyph:
count += 1
return count
# Calculate the length of hostname in a URL
def Hostname_len(url):
hostname = urlparse(url).netloc
return len(hostname)
# Calculate the length of a URL
def URL_len(url):
return len(url)
# Calculate the number of dots in a URL
def get_dot_num(url):
dot = '.'
count = 0
for i in url:
if i == dot:
count += 1
return count
# Binary feature for hostname tokens
def is_known_tld(url):
tld = tldextract.extract(url).suffix
if tld in All_Known_TLD:
return 0
else:
return 1
# Binary feature for path tokens
def is_known_path(url):
path = urlparse(url).path
for i in Suspicious_Words:
if i in path:
return 1
else:
continue
return 0
if __name__ == '__main__':
## 1. Read the training Dataset file
df = pd.read_csv(dataset_path, header=0)
# print(df.head())
## 2. Get the Basic Feature set
url = df['URL']
total_delims = []
total_hyphens = []
url_len = []
dot_num = []
host_token = []
path_token = []
for i in url:
total_delims.append(Total_delims(i))
total_hyphens.append(Total_hyphens(i))
url_len.append(URL_len(i))
dot_num.append(get_dot_num(i))
host_token.append(is_known_tld(i))
path_token.append(is_known_path(i))
## 3. Form the Lexical Feature Set
Lexical_Feature = np.array((total_delims,total_hyphens,url_len,dot_num,host_token,path_token)).T
print(Lexical_Feature.shape)
# print(Lexical_Feature[:10,:])
## 4. Save the Basic Feature set
np.save(Lexical_Feature_path, Lexical_Feature)
## 5. Load the Basic Feature set
lexical = np.load(Lexical_Feature_path)
print('lexical.shape=',lexical.shape)
# print(basic)
|
[
"numpy.load",
"numpy.save",
"numpy.set_printoptions",
"tldextract.extract",
"pandas.read_csv",
"numpy.array",
"pandas.set_option",
"urllib.parse.urlparse"
] |
[((332, 375), 'pandas.set_option', 'pd.set_option', (['"""display.max_columns"""', '(10000)'], {}), "('display.max_columns', 10000)\n", (345, 375), True, 'import pandas as pd\n'), ((376, 416), 'pandas.set_option', 'pd.set_option', (['"""display.max_rows"""', '(10000)'], {}), "('display.max_rows', 10000)\n", (389, 416), True, 'import pandas as pd\n'), ((417, 461), 'pandas.set_option', 'pd.set_option', (['"""display.max_colwidth"""', '(10000)'], {}), "('display.max_colwidth', 10000)\n", (430, 461), True, 'import pandas as pd\n'), ((462, 498), 'pandas.set_option', 'pd.set_option', (['"""display.width"""', '(1000)'], {}), "('display.width', 1000)\n", (475, 498), True, 'import pandas as pd\n'), ((498, 535), 'numpy.set_printoptions', 'np.set_printoptions', ([], {'threshold': 'np.inf'}), '(threshold=np.inf)\n', (517, 535), True, 'import numpy as np\n'), ((2380, 2415), 'pandas.read_csv', 'pd.read_csv', (['dataset_path'], {'header': '(0)'}), '(dataset_path, header=0)\n', (2391, 2415), True, 'import pandas as pd\n'), ((3141, 3187), 'numpy.save', 'np.save', (['Lexical_Feature_path', 'Lexical_Feature'], {}), '(Lexical_Feature_path, Lexical_Feature)\n', (3148, 3187), True, 'import numpy as np\n'), ((3241, 3270), 'numpy.load', 'np.load', (['Lexical_Feature_path'], {}), '(Lexical_Feature_path)\n', (3248, 3270), True, 'import numpy as np\n'), ((1624, 1637), 'urllib.parse.urlparse', 'urlparse', (['url'], {}), '(url)\n', (1632, 1637), False, 'from urllib.parse import urlparse\n'), ((1985, 2008), 'tldextract.extract', 'tldextract.extract', (['url'], {}), '(url)\n', (2003, 2008), False, 'import tldextract\n'), ((2159, 2172), 'urllib.parse.urlparse', 'urlparse', (['url'], {}), '(url)\n', (2167, 2172), False, 'from urllib.parse import urlparse\n'), ((2950, 3035), 'numpy.array', 'np.array', (['(total_delims, total_hyphens, url_len, dot_num, host_token, path_token)'], {}), '((total_delims, total_hyphens, url_len, dot_num, host_token,\n path_token))\n', (2958, 3035), True, 'import numpy as np\n')]
|
from Skeleton import Skeleton
if __name__ == '__main__' :
Skeleton.run()
|
[
"Skeleton.Skeleton.run"
] |
[((71, 85), 'Skeleton.Skeleton.run', 'Skeleton.run', ([], {}), '()\n', (83, 85), False, 'from Skeleton import Skeleton\n')]
|
#!/usr/bin/env python3
import acsuite
import lvsfunc as lvf
ac = acsuite.AC()
path = r'BDMV/GRANBLUE_FANTASY_SEASON2_1/BDMV/STREAM/00007.m2ts'
src = lvf.src(path)
if __name__ == "__main__":
ac.eztrim(src, [(0, -24)], path[:-4]+"wav", "GBF2BD_NCOP1_cut.wav")
|
[
"lvsfunc.src",
"acsuite.AC"
] |
[((65, 77), 'acsuite.AC', 'acsuite.AC', ([], {}), '()\n', (75, 77), False, 'import acsuite\n'), ((151, 164), 'lvsfunc.src', 'lvf.src', (['path'], {}), '(path)\n', (158, 164), True, 'import lvsfunc as lvf\n')]
|
#!/usr/bin/env python3
#
# Copyright 2020 IBM
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.IBM Confidential
#
import typing
import fastapi.testclient as tstc
import app.core.configuration as conf
def test_get_server_capabilities(
client: tstc.TestClient
) -> typing.NoReturn:
response = client.get(f'{conf.get_config().API_V2_STR}/capabilities')
content = response.json()
assert response.status_code == 200
assert content['capabilities']
assert 'info' in content['capabilities']
assert 'discover' in content['capabilities']
assert 'manage' in content['capabilities']
assert 'run' in content['capabilities']
def test_get_managed_capabilities(
client: tstc.TestClient,
test_upload_size_limit
):
response = client.get(f'{conf.get_config().API_V2_STR}/capabilities')
content = response.json()
assert response.status_code == 200
assert content['managed_capabilities']
assert 'supported_input_data_structure' in content['managed_capabilities']
assert 'supported_output_data_structure' in content['managed_capabilities']
assert 'supported_binary_format' in content['managed_capabilities']
assert 'supported_upload_format' in content['managed_capabilities']
assert 'auto' in content['managed_capabilities']['supported_input_data_structure']
assert 'auto' in content['managed_capabilities']['supported_input_data_structure']
assert 'joblib' in content['managed_capabilities']['supported_binary_format']
assert 'pickle' in content['managed_capabilities']['supported_binary_format']
assert 'pmml' in content['managed_capabilities']['supported_binary_format']
assert 'pmml' in content['managed_capabilities']['supported_upload_format']
assert content['managed_capabilities']['file_size_limit'] == test_upload_size_limit
|
[
"app.core.configuration.get_config"
] |
[((816, 833), 'app.core.configuration.get_config', 'conf.get_config', ([], {}), '()\n', (831, 833), True, 'import app.core.configuration as conf\n'), ((1284, 1301), 'app.core.configuration.get_config', 'conf.get_config', ([], {}), '()\n', (1299, 1301), True, 'import app.core.configuration as conf\n')]
|
import numpy as np
from scipy import stats
from pm4py.algo.filtering.log.start_activities import start_activities_filter
def start_activities(log):
log_start = start_activities_filter.get_start_activities(log)
n_unique_start_activities = len(log_start)
start_activities_occurrences = list(log_start.values())
start_activities_min = np.min(start_activities_occurrences)
start_activities_max = np.max(start_activities_occurrences)
start_activities_mean = np.mean(start_activities_occurrences)
start_activities_median = np.median(start_activities_occurrences)
start_activities_std = np.std(start_activities_occurrences)
start_activities_variance = np.var(start_activities_occurrences)
start_activities_q1 = np.percentile(start_activities_occurrences, 25)
start_activities_q3 = np.percentile(start_activities_occurrences, 75)
start_activities_iqr = stats.iqr(start_activities_occurrences)
start_activities_skewness = stats.skew(start_activities_occurrences)
start_activities_kurtosis = stats.kurtosis(start_activities_occurrences)
return [
n_unique_start_activities,
start_activities_min,
start_activities_max,
start_activities_mean,
start_activities_median,
start_activities_std,
start_activities_variance,
start_activities_q1,
start_activities_q3,
start_activities_iqr,
start_activities_skewness,
start_activities_kurtosis,
]
|
[
"scipy.stats.iqr",
"pm4py.algo.filtering.log.start_activities.start_activities_filter.get_start_activities",
"numpy.median",
"numpy.std",
"numpy.percentile",
"scipy.stats.skew",
"numpy.min",
"numpy.mean",
"numpy.max",
"scipy.stats.kurtosis",
"numpy.var"
] |
[((166, 215), 'pm4py.algo.filtering.log.start_activities.start_activities_filter.get_start_activities', 'start_activities_filter.get_start_activities', (['log'], {}), '(log)\n', (210, 215), False, 'from pm4py.algo.filtering.log.start_activities import start_activities_filter\n'), ((352, 388), 'numpy.min', 'np.min', (['start_activities_occurrences'], {}), '(start_activities_occurrences)\n', (358, 388), True, 'import numpy as np\n'), ((416, 452), 'numpy.max', 'np.max', (['start_activities_occurrences'], {}), '(start_activities_occurrences)\n', (422, 452), True, 'import numpy as np\n'), ((481, 518), 'numpy.mean', 'np.mean', (['start_activities_occurrences'], {}), '(start_activities_occurrences)\n', (488, 518), True, 'import numpy as np\n'), ((549, 588), 'numpy.median', 'np.median', (['start_activities_occurrences'], {}), '(start_activities_occurrences)\n', (558, 588), True, 'import numpy as np\n'), ((616, 652), 'numpy.std', 'np.std', (['start_activities_occurrences'], {}), '(start_activities_occurrences)\n', (622, 652), True, 'import numpy as np\n'), ((685, 721), 'numpy.var', 'np.var', (['start_activities_occurrences'], {}), '(start_activities_occurrences)\n', (691, 721), True, 'import numpy as np\n'), ((748, 795), 'numpy.percentile', 'np.percentile', (['start_activities_occurrences', '(25)'], {}), '(start_activities_occurrences, 25)\n', (761, 795), True, 'import numpy as np\n'), ((822, 869), 'numpy.percentile', 'np.percentile', (['start_activities_occurrences', '(75)'], {}), '(start_activities_occurrences, 75)\n', (835, 869), True, 'import numpy as np\n'), ((897, 936), 'scipy.stats.iqr', 'stats.iqr', (['start_activities_occurrences'], {}), '(start_activities_occurrences)\n', (906, 936), False, 'from scipy import stats\n'), ((969, 1009), 'scipy.stats.skew', 'stats.skew', (['start_activities_occurrences'], {}), '(start_activities_occurrences)\n', (979, 1009), False, 'from scipy import stats\n'), ((1042, 1086), 'scipy.stats.kurtosis', 'stats.kurtosis', (['start_activities_occurrences'], {}), '(start_activities_occurrences)\n', (1056, 1086), False, 'from scipy import stats\n')]
|
"""Setup of Spark Framework"""
from setuptools import setup
with open("requirements.txt") as f:
requirements = [line for line in f if not line.startswith("#")]
setup(
name="spark_framework",
version="1.24",
description="Alternative pythonic style API to work with Apache Spark",
long_description="The project is a collection of useful functions "
"that allow to write PySpark code in a more convenient way",
author="<NAME>",
author_email="<EMAIL>",
license="Apache 2.0",
packages=["spark_framework"],
platforms=["any"],
install_requires=requirements,
)
|
[
"setuptools.setup"
] |
[((174, 588), 'setuptools.setup', 'setup', ([], {'name': '"""spark_framework"""', 'version': '"""1.24"""', 'description': '"""Alternative pythonic style API to work with Apache Spark"""', 'long_description': '"""The project is a collection of useful functions that allow to write PySpark code in a more convenient way"""', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'license': '"""Apache 2.0"""', 'packages': "['spark_framework']", 'platforms': "['any']", 'install_requires': 'requirements'}), "(name='spark_framework', version='1.24', description=\n 'Alternative pythonic style API to work with Apache Spark',\n long_description=\n 'The project is a collection of useful functions that allow to write PySpark code in a more convenient way'\n , author='<NAME>', author_email='<EMAIL>', license='Apache 2.0',\n packages=['spark_framework'], platforms=['any'], install_requires=\n requirements)\n", (179, 588), False, 'from setuptools import setup\n')]
|
#!/usr/bin/env python
import os.path as op
import nibabel as nib
from glob import glob
from scipy.ndimage import gaussian_filter
from pydra import mark, Workflow, Submitter
import click
from numpy.typing import NDArray
@mark.task
@mark.annotate({"return": {"outfile": str}})
def filter(filename: str, outdir: str) -> str:
sigma = 5
img = nib.load(filename)
data = img.get_fdata()
smooth = gaussian_filter(data, sigma=sigma)
smooth_img = nib.Nifti1Image(smooth, img.affine, img.header)
outfile = op.join(outdir, f"smooth{sigma}_{op.basename(filename)}")
nib.save(smooth_img, outfile)
return outfile
@click.command()
@click.argument("dir", type=click.Path(exists=True, resolve_path=True))
@click.argument("output_dir", type=click.Path(resolve_path=True))
@click.option("--range", nargs=2, type=int, default=None)
def main(dir, output_dir, range):
paths = glob(op.join(dir, "*"))
if range is not None:
paths = paths[range[0] : range[1]]
wf = Workflow(name="filterbb", input_spec=["fn", "outdir"], outdir=output_dir)
wf.split(["fn"], fn=paths)
wf.add(filter(name="smooth", filename=wf.lzin.fn, outdir=wf.lzin.outdir))
wf.set_output([("result", wf.smooth.lzout.outfile)])
with Submitter(plugin="cf") as sub:
sub(wf)
for res in wf.result():
print(res.output.result)
if __name__ == "__main__":
main()
|
[
"pydra.mark.annotate",
"nibabel.Nifti1Image",
"nibabel.load",
"os.path.basename",
"scipy.ndimage.gaussian_filter",
"click.option",
"click.command",
"nibabel.save",
"click.Path",
"pydra.Workflow",
"pydra.Submitter",
"os.path.join"
] |
[((234, 277), 'pydra.mark.annotate', 'mark.annotate', (["{'return': {'outfile': str}}"], {}), "({'return': {'outfile': str}})\n", (247, 277), False, 'from pydra import mark, Workflow, Submitter\n'), ((640, 655), 'click.command', 'click.command', ([], {}), '()\n', (653, 655), False, 'import click\n'), ((795, 851), 'click.option', 'click.option', (['"""--range"""'], {'nargs': '(2)', 'type': 'int', 'default': 'None'}), "('--range', nargs=2, type=int, default=None)\n", (807, 851), False, 'import click\n'), ((350, 368), 'nibabel.load', 'nib.load', (['filename'], {}), '(filename)\n', (358, 368), True, 'import nibabel as nib\n'), ((409, 443), 'scipy.ndimage.gaussian_filter', 'gaussian_filter', (['data'], {'sigma': 'sigma'}), '(data, sigma=sigma)\n', (424, 443), False, 'from scipy.ndimage import gaussian_filter\n'), ((461, 508), 'nibabel.Nifti1Image', 'nib.Nifti1Image', (['smooth', 'img.affine', 'img.header'], {}), '(smooth, img.affine, img.header)\n', (476, 508), True, 'import nibabel as nib\n'), ((587, 616), 'nibabel.save', 'nib.save', (['smooth_img', 'outfile'], {}), '(smooth_img, outfile)\n', (595, 616), True, 'import nibabel as nib\n'), ((1003, 1076), 'pydra.Workflow', 'Workflow', ([], {'name': '"""filterbb"""', 'input_spec': "['fn', 'outdir']", 'outdir': 'output_dir'}), "(name='filterbb', input_spec=['fn', 'outdir'], outdir=output_dir)\n", (1011, 1076), False, 'from pydra import mark, Workflow, Submitter\n'), ((904, 921), 'os.path.join', 'op.join', (['dir', '"""*"""'], {}), "(dir, '*')\n", (911, 921), True, 'import os.path as op\n'), ((1253, 1275), 'pydra.Submitter', 'Submitter', ([], {'plugin': '"""cf"""'}), "(plugin='cf')\n", (1262, 1275), False, 'from pydra import mark, Workflow, Submitter\n'), ((684, 726), 'click.Path', 'click.Path', ([], {'exists': '(True)', 'resolve_path': '(True)'}), '(exists=True, resolve_path=True)\n', (694, 726), False, 'import click\n'), ((763, 792), 'click.Path', 'click.Path', ([], {'resolve_path': '(True)'}), '(resolve_path=True)\n', (773, 792), False, 'import click\n'), ((557, 578), 'os.path.basename', 'op.basename', (['filename'], {}), '(filename)\n', (568, 578), True, 'import os.path as op\n')]
|
#!/usr/bin/env python
import itertools
EXAMPLE = """mxmxvkd kfcds sqjhc nhms (contains dairy, fish)
trh fvjkl sbzzf mxmxvkd (contains dairy)
sqjhc fvjkl (contains soy)
sqjhc mxmxvkd sbzzf (contains fish)
"""
def parse_recipe(line):
"""Meh
>>> parse_recipe(EXAMPLE.split("\\n")[0])
{'ingredients': ['mxmxvkd', 'kfcds', 'sqjhc', 'nhms'], 'allergens': ['dairy', 'fish']}
"""
line = line.replace(")", "")
ing, allr = line.split(" (contains")
return {"ingredients": ing.strip().split(), "allergens": allr.strip().split(", ")}
def parse_recipes(text):
"""
>>> recipes = parse_recipes(EXAMPLE)
>>> len(recipes)
4
>>> list((len(recipe["ingredients"]) for recipe in recipes))
[4, 4, 2, 3]
>>> list((len(recipe["allergens"]) for recipe in recipes))
[2, 1, 1, 1]
"""
return [parse_recipe(line) for line in text.strip().split("\n")]
def ingredients_for_allergens(recipes):
"""
>>> buckets = ingredients_for_allergens(parse_recipes(EXAMPLE))
>>> sorted(buckets.keys())
['dairy', 'fish', 'soy']
>>> sorted(buckets["soy"])
['fvjkl', 'sqjhc']
>>> sorted(buckets["dairy"])
['mxmxvkd']
"""
all_alrgns = {alrgn: set() for r in recipes for alrgn in r["allergens"]}
for ingrs, allrgs in ((r["ingredients"], r["allergens"]) for r in recipes):
for allrg in allrgs:
if len(all_alrgns[allrg]) == 0:
all_alrgns[allrg] = set(ingrs)
else:
all_alrgns[allrg] &= set(ingrs)
return all_alrgns
def ingredients_without_allergens(recipes):
"""
>>> recipes = parse_recipes(EXAMPLE)
>>> sorted(ingredients_without_allergens(recipes))
['kfcds', 'nhms', 'sbzzf', 'trh']
"""
buckets = ingredients_for_allergens(recipes)
all_ingrs = [ingr for r in recipes for ingr in r["ingredients"]]
no_alrgn = set()
for ingr in set(all_ingrs):
if not any((ingr in bucket for bucket in buckets.values())):
no_alrgn.add(ingr)
return list(no_alrgn)
def count_ingredients_occurrences(recipes, ingredients):
"""
>>> recipes = parse_recipes(EXAMPLE)
>>> count_ingredients_occurrences(recipes, ["kfcds"])
1
>>> count_ingredients_occurrences(recipes, ["sbzzf"])
2
>>> count_ingredients_occurrences(recipes, ["kfcds", "nhms", "sbzzf", "trh"])
5
"""
all_ingrs = [ingr for r in recipes for ingr in r["ingredients"]]
return sum((all_ingrs.count(ingr) for ingr in ingredients))
def reduce_allergens(bkts):
"""
>>> recipes = parse_recipes(EXAMPLE)
>>> alrgns = ingredients_for_allergens(recipes)
>>> reduce_allergens(alrgns)
{'dairy': {'mxmxvkd'}, 'fish': {'sqjhc'}, 'soy': {'fvjkl'}}
"""
while not all((len(b) == 1 for b in bkts.values())):
with_one = [b for b in bkts if len(bkts[b]) == 1]
for o, b in itertools.product(with_one, bkts):
if o == b:
continue
bkts[b] -= bkts[o]
return bkts
if __name__ == "__main__":
with open("input") as fd:
recipes = parse_recipes(fd.read().strip())
w_out_alrgns = ingredients_without_allergens(recipes)
nb_w_out = count_ingredients_occurrences(recipes, w_out_alrgns)
print(f"Part1: {nb_w_out} ingredients without allergens")
ingr_for_alrgn = reduce_allergens(ingredients_for_allergens(recipes))
p2 = ",".join([list(ingr_for_alrgn[k])[0] for k in sorted(ingr_for_alrgn.keys())])
print(f"Part2: {p2}")
|
[
"itertools.product"
] |
[((2882, 2915), 'itertools.product', 'itertools.product', (['with_one', 'bkts'], {}), '(with_one, bkts)\n', (2899, 2915), False, 'import itertools\n')]
|
from gym_minigrid.minigrid import *
from gym_minigrid.register import register
class Room:
def __init__(self,
top,
size
):
# Top-left corner and size (tuples)
self.top = top
self.size = size
# List of door objects and door positions
self.doors = []
self.doorPos = []
# Indicates if this room is locked
self.locked = False
# Set of rooms this is connected to
self.neighbors = set()
# List of objects contained
self.objs = []
def randPos(self, env):
topX, topY = self.top
sizeX, sizeY = self.size
return env._randPos(
topX + 1, topX + sizeX - 1,
topY + 1, topY + sizeY - 1
)
class RoomGrid(MiniGridEnv):
"""
Environment with multiple rooms and random objects.
This is meant to serve as a base class for other environments.
"""
def __init__(
self,
roomSize=6,
numCols=4,
maxObsPerRoom=3,
lockedRooms=False
):
assert roomSize > 0
assert roomSize >= 4
assert numCols > 0
self.roomSize = roomSize
self.numCols = numCols
self.numRows = numCols
self.maxObsPerRoom = maxObsPerRoom
self.lockedRooms = False
gridSize = (roomSize - 1) * numCols + 1
super().__init__(gridSize=gridSize, maxSteps=6*gridSize)
self.reward_range = (0, 1)
def getRoom(self, x, y):
"""Get the room a given position maps to"""
assert x >= 0
assert y >= 0
i = x // self.roomSize
j = y // self.roomSize
assert i < self.numCols
assert j < self.numRows
return self.roomGrid[j][i]
def _genGrid(self, width, height):
# Create the grid
self.grid = Grid(width, height)
self.roomGrid = []
self.rooms = []
# For each row of rooms
for j in range(0, self.numRows):
row = []
# For each column of rooms
for i in range(0, self.numCols):
room = Room(
(i * (self.roomSize-1), j * (self.roomSize-1)),
(self.roomSize, self.roomSize)
)
row.append(room)
self.rooms.append(room)
# Generate the walls for this room
self.grid.wallRect(*room.top, *room.size)
self.roomGrid.append(row)
# Randomize the player start position and orientation
self.placeAgent()
# Find which room the agent was placed in
startRoom = self.getRoom(*self.startPos)
# TODO: respect maxObsPerRoom
# Place random objects in the world
types = ['key', 'ball', 'box']
for i in range(0, 12):
objType = self._randElem(types)
objColor = self._randElem(COLOR_NAMES)
if objType == 'key':
obj = Key(objColor)
elif objType == 'ball':
obj = Ball(objColor)
elif objType == 'box':
obj = Box(objColor)
self.placeObj(obj)
# TODO: curriculum generation
self.mission = ''
def step(self, action):
obs, reward, done, info = super().step(action)
return obs, reward, done, info
register(
id='MiniGrid-RoomGrid-v0',
entry_point='gym_minigrid.envs:RoomGrid'
)
|
[
"gym_minigrid.register.register"
] |
[((3379, 3456), 'gym_minigrid.register.register', 'register', ([], {'id': '"""MiniGrid-RoomGrid-v0"""', 'entry_point': '"""gym_minigrid.envs:RoomGrid"""'}), "(id='MiniGrid-RoomGrid-v0', entry_point='gym_minigrid.envs:RoomGrid')\n", (3387, 3456), False, 'from gym_minigrid.register import register\n')]
|
from ete3 import Tree, TreeStyle
import pandas as pd
# load jordan predictions
jor = pd.read_csv(snakemake.input.jor, index_col=0)
sample = jor.sample(100)
sample_genomes = sample['genome']
root = 'NC_000913.3'
tree = Tree(snakemake.input.tree)
tree.set_outgroup(root)
# keep_list = []
# for n, node in enumerate(tree.get_leaf_names()):
# if node in sample_genomes:
# keep_list.append(node)
#
# tree.prune(keep_list)
cs = TreeStyle()
cs.mode = 'c' # draw tree in circular mode
cs.arc_start = -180
cs.arc_span = 180
cs.show_leaf_name = True
tree.render("mytree.pdf", w=183, units="mm", tree_style=cs)
|
[
"pandas.read_csv",
"ete3.TreeStyle",
"ete3.Tree"
] |
[((86, 131), 'pandas.read_csv', 'pd.read_csv', (['snakemake.input.jor'], {'index_col': '(0)'}), '(snakemake.input.jor, index_col=0)\n', (97, 131), True, 'import pandas as pd\n'), ((220, 246), 'ete3.Tree', 'Tree', (['snakemake.input.tree'], {}), '(snakemake.input.tree)\n', (224, 246), False, 'from ete3 import Tree, TreeStyle\n'), ((439, 450), 'ete3.TreeStyle', 'TreeStyle', ([], {}), '()\n', (448, 450), False, 'from ete3 import Tree, TreeStyle\n')]
|
#! /usr/bin/env python
__author__ = "<NAME>"
__email__ = "<EMAIL>"
"""
Code to store and manipulate data.
-------------------------------------------------------------------------------
class StatusInfo
-------------------------------------------------------------------------------
This is the only class implemented that is meant to be public.
The data stored by instances of class StatusInfo must be a list of items.
These items can be anything, including objects.
A typical example is data is a list of HTCondor ClassAds, where each
item in the data list represents an HTCondor job.
Class StatusInfo has several methods to manipulate the data,
but in all cases the output of the method is a new instance of one of the
classes implemented: StatusInfo, _DictStatusInfo, etc.
Methods never modify the current instance data.
This allows to perform different manipulations from the same source object.
There are two types of methods in class StatusInfo:
- methods whose object output accepts further processing.
Examples are methods indexby(), filter(), and map().
- methods whose object output can not be processed anymore.
An attempt to call any method on these instances
will raise an Exception.
Examples are methods reduce(), and process().
The method indexby() is somehow special.
It is being used to split the stored data into a dictionary,
according to whatever rule is provided.
The values of this dictionary are themselves new StatusInfo instances.
Therefore, the output of calling indexby() once is an _DictStatusInfo object
with data:
self.data = {
key1: <StatusInfo>,
key2: <StatusInfo>,
...
keyN: <StatusInfo>
}
-------------------------------------------------------------------------------
The UML source for the classes is as follows:
@startuml
object <|-- _Base
_Base <|-- _BaseDict
_Base <|-- StatusInfo
_Base <|-- _NonMutableStatusInfo
_AnalysisInterface <|-- StatusInfo
_AnalysisInterface <|-- _DictStatusInfo
_BaseDict <|-- _DictStatusInfo
_BaseDict <|-- _NonMutableDictStatusInfo
_GetRawBase <|-- StatusInfo
_GetRawBase <|-- _NonMutableStatusInfo
@enduml
+--------+
| object |
+--------+
^
|
+--------------------+ +-------+
| _AnalysisInterface | +------------------------------->| _Base |<-----------------+
+--------------------+ | +-------+ |
^ ^ | +-------------+ ^ +-----------+
| | | | _GetRawBase | | | _BaseDict |
| | | +-------------+ | +-----------+
| | | ^ ^ | ^ ^
| | | | | | | |
| | | | | | | |
| | | | | | | |
| | | | | | | |
| +------------+ | | +-----------------------+ | |
| | StatusInfo |-------+ +---| _NonMutableStatusInfo | | |
| +------------+ +-----------------------+ | |
| +-----------------+ | +---------------------------+
+------------------------------------| _DictStatusInfo |-----------------------+ | _NonMutableDictStatusInfo |
+-----------------+ +---------------------------+
-------------------------------------------------------------------------------
Analyzers
-------------------------------------------------------------------------------
The input to all methods is an object of type Analyzer.
Analyzers are classes that implement the rules or policies to be used
for each method call.
For example:
- a call to method indexby() expects an object of type AnalyzerIndexBy
- a call to method map() expects an object of type AnalyzerMap
- a call to method reduce() expects an object of type AnalyzerReduce
- etc.
Each Analyzer object must have implemented a method
with the same name that the StatusInfo's method it is intended for.
For exmple:
- classes AnalyzerIndexBy must implement method indexby()
- classes AnalyzerMap must implement method map()
- classes AnalyzerReduce must implement method reduce()
- ...
Passing an analyzer object that does not implement the right method will
raise an IncorrectAnalyzer Exception.
A few basic pre-made Analyzers have been implemented, ready to use.
"""
import copy
import datetime
import inspect
import logging
import logging.handlers
import threading
import time
import traceback
import os
import pwd
import sys
# =============================================================================
# Decorators
#
# Note:
# the decorator must be implemented before the classes using it
# otherwise, they do not find it
# =============================================================================
def validate_call(method):
"""
validates calls to the processing methods.
Checks:
* if the StatusInfo object is mutable or not,
* if a method is being called with the right type of Analyzer
Exceptions are raised with some criteria is not met.
"""
def wrapper(self, analyzer, *k, **kw):
method_name = method.__name__
analyzertype = analyzer.analyzertype
if not analyzertype == method_name:
msg = 'Analyzer object {obj} is not type {name}. Raising exception.'
msg = msg.format(obj = analyzer,
name = method_name)
self.log.error(msg)
raise IncorrectAnalyzer(analyzer, analyzertype, method_name)
out = method(self, analyzer, *k, **kw)
return out
return wrapper
def catch_exception(method):
"""
catches any exception during data processing
and raises an AnalyzerFailure exception
"""
def wrapper(self, analyzer):
try:
out = method(self, analyzer)
except Exception as ex:
msg = 'Exception of type "%s" ' %ex.__class__.__name__
msg += 'with content "%s" ' %ex
msg += 'while calling "%s" ' %method.__name__
msg += 'with analyzer "%s"' %analyzer
raise AnalyzerFailure(msg)
else:
return out
return wrapper
# =============================================================================
# Base classes and interfaces
# =============================================================================
class _Base(object):
def __init__(self, data, timestamp=None):
"""
:param data: the data to be recorded
:param timestamp: the time when this object was created
"""
self.log = logging.getLogger('info')
self.log.addHandler(logging.NullHandler())
msg ='Initializing object with input options: \
data={data}, timestamp={timestamp}'
msg = msg.format(data=data,
timestamp=timestamp)
self.log.debug(msg)
self.data = data
if not timestamp:
timestamp = int(time.time())
msg = 'Setting timestamp to %s' %timestamp
self.log.debug(msg)
self.timestamp = timestamp
self.log.debug('Object initialized')
def get(self, *key_l):
"""
returns the data hosted by the Info object in the
tree structure pointed by all keys
The output is the data, either a dictionary or the original raw list
:param key_l list: list of keys for each nested dictionary
:rtype data:
"""
if len(key_l) == 0:
return self.data
else:
key = key_l[0]
if key not in self.data.keys():
raise MissingKey(key)
data = self.data[key]
return data.get(*key_l[1:])
class _BaseDict(_Base):
"""
adds an extra check for the input data
"""
def __init__(self, data, timestamp=None):
super(_BaseDict, self).__init__(data, timestamp)
if type(self.data) is not dict:
raise IncorrectInputDataType(dict)
def getraw(self):
out = {}
for key, value in self.data.items():
out[key] = value.getraw()
return out
def __getitem__(self, key):
"""
returns the Info object pointed by the key
:param key: the key in the higher level dictionary
:rtype StatusInfo:
"""
if key not in self.data.keys():
raise MissingKey(key)
return self.data[key]
# extra get methods
class _GetRawBase:
def getraw(self):
return self.data
# interfaces
class _AnalysisInterface:
def indexby(self, analyzer):
raise NotImplementedError
def map(self, analyzer):
raise NotImplementedError
def filter(self, analyzer):
raise NotImplementedError
def reduce(self, analyzer):
raise NotImplementedError
def transform(self, analyzer):
raise NotImplementedError
def process(self, analyzer):
raise NotImplementedError
# =============================================================================
# Info class
# =============================================================================
class StatusInfo(_Base, _AnalysisInterface, _GetRawBase):
def __init__(self, data, timestamp=None):
super(StatusInfo, self).__init__(data, timestamp)
if type(self.data) is not list:
msg = 'Input data %s is not a dict. Raising exception' %data
self.log.error(msg)
raise IncorrectInputDataType(list)
def analyze(self, analyzer):
"""
generic method that picks the right one
based on the type of analyzer
:param analyzer: an Analyzer object
:rtype StatusInfo:
"""
self.log.debug('Starting')
if analyzer.analyzertype == 'indexby':
return self.indexby(analyzer)
elif analyzer.analyzertype == 'filter':
return self.filter(analyzer)
elif analyzer.analyzertype == 'map':
return self.map(analyzer)
elif analyzer.analyzertype == 'reduce':
return self.reduce(analyzer)
elif analyzer.analyzertype == 'transform':
return self.transform(analyzer)
elif analyzer.analyzertype == 'process':
return self.process(analyzer)
else:
msg = 'Input object %s is not a valid analyzer. Raising exception.'
self.log.error(msg)
raise NotAnAnalyzer()
def apply_algorithm(self, algorithm):
"""
invoke all steps in an Algorithm object
and returns the final output
:param Algorithm algorithm:
:rtype StatusInfo:
"""
return algorithm.analyze(self)
# -------------------------------------------------------------------------
# methods to manipulate the data
# -------------------------------------------------------------------------
@validate_call
def indexby(self, analyzer):
"""
groups the items recorded in self.data into a dictionary
and creates a new StatusInfo object with it.
1. make a dictinary grouping items according to rules in analyzer
2. convert that dictionary into a dictionary of StatusInfo objects
3. make a new StatusInfo with that dictionary
:param analyzer: an instance of AnalyzerIndexBy-type class
implementing method indexby()
:rtype StatusInfo:
"""
self.log.debug('Starting with analyzer %s' %analyzer)
new_data = self.__indexby(analyzer)
new_info = _DictStatusInfo(new_data, timestamp=self.timestamp)
return new_info
@catch_exception
def __indexby(self, analyzer):
# 1
tmp_new_data = {}
for item in self.data:
key = analyzer.indexby(item)
if key is not None:
if key not in tmp_new_data.keys():
tmp_new_data[key] = []
tmp_new_data[key].append(item)
# 2
new_data = {}
for k, v in tmp_new_data.items():
new_data[k] = StatusInfo(v, timestamp=self.timestamp)
return new_data
# -------------------------------------------------------------------------
@validate_call
def map(self, analyzer):
"""
modifies each item in self.data according to rules
in analyzer
:param analyzer: an instance of AnalyzerMap-type class
implementing method map()
:rtype StatusInfo:
"""
self.log.debug('Starting with analyzer %s' %analyzer)
new_data = self.__map(analyzer)
new_info = StatusInfo(new_data, timestamp=self.timestamp)
return new_info
@catch_exception
def __map(self, analyzer):
new_data = []
for item in self.data:
new_item = analyzer.map(item)
new_data.append(new_item)
return new_data
# -------------------------------------------------------------------------
@validate_call
def filter(self, analyzer):
"""
eliminates the items in self.data that do not pass
the filter implemented in analyzer
:param analyzer: an instance of AnalyzerFilter-type class
implementing method filter()
:rtype StatusInfo:
"""
self.log.debug('Starting with analyzer %s' %analyzer)
new_data = self.__filter(analyzer)
new_info = StatusInfo(new_data, timestamp=self.timestamp)
return new_info
@catch_exception
def __filter(self, analyzer):
new_data = []
for item in self.data:
if analyzer.filter(item):
new_data.append(item)
return new_data
# -------------------------------------------------------------------------
@validate_call
def reduce(self, analyzer):
"""
process the entire self.data at the raw level and accumulate values
:param analyzer: an instance of AnalyzerReduce-type class
implementing method reduce()
:rtype StatusInfo:
"""
self.log.debug('Starting with analyzer %s' %analyzer)
new_data = self.__reduce(analyzer)
new_info = _NonMutableStatusInfo(new_data,
timestamp=self.timestamp)
return new_info
@catch_exception
def __reduce(self, analyzer):
value = analyzer.init_value
for item in self.data:
value = analyzer.reduce(value, item)
return value
# -------------------------------------------------------------------------
@validate_call
def transform(self, analyzer):
"""
process the entire self.data at the raw level
:param analyzer: an instance of AnalyzerTransform-type class
implementing method transform()
:rtype StatusInfo:
"""
self.log.debug('Starting with analyzer %s' %analyzer)
new_data = self.__transform(analyzer)
new_info = StatusInfo(new_data, timestamp=self.timestamp)
return new_info
@catch_exception
def __transform(self, analyzer):
new_data = analyzer.transform(self.data)
return new_data
# -------------------------------------------------------------------------
@validate_call
def process(self, analyzer):
"""
process the entire self.data at the raw level
:param analyzer: an instance of AnalyzerProcess-type class
implementing method process()
:rtype StatusInfo:
"""
self.log.debug('Starting with analyzer %s' %analyzer)
new_data = self.__process(analyzer)
new_info = _NonMutableStatusInfo(new_data, timestamp=self.timestamp)
return new_info
@catch_exception
def __process(self, analyzer):
new_data = analyzer.process(self.data)
return new_data
# =============================================================================
class _DictStatusInfo(_BaseDict, _AnalysisInterface):
# -------------------------------------------------------------------------
# methods to manipulate the data
# -------------------------------------------------------------------------
@validate_call
def indexby(self, analyzer):
new_data = {}
for key, statusinfo in self.data.items():
self.log.debug('calling indexby() for content in key %s'%key)
new_data[key] = statusinfo.indexby(analyzer)
new_info = _DictStatusInfo(new_data, timestamp=self.timestamp)
return new_info
@validate_call
def map(self, analyzer):
new_data = {}
for key, statusinfo in self.data.items():
self.log.debug('calling map() for content in key %s'%key)
new_data[key] = statusinfo.map(analyzer)
new_info = _DictStatusInfo(new_data, timestamp=self.timestamp)
return new_info
@validate_call
def filter(self, analyzer):
new_data = {}
for key, statusinfo in self.data.items():
self.log.debug('calling filter() for content in key %s'%key)
new_data[key] = statusinfo.filter(analyzer)
new_info = _DictStatusInfo(new_data, timestamp=self.timestamp)
return new_info
@validate_call
def reduce(self, analyzer):
new_data = {}
for key, statusinfo in self.data.items():
self.log.debug('calling reduce() for content in key %s'%key)
new_data[key] = statusinfo.reduce(analyzer)
new_info = _NonMutableDictStatusInfo(new_data, timestamp=self.timestamp)
return new_info
@validate_call
def transform(self, analyzer):
new_data = {}
for key, statusinfo in self.data.items():
self.log.debug('calling transform() for content in key %s'%key)
new_data[key] = statusinfo.transform(analyzer)
new_info = _DictStatusInfo(new_data, timestamp=self.timestamp)
return new_info
@validate_call
def process(self, analyzer):
new_data = {}
for key, statusinfo in self.data.items():
self.log.debug('calling process() for content in key %s'%key)
new_data[key] = statusinfo.process(analyzer)
new_info = _NonMutableDictStatusInfo(new_data, timestamp=self.timestamp)
return new_info
class _NonMutableStatusInfo(_Base, _GetRawBase):
pass
class _NonMutableDictStatusInfo(_BaseDict):
pass
# =============================================================================
# Analyzers
# =============================================================================
class Analyzer(object):
pass
class AnalyzerIndexBy(Analyzer):
analyzertype = "indexby"
def indexby(self):
raise NotImplementedError
class AnalyzerFilter(Analyzer):
analyzertype = "filter"
def filter(self):
raise NotImplementedError
class AnalyzerMap(Analyzer):
analyzertype = "map"
def map(self):
raise NotImplementedError
class AnalyzerReduce(Analyzer):
analyzertype = "reduce"
def __init__(self, init_value=None):
self.init_value = init_value
def reduce(self):
raise NotImplementedError
class AnalyzerTransform(Analyzer):
analyzertype = "transform"
def transform(self):
raise NotImplementedError
class AnalyzerProcess(Analyzer):
analyzertype = "process"
def process(self):
raise NotImplementedError
class Algorithm(object):
"""
container for multiple Analyzer objects
"""
def __init__(self):
self.analyzer_l= []
def add(self, analyzer):
self.analyzer_l.append(analyzer)
def analyze(self, input_data):
tmp_out = input_data
for analyzer in self.analyzer_l:
tmp_out = tmp_out.analyze(analyzer)
return tmp_out
# =============================================================================
# Some basic pre-made Analyzers
# =============================================================================
class IndexByKey(AnalyzerIndexBy):
def __init__(self, key):
self.key = key
def indexby(self, job):
try:
return job[self.key]
except Exception:
return None
class IndexByKeyRemap(AnalyzerIndexBy):
def __init__(self, key, mapping_d):
self.key = key
self.mapping_d = mapping_d
def indexby(self, job):
try:
value = str(job[self.key])
except Exception:
return None
if value in self.mapping_d.keys():
return self.mapping_d[value]
else:
return None
class AttributeValue(AnalyzerFilter):
def __init__(self, attribute, value):
self.attribute = attribute
self.value = value
def filter(self, job):
if self.attribute not in job.keys():
msg = 'job {job} does not have key {key}.'
msg = msg.format(job=job,
key=self.attribute)
logmsg = msg + ' Raising Exception.'
self.log.error(logmsg)
raise AnalyzerFailure(msg)
return job[self.attribute] == self.value
class Count(AnalyzerProcess):
def process(self, data):
return len(data)
class TotalRunningTimeFromRunningJobs(AnalyzerReduce):
def __init__(self):
self.now = int(time.time())
super(TotalRunningTimeFromRunningJobs, self).__init__(0)
def reduce(self, value, job):
running = self.now - int(job['enteredcurrentstatus'])
if value:
running += value
return running
class TotalRunningTimeFromRunningAndFinishedJobs(AnalyzerReduce):
def __init__(self):
self.now = int(time.time())
super(TotalRunningTimeFromRunningAndFinishedJobs, self).__init__(0)
def reduce(self, value, job):
if job['jobstatus'] == 2:
running = self.now - int(job['enteredcurrentstatus'])
elif job['jobstatus'] == 3 or \
job['jobstatus'] == 4:
try:
running = int(job['remotewallclocktime'])
except:
# unclear if a finished job that is still in condor_q
# but not yet in condor_history
# has classad remotewallclocktime
running = 0
else:
running = 0
if value:
running += value
return running
class IdleTime(AnalyzerMap):
def __init__(self):
self.now = int(time.time())
def map(self, job):
return self.now - int(job['enteredcurrentstatus'])
class ApplyFunction(AnalyzerProcess):
def __init__(self, func):
self.func = func
def process(self, data):
if data:
return self.func(data)
else:
return None
class CreateANY(AnalyzerTransform):
"""
duplicates the list of jobs,
adding a class MATCH_APF_QUEUE=ANY to the new ones
"""
def transform(self, job_l):
new_job_l = []
for job in job_l:
new_job = copy.copy(job)
new_job['match_apf_queue'] = 'ANY'
new_job_l.append(job)
new_job_l.append(new_job)
return new_job_l
# =============================================================================
# Exceptions
# =============================================================================
class IncorrectInputDataType(Exception):
def __init__(self, type):
self.value = 'Type of input data is not %s' %type
def __str__(self):
return repr(self.value)
class NotAnAnalyzer(Exception):
def __init__(self):
self.value = 'object does not have a valid analyzertype value'
def __str__(self):
return repr(self.value)
class IncorrectAnalyzer(Exception):
def __init__(self, analyzer, analyzertype, methodname):
value = "Analyzer object {ana} is of type '{atype}' but used for '{call}()'"
self.value = value.format(ana=analyzer,
atype=analyzertype,
call=methodname)
def __str__(self):
return repr(self.value)
class MissingKey(Exception):
def __init__(self, key):
self.value = "Key %s is not in the data dictionary" %key
def __str__(self):
return repr(self.value)
class AnalyzerFailure(Exception):
"""
generic Exception for any unclassified failure
"""
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
# =============================================================================
# class DataItem
# =============================================================================
class DataItem(object):
"""
class to store an arbitrary dictionary,
and read them as they were attributes
"""
def __init__(self, data_d={}, default=0, timestamp=None):
"""
:param dict data_d: input data
:param default: default value to return when the attribute
is being tried to read
is not a key in the dictionary
"""
self.log = logging.getLogger('info')
self.log.addHandler(logging.NullHandler())
msg ='Initializing object with input options: \
data_d={data_d}, default={default}, timestamp={timestamp}'
msg = msg.format(data_d=data_d,
default=default,
timestamp=timestamp)
self.log.debug(msg)
self._data_d = data_d
self._default = default
if not timestamp:
timestamp = int(time.time())
msg = 'Setting timestamp to %s' %timestamp
self.log.debug(msg)
self.timestamp = timestamp
def __getattr__(self, attr):
"""
read the values in the dictionary
as the keys of the dictionary were
attributes of the class.
For example, self.foo allows to read
the content of self.data_d['foo']
"""
return self._data_d.get(attr, self._default)
def __setitem__(self, attr, value):
"""
to allow using [] as if this class were actually a dict.
:param attr: the key
:param value: the value
"""
self._data_d[attr] = value
def __getitem__(self, attr):
"""
to allow using [] as if this class were actually a dict.
:param attr: the key
"""
return self.__getattr__(attr)
def __str__(self):
str_l = []
for pair in self._data_d.items():
s = '%s: %s' %pair
str_l.append(s)
return ', '.join(str_l)
def __repr__(self):
s = str(self)
return s
|
[
"time.time",
"copy.copy",
"logging.getLogger",
"logging.NullHandler"
] |
[((7775, 7800), 'logging.getLogger', 'logging.getLogger', (['"""info"""'], {}), "('info')\n", (7792, 7800), False, 'import logging\n'), ((26476, 26501), 'logging.getLogger', 'logging.getLogger', (['"""info"""'], {}), "('info')\n", (26493, 26501), False, 'import logging\n'), ((7829, 7850), 'logging.NullHandler', 'logging.NullHandler', ([], {}), '()\n', (7848, 7850), False, 'import logging\n'), ((22659, 22670), 'time.time', 'time.time', ([], {}), '()\n', (22668, 22670), False, 'import time\n'), ((23020, 23031), 'time.time', 'time.time', ([], {}), '()\n', (23029, 23031), False, 'import time\n'), ((23799, 23810), 'time.time', 'time.time', ([], {}), '()\n', (23808, 23810), False, 'import time\n'), ((24358, 24372), 'copy.copy', 'copy.copy', (['job'], {}), '(job)\n', (24367, 24372), False, 'import copy\n'), ((26530, 26551), 'logging.NullHandler', 'logging.NullHandler', ([], {}), '()\n', (26549, 26551), False, 'import logging\n'), ((8137, 8148), 'time.time', 'time.time', ([], {}), '()\n', (8146, 8148), False, 'import time\n'), ((26942, 26953), 'time.time', 'time.time', ([], {}), '()\n', (26951, 26953), False, 'import time\n')]
|
import os
import numpy as np
from demo_utils import plot_image
import svmbir
"""
This file demonstrates the generation of a 3D microscopy phantom followed by sinogram projection and reconstruction using MBIR.
The phantom, sinogram, and reconstruction are then displayed.
"""
# Simulated image parameters
num_rows = 256
num_cols = 64
num_slices = 33
display_slice = 16 # Display slice at z=-0.0
# Simulated sinogram parameters
num_views = 64
tilt_angle = np.pi/3 # Tilt range of +-60deg
# Reconstruction parameters
sharpness = 2.0
T = 0.25
snr_db = 30.0
p = 1.2
# Multi-resolution works much better for limited and sparse view reconstruction
max_resolutions=2 # Use 2 additional resolutions to do reconstruction
# Display parameters
vmin = 0.0
vmax = 1.1
# Generate phantom
phantom = svmbir.phantom.gen_microscopy_sample_3d(num_rows,num_cols,num_slices)
# Generate the array of view angles
angles = np.linspace(-tilt_angle, tilt_angle, num_views)
# Generate sinogram by projecting phantom
sino = svmbir.project(phantom, angles, max(num_rows, num_cols))
# Determine resulting number of views, slices, and channels
(num_views, num_slices, num_channels) = sino.shape
# Perform MBIR reconstruction
recon = svmbir.recon(sino, angles, num_rows=num_rows, num_cols=num_cols, max_resolutions=max_resolutions, T=T, p=p, sharpness=sharpness, snr_db=snr_db )
# Compute Normalized Root Mean Squared Error
nrmse = svmbir.phantom.nrmse(recon, phantom)
# create output folder
os.makedirs('output', exist_ok=True)
# display phantom
plot_image(phantom[display_slice], title='Shepp Logan Phantom', filename='output/3D_microscopy_phantom.png', vmin=vmin, vmax=vmax)
# display reconstruction
title = f'Slice {display_slice:d} of Reconstruction with NRMSE={nrmse:.3f}.'
plot_image(recon[display_slice], title=title, filename='output/3D_microscopy_recon.png', vmin=vmin, vmax=vmax)
input("press Enter")
|
[
"os.makedirs",
"svmbir.phantom.nrmse",
"svmbir.recon",
"numpy.linspace",
"demo_utils.plot_image",
"svmbir.phantom.gen_microscopy_sample_3d"
] |
[((792, 863), 'svmbir.phantom.gen_microscopy_sample_3d', 'svmbir.phantom.gen_microscopy_sample_3d', (['num_rows', 'num_cols', 'num_slices'], {}), '(num_rows, num_cols, num_slices)\n', (831, 863), False, 'import svmbir\n'), ((908, 955), 'numpy.linspace', 'np.linspace', (['(-tilt_angle)', 'tilt_angle', 'num_views'], {}), '(-tilt_angle, tilt_angle, num_views)\n', (919, 955), True, 'import numpy as np\n'), ((1214, 1366), 'svmbir.recon', 'svmbir.recon', (['sino', 'angles'], {'num_rows': 'num_rows', 'num_cols': 'num_cols', 'max_resolutions': 'max_resolutions', 'T': 'T', 'p': 'p', 'sharpness': 'sharpness', 'snr_db': 'snr_db'}), '(sino, angles, num_rows=num_rows, num_cols=num_cols,\n max_resolutions=max_resolutions, T=T, p=p, sharpness=sharpness, snr_db=\n snr_db)\n', (1226, 1366), False, 'import svmbir\n'), ((1413, 1449), 'svmbir.phantom.nrmse', 'svmbir.phantom.nrmse', (['recon', 'phantom'], {}), '(recon, phantom)\n', (1433, 1449), False, 'import svmbir\n'), ((1474, 1510), 'os.makedirs', 'os.makedirs', (['"""output"""'], {'exist_ok': '(True)'}), "('output', exist_ok=True)\n", (1485, 1510), False, 'import os\n'), ((1530, 1665), 'demo_utils.plot_image', 'plot_image', (['phantom[display_slice]'], {'title': '"""Shepp Logan Phantom"""', 'filename': '"""output/3D_microscopy_phantom.png"""', 'vmin': 'vmin', 'vmax': 'vmax'}), "(phantom[display_slice], title='Shepp Logan Phantom', filename=\n 'output/3D_microscopy_phantom.png', vmin=vmin, vmax=vmax)\n", (1540, 1665), False, 'from demo_utils import plot_image\n'), ((1764, 1879), 'demo_utils.plot_image', 'plot_image', (['recon[display_slice]'], {'title': 'title', 'filename': '"""output/3D_microscopy_recon.png"""', 'vmin': 'vmin', 'vmax': 'vmax'}), "(recon[display_slice], title=title, filename=\n 'output/3D_microscopy_recon.png', vmin=vmin, vmax=vmax)\n", (1774, 1879), False, 'from demo_utils import plot_image\n')]
|
import setuptools
from cyanobyte import __version__
"""
Run bundler:
$ python3 setup.py sdist
Install via Pip:
$ pip3 install dist/cyanobyte-<version>.tar.gz
Upload via twine:
$ twine check dist/*
$ twine upload dist/*
"""
with open("README.md", "r") as fh:
long_description = fh.read()
with open("requirements.txt", "r") as fh:
install_requires = [line.rstrip() for line in fh]
setuptools.setup(
name="cyanobyte",
version=__version__,
author="Google Inc.",
author_email="<EMAIL>",
description="A package that generates library files for a peripheral given an intermediary layer (YAML files)",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/google/cyanobyte",
packages=setuptools.find_packages(exclude=("test",)),
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
],
python_requires=">=3.7",
install_requires=install_requires,
entry_points={
"console_scripts": [
"cyanobyte-codegen=cyanobyte.codegen:gen",
"cyanobyte-validator=cyanobyte.validator:click_validate"
],
},
setup_requires=['setuptools_scm'],
include_package_data = True,
)
|
[
"setuptools.find_packages"
] |
[((776, 819), 'setuptools.find_packages', 'setuptools.find_packages', ([], {'exclude': "('test',)"}), "(exclude=('test',))\n", (800, 819), False, 'import setuptools\n')]
|
import json
from django.http import HttpRequest, JsonResponse
from connect.models import Connection
from .models import Workgroup
def schedule(request: HttpRequest):
if not Connection.authorize(request):
return JsonResponse({}, status=401)
try:
payload = json.loads(request.body)
worker_id = payload['worker_id']
script = payload['script']
class_name = payload['className']
method_name = payload['methodName']
args = payload['args']
Workgroup.allocate_workgroup(worker_id)
Workgroup.work(script, class_name, method_name, args)
Workgroup.free_workgroup(worker_id)
return JsonResponse({
"message": "success"
})
except ValueError as err:
return JsonResponse(err, 400)
def schedule_game(request: HttpRequest):
if not Connection.authorize(request):
return JsonResponse({}, status=401)
try:
payload = json.loads(request.body)
worker_id = payload['worker_id']
script = payload['script']
class_name = payload['className']
args = payload['args']
Workgroup.allocate_workgroup(worker_id)
Workgroup.game(script, class_name, args)
Workgroup.free_workgroup(worker_id)
return JsonResponse({
"message": "success"
})
except ValueError as err:
return JsonResponse(err, 400)
|
[
"json.loads",
"django.http.JsonResponse",
"connect.models.Connection.authorize"
] |
[((179, 208), 'connect.models.Connection.authorize', 'Connection.authorize', (['request'], {}), '(request)\n', (199, 208), False, 'from connect.models import Connection\n'), ((225, 253), 'django.http.JsonResponse', 'JsonResponse', (['{}'], {'status': '(401)'}), '({}, status=401)\n', (237, 253), False, 'from django.http import HttpRequest, JsonResponse\n'), ((282, 306), 'json.loads', 'json.loads', (['request.body'], {}), '(request.body)\n', (292, 306), False, 'import json\n'), ((669, 705), 'django.http.JsonResponse', 'JsonResponse', (["{'message': 'success'}"], {}), "({'message': 'success'})\n", (681, 705), False, 'from django.http import HttpRequest, JsonResponse\n'), ((850, 879), 'connect.models.Connection.authorize', 'Connection.authorize', (['request'], {}), '(request)\n', (870, 879), False, 'from connect.models import Connection\n'), ((896, 924), 'django.http.JsonResponse', 'JsonResponse', (['{}'], {'status': '(401)'}), '({}, status=401)\n', (908, 924), False, 'from django.http import HttpRequest, JsonResponse\n'), ((953, 977), 'json.loads', 'json.loads', (['request.body'], {}), '(request.body)\n', (963, 977), False, 'import json\n'), ((1283, 1319), 'django.http.JsonResponse', 'JsonResponse', (["{'message': 'success'}"], {}), "({'message': 'success'})\n", (1295, 1319), False, 'from django.http import HttpRequest, JsonResponse\n'), ((773, 795), 'django.http.JsonResponse', 'JsonResponse', (['err', '(400)'], {}), '(err, 400)\n', (785, 795), False, 'from django.http import HttpRequest, JsonResponse\n'), ((1387, 1409), 'django.http.JsonResponse', 'JsonResponse', (['err', '(400)'], {}), '(err, 400)\n', (1399, 1409), False, 'from django.http import HttpRequest, JsonResponse\n')]
|
from fastapi import BackgroundTasks
from dispatch.conversation import service as conversation_service
from dispatch.conversation.enums import ConversationButtonActions
from dispatch.database import SessionLocal
from dispatch.decorators import background_task
from dispatch.incident import flows as incident_flows
from dispatch.incident import service as incident_service
from dispatch.incident.enums import IncidentStatus
from dispatch.incident.models import IncidentUpdate, IncidentRead
from dispatch.plugin import service as plugin_service
from dispatch.plugins.dispatch_slack import service as dispatch_slack_service
from dispatch.report import flows as report_flows
from dispatch.task import service as task_service
from dispatch.task.models import TaskStatus
from .config import (
SLACK_COMMAND_ASSIGN_ROLE_SLUG,
SLACK_COMMAND_ENGAGE_ONCALL_SLUG,
SLACK_COMMAND_REPORT_EXECUTIVE_SLUG,
SLACK_COMMAND_REPORT_TACTICAL_SLUG,
SLACK_COMMAND_UPDATE_INCIDENT_SLUG,
)
from .service import get_user_email
slack_client = dispatch_slack_service.create_slack_client()
@background_task
def add_user_to_conversation(
user_id: str, user_email: str, incident_id: int, action: dict, db_session=None
):
"""Adds a user to a conversation."""
incident = incident_service.get(db_session=db_session, incident_id=incident_id)
if incident.status == IncidentStatus.closed:
message = f"Sorry, we cannot add you to a closed incident. Please reach out to the incident commander ({incident.commander.name}) for details."
dispatch_slack_service.send_ephemeral_message(
slack_client, action["container"]["channel_id"], user_id, message
)
else:
dispatch_slack_service.add_users_to_conversation(
slack_client, incident.conversation.channel_id, [user_id]
)
message = f"Success! We've added you to incident {incident.name}. Please check your side bar for the new channel."
dispatch_slack_service.send_ephemeral_message(
slack_client, action["container"]["channel_id"], user_id, message
)
@background_task
def update_task_status(
user_id: str, user_email: str, incident_id: int, action: dict, db_session=None
):
"""Updates a task based on user input."""
action_type, external_task_id = action["actions"][0]["value"].split("-")
resolve = True
if action_type == "reopen":
resolve = False
# we only update the external task allowing syncing to care of propagation to dispatch
task = task_service.get_by_resource_id(db_session=db_session, resource_id=external_task_id)
# avoid external calls if we are already in the desired state
if resolve and task.status == TaskStatus.resolved:
message = "Task is already resolved."
dispatch_slack_service.send_ephemeral_message(
slack_client, action["container"]["channel_id"], user_id, message
)
return
if not resolve and task.status == TaskStatus.open:
message = "Task is already open."
dispatch_slack_service.send_ephemeral_message(
slack_client, action["container"]["channel_id"], user_id, message
)
return
# we don't currently have a good way to get the correct file_id (we don't store a task <-> relationship)
# lets try in both the incident doc and PIR doc
drive_task_plugin = plugin_service.get_active(db_session=db_session, plugin_type="task")
try:
file_id = task.incident.incident_document.resource_id
drive_task_plugin.instance.update(file_id, external_task_id, resolved=resolve)
except Exception:
file_id = task.incident.incident_review_document.resource_id
drive_task_plugin.instance.update(file_id, external_task_id, resolved=resolve)
status = "resolved" if task.status == TaskStatus.open else "re-opened"
message = f"Task successfully {status}."
dispatch_slack_service.send_ephemeral_message(
slack_client, action["container"]["channel_id"], user_id, message
)
@background_task
def handle_update_incident_action(user_id, user_email, incident_id, action, db_session=None):
"""Messages slack dialog data into something that Dispatch can use."""
submission = action["submission"]
notify = True if submission["notify"] == "Yes" else False
incident_in = IncidentUpdate(
title=submission["title"],
description=submission["description"],
incident_type={"name": submission["type"]},
incident_priority={"name": submission["priority"]},
status=submission["status"],
visibility=submission["visibility"],
)
incident = incident_service.get(db_session=db_session, incident_id=incident_id)
existing_incident = IncidentRead.from_orm(incident)
incident_service.update(db_session=db_session, incident=incident, incident_in=incident_in)
incident_flows.incident_update_flow(user_email, incident_id, existing_incident, notify)
@background_task
def handle_assign_role_action(user_id, user_email, incident_id, action, db_session=None):
"""Messages slack dialog data into some thing that Dispatch can use."""
assignee_user_id = action["submission"]["participant"]
assignee_role = action["submission"]["role"]
assignee_email = get_user_email(client=slack_client, user_id=assignee_user_id)
incident_flows.incident_assign_role_flow(user_email, incident_id, assignee_email, assignee_role)
def dialog_action_functions(action: str):
"""Interprets the action and routes it to the appropriate function."""
action_mappings = {
SLACK_COMMAND_ASSIGN_ROLE_SLUG: [handle_assign_role_action],
SLACK_COMMAND_ENGAGE_ONCALL_SLUG: [incident_flows.incident_engage_oncall_flow],
SLACK_COMMAND_REPORT_EXECUTIVE_SLUG: [report_flows.create_executive_report],
SLACK_COMMAND_REPORT_TACTICAL_SLUG: [report_flows.create_tactical_report],
SLACK_COMMAND_UPDATE_INCIDENT_SLUG: [handle_update_incident_action],
}
# this allows for unique action blocks e.g. invite-user or invite-user-1, etc
for key in action_mappings.keys():
if key in action:
return action_mappings[key]
return []
def block_action_functions(action: str):
"""Interprets the action and routes it to the appropriate function."""
action_mappings = {
ConversationButtonActions.invite_user: [add_user_to_conversation],
ConversationButtonActions.update_task_status: [update_task_status],
}
# this allows for unique action blocks e.g. invite-user or invite-user-1, etc
for key in action_mappings.keys():
if key in action:
return action_mappings[key]
return []
def handle_dialog_action(action: dict, background_tasks: BackgroundTasks, db_session: SessionLocal):
"""Handles all dialog actions."""
channel_id = action["channel"]["id"]
conversation = conversation_service.get_by_channel_id_ignoring_channel_type(
db_session=db_session, channel_id=channel_id
)
incident_id = conversation.incident_id
user_id = action["user"]["id"]
user_email = action["user"]["email"]
action_id = action["callback_id"]
for f in dialog_action_functions(action_id):
background_tasks.add_task(f, user_id, user_email, incident_id, action)
def handle_block_action(action: dict, background_tasks: BackgroundTasks):
"""Handles a standalone block action."""
action_id = action["actions"][0]["block_id"]
incident_id = action["actions"][0]["value"]
user_id = action["user"]["id"]
user_email = action["user"]["email"]
for f in block_action_functions(action_id):
background_tasks.add_task(f, user_id, user_email, incident_id, action)
|
[
"dispatch.plugins.dispatch_slack.service.create_slack_client",
"dispatch.conversation.service.get_by_channel_id_ignoring_channel_type",
"dispatch.incident.models.IncidentRead.from_orm",
"dispatch.plugins.dispatch_slack.service.send_ephemeral_message",
"dispatch.plugin.service.get_active",
"dispatch.incident.service.update",
"dispatch.incident.models.IncidentUpdate",
"dispatch.incident.flows.incident_assign_role_flow",
"dispatch.incident.flows.incident_update_flow",
"dispatch.incident.service.get",
"dispatch.plugins.dispatch_slack.service.add_users_to_conversation",
"dispatch.task.service.get_by_resource_id"
] |
[((1039, 1083), 'dispatch.plugins.dispatch_slack.service.create_slack_client', 'dispatch_slack_service.create_slack_client', ([], {}), '()\n', (1081, 1083), True, 'from dispatch.plugins.dispatch_slack import service as dispatch_slack_service\n'), ((1275, 1343), 'dispatch.incident.service.get', 'incident_service.get', ([], {'db_session': 'db_session', 'incident_id': 'incident_id'}), '(db_session=db_session, incident_id=incident_id)\n', (1295, 1343), True, 'from dispatch.incident import service as incident_service\n'), ((2534, 2623), 'dispatch.task.service.get_by_resource_id', 'task_service.get_by_resource_id', ([], {'db_session': 'db_session', 'resource_id': 'external_task_id'}), '(db_session=db_session, resource_id=\n external_task_id)\n', (2565, 2623), True, 'from dispatch.task import service as task_service\n'), ((3387, 3455), 'dispatch.plugin.service.get_active', 'plugin_service.get_active', ([], {'db_session': 'db_session', 'plugin_type': '"""task"""'}), "(db_session=db_session, plugin_type='task')\n", (3412, 3455), True, 'from dispatch.plugin import service as plugin_service\n'), ((3918, 4035), 'dispatch.plugins.dispatch_slack.service.send_ephemeral_message', 'dispatch_slack_service.send_ephemeral_message', (['slack_client', "action['container']['channel_id']", 'user_id', 'message'], {}), "(slack_client, action[\n 'container']['channel_id'], user_id, message)\n", (3963, 4035), True, 'from dispatch.plugins.dispatch_slack import service as dispatch_slack_service\n'), ((4351, 4607), 'dispatch.incident.models.IncidentUpdate', 'IncidentUpdate', ([], {'title': "submission['title']", 'description': "submission['description']", 'incident_type': "{'name': submission['type']}", 'incident_priority': "{'name': submission['priority']}", 'status': "submission['status']", 'visibility': "submission['visibility']"}), "(title=submission['title'], description=submission[\n 'description'], incident_type={'name': submission['type']},\n incident_priority={'name': submission['priority']}, status=submission[\n 'status'], visibility=submission['visibility'])\n", (4365, 4607), False, 'from dispatch.incident.models import IncidentUpdate, IncidentRead\n'), ((4665, 4733), 'dispatch.incident.service.get', 'incident_service.get', ([], {'db_session': 'db_session', 'incident_id': 'incident_id'}), '(db_session=db_session, incident_id=incident_id)\n', (4685, 4733), True, 'from dispatch.incident import service as incident_service\n'), ((4758, 4789), 'dispatch.incident.models.IncidentRead.from_orm', 'IncidentRead.from_orm', (['incident'], {}), '(incident)\n', (4779, 4789), False, 'from dispatch.incident.models import IncidentUpdate, IncidentRead\n'), ((4794, 4888), 'dispatch.incident.service.update', 'incident_service.update', ([], {'db_session': 'db_session', 'incident': 'incident', 'incident_in': 'incident_in'}), '(db_session=db_session, incident=incident,\n incident_in=incident_in)\n', (4817, 4888), True, 'from dispatch.incident import service as incident_service\n'), ((4889, 4980), 'dispatch.incident.flows.incident_update_flow', 'incident_flows.incident_update_flow', (['user_email', 'incident_id', 'existing_incident', 'notify'], {}), '(user_email, incident_id,\n existing_incident, notify)\n', (4924, 4980), True, 'from dispatch.incident import flows as incident_flows\n'), ((5357, 5457), 'dispatch.incident.flows.incident_assign_role_flow', 'incident_flows.incident_assign_role_flow', (['user_email', 'incident_id', 'assignee_email', 'assignee_role'], {}), '(user_email, incident_id,\n assignee_email, assignee_role)\n', (5397, 5457), True, 'from dispatch.incident import flows as incident_flows\n'), ((6909, 7020), 'dispatch.conversation.service.get_by_channel_id_ignoring_channel_type', 'conversation_service.get_by_channel_id_ignoring_channel_type', ([], {'db_session': 'db_session', 'channel_id': 'channel_id'}), '(db_session=\n db_session, channel_id=channel_id)\n', (6969, 7020), True, 'from dispatch.conversation import service as conversation_service\n'), ((1554, 1671), 'dispatch.plugins.dispatch_slack.service.send_ephemeral_message', 'dispatch_slack_service.send_ephemeral_message', (['slack_client', "action['container']['channel_id']", 'user_id', 'message'], {}), "(slack_client, action[\n 'container']['channel_id'], user_id, message)\n", (1599, 1671), True, 'from dispatch.plugins.dispatch_slack import service as dispatch_slack_service\n'), ((1707, 1819), 'dispatch.plugins.dispatch_slack.service.add_users_to_conversation', 'dispatch_slack_service.add_users_to_conversation', (['slack_client', 'incident.conversation.channel_id', '[user_id]'], {}), '(slack_client, incident.\n conversation.channel_id, [user_id])\n', (1755, 1819), True, 'from dispatch.plugins.dispatch_slack import service as dispatch_slack_service\n'), ((1968, 2085), 'dispatch.plugins.dispatch_slack.service.send_ephemeral_message', 'dispatch_slack_service.send_ephemeral_message', (['slack_client', "action['container']['channel_id']", 'user_id', 'message'], {}), "(slack_client, action[\n 'container']['channel_id'], user_id, message)\n", (2013, 2085), True, 'from dispatch.plugins.dispatch_slack import service as dispatch_slack_service\n'), ((2795, 2912), 'dispatch.plugins.dispatch_slack.service.send_ephemeral_message', 'dispatch_slack_service.send_ephemeral_message', (['slack_client', "action['container']['channel_id']", 'user_id', 'message'], {}), "(slack_client, action[\n 'container']['channel_id'], user_id, message)\n", (2840, 2912), True, 'from dispatch.plugins.dispatch_slack import service as dispatch_slack_service\n'), ((3051, 3168), 'dispatch.plugins.dispatch_slack.service.send_ephemeral_message', 'dispatch_slack_service.send_ephemeral_message', (['slack_client', "action['container']['channel_id']", 'user_id', 'message'], {}), "(slack_client, action[\n 'container']['channel_id'], user_id, message)\n", (3096, 3168), True, 'from dispatch.plugins.dispatch_slack import service as dispatch_slack_service\n')]
|
import torch
import torch.nn as nn
import torch.nn.init as init
from torchvision.models import resnet50
def mixup_data(p, n, lam):
mixed_x = lam * p + (1 - lam) * n
return mixed_x
class Resnet50(nn.Module):
def __init__(self,embedding_size, pretrained=True, is_norm=True, bn_freeze = True):
super(Resnet50, self).__init__()
self.model = resnet50(pretrained)
self.is_norm = is_norm
self.embedding_size = embedding_size
self.num_ftrs = self.model.fc.in_features
self.model.gap = nn.AdaptiveAvgPool2d(1)
self.model.gmp = nn.AdaptiveMaxPool2d(1)
self.model.embedding = nn.Linear(self.num_ftrs, self.embedding_size)
self._initialize_weights()
if bn_freeze:
for m in self.model.modules():
if isinstance(m, nn.BatchNorm2d):
m.eval()
m.weight.requires_grad_(False)
m.bias.requires_grad_(False)
def l2_norm(self,input):
input_size = input.size()
buffer = torch.pow(input, 2)
normp = torch.sum(buffer, 1).add_(1e-12)
norm = torch.sqrt(normp)
_output = torch.div(input, norm.view(-1, 1).expand_as(input))
output = _output.view(input_size)
return output
def forward(self, x, a2, pos, neg, lam, mode, type):
x = self.model.conv1(x)
x = self.model.bn1(x)
x = self.model.relu(x)
x = self.model.maxpool(x)
x = self.model.layer1(x)
x = self.model.layer2(x)
x = self.model.layer3(x)
x = self.model.layer4(x)
if(mode == 'pos_neg_mixup'):
if(type == 'clean_anchor'):
x = x[a2.long()]
if(type == 'mixed'):
x = mixup_data(x[pos.long()], x[neg.long()], lam)
if(mode == 'anc_neg_mixup'):
if(type == 'clean_anchor'):
x = x[a2.long()]
if(type == 'mixed'):
x = mixup_data(x[a2.long()], x[neg.long()], lam)
avg_x = self.model.gap(x)
max_x = self.model.gmp(x)
x = max_x + avg_x
x = x.view(x.size(0), -1)
x = self.model.embedding(x)
if self.is_norm:
x = self.l2_norm(x)
return x
def _initialize_weights(self):
init.kaiming_normal_(self.model.embedding.weight, mode='fan_out')
init.constant_(self.model.embedding.bias, 0)
|
[
"torch.nn.AdaptiveAvgPool2d",
"torch.nn.init.kaiming_normal_",
"torch.nn.AdaptiveMaxPool2d",
"torch.sqrt",
"torchvision.models.resnet50",
"torch.nn.init.constant_",
"torch.pow",
"torch.nn.Linear",
"torch.sum"
] |
[((353, 373), 'torchvision.models.resnet50', 'resnet50', (['pretrained'], {}), '(pretrained)\n', (361, 373), False, 'from torchvision.models import resnet50\n'), ((501, 524), 'torch.nn.AdaptiveAvgPool2d', 'nn.AdaptiveAvgPool2d', (['(1)'], {}), '(1)\n', (521, 524), True, 'import torch.nn as nn\n'), ((544, 567), 'torch.nn.AdaptiveMaxPool2d', 'nn.AdaptiveMaxPool2d', (['(1)'], {}), '(1)\n', (564, 567), True, 'import torch.nn as nn\n'), ((593, 638), 'torch.nn.Linear', 'nn.Linear', (['self.num_ftrs', 'self.embedding_size'], {}), '(self.num_ftrs, self.embedding_size)\n', (602, 638), True, 'import torch.nn as nn\n'), ((907, 926), 'torch.pow', 'torch.pow', (['input', '(2)'], {}), '(input, 2)\n', (916, 926), False, 'import torch\n'), ((980, 997), 'torch.sqrt', 'torch.sqrt', (['normp'], {}), '(normp)\n', (990, 997), False, 'import torch\n'), ((1966, 2031), 'torch.nn.init.kaiming_normal_', 'init.kaiming_normal_', (['self.model.embedding.weight'], {'mode': '"""fan_out"""'}), "(self.model.embedding.weight, mode='fan_out')\n", (1986, 2031), True, 'import torch.nn.init as init\n'), ((2034, 2078), 'torch.nn.init.constant_', 'init.constant_', (['self.model.embedding.bias', '(0)'], {}), '(self.model.embedding.bias, 0)\n', (2048, 2078), True, 'import torch.nn.init as init\n'), ((938, 958), 'torch.sum', 'torch.sum', (['buffer', '(1)'], {}), '(buffer, 1)\n', (947, 958), False, 'import torch\n')]
|
import os
import shutil
import tempfile
import pytest
import prefect
from prefect import Flow, Parameter, Task
from prefect.environments import DockerEnvironment
#################################
##### Docker Environment Tests
#################################
class TestDockerEnvironment:
def test_create_docker_environment(self):
docker = DockerEnvironment(base_image=None, registry_url=None)
assert docker
@pytest.mark.skip("Circle will need to handle container building")
def test_build_image_process(self):
docker = DockerEnvironment(
base_image="python:3.6", image_tag="tag", registry_url=""
)
image = docker.build(Flow(name="test"))
assert image
def test_basic_create_dockerfile(self):
docker = DockerEnvironment(base_image="python:3.6", registry_url="")
with tempfile.TemporaryDirectory(prefix="prefect-tests") as tmp:
docker.create_dockerfile(Flow(name="test"), directory=tmp)
with open(os.path.join(tmp, "Dockerfile"), "r") as f:
dockerfile = f.read()
assert "FROM python:3.6" in dockerfile
assert " FROM python:3.6" not in dockerfile
assert "RUN pip install prefect" in dockerfile
assert "RUN mkdir /root/.prefect/" in dockerfile
def test_create_dockerfile_with_environment_variables(self):
docker = DockerEnvironment(
base_image="python:3.6",
registry_url="",
env_vars=dict(X=2, Y='"/a/quoted/string/path"'),
)
with tempfile.TemporaryDirectory(prefix="prefect-tests") as tmp:
docker.create_dockerfile(Flow(name="test"), directory=tmp)
with open(os.path.join(tmp, "Dockerfile"), "r") as f:
dockerfile = f.read()
var_orders = [
'X=2 \\ \n Y="/a/quoted/string/path"',
'Y="/a/quoted/string/path" \\ \n X=2',
]
assert any(["ENV {}".format(v) in dockerfile for v in var_orders])
def test_create_dockerfile_with_copy_files(self):
with tempfile.NamedTemporaryFile() as t1, tempfile.NamedTemporaryFile() as t2:
docker = DockerEnvironment(
base_image="python:3.6",
registry_url="",
files={t1.name: "/root/dockerconfig", t2.name: "./.secret_file"},
)
base1, base2 = os.path.basename(t1.name), os.path.basename(t2.name)
with tempfile.TemporaryDirectory(prefix="prefect-tests") as tmp:
docker.create_dockerfile(Flow(name="test"), directory=tmp)
## ensure create_dockerfile copied the files over
assert os.path.exists(os.path.join(tmp, base1))
assert os.path.exists(os.path.join(tmp, base2))
with open(os.path.join(tmp, "Dockerfile"), "r") as f:
dockerfile = f.read()
assert "COPY {} /root/dockerconfig".format(base1) in dockerfile
assert "COPY {} ./.secret_file".format(base2) in dockerfile
def test_create_dockerfile_with_copy_files_doesnt_raise_if_file_exists_and_is_same(
self
):
with tempfile.NamedTemporaryFile() as t1, tempfile.NamedTemporaryFile() as t2:
docker = DockerEnvironment(
base_image="python:3.6",
registry_url="",
files={t1.name: "/root/dockerconfig", t2.name: "./.secret_file"},
)
base1, base2 = os.path.basename(t1.name), os.path.basename(t2.name)
with tempfile.TemporaryDirectory(prefix="prefect-tests") as tmp:
shutil.copy(t1.name, os.path.join(tmp, base1))
docker.create_dockerfile(Flow(name="test"), directory=tmp)
def test_create_dockerfile_with_copy_files_raises_if_file_exists_and_different(
self
):
with tempfile.NamedTemporaryFile() as t1, tempfile.NamedTemporaryFile() as t2:
docker = DockerEnvironment(
base_image="python:3.6",
registry_url="",
files={t1.name: "/root/dockerconfig", t2.name: "./.secret_file"},
)
base1, base2 = os.path.basename(t1.name), os.path.basename(t2.name)
with tempfile.TemporaryDirectory(prefix="prefect-tests") as tmp:
new_file = os.path.join(tmp, base1)
shutil.copy(t1.name, new_file)
with open(new_file, "w+") as f:
f.write("a few lines\n")
with pytest.raises(ValueError) as exc:
docker.create_dockerfile(Flow(name="test"), directory=tmp)
assert "already exists" in str(exc.value)
def test_init_with_copy_files_raises_informative_error_if_not_absolute(self):
with pytest.raises(ValueError) as exc:
docker = DockerEnvironment(
base_image="python:3.6",
registry_url="",
files={
".secret_file": "./.secret_file",
"~/.prefect": ".prefect",
"/def/abs": "/def/abs",
},
)
file_list = [".secret_file, ~/.prefect", "~/.prefect, .secret_file"]
assert any(
[
"{} are not absolute file paths".format(fs) in str(exc.value)
for fs in file_list
]
)
|
[
"tempfile.NamedTemporaryFile",
"tempfile.TemporaryDirectory",
"os.path.basename",
"pytest.raises",
"prefect.environments.DockerEnvironment",
"pytest.mark.skip",
"os.path.join",
"shutil.copy",
"prefect.Flow"
] |
[((440, 505), 'pytest.mark.skip', 'pytest.mark.skip', (['"""Circle will need to handle container building"""'], {}), "('Circle will need to handle container building')\n", (456, 505), False, 'import pytest\n'), ((358, 411), 'prefect.environments.DockerEnvironment', 'DockerEnvironment', ([], {'base_image': 'None', 'registry_url': 'None'}), '(base_image=None, registry_url=None)\n', (375, 411), False, 'from prefect.environments import DockerEnvironment\n'), ((564, 640), 'prefect.environments.DockerEnvironment', 'DockerEnvironment', ([], {'base_image': '"""python:3.6"""', 'image_tag': '"""tag"""', 'registry_url': '""""""'}), "(base_image='python:3.6', image_tag='tag', registry_url='')\n", (581, 640), False, 'from prefect.environments import DockerEnvironment\n'), ((794, 853), 'prefect.environments.DockerEnvironment', 'DockerEnvironment', ([], {'base_image': '"""python:3.6"""', 'registry_url': '""""""'}), "(base_image='python:3.6', registry_url='')\n", (811, 853), False, 'from prefect.environments import DockerEnvironment\n'), ((692, 709), 'prefect.Flow', 'Flow', ([], {'name': '"""test"""'}), "(name='test')\n", (696, 709), False, 'from prefect import Flow, Parameter, Task\n'), ((867, 918), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {'prefix': '"""prefect-tests"""'}), "(prefix='prefect-tests')\n", (894, 918), False, 'import tempfile\n'), ((1566, 1617), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {'prefix': '"""prefect-tests"""'}), "(prefix='prefect-tests')\n", (1593, 1617), False, 'import tempfile\n'), ((2086, 2115), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {}), '()\n', (2113, 2115), False, 'import tempfile\n'), ((2123, 2152), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {}), '()\n', (2150, 2152), False, 'import tempfile\n'), ((2181, 2310), 'prefect.environments.DockerEnvironment', 'DockerEnvironment', ([], {'base_image': '"""python:3.6"""', 'registry_url': '""""""', 'files': "{t1.name: '/root/dockerconfig', t2.name: './.secret_file'}"}), "(base_image='python:3.6', registry_url='', files={t1.name:\n '/root/dockerconfig', t2.name: './.secret_file'})\n", (2198, 2310), False, 'from prefect.environments import DockerEnvironment\n'), ((3175, 3204), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {}), '()\n', (3202, 3204), False, 'import tempfile\n'), ((3212, 3241), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {}), '()\n', (3239, 3241), False, 'import tempfile\n'), ((3270, 3399), 'prefect.environments.DockerEnvironment', 'DockerEnvironment', ([], {'base_image': '"""python:3.6"""', 'registry_url': '""""""', 'files': "{t1.name: '/root/dockerconfig', t2.name: './.secret_file'}"}), "(base_image='python:3.6', registry_url='', files={t1.name:\n '/root/dockerconfig', t2.name: './.secret_file'})\n", (3287, 3399), False, 'from prefect.environments import DockerEnvironment\n'), ((3874, 3903), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {}), '()\n', (3901, 3903), False, 'import tempfile\n'), ((3911, 3940), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {}), '()\n', (3938, 3940), False, 'import tempfile\n'), ((3969, 4098), 'prefect.environments.DockerEnvironment', 'DockerEnvironment', ([], {'base_image': '"""python:3.6"""', 'registry_url': '""""""', 'files': "{t1.name: '/root/dockerconfig', t2.name: './.secret_file'}"}), "(base_image='python:3.6', registry_url='', files={t1.name:\n '/root/dockerconfig', t2.name: './.secret_file'})\n", (3986, 4098), False, 'from prefect.environments import DockerEnvironment\n'), ((4790, 4815), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (4803, 4815), False, 'import pytest\n'), ((4845, 5005), 'prefect.environments.DockerEnvironment', 'DockerEnvironment', ([], {'base_image': '"""python:3.6"""', 'registry_url': '""""""', 'files': "{'.secret_file': './.secret_file', '~/.prefect': '.prefect', '/def/abs':\n '/def/abs'}"}), "(base_image='python:3.6', registry_url='', files={\n '.secret_file': './.secret_file', '~/.prefect': '.prefect', '/def/abs':\n '/def/abs'})\n", (4862, 5005), False, 'from prefect.environments import DockerEnvironment\n'), ((964, 981), 'prefect.Flow', 'Flow', ([], {'name': '"""test"""'}), "(name='test')\n", (968, 981), False, 'from prefect import Flow, Parameter, Task\n'), ((1663, 1680), 'prefect.Flow', 'Flow', ([], {'name': '"""test"""'}), "(name='test')\n", (1667, 1680), False, 'from prefect import Flow, Parameter, Task\n'), ((2398, 2423), 'os.path.basename', 'os.path.basename', (['t1.name'], {}), '(t1.name)\n', (2414, 2423), False, 'import os\n'), ((2425, 2450), 'os.path.basename', 'os.path.basename', (['t2.name'], {}), '(t2.name)\n', (2441, 2450), False, 'import os\n'), ((2469, 2520), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {'prefix': '"""prefect-tests"""'}), "(prefix='prefect-tests')\n", (2496, 2520), False, 'import tempfile\n'), ((3487, 3512), 'os.path.basename', 'os.path.basename', (['t1.name'], {}), '(t1.name)\n', (3503, 3512), False, 'import os\n'), ((3514, 3539), 'os.path.basename', 'os.path.basename', (['t2.name'], {}), '(t2.name)\n', (3530, 3539), False, 'import os\n'), ((3558, 3609), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {'prefix': '"""prefect-tests"""'}), "(prefix='prefect-tests')\n", (3585, 3609), False, 'import tempfile\n'), ((4186, 4211), 'os.path.basename', 'os.path.basename', (['t1.name'], {}), '(t1.name)\n', (4202, 4211), False, 'import os\n'), ((4213, 4238), 'os.path.basename', 'os.path.basename', (['t2.name'], {}), '(t2.name)\n', (4229, 4238), False, 'import os\n'), ((4257, 4308), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {'prefix': '"""prefect-tests"""'}), "(prefix='prefect-tests')\n", (4284, 4308), False, 'import tempfile\n'), ((4344, 4368), 'os.path.join', 'os.path.join', (['tmp', 'base1'], {}), '(tmp, base1)\n', (4356, 4368), False, 'import os\n'), ((4385, 4415), 'shutil.copy', 'shutil.copy', (['t1.name', 'new_file'], {}), '(t1.name, new_file)\n', (4396, 4415), False, 'import shutil\n'), ((1020, 1051), 'os.path.join', 'os.path.join', (['tmp', '"""Dockerfile"""'], {}), "(tmp, 'Dockerfile')\n", (1032, 1051), False, 'import os\n'), ((1719, 1750), 'os.path.join', 'os.path.join', (['tmp', '"""Dockerfile"""'], {}), "(tmp, 'Dockerfile')\n", (1731, 1750), False, 'import os\n'), ((2570, 2587), 'prefect.Flow', 'Flow', ([], {'name': '"""test"""'}), "(name='test')\n", (2574, 2587), False, 'from prefect import Flow, Parameter, Task\n'), ((2709, 2733), 'os.path.join', 'os.path.join', (['tmp', 'base1'], {}), '(tmp, base1)\n', (2721, 2733), False, 'import os\n'), ((2773, 2797), 'os.path.join', 'os.path.join', (['tmp', 'base2'], {}), '(tmp, base2)\n', (2785, 2797), False, 'import os\n'), ((3655, 3679), 'os.path.join', 'os.path.join', (['tmp', 'base1'], {}), '(tmp, base1)\n', (3667, 3679), False, 'import os\n'), ((3722, 3739), 'prefect.Flow', 'Flow', ([], {'name': '"""test"""'}), "(name='test')\n", (3726, 3739), False, 'from prefect import Flow, Parameter, Task\n'), ((4530, 4555), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (4543, 4555), False, 'import pytest\n'), ((2826, 2857), 'os.path.join', 'os.path.join', (['tmp', '"""Dockerfile"""'], {}), "(tmp, 'Dockerfile')\n", (2838, 2857), False, 'import os\n'), ((4609, 4626), 'prefect.Flow', 'Flow', ([], {'name': '"""test"""'}), "(name='test')\n", (4613, 4626), False, 'from prefect import Flow, Parameter, Task\n')]
|
from setuptools import setup
# Dependencies.
with open("requirements.txt") as f:
tests_require = f.readlines()
install_requires = [t.strip() for t in tests_require]
with open("README.md") as f:
long_description = f.read()
setup(
name="contextily",
version="1.0.0",
description="Context geo-tiles in Python",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/darribas/contextily",
author="<NAME>",
author_email="<EMAIL>",
license="3-Clause BSD",
packages=["contextily"],
package_data={"": ["requirements.txt"]},
classifiers=[
"License :: OSI Approved :: BSD License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: Implementation :: CPython",
"Framework :: Matplotlib",
],
python_requires=">=3.6",
install_requires=install_requires,
zip_safe=False,
)
|
[
"setuptools.setup"
] |
[((233, 1068), 'setuptools.setup', 'setup', ([], {'name': '"""contextily"""', 'version': '"""1.0.0"""', 'description': '"""Context geo-tiles in Python"""', 'long_description': 'long_description', 'long_description_content_type': '"""text/markdown"""', 'url': '"""https://github.com/darribas/contextily"""', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'license': '"""3-Clause BSD"""', 'packages': "['contextily']", 'package_data': "{'': ['requirements.txt']}", 'classifiers': "['License :: OSI Approved :: BSD License',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3 :: Only',\n 'Programming Language :: Python :: Implementation :: CPython',\n 'Framework :: Matplotlib']", 'python_requires': '""">=3.6"""', 'install_requires': 'install_requires', 'zip_safe': '(False)'}), "(name='contextily', version='1.0.0', description=\n 'Context geo-tiles in Python', long_description=long_description,\n long_description_content_type='text/markdown', url=\n 'https://github.com/darribas/contextily', author='<NAME>', author_email\n ='<EMAIL>', license='3-Clause BSD', packages=['contextily'],\n package_data={'': ['requirements.txt']}, classifiers=[\n 'License :: OSI Approved :: BSD License',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3 :: Only',\n 'Programming Language :: Python :: Implementation :: CPython',\n 'Framework :: Matplotlib'], python_requires='>=3.6', install_requires=\n install_requires, zip_safe=False)\n", (238, 1068), False, 'from setuptools import setup\n')]
|
from direct.gui.DirectGui import *
from direct.task import Task
from pandac.PandaModules import *
from pirates.piratesgui import PiratesGuiGlobals
from pirates.piratesgui import InventoryPage
from pirates.piratesgui import WeaponPanel
from pirates.piratesgui.SkillButton import SkillButton
from pirates.piratesgui import InventoryItemGui
from pirates.piratesgui import InventoryItemList
from pirates.piratesbase import PiratesGlobals
from pirates.piratesbase import PLocalizer
from pirates.piratesgui.CombatTray import WeaponButton
from pirates.economy import EconomyGlobals
from pirates.economy.EconomyGlobals import *
from pirates.battle import WeaponGlobals
from pirates.reputation import ReputationGlobals
from pirates.piratesgui.ReputationMeter import ReputationMeter
import copy
from pirates.inventory import ItemGlobals, InventoryGlobals
from GuiButton import GuiButton
TOKEN_LIST = [
InventoryType.CutlassToken, InventoryType.PistolToken, InventoryType.DollToken, InventoryType.DaggerToken, InventoryType.GrenadeToken, InventoryType.WandToken]
class WeaponPage(InventoryPage.InventoryPage):
def __init__(self):
InventoryPage.InventoryPage.__init__(self)
self.initialiseoptions(WeaponPage)
self.weaponPanels = {}
self.tonicButtons = {}
self.fishingIcon = None
self.potionIcon = None
self.fishingRepMeter = None
self.potionRepMeter = None
self.fishingPoleName = None
self.fishingChangeMsg = None
self.needRefresh = 1
self.showing = 0
return
def show(self):
self.showing = 1
InventoryPage.InventoryPage.show(self)
if self.needRefresh:
self.refreshList()
self.needRefresh = 0
def hide(self):
self.showing = 0
self.equipStatus = 0
InventoryPage.InventoryPage.hide(self)
def tonicCallback(self, skillId):
localAvatar.guiMgr.combatTray.trySkill(InventoryType.UseItem, skillId, 0)
def rePanel(self, inventory):
if not self.showing:
self.needRefresh = 1
return
skillTokens = {InventoryType.CutlassToken: (ItemGlobals.RUSTY_CUTLASS,),InventoryType.PistolToken: (ItemGlobals.FLINTLOCK_PISTOL,),InventoryType.DollToken: (ItemGlobals.VOODOO_DOLL,),InventoryType.DaggerToken: (ItemGlobals.BASIC_DAGGER,),InventoryType.GrenadeToken: (ItemGlobals.GRENADE_POUCH,),InventoryType.WandToken: (ItemGlobals.CURSED_STAFF,)}
zIndex = 1
for skillTokenKey in TOKEN_LIST:
quantity = 0
if localAvatar.getInventory().stacks.get(skillTokenKey):
quantity = 1
skillData = skillTokens[skillTokenKey]
weaponId = skillData[0]
key = None
panel = WeaponPanel.WeaponPanel((weaponId, quantity), key)
panel.reparentTo(self)
panel.setZ(PiratesGuiGlobals.InventoryPanelHeight - 0.18 - zIndex * panel.height)
zIndex += 1
repCat = WeaponGlobals.getRepId(weaponId)
self.weaponPanels[repCat] = panel
self.ignore('inventoryQuantity-%s' % inventory.getDoId())
self.acceptOnce('inventoryQuantity-%s-%s' % (inventory.getDoId(), skillTokenKey), self.refreshList)
repIcon_gui = loader.loadModel('models/textureCards/skillIcons')
repIcon = repIcon_gui.find('**/box_base')
if config.GetBool('want-fishing-game', 0):
self.fishingIcon = GuiButton(pos=(0.166, 0, 0.045 + (PiratesGuiGlobals.InventoryPanelHeight - 0.18) - zIndex * panel.height), helpText=PLocalizer.FishingRepDescription, helpOpaque=True, image=(repIcon, repIcon, repIcon, repIcon), image_scale=(0.144,
0.144,
0.144))
fishIconCard = loader.loadModel('models/textureCards/fishing_icons')
inv = localAvatar.getInventory()
fishingChangeMsg = InventoryGlobals.getCategoryQuantChangeMsg(inv.doId, InventoryType.FishingRod)
if self.fishingChangeMsg:
self.ignore(fishingChangeMsg)
self.fishingChangeMsg = fishingChangeMsg
self.acceptOnce(fishingChangeMsg, self.refreshList)
rodIcons = [
'pir_t_gui_fsh_smRodIcon', 'pir_t_gui_fsh_mdRodIcon', 'pir_t_gui_fsh_lgRodIcon']
rodLvl = inv.getStackQuantity(InventoryType.FishingRod)
rodIcon = rodIcons[rodLvl - 1]
rodText = PLocalizer.FishingRodNames[rodLvl]
if rodLvl >= 1:
self.fishingIcon['geom'] = fishIconCard.find('**/' + rodIcon)
self.fishingIcon['geom_scale'] = 0.1
self.fishingIcon['geom_pos'] = (0, 0, 0)
self.fishingIcon.reparentTo(self)
fishingRepValue = localAvatar.getInventory().getReputation(InventoryType.FishingRep)
self.fishingRepMeter = ReputationMeter(InventoryType.FishingRep, width=0.66)
self.fishingRepMeter.setPos(0.62, 0, 0.041 + (PiratesGuiGlobals.InventoryPanelHeight - 0.18) - zIndex * panel.height)
self.fishingRepMeter.update(fishingRepValue)
self.fishingRepMeter.reparentTo(self)
self.fishingRepMeter.flattenLight()
self.fishingPoleName = DirectLabel(parent=self, relief=None, state=DGG.DISABLED, text=rodText, text_scale=PiratesGuiGlobals.TextScaleSmall, text_align=TextNode.ALeft, text_fg=PiratesGuiGlobals.TextFG2, text_shadow=PiratesGuiGlobals.TextShadow, pos=(0.29, 0, -0.005 + (PiratesGuiGlobals.InventoryPanelHeight - 0.18) - 7 * panel.height), text_font=PiratesGlobals.getInterfaceFont())
self.fishingPoleName.reparentTo(self)
zIndex += 1
iconCard = loader.loadModel('models/textureCards/skillIcons')
if config.GetBool('want-potion-game', 0):
self.potionIcon = GuiButton(pos=(0.166, 0, 0.045 + (PiratesGuiGlobals.InventoryPanelHeight - 0.18) - zIndex * panel.height), helpText=PLocalizer.PotionRepDescription, helpOpaque=True, image=(repIcon, repIcon, repIcon, repIcon), image_scale=(0.144,
0.144,
0.144))
self.potionIcon['geom'] = iconCard.find('**/pir_t_gui_pot_base')
self.potionIcon['geom_scale'] = 0.1
self.potionIcon['geom_pos'] = (0, 0, 0)
self.potionIcon.reparentTo(self)
potionRepValue = localAvatar.getInventory().getReputation(InventoryType.PotionsRep)
self.potionRepMeter = ReputationMeter(InventoryType.PotionsRep, width=0.66)
self.potionRepMeter.setPos(0.62, 0, 0.041 + (PiratesGuiGlobals.InventoryPanelHeight - 0.18) - zIndex * panel.height)
self.potionRepMeter.update(potionRepValue)
self.potionRepMeter.reparentTo(self)
self.potionRepMeter.flattenLight()
zIndex += 1
items = dict(map(lambda x: (x.getType(), x.getCount()), inventory.getConsumables().values()))
possibleItems = ItemGlobals.getAllHealthIds()
havePorky = items.get(ItemGlobals.ROAST_PORK)
if not havePorky and ItemGlobals.ROAST_PORK in possibleItems:
possibleItems.remove(ItemGlobals.ROAST_PORK)
offset = 0
if base.config.GetBool('want-potion-game', 0):
items = inventory.getConsumables()
listLength = len(InventoryType.PotionMinigamePotions)
count = 0
for i in range(listLength):
tonicId = InventoryType.PotionMinigamePotions[i]
if items.get(tonicId):
button = SkillButton(tonicId, self.tonicCallback, items.get(tonicId), showQuantity=True, showHelp=True, showRing=True)
button.skillButton['geom_scale'] = 0.08
x = 0.16 * (count % 6) + -1.2
z = 1.0 - int(count / 6) * 0.16
button.setPos(x, 0, z)
button.reparentTo(self)
self.tonicButtons[tonicId] = button
count += 1
return
def refreshList(self, newWeaponId=None):
for panel in self.weaponPanels.values():
panel.destroy()
for panel in self.tonicButtons.values():
panel.destroy()
if self.fishingIcon is not None:
self.fishingIcon.destroy()
if self.potionIcon is not None:
self.potionIcon.destroy()
if self.fishingRepMeter is not None:
self.fishingRepMeter.destroy()
if self.potionRepMeter is not None:
self.potionRepMeter.destroy()
if self.fishingPoleName is not None:
self.fishingPoleName.destroy()
inventory = localAvatar.getInventory()
if inventory:
if inventory.isReady():
self.rePanel(inventory)
else:
self.ignore('inventoryReady-%s' % inventory.getDoId())
self.acceptOnce('inventoryReady-%s' % inventory.getDoId(), self.rePanel)
return
def destroy(self):
if self.fishingChangeMsg:
self.ignore(self.fishingChangeMsg)
InventoryPage.InventoryPage.destroy(self)
def updateTonics(self):
if not hasattr(base, 'localAvatar'):
return
inv = localAvatar.getInventory()
if not inv:
return
possibleTonics = ItemGlobals.getAllHealthIds()
for tonicId in possibleTonics:
tonicAmt = inv.getItemQuantity(InventoryType.ItemTypeConsumable, tonicId)
if self.tonicButtons.has_key(tonicId):
self.tonicButtons[tonicId].updateQuantity(tonicAmt)
self.tonicButtons[tonicId].checkAmount()
|
[
"pirates.piratesgui.InventoryPage.InventoryPage.destroy",
"pirates.piratesgui.InventoryPage.InventoryPage.hide",
"pirates.piratesgui.InventoryPage.InventoryPage.show",
"pirates.inventory.ItemGlobals.getAllHealthIds",
"pirates.piratesgui.InventoryPage.InventoryPage.__init__",
"pirates.piratesbase.PiratesGlobals.getInterfaceFont",
"pirates.inventory.InventoryGlobals.getCategoryQuantChangeMsg",
"pirates.piratesgui.ReputationMeter.ReputationMeter",
"GuiButton.GuiButton",
"pirates.battle.WeaponGlobals.getRepId",
"pirates.piratesgui.WeaponPanel.WeaponPanel"
] |
[((1134, 1176), 'pirates.piratesgui.InventoryPage.InventoryPage.__init__', 'InventoryPage.InventoryPage.__init__', (['self'], {}), '(self)\n', (1170, 1176), False, 'from pirates.piratesgui import InventoryPage\n'), ((1612, 1650), 'pirates.piratesgui.InventoryPage.InventoryPage.show', 'InventoryPage.InventoryPage.show', (['self'], {}), '(self)\n', (1644, 1650), False, 'from pirates.piratesgui import InventoryPage\n'), ((1827, 1865), 'pirates.piratesgui.InventoryPage.InventoryPage.hide', 'InventoryPage.InventoryPage.hide', (['self'], {}), '(self)\n', (1859, 1865), False, 'from pirates.piratesgui import InventoryPage\n'), ((7874, 7903), 'pirates.inventory.ItemGlobals.getAllHealthIds', 'ItemGlobals.getAllHealthIds', ([], {}), '()\n', (7901, 7903), False, 'from pirates.inventory import ItemGlobals, InventoryGlobals\n'), ((10002, 10043), 'pirates.piratesgui.InventoryPage.InventoryPage.destroy', 'InventoryPage.InventoryPage.destroy', (['self'], {}), '(self)\n', (10037, 10043), False, 'from pirates.piratesgui import InventoryPage\n'), ((10242, 10271), 'pirates.inventory.ItemGlobals.getAllHealthIds', 'ItemGlobals.getAllHealthIds', ([], {}), '()\n', (10269, 10271), False, 'from pirates.inventory import ItemGlobals, InventoryGlobals\n'), ((2773, 2823), 'pirates.piratesgui.WeaponPanel.WeaponPanel', 'WeaponPanel.WeaponPanel', (['(weaponId, quantity)', 'key'], {}), '((weaponId, quantity), key)\n', (2796, 2823), False, 'from pirates.piratesgui import WeaponPanel\n'), ((2998, 3030), 'pirates.battle.WeaponGlobals.getRepId', 'WeaponGlobals.getRepId', (['weaponId'], {}), '(weaponId)\n', (3020, 3030), False, 'from pirates.battle import WeaponGlobals\n'), ((3465, 3724), 'GuiButton.GuiButton', 'GuiButton', ([], {'pos': '(0.166, 0, 0.045 + (PiratesGuiGlobals.InventoryPanelHeight - 0.18) - zIndex *\n panel.height)', 'helpText': 'PLocalizer.FishingRepDescription', 'helpOpaque': '(True)', 'image': '(repIcon, repIcon, repIcon, repIcon)', 'image_scale': '(0.144, 0.144, 0.144)'}), '(pos=(0.166, 0, 0.045 + (PiratesGuiGlobals.InventoryPanelHeight - \n 0.18) - zIndex * panel.height), helpText=PLocalizer.\n FishingRepDescription, helpOpaque=True, image=(repIcon, repIcon,\n repIcon, repIcon), image_scale=(0.144, 0.144, 0.144))\n', (3474, 3724), False, 'from GuiButton import GuiButton\n'), ((4378, 4456), 'pirates.inventory.InventoryGlobals.getCategoryQuantChangeMsg', 'InventoryGlobals.getCategoryQuantChangeMsg', (['inv.doId', 'InventoryType.FishingRod'], {}), '(inv.doId, InventoryType.FishingRod)\n', (4420, 4456), False, 'from pirates.inventory import ItemGlobals, InventoryGlobals\n'), ((5331, 5384), 'pirates.piratesgui.ReputationMeter.ReputationMeter', 'ReputationMeter', (['InventoryType.FishingRep'], {'width': '(0.66)'}), '(InventoryType.FishingRep, width=0.66)\n', (5346, 5384), False, 'from pirates.piratesgui.ReputationMeter import ReputationMeter\n'), ((6287, 6545), 'GuiButton.GuiButton', 'GuiButton', ([], {'pos': '(0.166, 0, 0.045 + (PiratesGuiGlobals.InventoryPanelHeight - 0.18) - zIndex *\n panel.height)', 'helpText': 'PLocalizer.PotionRepDescription', 'helpOpaque': '(True)', 'image': '(repIcon, repIcon, repIcon, repIcon)', 'image_scale': '(0.144, 0.144, 0.144)'}), '(pos=(0.166, 0, 0.045 + (PiratesGuiGlobals.InventoryPanelHeight - \n 0.18) - zIndex * panel.height), helpText=PLocalizer.\n PotionRepDescription, helpOpaque=True, image=(repIcon, repIcon, repIcon,\n repIcon), image_scale=(0.144, 0.144, 0.144))\n', (6296, 6545), False, 'from GuiButton import GuiButton\n'), ((7390, 7443), 'pirates.piratesgui.ReputationMeter.ReputationMeter', 'ReputationMeter', (['InventoryType.PotionsRep'], {'width': '(0.66)'}), '(InventoryType.PotionsRep, width=0.66)\n', (7405, 7443), False, 'from pirates.piratesgui.ReputationMeter import ReputationMeter\n'), ((6028, 6061), 'pirates.piratesbase.PiratesGlobals.getInterfaceFont', 'PiratesGlobals.getInterfaceFont', ([], {}), '()\n', (6059, 6061), False, 'from pirates.piratesbase import PiratesGlobals\n')]
|
#!/usr/bin/env python3
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
#
# Usage:
# run './build_docs.py' to generate the documentation and keep it updated
# open 'http://localhost:1313/' to check live update (this opens the top
# level index page). you can also directly access a specific document by
# accessing 'http://localhost:1313/path/to/doc',
# e.g. http://localhost:1313/hw/ip/uart/doc
import argparse
import logging
import os
import platform
import re
import subprocess
import sys
import textwrap
from pathlib import Path
import hjson
import check_tool_requirements
import dashboard.gen_dashboard_entry as gen_dashboard_entry
import difgen.gen_dif_listing as gen_dif_listing
import reggen.gen_cfg_html as gen_cfg_html
import reggen.gen_html as gen_html
import reggen.validate as validate
import reggen.gen_selfdoc as reggen_selfdoc
import dvsim.testplanner.testplan_utils as testplan_utils
import tlgen
USAGE = """
build_docs [options]
"""
# Version of hugo extended to be used to build the docs
try:
tool_requirements = check_tool_requirements.read_tool_requirements()
HUGO_EXTENDED_VERSION = tool_requirements['hugo_extended']
except Exception as e:
print("Unable to get required hugo version: %s" % str(e), file=sys.stderr)
sys.exit(1)
# Configurations
# TODO: Move to config.yaml
SRCTREE_TOP = Path(__file__).parent.joinpath('..').resolve()
config = {
# Toplevel source directory
"topdir":
SRCTREE_TOP,
# Pre-generate register and hwcfg fragments from these files.
"hardware_definitions": [
"hw/ip/aes/data/aes.hjson",
"hw/top_earlgrey/ip/alert_handler/data/autogen/alert_handler.hjson",
"hw/ip/entropy_src/data/entropy_src.hjson",
"hw/ip/csrng/data/csrng.hjson",
"hw/ip/edn/data/edn.hjson",
"hw/ip/flash_ctrl/data/flash_ctrl.hjson",
"hw/ip/gpio/data/gpio.hjson",
"hw/ip/hmac/data/hmac.hjson",
"hw/ip/i2c/data/i2c.hjson",
"hw/ip/keymgr/data/keymgr.hjson",
"hw/ip/kmac/data/kmac.hjson",
"hw/ip/lc_ctrl/data/lc_ctrl.hjson",
"hw/ip/nmi_gen/data/nmi_gen.hjson",
"hw/ip/otbn/data/otbn.hjson",
"hw/ip/otp_ctrl/data/otp_ctrl.hjson",
"hw/ip/padctrl/data/padctrl.hjson",
"hw/ip/pattgen/data/pattgen.hjson",
"hw/top_earlgrey/ip/pinmux/data/autogen/pinmux.hjson",
"hw/top_earlgrey/ip/clkmgr/data/autogen/clkmgr.hjson",
"hw/top_earlgrey/ip/pwrmgr/data/autogen/pwrmgr.hjson",
"hw/top_earlgrey/ip/rstmgr/data/autogen/rstmgr.hjson",
"hw/top_earlgrey/ip/rv_plic/data/autogen/rv_plic.hjson",
"hw/ip/rv_timer/data/rv_timer.hjson",
"hw/ip/spi_device/data/spi_device.hjson",
"hw/ip/sram_ctrl/data/sram_ctrl.hjson",
"hw/ip/uart/data/uart.hjson",
"hw/ip/usbdev/data/usbdev.hjson",
"hw/ip/usbuart/data/usbuart.hjson",
],
# Pre-generate dashboard fragments from these directories.
"dashboard_definitions": [
"hw/ip",
],
# Pre-generate testplan fragments from these files.
"testplan_definitions": [
"hw/ip/aes/data/aes_testplan.hjson",
"hw/ip/alert_handler/data/alert_handler_testplan.hjson",
"hw/ip/entropy_src/data/entropy_src_testplan.hjson",
"hw/ip/flash_ctrl/data/flash_ctrl_testplan.hjson",
"hw/ip/gpio/data/gpio_testplan.hjson",
"hw/ip/hmac/data/hmac_testplan.hjson",
"hw/ip/i2c/data/i2c_testplan.hjson",
"hw/ip/keymgr/data/keymgr_testplan.hjson",
"hw/ip/otp_ctrl/data/otp_ctrl_testplan.hjson",
"hw/ip/padctrl/data/padctrl_fpv_testplan.hjson",
"hw/ip/pattgen/data/pattgen_testplan.hjson",
"hw/ip/pinmux/data/pinmux_fpv_testplan.hjson",
"hw/ip/rv_plic/data/rv_plic_fpv_testplan.hjson",
"hw/ip/rv_timer/data/rv_timer_testplan.hjson",
"hw/ip/spi_device/data/spi_device_testplan.hjson",
"hw/ip/uart/data/uart_testplan.hjson",
"hw/ip/usbdev/data/usbdev_testplan.hjson",
"hw/ip/tlul/data/tlul_testplan.hjson",
"hw/top_earlgrey/data/chip_testplan.hjson",
"hw/top_earlgrey/data/standalone_sw_testplan.hjson",
"util/dvsim/testplanner/examples/foo_testplan.hjson",
],
# Pre-generated utility selfdoc
"selfdoc_tools": ["tlgen", "reggen"],
# DIF Docs
"difs-directory": "sw/device/lib/dif",
# Output directory for documents
"outdir":
SRCTREE_TOP.joinpath('build', 'docs'),
"outdir-generated":
SRCTREE_TOP.joinpath('build', 'docs-generated'),
"verbose":
False,
}
def generate_dashboards():
for dashboard in config["dashboard_definitions"]:
hjson_paths = []
hjson_paths.extend(
sorted(SRCTREE_TOP.joinpath(dashboard).rglob('*.prj.hjson')))
dashboard_path = config["outdir-generated"].joinpath(
dashboard, 'dashboard')
dashboard_html = open(str(dashboard_path), mode='w')
for hjson_path in hjson_paths:
gen_dashboard_entry.gen_dashboard_html(hjson_path, dashboard_html)
dashboard_html.close()
def generate_hardware_blocks():
for hardware in config["hardware_definitions"]:
hardware_file = open(str(SRCTREE_TOP.joinpath(hardware)))
regs = hjson.load(hardware_file,
use_decimal=True,
object_pairs_hook=validate.checking_dict)
if validate.validate(regs) == 0:
logging.info("Parsed %s" % (hardware))
else:
logging.fatal("Failed to parse %s" % (hardware))
base_path = config["outdir-generated"].joinpath(hardware)
base_path.parent.mkdir(parents=True, exist_ok=True)
regs_html = open(str(base_path.parent.joinpath(base_path.name +
'.registers')),
mode='w')
gen_html.gen_html(regs, regs_html)
regs_html.close()
hwcfg_html = open(str(base_path.parent.joinpath(base_path.name + '.hwcfg')),
mode='w')
gen_cfg_html.gen_cfg_html(regs, hwcfg_html)
hwcfg_html.close()
def generate_testplans():
for testplan in config["testplan_definitions"]:
plan = testplan_utils.parse_testplan(SRCTREE_TOP.joinpath(testplan))
plan_path = config["outdir-generated"].joinpath(testplan + '.testplan')
plan_path.parent.mkdir(parents=True, exist_ok=True)
testplan_html = open(str(plan_path), mode='w')
testplan_utils.gen_html_testplan_table(plan, testplan_html)
testplan_html.close()
def generate_selfdocs():
"""Generate documents for the tools in `util/` if `--doc` option exists.
Each tool creates selfdoc differently. Manually invoked.
"""
for tool in config["selfdoc_tools"]:
selfdoc_path = config["outdir-generated"].joinpath(tool + '.selfdoc')
selfdoc_path.parent.mkdir(parents=True, exist_ok=True)
with open(str(selfdoc_path), mode='w') as fout:
if tool == "reggen":
reggen_selfdoc.document(fout)
elif tool == "tlgen":
fout.write(tlgen.selfdoc(heading=3, cmd='tlgen.py --doc'))
def generate_apt_reqs():
"""Generate an apt-get command line invocation from apt-requirements.txt
This will be saved in outdir-generated/apt_cmd.txt
"""
# Read the apt-requirements.txt
apt_requirements = []
requirements_file = open(str(SRCTREE_TOP.joinpath("apt-requirements.txt")))
for package_line in requirements_file.readlines():
# Ignore everything after `#` on each line, and strip whitespace
package = package_line.split('#', 1)[0].strip()
if package:
# only add non-empty lines to packages
apt_requirements.append(package)
apt_cmd = "$ sudo apt-get install " + " ".join(apt_requirements)
apt_cmd_lines = textwrap.wrap(apt_cmd,
width=78,
replace_whitespace=True,
subsequent_indent=' ')
# Newlines need to be escaped
apt_cmd = " \\\n".join(apt_cmd_lines)
# And then to write the generated string directly to the file.
apt_cmd_path = config["outdir-generated"].joinpath('apt_cmd.txt')
apt_cmd_path.parent.mkdir(parents=True, exist_ok=True)
with open(str(apt_cmd_path), mode='w') as fout:
fout.write(apt_cmd)
def generate_tool_versions():
"""Generate an tool version number requirement from tool_requirements.py
The version number per tool will be saved in outdir-generated/version_$TOOL_NAME.txt
"""
# Populate __TOOL_REQUIREMENTS__
requirements_file = str(SRCTREE_TOP.joinpath("tool_requirements.py"))
exec(open(requirements_file).read(), globals())
# And then write a version file for every tool.
for tool in __TOOL_REQUIREMENTS__: # noqa: F821
version_path = config["outdir-generated"].joinpath('version_' + tool + '.txt')
version_path.parent.mkdir(parents=True, exist_ok=True)
with open(str(version_path), mode='w') as fout:
fout.write(__TOOL_REQUIREMENTS__[tool]) # noqa: F821
def generate_dif_docs():
"""Generate doxygen documentation and DIF listings from DIF source comments.
This invokes Doxygen, and a few other things. Be careful of changing any
paths here, some correspond to paths in other configuration files.
"""
logging.info("Generating Software API Documentation (Doxygen)...")
doxygen_out_path = config["outdir-generated"].joinpath("sw")
# The next two paths correspond to relative paths specified in the Doxyfile
doxygen_xml_path = doxygen_out_path.joinpath("api-xml")
# We need to prepare this path because doxygen won't `mkdir -p`
doxygen_sw_path = doxygen_out_path.joinpath("public-api/sw/apis")
doxygen_sw_path.mkdir(parents=True, exist_ok=True)
# This is where warnings will be generated
doxygen_warnings_path = doxygen_out_path.joinpath("doxygen_warnings.log")
if doxygen_warnings_path.exists():
doxygen_warnings_path.unlink()
doxygen_args = [
"doxygen",
str(SRCTREE_TOP.joinpath("util/doxygen/Doxyfile")),
]
doxygen_results = subprocess.run( # noqa: F841
doxygen_args, check=True,
cwd=str(SRCTREE_TOP), stdout=subprocess.PIPE,
env=dict(
os.environ,
SRCTREE_TOP=str(SRCTREE_TOP),
DOXYGEN_OUT=str(doxygen_out_path),
))
logging.info("Generated Software API Documentation (Doxygen)")
if doxygen_warnings_path.exists():
logging.warning("Doxygen Generated Warnings "
"(saved in {})".format(str(doxygen_warnings_path)))
combined_xml = gen_dif_listing.get_combined_xml(doxygen_xml_path)
dif_paths = []
dif_paths.extend(sorted(SRCTREE_TOP.joinpath(config["difs-directory"]).glob("dif_*.h")))
dif_listings_root_path = config["outdir-generated"].joinpath("sw/difs_listings")
difrefs_root_path = config["outdir-generated"].joinpath("sw/difref")
for dif_header_path in dif_paths:
dif_header = str(dif_header_path.relative_to(SRCTREE_TOP))
dif_listings_filename = dif_listings_root_path.joinpath(dif_header + ".html")
dif_listings_filename.parent.mkdir(parents=True, exist_ok=True)
with open(str(dif_listings_filename), mode='w') as dif_listings_html:
gen_dif_listing.gen_listing_html(combined_xml, dif_header,
dif_listings_html)
difref_functions = gen_dif_listing.get_difref_info(combined_xml, dif_header)
for function in difref_functions:
difref_filename = difrefs_root_path.joinpath(function["name"] + '.html')
difref_filename.parent.mkdir(parents=True, exist_ok=True)
with open(str(difref_filename), mode='w') as difref_html:
gen_dif_listing.gen_difref_html(function, difref_html)
logging.info("Generated DIF Listing for {}".format(dif_header))
def generate_otbn_isa():
'''Generate the OTBN ISA documentation fragment
The result is in Markdown format and is written to
outdir-generated/otbn-isa.md
'''
otbn_dir = SRCTREE_TOP / 'hw/ip/otbn'
script = otbn_dir / 'util/yaml_to_doc.py'
yaml_file = otbn_dir / 'data/insns.yml'
out_dir = config['outdir-generated'].joinpath('otbn-isa')
subprocess.run([str(script), str(yaml_file), str(out_dir)], check=True)
def hugo_match_version(hugo_bin_path, version):
logging.info("Hugo binary path: %s", hugo_bin_path)
args = [str(hugo_bin_path), "version"]
process = subprocess.run(args,
universal_newlines=True,
stdout=subprocess.PIPE,
check=True,
cwd=str(SRCTREE_TOP))
logging.info("Checking for correct Hugo version: %s", version)
# Hugo version string example:
# "Hugo Static Site Generator v0.59.0-1DD0C69C/extended linux/amd64 BuildDate: 2019-10-21T09:45:38Z" # noqa: E501
return bool(re.search("v" + version + ".*/extended", process.stdout))
def install_hugo(install_dir):
"""Download and "install" hugo into |install_dir|
install_dir is created if it doesn't exist yet.
Limitations:
Currently only 64-bit x86 Linux and macOS is supported."""
# TODO: Support more configurations
if platform.system() == 'Linux' and platform.machine() == 'x86_64':
download_url = ('https://github.com/gohugoio/hugo/releases/download/v{version}'
'/hugo_extended_{version}_Linux-64bit.tar.gz').format(
version=HUGO_EXTENDED_VERSION)
elif platform.system() == 'Darwin' and platform.machine() == 'x86_64':
download_url = ('https://github.com/gohugoio/hugo/releases/download/v{version}'
'/hugo_extended_{version}_macOS-64bit.tar.gz').format(
version=HUGO_EXTENDED_VERSION)
else:
logging.fatal(
"Auto-install of hugo only supported for 64-bit x86 Linux and "
"macOS. Manually install hugo and re-run this script with --force-global.")
return False
install_dir.mkdir(exist_ok=True, parents=True)
hugo_bin_path = install_dir / 'hugo'
try:
if hugo_match_version(hugo_bin_path, HUGO_EXTENDED_VERSION):
return hugo_bin_path
except PermissionError:
# If there is an error checking the version just continue to download
logging.info("Hugo version could not be verified. Continue to download.")
except FileNotFoundError:
pass
# TODO: Investigate the use of Python builtins for downloading. Extracting
# the archive will probably will be a call to tar.
cmd = 'curl -sL {download_url} | tar -xzO hugo > {hugo_bin_file}'.format(
hugo_bin_file=str(hugo_bin_path), download_url=download_url)
logging.info("Calling %s to download hugo.", cmd)
subprocess.run(cmd, shell=True, check=True, cwd=str(SRCTREE_TOP))
hugo_bin_path.chmod(0o755)
return hugo_bin_path
def invoke_hugo(preview, hugo_bin_path):
site_docs = SRCTREE_TOP.joinpath('site', 'docs')
config_file = str(site_docs.joinpath('config.toml'))
layout_dir = str(site_docs.joinpath('layouts'))
args = [
str(hugo_bin_path),
"--config",
config_file,
"--destination",
str(config["outdir"]),
"--contentDir",
str(SRCTREE_TOP),
"--layoutDir",
layout_dir,
]
if preview:
args += ["server"]
subprocess.run(args, check=True, cwd=str(SRCTREE_TOP))
def main():
logging.basicConfig(level=logging.INFO,
format="%(asctime)s - %(message)s",
datefmt="%Y-%m-%d %H:%M")
parser = argparse.ArgumentParser(
prog="build_docs",
formatter_class=argparse.RawDescriptionHelpFormatter,
usage=USAGE)
parser.add_argument(
'--preview',
action='store_true',
help="""Starts a local server with live reload (updates triggered upon
changes in the documentation files). This feature is intended
to preview the documentation locally.""")
parser.add_argument(
'--force-global',
action='store_true',
help="""Use a global installation of Hugo. This skips the version
check and relies on Hugo to be available from the environment.""")
parser.add_argument('--hugo', help="""TODO""")
args = parser.parse_args()
generate_hardware_blocks()
generate_dashboards()
generate_testplans()
generate_selfdocs()
generate_apt_reqs()
generate_tool_versions()
generate_dif_docs()
generate_otbn_isa()
hugo_localinstall_dir = SRCTREE_TOP / 'build' / 'docs-hugo'
os.environ["PATH"] += os.pathsep + str(hugo_localinstall_dir)
hugo_bin_path = "hugo"
if not args.force_global:
try:
hugo_bin_path = install_hugo(hugo_localinstall_dir)
except KeyboardInterrupt:
pass
try:
invoke_hugo(args.preview, hugo_bin_path)
except subprocess.CalledProcessError:
sys.exit("Error building site")
except PermissionError:
sys.exit("Error running Hugo")
except KeyboardInterrupt:
pass
if __name__ == "__main__":
main()
|
[
"argparse.ArgumentParser",
"textwrap.wrap",
"pathlib.Path",
"difgen.gen_dif_listing.gen_listing_html",
"check_tool_requirements.read_tool_requirements",
"difgen.gen_dif_listing.get_difref_info",
"reggen.gen_html.gen_html",
"difgen.gen_dif_listing.gen_difref_html",
"platform.machine",
"re.search",
"tlgen.selfdoc",
"dashboard.gen_dashboard_entry.gen_dashboard_html",
"logging.fatal",
"platform.system",
"reggen.gen_cfg_html.gen_cfg_html",
"sys.exit",
"logging.basicConfig",
"difgen.gen_dif_listing.get_combined_xml",
"hjson.load",
"reggen.validate.validate",
"logging.info",
"reggen.gen_selfdoc.document",
"dvsim.testplanner.testplan_utils.gen_html_testplan_table"
] |
[((1155, 1203), 'check_tool_requirements.read_tool_requirements', 'check_tool_requirements.read_tool_requirements', ([], {}), '()\n', (1201, 1203), False, 'import check_tool_requirements\n'), ((7999, 8087), 'textwrap.wrap', 'textwrap.wrap', (['apt_cmd'], {'width': '(78)', 'replace_whitespace': '(True)', 'subsequent_indent': '""" """'}), "(apt_cmd, width=78, replace_whitespace=True, subsequent_indent\n =' ')\n", (8012, 8087), False, 'import textwrap\n'), ((9557, 9623), 'logging.info', 'logging.info', (['"""Generating Software API Documentation (Doxygen)..."""'], {}), "('Generating Software API Documentation (Doxygen)...')\n", (9569, 9623), False, 'import logging\n'), ((10624, 10686), 'logging.info', 'logging.info', (['"""Generated Software API Documentation (Doxygen)"""'], {}), "('Generated Software API Documentation (Doxygen)')\n", (10636, 10686), False, 'import logging\n'), ((10877, 10927), 'difgen.gen_dif_listing.get_combined_xml', 'gen_dif_listing.get_combined_xml', (['doxygen_xml_path'], {}), '(doxygen_xml_path)\n', (10909, 10927), True, 'import difgen.gen_dif_listing as gen_dif_listing\n'), ((12679, 12730), 'logging.info', 'logging.info', (['"""Hugo binary path: %s"""', 'hugo_bin_path'], {}), "('Hugo binary path: %s', hugo_bin_path)\n", (12691, 12730), False, 'import logging\n'), ((13013, 13075), 'logging.info', 'logging.info', (['"""Checking for correct Hugo version: %s"""', 'version'], {}), "('Checking for correct Hugo version: %s', version)\n", (13025, 13075), False, 'import logging\n'), ((15086, 15135), 'logging.info', 'logging.info', (['"""Calling %s to download hugo."""', 'cmd'], {}), "('Calling %s to download hugo.', cmd)\n", (15098, 15135), False, 'import logging\n'), ((15824, 15929), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO', 'format': '"""%(asctime)s - %(message)s"""', 'datefmt': '"""%Y-%m-%d %H:%M"""'}), "(level=logging.INFO, format='%(asctime)s - %(message)s',\n datefmt='%Y-%m-%d %H:%M')\n", (15843, 15929), False, 'import logging\n'), ((15988, 16102), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '"""build_docs"""', 'formatter_class': 'argparse.RawDescriptionHelpFormatter', 'usage': 'USAGE'}), "(prog='build_docs', formatter_class=argparse.\n RawDescriptionHelpFormatter, usage=USAGE)\n", (16011, 16102), False, 'import argparse\n'), ((1373, 1384), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1381, 1384), False, 'import sys\n'), ((5366, 5456), 'hjson.load', 'hjson.load', (['hardware_file'], {'use_decimal': '(True)', 'object_pairs_hook': 'validate.checking_dict'}), '(hardware_file, use_decimal=True, object_pairs_hook=validate.\n checking_dict)\n', (5376, 5456), False, 'import hjson\n'), ((5985, 6019), 'reggen.gen_html.gen_html', 'gen_html.gen_html', (['regs', 'regs_html'], {}), '(regs, regs_html)\n', (6002, 6019), True, 'import reggen.gen_html as gen_html\n'), ((6176, 6219), 'reggen.gen_cfg_html.gen_cfg_html', 'gen_cfg_html.gen_cfg_html', (['regs', 'hwcfg_html'], {}), '(regs, hwcfg_html)\n', (6201, 6219), True, 'import reggen.gen_cfg_html as gen_cfg_html\n'), ((6609, 6668), 'dvsim.testplanner.testplan_utils.gen_html_testplan_table', 'testplan_utils.gen_html_testplan_table', (['plan', 'testplan_html'], {}), '(plan, testplan_html)\n', (6647, 6668), True, 'import dvsim.testplanner.testplan_utils as testplan_utils\n'), ((11707, 11764), 'difgen.gen_dif_listing.get_difref_info', 'gen_dif_listing.get_difref_info', (['combined_xml', 'dif_header'], {}), '(combined_xml, dif_header)\n', (11738, 11764), True, 'import difgen.gen_dif_listing as gen_dif_listing\n'), ((13246, 13302), 're.search', 're.search', (["('v' + version + '.*/extended')", 'process.stdout'], {}), "('v' + version + '.*/extended', process.stdout)\n", (13255, 13302), False, 'import re\n'), ((5101, 5167), 'dashboard.gen_dashboard_entry.gen_dashboard_html', 'gen_dashboard_entry.gen_dashboard_html', (['hjson_path', 'dashboard_html'], {}), '(hjson_path, dashboard_html)\n', (5139, 5167), True, 'import dashboard.gen_dashboard_entry as gen_dashboard_entry\n'), ((5515, 5538), 'reggen.validate.validate', 'validate.validate', (['regs'], {}), '(regs)\n', (5532, 5538), True, 'import reggen.validate as validate\n'), ((5557, 5593), 'logging.info', 'logging.info', (["('Parsed %s' % hardware)"], {}), "('Parsed %s' % hardware)\n", (5569, 5593), False, 'import logging\n'), ((5622, 5668), 'logging.fatal', 'logging.fatal', (["('Failed to parse %s' % hardware)"], {}), "('Failed to parse %s' % hardware)\n", (5635, 5668), False, 'import logging\n'), ((11556, 11633), 'difgen.gen_dif_listing.gen_listing_html', 'gen_dif_listing.gen_listing_html', (['combined_xml', 'dif_header', 'dif_listings_html'], {}), '(combined_xml, dif_header, dif_listings_html)\n', (11588, 11633), True, 'import difgen.gen_dif_listing as gen_dif_listing\n'), ((13575, 13592), 'platform.system', 'platform.system', ([], {}), '()\n', (13590, 13592), False, 'import platform\n'), ((13608, 13626), 'platform.machine', 'platform.machine', ([], {}), '()\n', (13624, 13626), False, 'import platform\n'), ((14164, 14324), 'logging.fatal', 'logging.fatal', (['"""Auto-install of hugo only supported for 64-bit x86 Linux and macOS. Manually install hugo and re-run this script with --force-global."""'], {}), "(\n 'Auto-install of hugo only supported for 64-bit x86 Linux and macOS. Manually install hugo and re-run this script with --force-global.'\n )\n", (14177, 14324), False, 'import logging\n'), ((14683, 14756), 'logging.info', 'logging.info', (['"""Hugo version could not be verified. Continue to download."""'], {}), "('Hugo version could not be verified. Continue to download.')\n", (14695, 14756), False, 'import logging\n'), ((17357, 17388), 'sys.exit', 'sys.exit', (['"""Error building site"""'], {}), "('Error building site')\n", (17365, 17388), False, 'import sys\n'), ((17425, 17455), 'sys.exit', 'sys.exit', (['"""Error running Hugo"""'], {}), "('Error running Hugo')\n", (17433, 17455), False, 'import sys\n'), ((7160, 7189), 'reggen.gen_selfdoc.document', 'reggen_selfdoc.document', (['fout'], {}), '(fout)\n', (7183, 7189), True, 'import reggen.gen_selfdoc as reggen_selfdoc\n'), ((12049, 12103), 'difgen.gen_dif_listing.gen_difref_html', 'gen_dif_listing.gen_difref_html', (['function', 'difref_html'], {}), '(function, difref_html)\n', (12080, 12103), True, 'import difgen.gen_dif_listing as gen_dif_listing\n'), ((13864, 13881), 'platform.system', 'platform.system', ([], {}), '()\n', (13879, 13881), False, 'import platform\n'), ((13898, 13916), 'platform.machine', 'platform.machine', ([], {}), '()\n', (13914, 13916), False, 'import platform\n'), ((1445, 1459), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (1449, 1459), False, 'from pathlib import Path\n'), ((7251, 7297), 'tlgen.selfdoc', 'tlgen.selfdoc', ([], {'heading': '(3)', 'cmd': '"""tlgen.py --doc"""'}), "(heading=3, cmd='tlgen.py --doc')\n", (7264, 7297), False, 'import tlgen\n')]
|
import csv
import math
import bokeh
from bokeh.plotting import figure
from bokeh.io import show
from bokeh.models import Span
def main():
filepath = input("output: ")
data = []
with open(filepath, "r") as file:
reader = csv.reader(file)
for k, i in enumerate(reader):
row = []
for j in range(len(i)):
row.append(float(i[j]) if i[j] != "" else 0)
data.append(row)
filepath = input("input: ")
soils = []
foundation_depth = 0
with open(filepath, "r") as file:
file_content = file.read()
foundation_depth = float(file_content.split("\n")[0].replace(" ", "").split(",")[2])
soils = [[float(j) for j in i.replace(" ", "").split(",")] for i in file_content.split("\n")[2:]]
bokeh.io.curdoc().theme = "dark_minimal"
p1 = figure(
x_axis_label = "Spannung [kN/m²]",
y_axis_label = "Tiefe [m]",
x_axis_type = "auto",
width = 750, height = 900,
title = "Spannungsverteilung"
)
x, y = to_xy(data, 5, 3)
y = [-i for i in y]
p1.line(x, y, legend_label = "σQ", line_color = "lightblue", line_width = 2)
x, y = to_xy(data, 6, 3)
y = [-i for i in y]
p1.line(x, y, legend_label = "σ0", line_color = "lightgreen", line_width = 2)
x, y = to_xy(data, 7, 3)
y = [-i for i in y]
p1.line(x, y, legend_label = "0.2 * σ0", line_color = "pink", line_width = 2)
for i in soils:
p1.ray(x = [0], y = [-i[0]], length = 0, angle = 0, line_color = "grey", line_dash = "dashed")
p1.ray(x = [0], y = [-i[0]], length = 0, angle = math.pi, line_color = "grey", line_dash = "dashed")
p1.ray(x = [0], y = [-foundation_depth], length = 0, angle = 0, line_color = "white", line_dash = "dashed")
p1.ray(x = [0], y = [-foundation_depth], length = 0, angle = math.pi, line_color = "white", line_dash = "dashed")
p1.legend.location = "bottom_right"
p2 = figure(
x_axis_label = "Setzung [‰]",
y_axis_label = "Tiefe [m]",
x_axis_type = "auto",
width = 1500, height = 900,
title = "Setzungen"
)
x, y = to_xy(data, 9, 3)
x = [1000 * i for i in x]
y = [-i for i in y]
del(x[-1])
del(y[-1])
p2.line(x, y, line_color = "white", line_width = 2)
for i in soils:
p2.ray(x = [0], y = [-i[0]], length = 0, angle = 0, line_color = "grey", line_dash = "dashed")
p2.ray(x = [0], y = [-i[0]], length = 0, angle = math.pi, line_color = "grey", line_dash = "dashed")
p2.ray(x = [0], y = [-foundation_depth], length = 0, angle = 0, line_color = "white", line_dash = "dashed")
p2.ray(x = [0], y = [-foundation_depth], length = 0, angle = math.pi, line_color = "white", line_dash = "dashed")
p3 = figure(
x_axis_label = "Setzung [cm]",
y_axis_label = "Tiefe [m]",
x_axis_type = "auto",
width = 1500, height = 900,
title = "Setzungen"
)
x, y = to_xy(data, 10, 3)
x = [100 * i for i in x]
for i in range(1, len(x)):
x[i] += x[i - 1]
y = [-i for i in y]
del(x[-1])
del(y[-1])
p3.line(x, y, line_color = "white", line_width = 2)
for i in soils:
p3.ray(x = [0], y = [-i[0]], length = 0, angle = 0, line_color = "grey", line_dash = "dashed")
p3.ray(x = [0], y = [-i[0]], length = 0, angle = math.pi, line_color = "grey", line_dash = "dashed")
p3.ray(x = [0], y = [-foundation_depth], length = 0, angle = 0, line_color = "white", line_dash = "dashed")
p3.ray(x = [0], y = [-foundation_depth], length = 0, angle = math.pi, line_color = "white", line_dash = "dashed")
show(bokeh.layouts.column(p1, p2, p3))
def to_xy(data, ix, iy):
x = []
y = []
for i in data:
x.append(i[ix])
y.append(i[iy])
return [x, y]
if __name__ == "__main__":
main()
|
[
"bokeh.io.curdoc",
"bokeh.plotting.figure",
"csv.reader",
"bokeh.layouts.column"
] |
[((749, 890), 'bokeh.plotting.figure', 'figure', ([], {'x_axis_label': '"""Spannung [kN/m²]"""', 'y_axis_label': '"""Tiefe [m]"""', 'x_axis_type': '"""auto"""', 'width': '(750)', 'height': '(900)', 'title': '"""Spannungsverteilung"""'}), "(x_axis_label='Spannung [kN/m²]', y_axis_label='Tiefe [m]',\n x_axis_type='auto', width=750, height=900, title='Spannungsverteilung')\n", (755, 890), False, 'from bokeh.plotting import figure\n'), ((1774, 1902), 'bokeh.plotting.figure', 'figure', ([], {'x_axis_label': '"""Setzung [‰]"""', 'y_axis_label': '"""Tiefe [m]"""', 'x_axis_type': '"""auto"""', 'width': '(1500)', 'height': '(900)', 'title': '"""Setzungen"""'}), "(x_axis_label='Setzung [‰]', y_axis_label='Tiefe [m]', x_axis_type=\n 'auto', width=1500, height=900, title='Setzungen')\n", (1780, 1902), False, 'from bokeh.plotting import figure\n'), ((2522, 2651), 'bokeh.plotting.figure', 'figure', ([], {'x_axis_label': '"""Setzung [cm]"""', 'y_axis_label': '"""Tiefe [m]"""', 'x_axis_type': '"""auto"""', 'width': '(1500)', 'height': '(900)', 'title': '"""Setzungen"""'}), "(x_axis_label='Setzung [cm]', y_axis_label='Tiefe [m]', x_axis_type=\n 'auto', width=1500, height=900, title='Setzungen')\n", (2528, 2651), False, 'from bokeh.plotting import figure\n'), ((226, 242), 'csv.reader', 'csv.reader', (['file'], {}), '(file)\n', (236, 242), False, 'import csv\n'), ((701, 718), 'bokeh.io.curdoc', 'bokeh.io.curdoc', ([], {}), '()\n', (716, 718), False, 'import bokeh\n'), ((3318, 3350), 'bokeh.layouts.column', 'bokeh.layouts.column', (['p1', 'p2', 'p3'], {}), '(p1, p2, p3)\n', (3338, 3350), False, 'import bokeh\n')]
|
# ---
# jupyter:
# jupytext:
# formats: ipynb,py
# text_representation:
# extension: .py
# format_name: light
# format_version: '1.5'
# jupytext_version: 1.9.1+dev
# kernelspec:
# display_name: Python [conda env:biovectors]
# language: python
# name: conda-env-biovectors-py
# ---
# # Pubtator Central Update
# Pubtator Central updates their data monthly; however, they changed their data to be xml format instead of common text.
# Based on this update it is imperative to know what changes have been made/format this data to make future experiments easier to work with.
# This notebook is being created to make training word2vec a whole lot easier.
# +
# %load_ext autoreload
# %autoreload 2
from collections import defaultdict, Counter
import csv
from datetime import datetime
import itertools
import lzma
from pathlib import Path
import pickle
import tarfile
import lxml.etree as ET
import numpy as np
import pandas as pd
import plotnine as p9
import tqdm
from biovectors_modules.word2vec_run_helper import (
PubMedSentencesIterator,
PubtatorTarIterator,
chunks,
)
# -
# # Look at xml example
# Lets look at an example entry for each tagged document. Looks like it is regular BioCXML format which is good for processing.
# Move up a level in the repository, enter the folder with all the pubmed abstracts
# grab all files with the .gz extenstion for processing
pubtator_abstract_batch = list(Path("../pubtator_abstracts").rglob("*.gz"))
print(len(pubtator_abstract_batch))
for batch_directory in pubtator_abstract_batch:
for doc_obj in PubtatorTarIterator(batch_directory, return_ibatch_file=False):
passages = doc_obj.xpath("//passage")
lxml_str = ET.tostring(passages[1], pretty_print=True)
print(lxml_str.decode("utf-8"))
break
break
# # Grab Document Metadata
if not Path("output/pmc_metadata.tsv.xz").exists():
with lzma.open("output/pmc_metadata.tsv.xz", "wt") as outfile:
writer = csv.DictWriter(
outfile,
fieldnames=[
"batch_folder",
"batch_file",
"doc_id",
"doi",
"pmc",
"pmid",
"section",
"published_year",
],
delimiter="\t",
)
writer.writeheader()
# Cycle through each batch file
for batch_directory in pubtator_abstract_batch:
# Cycle through each document
for file_name, doc_obj in tqdm.tqdm(
PubtatorTarIterator(batch_directory, return_ibatch_file=True)
):
doc_id = doc_obj.xpath("id/text()")
doi = doc_obj.xpath("passage/infon[@key='article-id_doi']/text()")
pmc_id = doc_obj.xpath("passage/infon[@key='article-id_pmc']/text()")
pmid = doc_obj.xpath("passage/infon[@key='article-id_pmid']/text()")
sections = doc_obj.xpath("passage/infon[@key='section_type']/text()")
section_type = doc_obj.xpath("passage/infon[@key='type']/text()")
year = doc_obj.xpath("passage/infon[@key='year']/text()")
section_headers = "|".join(sorted(list(set(sections))))
section_headers_type = "|".join(
sorted(list(set(map(lambda x: x.upper(), section_type))))
)
writer.writerow(
{
"batch_folder": batch_directory.name,
"batch_file": file_name,
"doc_id": doc_id[0],
"doi": doi[0] if len(doi) > 0 else "",
"pmc": pmc_id[0] if len(pmc_id) > 0 else "",
"pmid": pmid[0] if len(pmid) > 0 else "",
"section": section_headers
if len(section_headers) > 0
else section_headers_type,
"published_year": year[0]
if len(year) > 0
else 0, # Shouldnt get to the else statement
}
)
# # Analyze Abstract/Full Text Dataset
pubtator_central_metadata_df = pd.read_csv("output/pmc_metadata_2.tsv.xz", sep="\t")
print(pubtator_central_metadata_df.shape)
pubtator_central_metadata_df.head()
# ## Sanity Check the data
# Sanity check that all documents have a published year
(pubtator_central_metadata_df.query("published_year.isnull()").shape)
# Do all documents have an id?
(pubtator_central_metadata_df.query("doc_id.isnull()").shape)
# Do all documents have a pmid?
(pubtator_central_metadata_df.query("pmid.isnull()").shape)
# Do all documents have a pmc id?
(pubtator_central_metadata_df.query("pmc.isnull()").shape)
# Do all documents have a doi?
(pubtator_central_metadata_df.query("doi.isnull()").shape)
# ## Published Year Distribution
(pubtator_central_metadata_df.sort_values("published_year").published_year.unique())
doc_count_df = (
pubtator_central_metadata_df.groupby("published_year")
.agg({"published_year": "size"})
.rename(index=str, columns={"published_year": "doc_count"})
.reset_index()
.astype({"published_year": int, "doc_count": int})
)
doc_count_df.head()
g = (
p9.ggplot(
doc_count_df.query("published_year > 0& published_year < 1950"),
p9.aes(x="published_year", y="doc_count"),
)
+ p9.geom_col(position=p9.position_dodge(width=0.9), fill="#1f78b4")
+ p9.labs(
title="Number of Documents Pre 1950", x="Publication Year", y="Document Count"
)
)
g.save("output/figures/pre_1950_doc_count.png", dpi=500)
print(g)
g = (
p9.ggplot(
doc_count_df.query("published_year >= 1950"),
p9.aes(x="published_year", y="doc_count"),
)
+ p9.geom_col(position=p9.position_dodge(width=0.9), fill="#1f78b4")
+ p9.labs(
title="Number of Documents Post 1950", x="Publication Year", y="Document Count"
)
)
g.save("output/figures/post_1950_doc_count.png", dpi=500)
print(g)
full_text_doc_count_df = (
pubtator_central_metadata_df[
pubtator_central_metadata_df.apply(lambda x: "INTRO" in x.section, axis=1)
]
.groupby("published_year")
.agg({"published_year": "size"})
.rename(index=str, columns={"published_year": "doc_count"})
.reset_index()
.astype({"published_year": int, "doc_count": int})
)
full_text_doc_count_df.head()
g = (
p9.ggplot(
full_text_doc_count_df.query("published_year > 0& published_year < 2000"),
p9.aes(x="published_year", y="doc_count"),
)
+ p9.geom_col(position=p9.position_dodge(width=0.9), fill="#1f78b4")
+ p9.labs(
title="Number of Full Text Documents Pre 2000",
x="Publication Year",
y="Document Count",
)
)
g.save("output/figures/pre_2000_full_text_doc_count.png", dpi=500)
print(g)
g = (
p9.ggplot(
full_text_doc_count_df.query("published_year >= 2000"),
p9.aes(x="published_year", y="doc_count"),
)
+ p9.geom_col(position=p9.position_dodge(width=0.9), fill="#1f78b4")
+ p9.labs(
title="Number of Full Text Documents Post 2000",
x="Publication Year",
y="Document Count",
)
)
g.save("output/figures/post_2000_full_text_doc_count.png", dpi=500)
print(g)
# # Shared Tokens Across Time - Abstract Only
tokens_by_year = defaultdict(Counter)
sentence_iterator = PubMedSentencesIterator(
pubtator_abstract_batch,
year_filter=list(range(1990, datetime.now().year + 1, 1)),
return_year=True,
tag_entities=False,
jobs=3,
)
if not Path("output/unique_tokens_by_year_replace.pkl").exists():
for year, sentence in tqdm.tqdm(sentence_iterator):
tokens_by_year[year].update(Counter(sentence))
if not Path("output/unique_tokens_by_year_replace.pkl").exists():
pickle.dump(tokens_by_year, open("output/unique_tokens_by_year_replace.pkl", "wb"))
else:
tokens_by_year = pickle.load(open("output/unique_tokens_by_year_replace.pkl", "rb"))
# ## Unique Tokens Available per Year
# +
data_rows = []
for query_year in tokens_by_year:
data_rows.append(
{
"year": query_year,
"num_tokens": len(tokens_by_year[query_year]),
}
)
# -
unique_token_df = pd.DataFrame.from_records(data_rows)
unique_token_df
g = (
p9.ggplot(unique_token_df, p9.aes(x="year", y="num_tokens"))
+ p9.geom_col(fill="#1f78b4")
+ p9.coord_flip()
+ p9.labs(
title="Number of Abstract Tokens Available Post 1990",
x="Year",
y="# Unique Tokens",
)
)
g.save("output/figures/post_1990_unique_tokens_abstracts.png", dpi=500)
print(g)
# ## Shared tokens across years
# +
data_rows = []
reversed_tokens = list(sorted(tokens_by_year.keys()))[::-1]
all_tokens = set(tokens_by_year[2021].keys()) | set(tokens_by_year[2020].keys())
for query_year in reversed_tokens[1:]:
query_year_vocab_set = set(tokens_by_year[query_year].keys())
tokens_matched = all_tokens & query_year_vocab_set
data_rows.append(
{
"years": str(query_year) if query_year != 2020 else "2020-21",
"percentage_tokens_mapped": len(tokens_matched) / len(all_tokens),
"num_tokens_matched": len(tokens_matched),
"num_tokens_total": len(all_tokens),
}
)
# -
token_overlap_df = pd.DataFrame.from_dict(data_rows)
token_overlap_df
g = (
p9.ggplot(
token_overlap_df.iloc[1:, :], p9.aes(x="years", y="percentage_tokens_mapped")
)
+ p9.geom_col(fill="#1f78b4")
+ p9.coord_flip()
+ p9.labs(
title="Token Overlap with 2020-2021 Abstracts",
x="Year",
y="Fraction of Tokens Overlapped",
)
)
g.save("output/figures/tokens_overlap_with_2020-21_abstracts.png", dpi=500)
print(g)
# # Shared Tokens Across Time - Full Text Only
# Grab sentences within the full text documents
tokens_by_year_full_text = defaultdict(Counter)
sentence_iterator = PubMedSentencesIterator(
pubtator_abstract_batch,
section_filter=["INTRO", "METHODS", "RESULTS", "DISCUSS", "CONCL", "SUPPL"],
year_filter=list(range(1990, datetime.now().year + 1, 1)),
return_year=True,
tag_entities=False,
jobs=3,
)
if not Path("output/unique_tokens_by_year_full_text_replace.pkl").exists():
for year, sentence in tqdm.tqdm(sentence_iterator):
tokens_by_year_full_text[year].update(Counter(sentence))
if not Path("output/unique_tokens_by_year_full_text_replace.pkl").exists():
pickle.dump(
tokens_by_year_full_text,
open("output/unique_tokens_by_year_full_text_replace.pkl", "wb"),
)
else:
tokens_by_year_full_text = pickle.load(
open("output/unique_tokens_by_year_full_text_replace.pkl", "rb")
)
# ## Unique Tokens Available per Year
# +
data_rows = []
for query_year in tokens_by_year_full_text:
data_rows.append(
{
"year": query_year,
"num_tokens": len(tokens_by_year_full_text[query_year]),
}
)
# -
unique_token_full_text_df = pd.DataFrame.from_records(data_rows)
unique_token_full_text_df
g = (
p9.ggplot(unique_token_full_text_df, p9.aes(x="year", y="num_tokens"))
+ p9.geom_col(fill="#1f78b4")
+ p9.coord_flip()
+ p9.labs(
title="Number of Full Text Tokens Available Post 1990",
x="Year",
y="# Unique Tokens",
)
)
g.save("output/figures/post_1990_unique_tokens_full_text.png", dpi=500)
print(g)
# ## Shared tokens across years
# +
data_rows = []
reversed_tokens = list(sorted(tokens_by_year_full_text.keys()))[::-1]
all_tokens = set(tokens_by_year_full_text[2021].keys()) | set(
tokens_by_year_full_text[2020].keys()
)
for query_year in reversed_tokens[1:]:
query_year_vocab_set = set(tokens_by_year_full_text[query_year].keys())
tokens_matched = all_tokens & query_year_vocab_set
data_rows.append(
{
"years": str(query_year) if query_year != 2020 else "2020-21",
"percentage_tokens_mapped": len(tokens_matched) / len(all_tokens),
"num_tokens_matched": len(tokens_matched),
"num_tokens_total": len(all_tokens),
}
)
# -
token_overlap_full_text_df = pd.DataFrame.from_dict(data_rows)
token_overlap_full_text_df
g = (
p9.ggplot(
token_overlap_full_text_df.iloc[1:, :],
p9.aes(x="years", y="percentage_tokens_mapped"),
)
+ p9.geom_col(fill="#1f78b4")
+ p9.coord_flip()
+ p9.labs(
title="Token Overlap with 2020-2021 Full Text",
x="Year",
y="Fraction of Tokens Overlapped",
)
)
g.save("output/figures/tokens_overlap_with_2020-21_full_text.png", dpi=500)
print(g)
|
[
"lzma.open",
"tqdm.tqdm",
"plotnine.coord_flip",
"pandas.DataFrame.from_dict",
"plotnine.geom_col",
"pandas.read_csv",
"plotnine.labs",
"collections.Counter",
"collections.defaultdict",
"biovectors_modules.word2vec_run_helper.PubtatorTarIterator",
"pathlib.Path",
"pandas.DataFrame.from_records",
"plotnine.position_dodge",
"lxml.etree.tostring",
"plotnine.aes",
"datetime.datetime.now",
"csv.DictWriter"
] |
[((4264, 4317), 'pandas.read_csv', 'pd.read_csv', (['"""output/pmc_metadata_2.tsv.xz"""'], {'sep': '"""\t"""'}), "('output/pmc_metadata_2.tsv.xz', sep='\\t')\n", (4275, 4317), True, 'import pandas as pd\n'), ((7435, 7455), 'collections.defaultdict', 'defaultdict', (['Counter'], {}), '(Counter)\n', (7446, 7455), False, 'from collections import defaultdict, Counter\n'), ((8338, 8374), 'pandas.DataFrame.from_records', 'pd.DataFrame.from_records', (['data_rows'], {}), '(data_rows)\n', (8363, 8374), True, 'import pandas as pd\n'), ((9419, 9452), 'pandas.DataFrame.from_dict', 'pd.DataFrame.from_dict', (['data_rows'], {}), '(data_rows)\n', (9441, 9452), True, 'import pandas as pd\n'), ((9990, 10010), 'collections.defaultdict', 'defaultdict', (['Counter'], {}), '(Counter)\n', (10001, 10010), False, 'from collections import defaultdict, Counter\n'), ((11111, 11147), 'pandas.DataFrame.from_records', 'pd.DataFrame.from_records', (['data_rows'], {}), '(data_rows)\n', (11136, 11147), True, 'import pandas as pd\n'), ((12269, 12302), 'pandas.DataFrame.from_dict', 'pd.DataFrame.from_dict', (['data_rows'], {}), '(data_rows)\n', (12291, 12302), True, 'import pandas as pd\n'), ((1613, 1675), 'biovectors_modules.word2vec_run_helper.PubtatorTarIterator', 'PubtatorTarIterator', (['batch_directory'], {'return_ibatch_file': '(False)'}), '(batch_directory, return_ibatch_file=False)\n', (1632, 1675), False, 'from biovectors_modules.word2vec_run_helper import PubMedSentencesIterator, PubtatorTarIterator, chunks\n'), ((5548, 5640), 'plotnine.labs', 'p9.labs', ([], {'title': '"""Number of Documents Pre 1950"""', 'x': '"""Publication Year"""', 'y': '"""Document Count"""'}), "(title='Number of Documents Pre 1950', x='Publication Year', y=\n 'Document Count')\n", (5555, 5640), True, 'import plotnine as p9\n'), ((5930, 6023), 'plotnine.labs', 'p9.labs', ([], {'title': '"""Number of Documents Post 1950"""', 'x': '"""Publication Year"""', 'y': '"""Document Count"""'}), "(title='Number of Documents Post 1950', x='Publication Year', y=\n 'Document Count')\n", (5937, 6023), True, 'import plotnine as p9\n'), ((6732, 6834), 'plotnine.labs', 'p9.labs', ([], {'title': '"""Number of Full Text Documents Pre 2000"""', 'x': '"""Publication Year"""', 'y': '"""Document Count"""'}), "(title='Number of Full Text Documents Pre 2000', x=\n 'Publication Year', y='Document Count')\n", (6739, 6834), True, 'import plotnine as p9\n'), ((7161, 7264), 'plotnine.labs', 'p9.labs', ([], {'title': '"""Number of Full Text Documents Post 2000"""', 'x': '"""Publication Year"""', 'y': '"""Document Count"""'}), "(title='Number of Full Text Documents Post 2000', x=\n 'Publication Year', y='Document Count')\n", (7168, 7264), True, 'import plotnine as p9\n'), ((7746, 7774), 'tqdm.tqdm', 'tqdm.tqdm', (['sentence_iterator'], {}), '(sentence_iterator)\n', (7755, 7774), False, 'import tqdm\n'), ((8525, 8623), 'plotnine.labs', 'p9.labs', ([], {'title': '"""Number of Abstract Tokens Available Post 1990"""', 'x': '"""Year"""', 'y': '"""# Unique Tokens"""'}), "(title='Number of Abstract Tokens Available Post 1990', x='Year', y=\n '# Unique Tokens')\n", (8532, 8623), True, 'import plotnine as p9\n'), ((9646, 9751), 'plotnine.labs', 'p9.labs', ([], {'title': '"""Token Overlap with 2020-2021 Abstracts"""', 'x': '"""Year"""', 'y': '"""Fraction of Tokens Overlapped"""'}), "(title='Token Overlap with 2020-2021 Abstracts', x='Year', y=\n 'Fraction of Tokens Overlapped')\n", (9653, 9751), True, 'import plotnine as p9\n'), ((10392, 10420), 'tqdm.tqdm', 'tqdm.tqdm', (['sentence_iterator'], {}), '(sentence_iterator)\n', (10401, 10420), False, 'import tqdm\n'), ((11318, 11417), 'plotnine.labs', 'p9.labs', ([], {'title': '"""Number of Full Text Tokens Available Post 1990"""', 'x': '"""Year"""', 'y': '"""# Unique Tokens"""'}), "(title='Number of Full Text Tokens Available Post 1990', x='Year', y\n ='# Unique Tokens')\n", (11325, 11417), True, 'import plotnine as p9\n'), ((12525, 12630), 'plotnine.labs', 'p9.labs', ([], {'title': '"""Token Overlap with 2020-2021 Full Text"""', 'x': '"""Year"""', 'y': '"""Fraction of Tokens Overlapped"""'}), "(title='Token Overlap with 2020-2021 Full Text', x='Year', y=\n 'Fraction of Tokens Overlapped')\n", (12532, 12630), True, 'import plotnine as p9\n'), ((1742, 1785), 'lxml.etree.tostring', 'ET.tostring', (['passages[1]'], {'pretty_print': '(True)'}), '(passages[1], pretty_print=True)\n', (1753, 1785), True, 'import lxml.etree as ET\n'), ((1940, 1985), 'lzma.open', 'lzma.open', (['"""output/pmc_metadata.tsv.xz"""', '"""wt"""'], {}), "('output/pmc_metadata.tsv.xz', 'wt')\n", (1949, 1985), False, 'import lzma\n'), ((2015, 2162), 'csv.DictWriter', 'csv.DictWriter', (['outfile'], {'fieldnames': "['batch_folder', 'batch_file', 'doc_id', 'doi', 'pmc', 'pmid', 'section',\n 'published_year']", 'delimiter': '"""\t"""'}), "(outfile, fieldnames=['batch_folder', 'batch_file', 'doc_id',\n 'doi', 'pmc', 'pmid', 'section', 'published_year'], delimiter='\\t')\n", (2029, 2162), False, 'import csv\n'), ((8503, 8518), 'plotnine.coord_flip', 'p9.coord_flip', ([], {}), '()\n', (8516, 8518), True, 'import plotnine as p9\n'), ((9624, 9639), 'plotnine.coord_flip', 'p9.coord_flip', ([], {}), '()\n', (9637, 9639), True, 'import plotnine as p9\n'), ((11296, 11311), 'plotnine.coord_flip', 'p9.coord_flip', ([], {}), '()\n', (11309, 11311), True, 'import plotnine as p9\n'), ((12503, 12518), 'plotnine.coord_flip', 'p9.coord_flip', ([], {}), '()\n', (12516, 12518), True, 'import plotnine as p9\n'), ((1464, 1493), 'pathlib.Path', 'Path', (['"""../pubtator_abstracts"""'], {}), "('../pubtator_abstracts')\n", (1468, 1493), False, 'from pathlib import Path\n'), ((1886, 1920), 'pathlib.Path', 'Path', (['"""output/pmc_metadata.tsv.xz"""'], {}), "('output/pmc_metadata.tsv.xz')\n", (1890, 1920), False, 'from pathlib import Path\n'), ((5420, 5461), 'plotnine.aes', 'p9.aes', ([], {'x': '"""published_year"""', 'y': '"""doc_count"""'}), "(x='published_year', y='doc_count')\n", (5426, 5461), True, 'import plotnine as p9\n'), ((5802, 5843), 'plotnine.aes', 'p9.aes', ([], {'x': '"""published_year"""', 'y': '"""doc_count"""'}), "(x='published_year', y='doc_count')\n", (5808, 5843), True, 'import plotnine as p9\n'), ((6604, 6645), 'plotnine.aes', 'p9.aes', ([], {'x': '"""published_year"""', 'y': '"""doc_count"""'}), "(x='published_year', y='doc_count')\n", (6610, 6645), True, 'import plotnine as p9\n'), ((7033, 7074), 'plotnine.aes', 'p9.aes', ([], {'x': '"""published_year"""', 'y': '"""doc_count"""'}), "(x='published_year', y='doc_count')\n", (7039, 7074), True, 'import plotnine as p9\n'), ((7661, 7709), 'pathlib.Path', 'Path', (['"""output/unique_tokens_by_year_replace.pkl"""'], {}), "('output/unique_tokens_by_year_replace.pkl')\n", (7665, 7709), False, 'from pathlib import Path\n'), ((7812, 7829), 'collections.Counter', 'Counter', (['sentence'], {}), '(sentence)\n', (7819, 7829), False, 'from collections import defaultdict, Counter\n'), ((7839, 7887), 'pathlib.Path', 'Path', (['"""output/unique_tokens_by_year_replace.pkl"""'], {}), "('output/unique_tokens_by_year_replace.pkl')\n", (7843, 7887), False, 'from pathlib import Path\n'), ((8469, 8496), 'plotnine.geom_col', 'p9.geom_col', ([], {'fill': '"""#1f78b4"""'}), "(fill='#1f78b4')\n", (8480, 8496), True, 'import plotnine as p9\n'), ((9590, 9617), 'plotnine.geom_col', 'p9.geom_col', ([], {'fill': '"""#1f78b4"""'}), "(fill='#1f78b4')\n", (9601, 9617), True, 'import plotnine as p9\n'), ((10297, 10355), 'pathlib.Path', 'Path', (['"""output/unique_tokens_by_year_full_text_replace.pkl"""'], {}), "('output/unique_tokens_by_year_full_text_replace.pkl')\n", (10301, 10355), False, 'from pathlib import Path\n'), ((10468, 10485), 'collections.Counter', 'Counter', (['sentence'], {}), '(sentence)\n', (10475, 10485), False, 'from collections import defaultdict, Counter\n'), ((10495, 10553), 'pathlib.Path', 'Path', (['"""output/unique_tokens_by_year_full_text_replace.pkl"""'], {}), "('output/unique_tokens_by_year_full_text_replace.pkl')\n", (10499, 10553), False, 'from pathlib import Path\n'), ((11262, 11289), 'plotnine.geom_col', 'p9.geom_col', ([], {'fill': '"""#1f78b4"""'}), "(fill='#1f78b4')\n", (11273, 11289), True, 'import plotnine as p9\n'), ((12469, 12496), 'plotnine.geom_col', 'p9.geom_col', ([], {'fill': '"""#1f78b4"""'}), "(fill='#1f78b4')\n", (12480, 12496), True, 'import plotnine as p9\n'), ((2583, 2644), 'biovectors_modules.word2vec_run_helper.PubtatorTarIterator', 'PubtatorTarIterator', (['batch_directory'], {'return_ibatch_file': '(True)'}), '(batch_directory, return_ibatch_file=True)\n', (2602, 2644), False, 'from biovectors_modules.word2vec_run_helper import PubMedSentencesIterator, PubtatorTarIterator, chunks\n'), ((5496, 5524), 'plotnine.position_dodge', 'p9.position_dodge', ([], {'width': '(0.9)'}), '(width=0.9)\n', (5513, 5524), True, 'import plotnine as p9\n'), ((5878, 5906), 'plotnine.position_dodge', 'p9.position_dodge', ([], {'width': '(0.9)'}), '(width=0.9)\n', (5895, 5906), True, 'import plotnine as p9\n'), ((6680, 6708), 'plotnine.position_dodge', 'p9.position_dodge', ([], {'width': '(0.9)'}), '(width=0.9)\n', (6697, 6708), True, 'import plotnine as p9\n'), ((7109, 7137), 'plotnine.position_dodge', 'p9.position_dodge', ([], {'width': '(0.9)'}), '(width=0.9)\n', (7126, 7137), True, 'import plotnine as p9\n'), ((8429, 8461), 'plotnine.aes', 'p9.aes', ([], {'x': '"""year"""', 'y': '"""num_tokens"""'}), "(x='year', y='num_tokens')\n", (8435, 8461), True, 'import plotnine as p9\n'), ((9530, 9577), 'plotnine.aes', 'p9.aes', ([], {'x': '"""years"""', 'y': '"""percentage_tokens_mapped"""'}), "(x='years', y='percentage_tokens_mapped')\n", (9536, 9577), True, 'import plotnine as p9\n'), ((11222, 11254), 'plotnine.aes', 'p9.aes', ([], {'x': '"""year"""', 'y': '"""num_tokens"""'}), "(x='year', y='num_tokens')\n", (11228, 11254), True, 'import plotnine as p9\n'), ((12408, 12455), 'plotnine.aes', 'p9.aes', ([], {'x': '"""years"""', 'y': '"""percentage_tokens_mapped"""'}), "(x='years', y='percentage_tokens_mapped')\n", (12414, 12455), True, 'import plotnine as p9\n'), ((7563, 7577), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (7575, 7577), False, 'from datetime import datetime\n'), ((10199, 10213), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (10211, 10213), False, 'from datetime import datetime\n')]
|
# import libraries
import requests, shelve
from bs4 import BeautifulSoup
def getKDA(id):
quote_page ='https://www.dotabuff.com/players/'+str(id)
page = requests.get(quote_page,headers = {'User-agent': 'my bot 0.1'})
page.status_code
soup = BeautifulSoup( page.content,features="html.parser" )
kda = soup.find('span' , attrs = {'class':'kda-record'})
return kda.text
|
[
"bs4.BeautifulSoup",
"requests.get"
] |
[((167, 229), 'requests.get', 'requests.get', (['quote_page'], {'headers': "{'User-agent': 'my bot 0.1'}"}), "(quote_page, headers={'User-agent': 'my bot 0.1'})\n", (179, 229), False, 'import requests, shelve\n'), ((265, 316), 'bs4.BeautifulSoup', 'BeautifulSoup', (['page.content'], {'features': '"""html.parser"""'}), "(page.content, features='html.parser')\n", (278, 316), False, 'from bs4 import BeautifulSoup\n')]
|
from bisect import insort
from collections import Counter
from hashlib import md5
class KetamaRing:
"""Implement a ketama compatible consistent hashing ring."""
def __init__(self):
"""Create a new HashRing."""
self._distribution = Counter()
self._keys = []
self._nodes = {}
self._replicas = 4
self._ring = {}
self._listbytes = lambda x: x
def hashi(self, key, replica=0):
"""Returns a ketama compatible hash from the given key."""
dh = self._listbytes(md5(str(key).encode("utf-8")).digest())
rd = replica * 4
return (dh[3 + rd] << 24) | (dh[2 + rd] << 16) | (dh[1 + rd] << 8) | dh[0 + rd]
def _hashi_weight_generator(self, node_name, node_conf):
"""Calculate the weight factor of the given node and
yield its hash key for every configured replica.
:param node_name: the node name.
"""
ks = (
node_conf["vnodes"] * len(self._nodes) * node_conf["weight"]
) // self._weight_sum
for w in range(0, ks):
w_node_name = f"{node_name}-{w}"
for i in range(0, self._replicas):
yield self.hashi(w_node_name, replica=i)
@staticmethod
def _listbytes(data):
"""Python 2 compatible int iterator from str.
:param data: the string to int iterate upon.
"""
return map(ord, data)
def _create_ring(self, nodes):
"""Generate a ketama compatible continuum/ring."""
_weight_sum = 0
for node_conf in self._nodes.values():
_weight_sum += node_conf["weight"]
self._weight_sum = _weight_sum
_distribution = Counter()
_keys = []
_ring = {}
for node_name, node_conf in self._nodes.items():
for h in self._hashi_weight_generator(node_name, node_conf):
_ring[h] = node_name
insort(_keys, h)
_distribution[node_name] += 1
self._distribution = _distribution
self._keys = _keys
self._ring = _ring
def _remove_node(self, node_name):
"""Remove the given node from the continuum/ring.
:param node_name: the node name.
"""
try:
self._nodes.pop(node_name)
except Exception:
raise KeyError(
"node '{}' not found, available nodes: {}".format(
node_name, self._nodes.keys()
)
)
else:
self._create_ring(self._nodes)
|
[
"collections.Counter",
"bisect.insort"
] |
[((258, 267), 'collections.Counter', 'Counter', ([], {}), '()\n', (265, 267), False, 'from collections import Counter\n'), ((1698, 1707), 'collections.Counter', 'Counter', ([], {}), '()\n', (1705, 1707), False, 'from collections import Counter\n'), ((1929, 1945), 'bisect.insort', 'insort', (['_keys', 'h'], {}), '(_keys, h)\n', (1935, 1945), False, 'from bisect import insort\n')]
|
import codecs
import os
from multiprocessing.pool import Pool
import soundfile as sf
import librosa
import sys
from pydub import AudioSegment
def detect_leading_silence(sound, silence_threshold=-50.0, chunk_size=10):
'''
sound is a pydub.AudioSegment
silence_threshold in dB
chunk_size in ms
iterate over chunks until you find the first one with sound
'''
trim_ms = 0 # ms
assert chunk_size > 0 # to avoid infinite loop
while sound[trim_ms:trim_ms+chunk_size].dBFS < silence_threshold and trim_ms < len(sound):
trim_ms += chunk_size
return trim_ms
def convert_to_16k(path_file):
in_path = path_file
out_path = path_file.replace("/speech_data/", "/speech_data_22050/")
out_folder = os.path.dirname(out_path)
os.makedirs(out_folder, exist_ok=True)
sound = AudioSegment.from_file(in_path, format="wav")
sound = sound.set_frame_rate(22050).set_channels(1)
start_trim = detect_leading_silence(sound)
end_trim = detect_leading_silence(sound.reverse())
duration = len(sound)
trimmed_sound = sound[start_trim:duration - end_trim]
trimmed_sound.export(out_path, format="wav")
out_path_2 = path_file.replace("/speech_data/", "/speech_data_22050_1/")
out_folder_2 = os.path.dirname(out_path_2)
os.makedirs(out_folder_2, exist_ok=True)
y, s = librosa.load(out_path, sr=22050)
y_16k = librosa.resample(y, s, 22050)
sf.write(out_path_2, y_16k, 22050, format='WAV', subtype='PCM_16')
if __name__ == '__main__':
input_folder = sys.argv[1]
files = os.listdir(input_folder)
for file in files:
lst_file = os.path.join(input_folder, file)
f_in = codecs.open(lst_file, "r", encoding="utf-8")
data = f_in.readlines()
# convert 16k sample rates
audio_names = []
for line in data:
info = line.split("|")[0]
audio_name = info.replace("/root/src/data", "/media/fit/storage5/VietAnh")
audio_names.append(audio_name)
pool = Pool(20)
pool.map(convert_to_16k,audio_names)
|
[
"multiprocessing.pool.Pool",
"os.makedirs",
"codecs.open",
"os.path.dirname",
"librosa.resample",
"librosa.load",
"soundfile.write",
"pydub.AudioSegment.from_file",
"os.path.join",
"os.listdir"
] |
[((745, 770), 'os.path.dirname', 'os.path.dirname', (['out_path'], {}), '(out_path)\n', (760, 770), False, 'import os\n'), ((775, 813), 'os.makedirs', 'os.makedirs', (['out_folder'], {'exist_ok': '(True)'}), '(out_folder, exist_ok=True)\n', (786, 813), False, 'import os\n'), ((826, 871), 'pydub.AudioSegment.from_file', 'AudioSegment.from_file', (['in_path'], {'format': '"""wav"""'}), "(in_path, format='wav')\n", (848, 871), False, 'from pydub import AudioSegment\n'), ((1260, 1287), 'os.path.dirname', 'os.path.dirname', (['out_path_2'], {}), '(out_path_2)\n', (1275, 1287), False, 'import os\n'), ((1292, 1332), 'os.makedirs', 'os.makedirs', (['out_folder_2'], {'exist_ok': '(True)'}), '(out_folder_2, exist_ok=True)\n', (1303, 1332), False, 'import os\n'), ((1344, 1376), 'librosa.load', 'librosa.load', (['out_path'], {'sr': '(22050)'}), '(out_path, sr=22050)\n', (1356, 1376), False, 'import librosa\n'), ((1389, 1418), 'librosa.resample', 'librosa.resample', (['y', 's', '(22050)'], {}), '(y, s, 22050)\n', (1405, 1418), False, 'import librosa\n'), ((1423, 1489), 'soundfile.write', 'sf.write', (['out_path_2', 'y_16k', '(22050)'], {'format': '"""WAV"""', 'subtype': '"""PCM_16"""'}), "(out_path_2, y_16k, 22050, format='WAV', subtype='PCM_16')\n", (1431, 1489), True, 'import soundfile as sf\n'), ((1561, 1585), 'os.listdir', 'os.listdir', (['input_folder'], {}), '(input_folder)\n', (1571, 1585), False, 'import os\n'), ((1628, 1660), 'os.path.join', 'os.path.join', (['input_folder', 'file'], {}), '(input_folder, file)\n', (1640, 1660), False, 'import os\n'), ((1676, 1720), 'codecs.open', 'codecs.open', (['lst_file', '"""r"""'], {'encoding': '"""utf-8"""'}), "(lst_file, 'r', encoding='utf-8')\n", (1687, 1720), False, 'import codecs\n'), ((2022, 2030), 'multiprocessing.pool.Pool', 'Pool', (['(20)'], {}), '(20)\n', (2026, 2030), False, 'from multiprocessing.pool import Pool\n')]
|
# -*- coding: utf-8 -*-
# Copyright 2019 The Blueoil Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
import os
import six
import math
import click
import tensorflow as tf
import multiprocessing
from easydict import EasyDict
from lmnet.utils import executor, config as config_util
from lmnet.datasets.base import ObjectDetectionBase
from lmnet.datasets.dataset_iterator import DatasetIterator
from lmnet.datasets.tfds import TFDSClassification, TFDSObjectDetection
import ray
from ray.tune import run_experiments, register_trainable, Trainable
from ray.tune.schedulers import AsyncHyperBandScheduler
from ray.tune.suggest import HyperOptSearch
if six.PY2:
import subprocess32 as subprocess
else:
import subprocess
def subproc_call(cmd, timeout=None):
"""
Execute a command with timeout, and return both STDOUT/STDERR.
Args:
cmd(str): the command to execute.
timeout(float): timeout in seconds.
Returns:
output(bytes), retcode(int). If timeout, retcode is -1.
"""
try:
output = subprocess.check_output(
cmd, stderr=subprocess.STDOUT,
shell=True, timeout=timeout)
return output, 0
except subprocess.TimeoutExpired as e:
print("Command '{}' timeout!".format(cmd))
print(e.output.decode('utf-8'))
return e.output, -1
except subprocess.CalledProcessError as e:
print("Command '{}' failed, return code={}".format(cmd, e.returncode))
print(e.output.decode('utf-8'))
return e.output, e.returncode
except Exception:
print("Command '{}' failed to run.".format(cmd))
return "", -2
def get_num_gpu():
"""
Returns:
int: #available GPUs in CUDA_VISIBLE_DEVICES, or in the system.
"""
def warn_return(ret, message):
built_with_cuda = tf.test.is_built_with_cuda()
if not built_with_cuda and ret > 0:
print(message + "But TensorFlow was not built with CUDA support and could not use GPUs!")
return ret
env = os.environ.get('CUDA_VISIBLE_DEVICES', None)
if env:
return warn_return(len(env.split(',')), "Found non-empty CUDA_VISIBLE_DEVICES. ")
output, code = subproc_call("nvidia-smi -L", timeout=5)
if code == 0:
output = output.decode('utf-8')
return warn_return(len(output.strip().split('\n')), "Found nvidia-smi. ")
else:
print('Not working for this one... But there are other methods you can try...')
raise NotImplementedError
def get_best_trial(trial_list, metric):
"""Retrieve the best trial."""
return max(trial_list, key=lambda trial: trial.last_result.get(metric, 0))
def trial_str_creator(trial):
"""Rename trial to shorter string"""
return "{}_{}".format(trial.trainable_name, trial.trial_id)
def get_best_result(trial_list, metric, param):
"""Retrieve the last result from the best trial."""
return {metric: get_best_trial(trial_list, metric).last_result[metric],
param: get_best_trial(trial_list, metric).last_result[param]}
def update_parameters_for_each_trial(network_kwargs, chosen_kwargs):
"""Update selected parameters to the configuration of each trial"""
network_kwargs['optimizer_class'] = chosen_kwargs['optimizer_class']['optimizer']
for key in list(chosen_kwargs['optimizer_class'].keys()):
if key != 'optimizer':
network_kwargs['optimizer_kwargs'][key] = chosen_kwargs['optimizer_class'][key]
network_kwargs['learning_rate_func'] = chosen_kwargs['learning_rate_func']['scheduler']
base_lr = chosen_kwargs['learning_rate']
if network_kwargs['learning_rate_func'] is tf.train.piecewise_constant:
lr_factor = chosen_kwargs['learning_rate_func']['scheduler_factor']
network_kwargs['learning_rate_kwargs']['values'] = [base_lr,
base_lr * lr_factor,
base_lr * lr_factor * lr_factor,
base_lr * lr_factor * lr_factor * lr_factor]
network_kwargs['learning_rate_kwargs']['boundaries'] = chosen_kwargs['learning_rate_func']['scheduler_steps']
elif network_kwargs['learning_rate_func'] is tf.train.polynomial_decay:
network_kwargs['learning_rate_kwargs']['learning_rate'] = base_lr
network_kwargs['learning_rate_kwargs']['power'] = chosen_kwargs['learning_rate_func']['scheduler_power']
network_kwargs['learning_rate_kwargs']['decay_steps'] = chosen_kwargs['learning_rate_func']['scheduler_decay']
else:
network_kwargs['learning_rate_kwargs']['learning_rate'] = base_lr
if 'weight_decay_rate' in chosen_kwargs:
network_kwargs['weight_decay_rate'] = chosen_kwargs['weight_decay_rate']
return network_kwargs
def setup_dataset(config, subset, rank):
"""helper function from lmnet/train.py to setup the data iterator"""
dataset_class = config.DATASET_CLASS
dataset_kwargs = {key.lower(): val for key, val in config.DATASET.items()}
# If there is a settings for TFDS, TFDS dataset class will be used.
tfds_kwargs = dataset_kwargs.pop("tfds_kwargs", {})
if tfds_kwargs:
if issubclass(dataset_class, ObjectDetectionBase):
dataset_class = TFDSObjectDetection
else:
dataset_class = TFDSClassification
dataset = dataset_class(subset=subset, **dataset_kwargs, **tfds_kwargs)
# TODO (Neil): Enable both train and validation
# For some reasons processes are not terminated cleanly, enable prefetch ONLY for the train dataset.
enable_prefetch = dataset_kwargs.pop("enable_prefetch", False) if subset == 'train' else False
return DatasetIterator(dataset, seed=rank, enable_prefetch=enable_prefetch)
class TrainTunable(Trainable):
""" TrainTunable class interfaces with Ray framework """
def _setup(self, config):
self.lm_config = config_util.load(self.config['lm_config'])
executor.init_logging(self.lm_config)
model_class = self.lm_config.NETWORK_CLASS
network_kwargs = {key.lower(): val for key, val in self.lm_config.NETWORK.items()}
network_kwargs = update_parameters_for_each_trial(network_kwargs, self.config)
# No distributed training was implemented, therefore rank set to 0
self.train_dataset = setup_dataset(self.lm_config, "train", 0)
self.validation_dataset = setup_dataset(self.lm_config, "validation", 0)
if model_class.__module__.startswith("lmnet.networks.object_detection"):
model = model_class(
classes=self.train_dataset.classes,
num_max_boxes=self.train_dataset.num_max_boxes,
is_debug=self.lm_config.IS_DEBUG,
**network_kwargs,
)
elif model_class.__module__.startswith("lmnet.networks.segmentation"):
model = model_class(
classes=self.train_dataset.classes,
label_colors=self.train_dataset.label_colors,
is_debug=self.lm_config.IS_DEBUG,
**network_kwargs,
)
else:
model = model_class(
classes=self.train_dataset.classes,
is_debug=self.lm_config.IS_DEBUG,
**network_kwargs,
)
self.global_step = tf.Variable(0, name="global_step", trainable=False)
self.is_training_placeholder = tf.placeholder(tf.bool, name="is_training_placeholder")
self.images_placeholder, self.labels_placeholder = model.placeholderes()
output = model.inference(self.images_placeholder, self.is_training_placeholder)
if model_class.__module__.startswith("lmnet.networks.object_detection"):
loss = model.loss(output, self.labels_placeholder, self.is_training_placeholder)
else:
loss = model.loss(output, self.labels_placeholder)
opt = model.optimizer(self.global_step)
train_op = model.train(loss, opt, self.global_step)
metrics_ops_dict, metrics_update_op = model.metrics(output, self.labels_placeholder)
self.train_op = train_op
self.metrics_ops_dict = metrics_ops_dict
self.metrics_update_op = metrics_update_op
init_op = tf.global_variables_initializer()
self.reset_metrics_op = tf.local_variables_initializer()
session_config = tf.ConfigProto(
gpu_options=tf.GPUOptions(allow_growth=True))
self.sess = tf.Session(config=session_config)
self.sess.run([init_op, self.reset_metrics_op])
self.iterations = 0
self.saver = tf.train.Saver()
def _train(self):
step_per_epoch = int(self.train_dataset.num_per_epoch / self.lm_config.BATCH_SIZE)
for _ in range(step_per_epoch):
images, labels = self.train_dataset.feed()
feed_dict = {
self.is_training_placeholder: True,
self.images_placeholder: images,
self.labels_placeholder: labels,
}
self.sess.run([self.train_op], feed_dict=feed_dict)
self.sess.run(self.reset_metrics_op)
test_step_size = int(math.ceil(self.validation_dataset.num_per_epoch / self.lm_config.BATCH_SIZE))
for _ in range(test_step_size):
images, labels = self.validation_dataset.feed()
feed_dict = {
self.is_training_placeholder: False,
self.images_placeholder: images,
self.labels_placeholder: labels,
}
self.sess.run([self.metrics_update_op], feed_dict=feed_dict)
if self.lm_config.NETWORK_CLASS.__module__.startswith("lmnet.networks.segmentation"):
metric_accuracy = self.sess.run(self.metrics_ops_dict["mean_iou"])
else:
metric_accuracy = self.sess.run(self.metrics_ops_dict["accuracy"])
self.iterations += 1
return {"mean_accuracy": metric_accuracy}
def _save(self, checkpoint_dir):
return self.saver.save(
self.sess, checkpoint_dir + "/save", global_step=self.iterations)
def _restore(self, path):
return self.saver.restore(self.sess, path)
def run(config_file, tunable_id, local_dir):
register_trainable(tunable_id, TrainTunable)
lm_config = config_util.load(config_file)
def easydict_to_dict(config):
if isinstance(config, EasyDict):
config = dict(config)
for key, value in config.items():
if isinstance(value, EasyDict):
value = dict(value)
easydict_to_dict(value)
config[key] = value
return config
tune_space = easydict_to_dict(lm_config['TUNE_SPACE'])
tune_spec = easydict_to_dict(lm_config['TUNE_SPEC'])
tune_spec['run'] = tunable_id
tune_spec['config'] = {'lm_config': os.path.join(os.getcwd(), config_file)}
tune_spec['local_dir'] = local_dir
tune_spec['trial_name_creator'] = ray.tune.function(trial_str_creator)
# Expecting use of gpus to do parameter search
ray.init(num_cpus=multiprocessing.cpu_count() // 2, num_gpus=max(get_num_gpu(), 1))
algo = HyperOptSearch(tune_space, max_concurrent=4, reward_attr="mean_accuracy")
scheduler = AsyncHyperBandScheduler(time_attr="training_iteration", reward_attr="mean_accuracy", max_t=200)
trials = run_experiments(experiments={'exp_tune': tune_spec},
search_alg=algo,
scheduler=scheduler)
print("The best result is", get_best_result(trials, metric="mean_accuracy", param='config'))
@click.command(context_settings=dict(help_option_names=['-h', '--help']))
@click.option(
'-c',
'--config_file',
help="config file path for this training",
default=os.path.join('configs', 'example.py'),
required=True,
)
@click.option(
'-i',
'--tunable_id',
help='[optional] id of this tuning',
default="tunable",
)
@click.option(
'-s',
'--local_dir',
help='[optional] result saving directory of training results, defaults in ~/ray_results',
default=None,
)
def main(config_file, tunable_id, local_dir):
run(config_file, tunable_id, local_dir)
if __name__ == '__main__':
main()
|
[
"click.option",
"lmnet.datasets.dataset_iterator.DatasetIterator",
"tensorflow.local_variables_initializer",
"tensorflow.Variable",
"tensorflow.GPUOptions",
"os.path.join",
"multiprocessing.cpu_count",
"tensorflow.test.is_built_with_cuda",
"ray.tune.suggest.HyperOptSearch",
"tensorflow.placeholder",
"tensorflow.train.Saver",
"lmnet.utils.config.load",
"ray.tune.schedulers.AsyncHyperBandScheduler",
"tensorflow.global_variables_initializer",
"subprocess.check_output",
"math.ceil",
"tensorflow.Session",
"ray.tune.function",
"ray.tune.run_experiments",
"os.getcwd",
"ray.tune.register_trainable",
"os.environ.get",
"lmnet.utils.executor.init_logging"
] |
[((12512, 12606), 'click.option', 'click.option', (['"""-i"""', '"""--tunable_id"""'], {'help': '"""[optional] id of this tuning"""', 'default': '"""tunable"""'}), "('-i', '--tunable_id', help='[optional] id of this tuning',\n default='tunable')\n", (12524, 12606), False, 'import click\n'), ((12623, 12770), 'click.option', 'click.option', (['"""-s"""', '"""--local_dir"""'], {'help': '"""[optional] result saving directory of training results, defaults in ~/ray_results"""', 'default': 'None'}), "('-s', '--local_dir', help=\n '[optional] result saving directory of training results, defaults in ~/ray_results'\n , default=None)\n", (12635, 12770), False, 'import click\n'), ((2643, 2687), 'os.environ.get', 'os.environ.get', (['"""CUDA_VISIBLE_DEVICES"""', 'None'], {}), "('CUDA_VISIBLE_DEVICES', None)\n", (2657, 2687), False, 'import os\n'), ((6358, 6426), 'lmnet.datasets.dataset_iterator.DatasetIterator', 'DatasetIterator', (['dataset'], {'seed': 'rank', 'enable_prefetch': 'enable_prefetch'}), '(dataset, seed=rank, enable_prefetch=enable_prefetch)\n', (6373, 6426), False, 'from lmnet.datasets.dataset_iterator import DatasetIterator\n'), ((10911, 10955), 'ray.tune.register_trainable', 'register_trainable', (['tunable_id', 'TrainTunable'], {}), '(tunable_id, TrainTunable)\n', (10929, 10955), False, 'from ray.tune import run_experiments, register_trainable, Trainable\n'), ((10972, 11001), 'lmnet.utils.config.load', 'config_util.load', (['config_file'], {}), '(config_file)\n', (10988, 11001), True, 'from lmnet.utils import executor, config as config_util\n'), ((11637, 11673), 'ray.tune.function', 'ray.tune.function', (['trial_str_creator'], {}), '(trial_str_creator)\n', (11654, 11673), False, 'import ray\n'), ((11825, 11898), 'ray.tune.suggest.HyperOptSearch', 'HyperOptSearch', (['tune_space'], {'max_concurrent': '(4)', 'reward_attr': '"""mean_accuracy"""'}), "(tune_space, max_concurrent=4, reward_attr='mean_accuracy')\n", (11839, 11898), False, 'from ray.tune.suggest import HyperOptSearch\n'), ((11915, 12015), 'ray.tune.schedulers.AsyncHyperBandScheduler', 'AsyncHyperBandScheduler', ([], {'time_attr': '"""training_iteration"""', 'reward_attr': '"""mean_accuracy"""', 'max_t': '(200)'}), "(time_attr='training_iteration', reward_attr=\n 'mean_accuracy', max_t=200)\n", (11938, 12015), False, 'from ray.tune.schedulers import AsyncHyperBandScheduler\n'), ((12024, 12118), 'ray.tune.run_experiments', 'run_experiments', ([], {'experiments': "{'exp_tune': tune_spec}", 'search_alg': 'algo', 'scheduler': 'scheduler'}), "(experiments={'exp_tune': tune_spec}, search_alg=algo,\n scheduler=scheduler)\n", (12039, 12118), False, 'from ray.tune import run_experiments, register_trainable, Trainable\n'), ((1653, 1741), 'subprocess.check_output', 'subprocess.check_output', (['cmd'], {'stderr': 'subprocess.STDOUT', 'shell': '(True)', 'timeout': 'timeout'}), '(cmd, stderr=subprocess.STDOUT, shell=True, timeout=\n timeout)\n', (1676, 1741), False, 'import subprocess\n'), ((2438, 2466), 'tensorflow.test.is_built_with_cuda', 'tf.test.is_built_with_cuda', ([], {}), '()\n', (2464, 2466), True, 'import tensorflow as tf\n'), ((6576, 6618), 'lmnet.utils.config.load', 'config_util.load', (["self.config['lm_config']"], {}), "(self.config['lm_config'])\n", (6592, 6618), True, 'from lmnet.utils import executor, config as config_util\n'), ((6627, 6664), 'lmnet.utils.executor.init_logging', 'executor.init_logging', (['self.lm_config'], {}), '(self.lm_config)\n', (6648, 6664), False, 'from lmnet.utils import executor, config as config_util\n'), ((8001, 8052), 'tensorflow.Variable', 'tf.Variable', (['(0)'], {'name': '"""global_step"""', 'trainable': '(False)'}), "(0, name='global_step', trainable=False)\n", (8012, 8052), True, 'import tensorflow as tf\n'), ((8092, 8147), 'tensorflow.placeholder', 'tf.placeholder', (['tf.bool'], {'name': '"""is_training_placeholder"""'}), "(tf.bool, name='is_training_placeholder')\n", (8106, 8147), True, 'import tensorflow as tf\n'), ((8924, 8957), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (8955, 8957), True, 'import tensorflow as tf\n'), ((8990, 9022), 'tensorflow.local_variables_initializer', 'tf.local_variables_initializer', ([], {}), '()\n', (9020, 9022), True, 'import tensorflow as tf\n'), ((9143, 9176), 'tensorflow.Session', 'tf.Session', ([], {'config': 'session_config'}), '(config=session_config)\n', (9153, 9176), True, 'import tensorflow as tf\n'), ((9282, 9298), 'tensorflow.train.Saver', 'tf.train.Saver', ([], {}), '()\n', (9296, 9298), True, 'import tensorflow as tf\n'), ((12451, 12488), 'os.path.join', 'os.path.join', (['"""configs"""', '"""example.py"""'], {}), "('configs', 'example.py')\n", (12463, 12488), False, 'import os\n'), ((9840, 9916), 'math.ceil', 'math.ceil', (['(self.validation_dataset.num_per_epoch / self.lm_config.BATCH_SIZE)'], {}), '(self.validation_dataset.num_per_epoch / self.lm_config.BATCH_SIZE)\n', (9849, 9916), False, 'import math\n'), ((11533, 11544), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (11542, 11544), False, 'import os\n'), ((9089, 9121), 'tensorflow.GPUOptions', 'tf.GPUOptions', ([], {'allow_growth': '(True)'}), '(allow_growth=True)\n', (9102, 9121), True, 'import tensorflow as tf\n'), ((11748, 11775), 'multiprocessing.cpu_count', 'multiprocessing.cpu_count', ([], {}), '()\n', (11773, 11775), False, 'import multiprocessing\n')]
|
import os
from itertools import islice
import random
from test_framework.base58 import b58encode_chk, b58decode_chk, b58chars
from test_framework.script import CScript, OP_HASH160, OP_CHECKSIG, hash160, OP_EQUAL, OP_DUP, OP_EQUALVERIFY
# 为key_io_tests.cpp准备测试数据
# python test/functional/feature_address.py valid 50 > src/test/data/key_io_valid.json
# key types
PUBKEY_ADDRESS = 33
SCRIPT_ADDRESS = 15
PUBKEY_ADDRESS_TEST = 111
SCRIPT_ADDRESS_TEST = 196
PRIVKEY = 128
PRIVKEY_TEST = 239
metadata_keys = ['isPrivkey', 'chain', 'addrType']
# templates for valid sequences
templates = [
# prefix, payload_size, suffix, metadata
# None = N/A
((PUBKEY_ADDRESS,), 20, (), (False, 'main', 'pubkey')),
((SCRIPT_ADDRESS,), 20, (), (False, 'main', 'script')),
((PUBKEY_ADDRESS_TEST,), 20, (), (False, 'test', 'pubkey')),
((SCRIPT_ADDRESS_TEST,), 20, (), (False, 'test', 'script')),
((PRIVKEY,), 64, (), (True, 'main', None)),
((PRIVKEY_TEST,), 64, (), (True, 'test', None)),
]
def is_valid(v):
'''Check vector v for validity'''
result = b58decode_chk(v)
if result is None:
return False
for template in templates:
prefix = bytearray(template[0])
suffix = bytearray(template[2])
if result.startswith(prefix) and result.endswith(suffix):
if (len(result) - len(prefix) - len(suffix)) == template[1]:
return True
return False
def gen_valid_vectors():
'''Generate valid test vectors'''
while True:
for template in templates:
prefix = bytearray(template[0])
payload = os.urandom(template[1])
suffix = bytearray(template[2])
chk_b = bytearray()
chk_b.extend(prefix)
chk_b.extend(payload)
chk_b.extend(suffix)
metadata = dict([(x,y) for (x,y) in zip(metadata_keys,template[3]) if y is not None])
rv = b58encode_chk(chk_b)
assert is_valid(rv)
if (metadata['isPrivkey']):
yield (rv, payload.hex(), metadata)
else:
if (metadata['addrType'] == 'pubkey'):
# OP_DUP << OP_HASH160 << ToByteVector(keyID) << OP_EQUALVERIFY << OP_CHECKSIG;
p2pkh = CScript([OP_DUP, OP_HASH160, payload, OP_EQUALVERIFY, OP_CHECKSIG])
yield (rv, p2pkh.hex(), metadata)
else:
# OP_HASH160 << ToByteVector(scriptID) << OP_EQUAL;
p2sh = CScript([OP_HASH160, payload, OP_EQUAL])
yield (rv, p2sh.hex(), metadata)
def gen_invalid_vector(template, corrupt_prefix, randomize_payload_size, corrupt_suffix):
'''Generate possibly invalid vector'''
if corrupt_prefix:
prefix = os.urandom(1)
else:
prefix = bytearray(template[0])
if randomize_payload_size:
payload = os.urandom(max(int(random.expovariate(0.5)), 50))
else:
payload = os.urandom(template[1])
if corrupt_suffix:
suffix = os.urandom(len(template[2]))
else:
suffix = bytearray(template[2])
chk_b = bytearray()
chk_b.extend(prefix)
chk_b.extend(payload)
chk_b.extend(suffix)
return b58encode_chk(chk_b)
def randbool(p = 0.5):
'''Return True with P(p)'''
return random.random() < p
def gen_invalid_vectors():
'''Generate invalid test vectors'''
# start with some manual edge-cases
yield "",
yield "x",
while True:
# kinds of invalid vectors:
# invalid prefix
# invalid payload length
# invalid (randomized) suffix (add random data)
# corrupt checksum
for template in templates:
val = gen_invalid_vector(template, randbool(0.2), randbool(0.2), randbool(0.2))
if random.randint(0,10)<1: # line corruption
if randbool(): # add random character to end
val += random.choice(b58chars)
else: # replace random character in the middle
n = random.randint(0, len(val))
val = val[0:n] + random.choice(b58chars) + val[n+1:]
if not is_valid(val):
yield val,
if __name__ == '__main__':
import sys, json
iters = {'valid':gen_valid_vectors, 'invalid':gen_invalid_vectors}
try:
uiter = iters[sys.argv[1]]
except IndexError:
uiter = gen_valid_vectors
try:
count = int(sys.argv[2])
except IndexError:
count = 0
data = list(islice(uiter(), count))
json.dump(data, sys.stdout, sort_keys=True, indent=4)
sys.stdout.write('\n')
|
[
"sys.stdout.write",
"json.dump",
"test_framework.base58.b58encode_chk",
"random.expovariate",
"random.randint",
"test_framework.script.CScript",
"test_framework.base58.b58decode_chk",
"random.choice",
"random.random",
"os.urandom"
] |
[((1134, 1150), 'test_framework.base58.b58decode_chk', 'b58decode_chk', (['v'], {}), '(v)\n', (1147, 1150), False, 'from test_framework.base58 import b58encode_chk, b58decode_chk, b58chars\n'), ((3303, 3323), 'test_framework.base58.b58encode_chk', 'b58encode_chk', (['chk_b'], {}), '(chk_b)\n', (3316, 3323), False, 'from test_framework.base58 import b58encode_chk, b58decode_chk, b58chars\n'), ((4646, 4699), 'json.dump', 'json.dump', (['data', 'sys.stdout'], {'sort_keys': '(True)', 'indent': '(4)'}), '(data, sys.stdout, sort_keys=True, indent=4)\n', (4655, 4699), False, 'import sys, json\n'), ((4704, 4726), 'sys.stdout.write', 'sys.stdout.write', (['"""\n"""'], {}), "('\\n')\n", (4720, 4726), False, 'import sys, json\n'), ((2847, 2860), 'os.urandom', 'os.urandom', (['(1)'], {}), '(1)\n', (2857, 2860), False, 'import os\n'), ((3043, 3066), 'os.urandom', 'os.urandom', (['template[1]'], {}), '(template[1])\n', (3053, 3066), False, 'import os\n'), ((3391, 3406), 'random.random', 'random.random', ([], {}), '()\n', (3404, 3406), False, 'import random\n'), ((1674, 1697), 'os.urandom', 'os.urandom', (['template[1]'], {}), '(template[1])\n', (1684, 1697), False, 'import os\n'), ((1989, 2009), 'test_framework.base58.b58encode_chk', 'b58encode_chk', (['chk_b'], {}), '(chk_b)\n', (2002, 2009), False, 'from test_framework.base58 import b58encode_chk, b58decode_chk, b58chars\n'), ((3891, 3912), 'random.randint', 'random.randint', (['(0)', '(10)'], {}), '(0, 10)\n', (3905, 3912), False, 'import random\n'), ((2335, 2402), 'test_framework.script.CScript', 'CScript', (['[OP_DUP, OP_HASH160, payload, OP_EQUALVERIFY, OP_CHECKSIG]'], {}), '([OP_DUP, OP_HASH160, payload, OP_EQUALVERIFY, OP_CHECKSIG])\n', (2342, 2402), False, 'from test_framework.script import CScript, OP_HASH160, OP_CHECKSIG, hash160, OP_EQUAL, OP_DUP, OP_EQUALVERIFY\n'), ((2578, 2618), 'test_framework.script.CScript', 'CScript', (['[OP_HASH160, payload, OP_EQUAL]'], {}), '([OP_HASH160, payload, OP_EQUAL])\n', (2585, 2618), False, 'from test_framework.script import CScript, OP_HASH160, OP_CHECKSIG, hash160, OP_EQUAL, OP_DUP, OP_EQUALVERIFY\n'), ((2984, 3007), 'random.expovariate', 'random.expovariate', (['(0.5)'], {}), '(0.5)\n', (3002, 3007), False, 'import random\n'), ((4021, 4044), 'random.choice', 'random.choice', (['b58chars'], {}), '(b58chars)\n', (4034, 4044), False, 'import random\n'), ((4197, 4220), 'random.choice', 'random.choice', (['b58chars'], {}), '(b58chars)\n', (4210, 4220), False, 'import random\n')]
|
"""
Copyright 2013 <NAME>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from cvxpy.constraints.constraint import Constraint
import numpy as np
class Zero(Constraint):
"""A constraint of the form :math:`x = 0`.
The preferred way of creating a ``Zero`` constraint is through
operator overloading. To constrain an expression ``x`` to be zero,
simply write ``x == 0``. The former creates a ``Zero`` constraint with
``x`` as its argument.
"""
def __init__(self, expr, constr_id=None):
super(Zero, self).__init__([expr], constr_id)
def __str__(self):
"""Returns a string showing the mathematical constraint.
"""
return self.name()
def __repr__(self):
"""Returns a string with information about the constraint.
"""
return "%s(%s)" % (self.__class__.__name__,
repr(self.args[0]))
@property
def shape(self):
"""int : The shape of the constrained expression."""
return self.args[0].shape
@property
def size(self):
"""int : The size of the constrained expression."""
return self.args[0].size
def name(self):
return "%s == 0" % self.args[0]
def is_dcp(self):
"""A zero constraint is DCP if its argument is affine."""
return self.args[0].is_affine()
def is_dgp(self):
return False
def is_dqcp(self):
return self.is_dcp()
@property
def residual(self):
"""The residual of the constraint.
Returns
-------
Expression
"""
if self.expr.value is None:
return None
return np.abs(self.expr.value)
# The value of the dual variable.
@property
def dual_value(self):
"""NumPy.ndarray : The value of the dual variable.
"""
return self.dual_variables[0].value
# TODO(akshayka): Rename to save_dual_value to avoid collision with
# value as defined above.
def save_value(self, value):
"""Save the value of the dual variable for the constraint's parent.
Args:
value: The value of the dual variable.
"""
self.dual_variables[0].save_value(value)
class Equality(Constraint):
"""A constraint of the form :math:`x = y`.
"""
def __init__(self, lhs, rhs, constr_id=None):
self._expr = lhs - rhs
super(Equality, self).__init__([lhs, rhs], constr_id)
def __str__(self):
"""Returns a string showing the mathematical constraint.
"""
return self.name()
def __repr__(self):
"""Returns a string with information about the constraint.
"""
return "%s(%s, %s)" % (self.__class__.__name__,
repr(self.args[0]), repr(self.args[1]))
def _construct_dual_variables(self, args):
super(Equality, self)._construct_dual_variables([self._expr])
@property
def expr(self):
return self._expr
@property
def shape(self):
"""int : The shape of the constrained expression."""
return self.expr.shape
@property
def size(self):
"""int : The size of the constrained expression."""
return self.expr.size
def name(self):
return "%s == %s" % (self.args[0], self.args[1])
def is_dcp(self):
"""An equality constraint is DCP if its argument is affine."""
return self.expr.is_affine()
def is_dpp(self):
return self.is_dcp() and self.expr.is_dpp()
def is_dgp(self):
return (self.args[0].is_log_log_affine() and
self.args[1].is_log_log_affine())
def is_dqcp(self):
return self.is_dcp()
@property
def residual(self):
"""The residual of the constraint.
Returns
-------
Expression
"""
if self.expr.value is None:
return None
return np.abs(self.expr.value)
@property
def dual_value(self):
"""NumPy.ndarray : The value of the dual variable.
"""
return self.dual_variables[0].value
def save_value(self, value):
"""Save the value of the dual variable for the constraint's parent.
Args:
value: The value of the dual variable.
"""
self.dual_variables[0].save_value(value)
|
[
"numpy.abs"
] |
[((2147, 2170), 'numpy.abs', 'np.abs', (['self.expr.value'], {}), '(self.expr.value)\n', (2153, 2170), True, 'import numpy as np\n'), ((4407, 4430), 'numpy.abs', 'np.abs', (['self.expr.value'], {}), '(self.expr.value)\n', (4413, 4430), True, 'import numpy as np\n')]
|
"""Test the cross_validation module"""
from __future__ import division
import warnings
import numpy as np
from scipy.sparse import coo_matrix
from scipy.sparse import csr_matrix
from scipy import stats
from sklearn.exceptions import ConvergenceWarning
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_false
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import assert_greater_equal
from sklearn.utils.testing import assert_less
from sklearn.utils.testing import assert_not_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_warns_message
from sklearn.utils.testing import assert_raise_message
from sklearn.utils.testing import ignore_warnings
from sklearn.utils.mocking import CheckingClassifier, MockDataFrame
with warnings.catch_warnings():
warnings.simplefilter('ignore')
from sklearn import cross_validation as cval
from sklearn.datasets import make_regression
from sklearn.datasets import load_boston
from sklearn.datasets import load_digits
from sklearn.datasets import load_iris
from sklearn.datasets import make_multilabel_classification
from sklearn.metrics import explained_variance_score
from sklearn.metrics import make_scorer
from sklearn.metrics import precision_score
from sklearn.externals import six
from sklearn.externals.six.moves import zip
from sklearn.linear_model import Ridge
from sklearn.multiclass import OneVsRestClassifier
from sklearn.neighbors import KNeighborsClassifier
from sklearn.svm import SVC
from sklearn.cluster import KMeans
from sklearn.preprocessing import Imputer
from sklearn.pipeline import Pipeline
class MockClassifier(object):
"""Dummy classifier to test the cross-validation"""
def __init__(self, a=0, allow_nd=False):
self.a = a
self.allow_nd = allow_nd
def fit(self, X, Y=None, sample_weight=None, class_prior=None,
sparse_sample_weight=None, sparse_param=None, dummy_int=None,
dummy_str=None, dummy_obj=None, callback=None):
"""The dummy arguments are to test that this fit function can
accept non-array arguments through cross-validation, such as:
- int
- str (this is actually array-like)
- object
- function
"""
self.dummy_int = dummy_int
self.dummy_str = dummy_str
self.dummy_obj = dummy_obj
if callback is not None:
callback(self)
if self.allow_nd:
X = X.reshape(len(X), -1)
if X.ndim >= 3 and not self.allow_nd:
raise ValueError('X cannot be d')
if sample_weight is not None:
assert_true(sample_weight.shape[0] == X.shape[0],
'MockClassifier extra fit_param sample_weight.shape[0]'
' is {0}, should be {1}'.format(sample_weight.shape[0],
X.shape[0]))
if class_prior is not None:
assert_true(class_prior.shape[0] == len(np.unique(y)),
'MockClassifier extra fit_param class_prior.shape[0]'
' is {0}, should be {1}'.format(class_prior.shape[0],
len(np.unique(y))))
if sparse_sample_weight is not None:
fmt = ('MockClassifier extra fit_param sparse_sample_weight'
'.shape[0] is {0}, should be {1}')
assert_true(sparse_sample_weight.shape[0] == X.shape[0],
fmt.format(sparse_sample_weight.shape[0], X.shape[0]))
if sparse_param is not None:
fmt = ('MockClassifier extra fit_param sparse_param.shape '
'is ({0}, {1}), should be ({2}, {3})')
assert_true(sparse_param.shape == P_sparse.shape,
fmt.format(sparse_param.shape[0],
sparse_param.shape[1],
P_sparse.shape[0], P_sparse.shape[1]))
return self
def predict(self, T):
if self.allow_nd:
T = T.reshape(len(T), -1)
return T[:, 0]
def score(self, X=None, Y=None):
return 1. / (1 + np.abs(self.a))
def get_params(self, deep=False):
return {'a': self.a, 'allow_nd': self.allow_nd}
X = np.ones((10, 2))
X_sparse = coo_matrix(X)
W_sparse = coo_matrix((np.array([1]), (np.array([1]), np.array([0]))),
shape=(10, 1))
P_sparse = coo_matrix(np.eye(5))
# avoid StratifiedKFold's Warning about least populated class in y
y = np.arange(10) % 3
##############################################################################
# Tests
def check_valid_split(train, test, n_samples=None):
# Use python sets to get more informative assertion failure messages
train, test = set(train), set(test)
# Train and test split should not overlap
assert_equal(train.intersection(test), set())
if n_samples is not None:
# Check that the union of train an test split cover all the indices
assert_equal(train.union(test), set(range(n_samples)))
def check_cv_coverage(cv, expected_n_iter=None, n_samples=None):
# Check that a all the samples appear at least once in a test fold
if expected_n_iter is not None:
assert_equal(len(cv), expected_n_iter)
else:
expected_n_iter = len(cv)
collected_test_samples = set()
iterations = 0
for train, test in cv:
check_valid_split(train, test, n_samples=n_samples)
iterations += 1
collected_test_samples.update(test)
# Check that the accumulated test samples cover the whole dataset
assert_equal(iterations, expected_n_iter)
if n_samples is not None:
assert_equal(collected_test_samples, set(range(n_samples)))
def test_kfold_valueerrors():
# Check that errors are raised if there is not enough samples
assert_raises(ValueError, cval.KFold, 3, 4)
# Check that a warning is raised if the least populated class has too few
# members.
y = [3, 3, -1, -1, 3]
cv = assert_warns_message(Warning, "The least populated class",
cval.StratifiedKFold, y, 3)
# Check that despite the warning the folds are still computed even
# though all the classes are not necessarily represented at on each
# side of the split at each split
check_cv_coverage(cv, expected_n_iter=3, n_samples=len(y))
# Check that errors are raised if all n_labels for individual
# classes are less than n_folds.
y = [3, 3, -1, -1, 2]
assert_raises(ValueError, cval.StratifiedKFold, y, 3)
# Error when number of folds is <= 1
assert_raises(ValueError, cval.KFold, 2, 0)
assert_raises(ValueError, cval.KFold, 2, 1)
error_string = ("k-fold cross validation requires at least one"
" train / test split")
assert_raise_message(ValueError, error_string,
cval.StratifiedKFold, y, 0)
assert_raise_message(ValueError, error_string,
cval.StratifiedKFold, y, 1)
# When n is not integer:
assert_raises(ValueError, cval.KFold, 2.5, 2)
# When n_folds is not integer:
assert_raises(ValueError, cval.KFold, 5, 1.5)
assert_raises(ValueError, cval.StratifiedKFold, y, 1.5)
def test_kfold_indices():
# Check all indices are returned in the test folds
kf = cval.KFold(300, 3)
check_cv_coverage(kf, expected_n_iter=3, n_samples=300)
# Check all indices are returned in the test folds even when equal-sized
# folds are not possible
kf = cval.KFold(17, 3)
check_cv_coverage(kf, expected_n_iter=3, n_samples=17)
def test_kfold_no_shuffle():
# Manually check that KFold preserves the data ordering on toy datasets
splits = iter(cval.KFold(4, 2))
train, test = next(splits)
assert_array_equal(test, [0, 1])
assert_array_equal(train, [2, 3])
train, test = next(splits)
assert_array_equal(test, [2, 3])
assert_array_equal(train, [0, 1])
splits = iter(cval.KFold(5, 2))
train, test = next(splits)
assert_array_equal(test, [0, 1, 2])
assert_array_equal(train, [3, 4])
train, test = next(splits)
assert_array_equal(test, [3, 4])
assert_array_equal(train, [0, 1, 2])
def test_stratified_kfold_no_shuffle():
# Manually check that StratifiedKFold preserves the data ordering as much
# as possible on toy datasets in order to avoid hiding sample dependencies
# when possible
splits = iter(cval.StratifiedKFold([1, 1, 0, 0], 2))
train, test = next(splits)
assert_array_equal(test, [0, 2])
assert_array_equal(train, [1, 3])
train, test = next(splits)
assert_array_equal(test, [1, 3])
assert_array_equal(train, [0, 2])
splits = iter(cval.StratifiedKFold([1, 1, 1, 0, 0, 0, 0], 2))
train, test = next(splits)
assert_array_equal(test, [0, 1, 3, 4])
assert_array_equal(train, [2, 5, 6])
train, test = next(splits)
assert_array_equal(test, [2, 5, 6])
assert_array_equal(train, [0, 1, 3, 4])
def test_stratified_kfold_ratios():
# Check that stratified kfold preserves label ratios in individual splits
# Repeat with shuffling turned off and on
n_samples = 1000
labels = np.array([4] * int(0.10 * n_samples) +
[0] * int(0.89 * n_samples) +
[1] * int(0.01 * n_samples))
for shuffle in [False, True]:
for train, test in cval.StratifiedKFold(labels, 5, shuffle=shuffle):
assert_almost_equal(np.sum(labels[train] == 4) / len(train), 0.10,
2)
assert_almost_equal(np.sum(labels[train] == 0) / len(train), 0.89,
2)
assert_almost_equal(np.sum(labels[train] == 1) / len(train), 0.01,
2)
assert_almost_equal(np.sum(labels[test] == 4) / len(test), 0.10, 2)
assert_almost_equal(np.sum(labels[test] == 0) / len(test), 0.89, 2)
assert_almost_equal(np.sum(labels[test] == 1) / len(test), 0.01, 2)
def test_kfold_balance():
# Check that KFold returns folds with balanced sizes
for kf in [cval.KFold(i, 5) for i in range(11, 17)]:
sizes = []
for _, test in kf:
sizes.append(len(test))
assert_true((np.max(sizes) - np.min(sizes)) <= 1)
assert_equal(np.sum(sizes), kf.n)
def test_stratifiedkfold_balance():
# Check that KFold returns folds with balanced sizes (only when
# stratification is possible)
# Repeat with shuffling turned off and on
labels = [0] * 3 + [1] * 14
for shuffle in [False, True]:
for skf in [cval.StratifiedKFold(labels[:i], 3, shuffle=shuffle)
for i in range(11, 17)]:
sizes = []
for _, test in skf:
sizes.append(len(test))
assert_true((np.max(sizes) - np.min(sizes)) <= 1)
assert_equal(np.sum(sizes), skf.n)
def test_shuffle_kfold():
# Check the indices are shuffled properly, and that all indices are
# returned in the different test folds
kf = cval.KFold(300, 3, shuffle=True, random_state=0)
ind = np.arange(300)
all_folds = None
for train, test in kf:
assert_true(np.any(np.arange(100) != ind[test]))
assert_true(np.any(np.arange(100, 200) != ind[test]))
assert_true(np.any(np.arange(200, 300) != ind[test]))
if all_folds is None:
all_folds = ind[test].copy()
else:
all_folds = np.concatenate((all_folds, ind[test]))
all_folds.sort()
assert_array_equal(all_folds, ind)
def test_shuffle_stratifiedkfold():
# Check that shuffling is happening when requested, and for proper
# sample coverage
labels = [0] * 20 + [1] * 20
kf0 = list(cval.StratifiedKFold(labels, 5, shuffle=True, random_state=0))
kf1 = list(cval.StratifiedKFold(labels, 5, shuffle=True, random_state=1))
for (_, test0), (_, test1) in zip(kf0, kf1):
assert_true(set(test0) != set(test1))
check_cv_coverage(kf0, expected_n_iter=5, n_samples=40)
def test_kfold_can_detect_dependent_samples_on_digits(): # see #2372
# The digits samples are dependent: they are apparently grouped by authors
# although we don't have any information on the groups segment locations
# for this data. We can highlight this fact be computing k-fold cross-
# validation with and without shuffling: we observe that the shuffling case
# wrongly makes the IID assumption and is therefore too optimistic: it
# estimates a much higher accuracy (around 0.96) than the non
# shuffling variant (around 0.86).
digits = load_digits()
X, y = digits.data[:800], digits.target[:800]
model = SVC(C=10, gamma=0.005)
n = len(y)
cv = cval.KFold(n, 5, shuffle=False)
mean_score = cval.cross_val_score(model, X, y, cv=cv).mean()
assert_greater(0.88, mean_score)
assert_greater(mean_score, 0.85)
# Shuffling the data artificially breaks the dependency and hides the
# overfitting of the model with regards to the writing style of the authors
# by yielding a seriously overestimated score:
cv = cval.KFold(n, 5, shuffle=True, random_state=0)
mean_score = cval.cross_val_score(model, X, y, cv=cv).mean()
assert_greater(mean_score, 0.95)
cv = cval.KFold(n, 5, shuffle=True, random_state=1)
mean_score = cval.cross_val_score(model, X, y, cv=cv).mean()
assert_greater(mean_score, 0.95)
# Similarly, StratifiedKFold should try to shuffle the data as little
# as possible (while respecting the balanced class constraints)
# and thus be able to detect the dependency by not overestimating
# the CV score either. As the digits dataset is approximately balanced
# the estimated mean score is close to the score measured with
# non-shuffled KFold
cv = cval.StratifiedKFold(y, 5)
mean_score = cval.cross_val_score(model, X, y, cv=cv).mean()
assert_greater(0.88, mean_score)
assert_greater(mean_score, 0.85)
def test_label_kfold():
rng = np.random.RandomState(0)
# Parameters of the test
n_labels = 15
n_samples = 1000
n_folds = 5
# Construct the test data
tolerance = 0.05 * n_samples # 5 percent error allowed
labels = rng.randint(0, n_labels, n_samples)
folds = cval.LabelKFold(labels, n_folds=n_folds).idxs
ideal_n_labels_per_fold = n_samples // n_folds
# Check that folds have approximately the same size
assert_equal(len(folds), len(labels))
for i in np.unique(folds):
assert_greater_equal(tolerance,
abs(sum(folds == i) - ideal_n_labels_per_fold))
# Check that each label appears only in 1 fold
for label in np.unique(labels):
assert_equal(len(np.unique(folds[labels == label])), 1)
# Check that no label is on both sides of the split
labels = np.asarray(labels, dtype=object)
for train, test in cval.LabelKFold(labels, n_folds=n_folds):
assert_equal(len(np.intersect1d(labels[train], labels[test])), 0)
# Construct the test data
labels = ['Albert', 'Jean', 'Bertrand', 'Michel', 'Jean',
'Francis', 'Robert', 'Michel', 'Rachel', 'Lois',
'Michelle', 'Bernard', 'Marion', 'Laura', 'Jean',
'Rachel', 'Franck', 'John', 'Gael', 'Anna', 'Alix',
'Robert', 'Marion', 'David', 'Tony', 'Abel', 'Becky',
'Madmood', 'Cary', 'Mary', 'Alexandre', 'David', 'Francis',
'Barack', 'Abdoul', 'Rasha', 'Xi', 'Silvia']
labels = np.asarray(labels, dtype=object)
n_labels = len(np.unique(labels))
n_samples = len(labels)
n_folds = 5
tolerance = 0.05 * n_samples # 5 percent error allowed
folds = cval.LabelKFold(labels, n_folds=n_folds).idxs
ideal_n_labels_per_fold = n_samples // n_folds
# Check that folds have approximately the same size
assert_equal(len(folds), len(labels))
for i in np.unique(folds):
assert_greater_equal(tolerance,
abs(sum(folds == i) - ideal_n_labels_per_fold))
# Check that each label appears only in 1 fold
for label in np.unique(labels):
assert_equal(len(np.unique(folds[labels == label])), 1)
# Check that no label is on both sides of the split
for train, test in cval.LabelKFold(labels, n_folds=n_folds):
assert_equal(len(np.intersect1d(labels[train], labels[test])), 0)
# Should fail if there are more folds than labels
labels = np.array([1, 1, 1, 2, 2])
assert_raises(ValueError, cval.LabelKFold, labels, n_folds=3)
def test_shuffle_split():
ss1 = cval.ShuffleSplit(10, test_size=0.2, random_state=0)
ss2 = cval.ShuffleSplit(10, test_size=2, random_state=0)
ss3 = cval.ShuffleSplit(10, test_size=np.int32(2), random_state=0)
for typ in six.integer_types:
ss4 = cval.ShuffleSplit(10, test_size=typ(2), random_state=0)
for t1, t2, t3, t4 in zip(ss1, ss2, ss3, ss4):
assert_array_equal(t1[0], t2[0])
assert_array_equal(t2[0], t3[0])
assert_array_equal(t3[0], t4[0])
assert_array_equal(t1[1], t2[1])
assert_array_equal(t2[1], t3[1])
assert_array_equal(t3[1], t4[1])
def test_stratified_shuffle_split_init():
y = np.asarray([0, 1, 1, 1, 2, 2, 2])
# Check that error is raised if there is a class with only one sample
assert_raises(ValueError, cval.StratifiedShuffleSplit, y, 3, 0.2)
# Check that error is raised if the test set size is smaller than n_classes
assert_raises(ValueError, cval.StratifiedShuffleSplit, y, 3, 2)
# Check that error is raised if the train set size is smaller than
# n_classes
assert_raises(ValueError, cval.StratifiedShuffleSplit, y, 3, 3, 2)
y = np.asarray([0, 0, 0, 1, 1, 1, 2, 2, 2])
# Check that errors are raised if there is not enough samples
assert_raises(ValueError, cval.StratifiedShuffleSplit, y, 3, 0.5, 0.6)
assert_raises(ValueError, cval.StratifiedShuffleSplit, y, 3, 8, 0.6)
assert_raises(ValueError, cval.StratifiedShuffleSplit, y, 3, 0.6, 8)
# Train size or test size too small
assert_raises(ValueError, cval.StratifiedShuffleSplit, y, train_size=2)
assert_raises(ValueError, cval.StratifiedShuffleSplit, y, test_size=2)
def test_stratified_shuffle_split_iter():
ys = [np.array([1, 1, 1, 1, 2, 2, 2, 3, 3, 3, 3, 3]),
np.array([0, 0, 0, 1, 1, 1, 2, 2, 2, 3, 3, 3]),
np.array([0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2] * 2),
np.array([1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4]),
np.array([-1] * 800 + [1] * 50)
]
for y in ys:
sss = cval.StratifiedShuffleSplit(y, 6, test_size=0.33,
random_state=0)
test_size = np.ceil(0.33 * len(y))
train_size = len(y) - test_size
for train, test in sss:
assert_array_equal(np.unique(y[train]), np.unique(y[test]))
# Checks if folds keep classes proportions
p_train = (np.bincount(np.unique(y[train],
return_inverse=True)[1]) /
float(len(y[train])))
p_test = (np.bincount(np.unique(y[test],
return_inverse=True)[1]) /
float(len(y[test])))
assert_array_almost_equal(p_train, p_test, 1)
assert_equal(len(train) + len(test), y.size)
assert_equal(len(train), train_size)
assert_equal(len(test), test_size)
assert_array_equal(np.lib.arraysetops.intersect1d(train, test), [])
def test_stratified_shuffle_split_even():
# Test the StratifiedShuffleSplit, indices are drawn with a
# equal chance
n_folds = 5
n_iter = 1000
def assert_counts_are_ok(idx_counts, p):
# Here we test that the distribution of the counts
# per index is close enough to a binomial
threshold = 0.05 / n_splits
bf = stats.binom(n_splits, p)
for count in idx_counts:
p = bf.pmf(count)
assert_true(p > threshold,
"An index is not drawn with chance corresponding "
"to even draws")
for n_samples in (6, 22):
labels = np.array((n_samples // 2) * [0, 1])
splits = cval.StratifiedShuffleSplit(labels, n_iter=n_iter,
test_size=1. / n_folds,
random_state=0)
train_counts = [0] * n_samples
test_counts = [0] * n_samples
n_splits = 0
for train, test in splits:
n_splits += 1
for counter, ids in [(train_counts, train), (test_counts, test)]:
for id in ids:
counter[id] += 1
assert_equal(n_splits, n_iter)
assert_equal(len(train), splits.n_train)
assert_equal(len(test), splits.n_test)
assert_equal(len(set(train).intersection(test)), 0)
label_counts = np.unique(labels)
assert_equal(splits.test_size, 1.0 / n_folds)
assert_equal(splits.n_train + splits.n_test, len(labels))
assert_equal(len(label_counts), 2)
ex_test_p = float(splits.n_test) / n_samples
ex_train_p = float(splits.n_train) / n_samples
assert_counts_are_ok(train_counts, ex_train_p)
assert_counts_are_ok(test_counts, ex_test_p)
def test_stratified_shuffle_split_overlap_train_test_bug():
# See https://github.com/scikit-learn/scikit-learn/issues/6121 for
# the original bug report
labels = [0, 1, 2, 3] * 3 + [4, 5] * 5
splits = cval.StratifiedShuffleSplit(labels, n_iter=1,
test_size=0.5, random_state=0)
train, test = next(iter(splits))
assert_array_equal(np.intersect1d(train, test), [])
def test_predefinedsplit_with_kfold_split():
# Check that PredefinedSplit can reproduce a split generated by Kfold.
folds = -1 * np.ones(10)
kf_train = []
kf_test = []
for i, (train_ind, test_ind) in enumerate(cval.KFold(10, 5, shuffle=True)):
kf_train.append(train_ind)
kf_test.append(test_ind)
folds[test_ind] = i
ps_train = []
ps_test = []
ps = cval.PredefinedSplit(folds)
for train_ind, test_ind in ps:
ps_train.append(train_ind)
ps_test.append(test_ind)
assert_array_equal(ps_train, kf_train)
assert_array_equal(ps_test, kf_test)
def test_label_shuffle_split():
ys = [np.array([1, 1, 1, 1, 2, 2, 2, 3, 3, 3, 3, 3]),
np.array([0, 0, 0, 1, 1, 1, 2, 2, 2, 3, 3, 3]),
np.array([0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2]),
np.array([1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4]),
]
for y in ys:
n_iter = 6
test_size = 1. / 3
slo = cval.LabelShuffleSplit(y, n_iter, test_size=test_size,
random_state=0)
# Make sure the repr works
repr(slo)
# Test that the length is correct
assert_equal(len(slo), n_iter)
y_unique = np.unique(y)
for train, test in slo:
# First test: no train label is in the test set and vice versa
y_train_unique = np.unique(y[train])
y_test_unique = np.unique(y[test])
assert_false(np.any(np.in1d(y[train], y_test_unique)))
assert_false(np.any(np.in1d(y[test], y_train_unique)))
# Second test: train and test add up to all the data
assert_equal(y[train].size + y[test].size, y.size)
# Third test: train and test are disjoint
assert_array_equal(np.intersect1d(train, test), [])
# Fourth test: # unique train and test labels are correct,
# +- 1 for rounding error
assert_true(abs(len(y_test_unique) -
round(test_size * len(y_unique))) <= 1)
assert_true(abs(len(y_train_unique) -
round((1.0 - test_size) * len(y_unique))) <= 1)
def test_leave_label_out_changing_labels():
# Check that LeaveOneLabelOut and LeavePLabelOut work normally if
# the labels variable is changed before calling __iter__
labels = np.array([0, 1, 2, 1, 1, 2, 0, 0])
labels_changing = np.array(labels, copy=True)
lolo = cval.LeaveOneLabelOut(labels)
lolo_changing = cval.LeaveOneLabelOut(labels_changing)
lplo = cval.LeavePLabelOut(labels, p=2)
lplo_changing = cval.LeavePLabelOut(labels_changing, p=2)
labels_changing[:] = 0
for llo, llo_changing in [(lolo, lolo_changing), (lplo, lplo_changing)]:
for (train, test), (train_chan, test_chan) in zip(llo, llo_changing):
assert_array_equal(train, train_chan)
assert_array_equal(test, test_chan)
def test_cross_val_score():
clf = MockClassifier()
for a in range(-10, 10):
clf.a = a
# Smoke test
scores = cval.cross_val_score(clf, X, y)
assert_array_equal(scores, clf.score(X, y))
# test with multioutput y
scores = cval.cross_val_score(clf, X_sparse, X)
assert_array_equal(scores, clf.score(X_sparse, X))
scores = cval.cross_val_score(clf, X_sparse, y)
assert_array_equal(scores, clf.score(X_sparse, y))
# test with multioutput y
scores = cval.cross_val_score(clf, X_sparse, X)
assert_array_equal(scores, clf.score(X_sparse, X))
# test with X and y as list
list_check = lambda x: isinstance(x, list)
clf = CheckingClassifier(check_X=list_check)
scores = cval.cross_val_score(clf, X.tolist(), y.tolist())
clf = CheckingClassifier(check_y=list_check)
scores = cval.cross_val_score(clf, X, y.tolist())
assert_raises(ValueError, cval.cross_val_score, clf, X, y,
scoring="sklearn")
# test with 3d X and
X_3d = X[:, :, np.newaxis]
clf = MockClassifier(allow_nd=True)
scores = cval.cross_val_score(clf, X_3d, y)
clf = MockClassifier(allow_nd=False)
assert_raises(ValueError, cval.cross_val_score, clf, X_3d, y)
def test_cross_val_score_pandas():
# check cross_val_score doesn't destroy pandas dataframe
types = [(MockDataFrame, MockDataFrame)]
try:
from pandas import Series, DataFrame
types.append((Series, DataFrame))
except ImportError:
pass
for TargetType, InputFeatureType in types:
# X dataframe, y series
X_df, y_ser = InputFeatureType(X), TargetType(y)
check_df = lambda x: isinstance(x, InputFeatureType)
check_series = lambda x: isinstance(x, TargetType)
clf = CheckingClassifier(check_X=check_df, check_y=check_series)
cval.cross_val_score(clf, X_df, y_ser)
def test_cross_val_score_mask():
# test that cross_val_score works with boolean masks
svm = SVC(kernel="linear")
iris = load_iris()
X, y = iris.data, iris.target
cv_indices = cval.KFold(len(y), 5)
scores_indices = cval.cross_val_score(svm, X, y, cv=cv_indices)
cv_indices = cval.KFold(len(y), 5)
cv_masks = []
for train, test in cv_indices:
mask_train = np.zeros(len(y), dtype=np.bool)
mask_test = np.zeros(len(y), dtype=np.bool)
mask_train[train] = 1
mask_test[test] = 1
cv_masks.append((train, test))
scores_masks = cval.cross_val_score(svm, X, y, cv=cv_masks)
assert_array_equal(scores_indices, scores_masks)
def test_cross_val_score_precomputed():
# test for svm with precomputed kernel
svm = SVC(kernel="precomputed")
iris = load_iris()
X, y = iris.data, iris.target
linear_kernel = np.dot(X, X.T)
score_precomputed = cval.cross_val_score(svm, linear_kernel, y)
svm = SVC(kernel="linear")
score_linear = cval.cross_val_score(svm, X, y)
assert_array_equal(score_precomputed, score_linear)
# Error raised for non-square X
svm = SVC(kernel="precomputed")
assert_raises(ValueError, cval.cross_val_score, svm, X, y)
# test error is raised when the precomputed kernel is not array-like
# or sparse
assert_raises(ValueError, cval.cross_val_score, svm,
linear_kernel.tolist(), y)
def test_cross_val_score_fit_params():
clf = MockClassifier()
n_samples = X.shape[0]
n_classes = len(np.unique(y))
DUMMY_INT = 42
DUMMY_STR = '42'
DUMMY_OBJ = object()
def assert_fit_params(clf):
# Function to test that the values are passed correctly to the
# classifier arguments for non-array type
assert_equal(clf.dummy_int, DUMMY_INT)
assert_equal(clf.dummy_str, DUMMY_STR)
assert_equal(clf.dummy_obj, DUMMY_OBJ)
fit_params = {'sample_weight': np.ones(n_samples),
'class_prior': np.ones(n_classes) / n_classes,
'sparse_sample_weight': W_sparse,
'sparse_param': P_sparse,
'dummy_int': DUMMY_INT,
'dummy_str': DUMMY_STR,
'dummy_obj': DUMMY_OBJ,
'callback': assert_fit_params}
cval.cross_val_score(clf, X, y, fit_params=fit_params)
def test_cross_val_score_score_func():
clf = MockClassifier()
_score_func_args = []
def score_func(y_test, y_predict):
_score_func_args.append((y_test, y_predict))
return 1.0
with warnings.catch_warnings(record=True):
scoring = make_scorer(score_func)
score = cval.cross_val_score(clf, X, y, scoring=scoring)
assert_array_equal(score, [1.0, 1.0, 1.0])
assert len(_score_func_args) == 3
def test_cross_val_score_errors():
class BrokenEstimator:
pass
assert_raises(TypeError, cval.cross_val_score, BrokenEstimator(), X)
def test_train_test_split_errors():
assert_raises(ValueError, cval.train_test_split)
assert_raises(ValueError, cval.train_test_split, range(3), train_size=1.1)
assert_raises(ValueError, cval.train_test_split, range(3), test_size=0.6,
train_size=0.6)
assert_raises(ValueError, cval.train_test_split, range(3),
test_size=np.float32(0.6), train_size=np.float32(0.6))
assert_raises(ValueError, cval.train_test_split, range(3),
test_size="wrong_type")
assert_raises(ValueError, cval.train_test_split, range(3), test_size=2,
train_size=4)
assert_raises(TypeError, cval.train_test_split, range(3),
some_argument=1.1)
assert_raises(ValueError, cval.train_test_split, range(3), range(42))
def test_train_test_split():
X = np.arange(100).reshape((10, 10))
X_s = coo_matrix(X)
y = np.arange(10)
# simple test
split = cval.train_test_split(X, y, test_size=None, train_size=.5)
X_train, X_test, y_train, y_test = split
assert_equal(len(y_test), len(y_train))
# test correspondence of X and y
assert_array_equal(X_train[:, 0], y_train * 10)
assert_array_equal(X_test[:, 0], y_test * 10)
# conversion of lists to arrays (deprecated?)
with warnings.catch_warnings(record=True):
split = cval.train_test_split(X, X_s, y.tolist())
X_train, X_test, X_s_train, X_s_test, y_train, y_test = split
assert_array_equal(X_train, X_s_train.toarray())
assert_array_equal(X_test, X_s_test.toarray())
# don't convert lists to anything else by default
split = cval.train_test_split(X, X_s, y.tolist())
X_train, X_test, X_s_train, X_s_test, y_train, y_test = split
assert_true(isinstance(y_train, list))
assert_true(isinstance(y_test, list))
# allow nd-arrays
X_4d = np.arange(10 * 5 * 3 * 2).reshape(10, 5, 3, 2)
y_3d = np.arange(10 * 7 * 11).reshape(10, 7, 11)
split = cval.train_test_split(X_4d, y_3d)
assert_equal(split[0].shape, (7, 5, 3, 2))
assert_equal(split[1].shape, (3, 5, 3, 2))
assert_equal(split[2].shape, (7, 7, 11))
assert_equal(split[3].shape, (3, 7, 11))
# test stratification option
y = np.array([1, 1, 1, 1, 2, 2, 2, 2])
for test_size, exp_test_size in zip([2, 4, 0.25, 0.5, 0.75],
[2, 4, 2, 4, 6]):
train, test = cval.train_test_split(y,
test_size=test_size,
stratify=y,
random_state=0)
assert_equal(len(test), exp_test_size)
assert_equal(len(test) + len(train), len(y))
# check the 1:1 ratio of ones and twos in the data is preserved
assert_equal(np.sum(train == 1), np.sum(train == 2))
def train_test_split_pandas():
# check cross_val_score doesn't destroy pandas dataframe
types = [MockDataFrame]
try:
from pandas import DataFrame
types.append(DataFrame)
except ImportError:
pass
for InputFeatureType in types:
# X dataframe
X_df = InputFeatureType(X)
X_train, X_test = cval.train_test_split(X_df)
assert_true(isinstance(X_train, InputFeatureType))
assert_true(isinstance(X_test, InputFeatureType))
def train_test_split_mock_pandas():
# X mock dataframe
X_df = MockDataFrame(X)
X_train, X_test = cval.train_test_split(X_df)
assert_true(isinstance(X_train, MockDataFrame))
assert_true(isinstance(X_test, MockDataFrame))
def test_cross_val_score_with_score_func_classification():
iris = load_iris()
clf = SVC(kernel='linear')
# Default score (should be the accuracy score)
scores = cval.cross_val_score(clf, iris.data, iris.target, cv=5)
assert_array_almost_equal(scores, [0.97, 1., 0.97, 0.97, 1.], 2)
# Correct classification score (aka. zero / one score) - should be the
# same as the default estimator score
zo_scores = cval.cross_val_score(clf, iris.data, iris.target,
scoring="accuracy", cv=5)
assert_array_almost_equal(zo_scores, [0.97, 1., 0.97, 0.97, 1.], 2)
# F1 score (class are balanced so f1_score should be equal to zero/one
# score
f1_scores = cval.cross_val_score(clf, iris.data, iris.target,
scoring="f1_weighted", cv=5)
assert_array_almost_equal(f1_scores, [0.97, 1., 0.97, 0.97, 1.], 2)
def test_cross_val_score_with_score_func_regression():
X, y = make_regression(n_samples=30, n_features=20, n_informative=5,
random_state=0)
reg = Ridge()
# Default score of the Ridge regression estimator
scores = cval.cross_val_score(reg, X, y, cv=5)
assert_array_almost_equal(scores, [0.94, 0.97, 0.97, 0.99, 0.92], 2)
# R2 score (aka. determination coefficient) - should be the
# same as the default estimator score
r2_scores = cval.cross_val_score(reg, X, y, scoring="r2", cv=5)
assert_array_almost_equal(r2_scores, [0.94, 0.97, 0.97, 0.99, 0.92], 2)
# Mean squared error; this is a loss function, so "scores" are negative
neg_mse_scores = cval.cross_val_score(reg, X, y, cv=5,
scoring="neg_mean_squared_error")
expected_neg_mse = np.array([-763.07, -553.16, -274.38, -273.26, -1681.99])
assert_array_almost_equal(neg_mse_scores, expected_neg_mse, 2)
# Explained variance
scoring = make_scorer(explained_variance_score)
ev_scores = cval.cross_val_score(reg, X, y, cv=5, scoring=scoring)
assert_array_almost_equal(ev_scores, [0.94, 0.97, 0.97, 0.99, 0.92], 2)
def test_permutation_score():
iris = load_iris()
X = iris.data
X_sparse = coo_matrix(X)
y = iris.target
svm = SVC(kernel='linear')
cv = cval.StratifiedKFold(y, 2)
score, scores, pvalue = cval.permutation_test_score(
svm, X, y, n_permutations=30, cv=cv, scoring="accuracy")
assert_greater(score, 0.9)
assert_almost_equal(pvalue, 0.0, 1)
score_label, _, pvalue_label = cval.permutation_test_score(
svm, X, y, n_permutations=30, cv=cv, scoring="accuracy",
labels=np.ones(y.size), random_state=0)
assert_true(score_label == score)
assert_true(pvalue_label == pvalue)
# check that we obtain the same results with a sparse representation
svm_sparse = SVC(kernel='linear')
cv_sparse = cval.StratifiedKFold(y, 2)
score_label, _, pvalue_label = cval.permutation_test_score(
svm_sparse, X_sparse, y, n_permutations=30, cv=cv_sparse,
scoring="accuracy", labels=np.ones(y.size), random_state=0)
assert_true(score_label == score)
assert_true(pvalue_label == pvalue)
# test with custom scoring object
def custom_score(y_true, y_pred):
return (((y_true == y_pred).sum() - (y_true != y_pred).sum())
/ y_true.shape[0])
scorer = make_scorer(custom_score)
score, _, pvalue = cval.permutation_test_score(
svm, X, y, n_permutations=100, scoring=scorer, cv=cv, random_state=0)
assert_almost_equal(score, .93, 2)
assert_almost_equal(pvalue, 0.01, 3)
# set random y
y = np.mod(np.arange(len(y)), 3)
score, scores, pvalue = cval.permutation_test_score(
svm, X, y, n_permutations=30, cv=cv, scoring="accuracy")
assert_less(score, 0.5)
assert_greater(pvalue, 0.2)
def test_cross_val_generator_with_indices():
X = np.array([[1, 2], [3, 4], [5, 6], [7, 8]])
y = np.array([1, 1, 2, 2])
labels = np.array([1, 2, 3, 4])
# explicitly passing indices value is deprecated
loo = cval.LeaveOneOut(4)
lpo = cval.LeavePOut(4, 2)
kf = cval.KFold(4, 2)
skf = cval.StratifiedKFold(y, 2)
lolo = cval.LeaveOneLabelOut(labels)
lopo = cval.LeavePLabelOut(labels, 2)
ps = cval.PredefinedSplit([1, 1, 2, 2])
ss = cval.ShuffleSplit(2)
for cv in [loo, lpo, kf, skf, lolo, lopo, ss, ps]:
for train, test in cv:
assert_not_equal(np.asarray(train).dtype.kind, 'b')
assert_not_equal(np.asarray(train).dtype.kind, 'b')
X[train], X[test]
y[train], y[test]
@ignore_warnings
def test_cross_val_generator_with_default_indices():
X = np.array([[1, 2], [3, 4], [5, 6], [7, 8]])
y = np.array([1, 1, 2, 2])
labels = np.array([1, 2, 3, 4])
loo = cval.LeaveOneOut(4)
lpo = cval.LeavePOut(4, 2)
kf = cval.KFold(4, 2)
skf = cval.StratifiedKFold(y, 2)
lolo = cval.LeaveOneLabelOut(labels)
lopo = cval.LeavePLabelOut(labels, 2)
ss = cval.ShuffleSplit(2)
ps = cval.PredefinedSplit([1, 1, 2, 2])
for cv in [loo, lpo, kf, skf, lolo, lopo, ss, ps]:
for train, test in cv:
assert_not_equal(np.asarray(train).dtype.kind, 'b')
assert_not_equal(np.asarray(train).dtype.kind, 'b')
X[train], X[test]
y[train], y[test]
def test_shufflesplit_errors():
assert_raises(ValueError, cval.ShuffleSplit, 10, test_size=2.0)
assert_raises(ValueError, cval.ShuffleSplit, 10, test_size=1.0)
assert_raises(ValueError, cval.ShuffleSplit, 10, test_size=0.1,
train_size=0.95)
assert_raises(ValueError, cval.ShuffleSplit, 10, test_size=11)
assert_raises(ValueError, cval.ShuffleSplit, 10, test_size=10)
assert_raises(ValueError, cval.ShuffleSplit, 10, test_size=8, train_size=3)
assert_raises(ValueError, cval.ShuffleSplit, 10, train_size=1j)
assert_raises(ValueError, cval.ShuffleSplit, 10, test_size=None,
train_size=None)
def test_shufflesplit_reproducible():
# Check that iterating twice on the ShuffleSplit gives the same
# sequence of train-test when the random_state is given
ss = cval.ShuffleSplit(10, random_state=21)
assert_array_equal(list(a for a, b in ss), list(a for a, b in ss))
def test_safe_split_with_precomputed_kernel():
clf = SVC(gamma="scale")
clfp = SVC(kernel="precomputed")
iris = load_iris()
X, y = iris.data, iris.target
K = np.dot(X, X.T)
cv = cval.ShuffleSplit(X.shape[0], test_size=0.25, random_state=0)
tr, te = list(cv)[0]
X_tr, y_tr = cval._safe_split(clf, X, y, tr)
K_tr, y_tr2 = cval._safe_split(clfp, K, y, tr)
assert_array_almost_equal(K_tr, np.dot(X_tr, X_tr.T))
X_te, y_te = cval._safe_split(clf, X, y, te, tr)
K_te, y_te2 = cval._safe_split(clfp, K, y, te, tr)
assert_array_almost_equal(K_te, np.dot(X_te, X_tr.T))
def test_cross_val_score_allow_nans():
# Check that cross_val_score allows input data with NaNs
X = np.arange(200, dtype=np.float64).reshape(10, -1)
X[2, :] = np.nan
y = np.repeat([0, 1], X.shape[0] / 2)
p = Pipeline([
('imputer', Imputer(strategy='mean', missing_values='NaN')),
('classifier', MockClassifier()),
])
cval.cross_val_score(p, X, y, cv=5)
def test_train_test_split_allow_nans():
# Check that train_test_split allows input data with NaNs
X = np.arange(200, dtype=np.float64).reshape(10, -1)
X[2, :] = np.nan
y = np.repeat([0, 1], X.shape[0] / 2)
cval.train_test_split(X, y, test_size=0.2, random_state=42)
def test_permutation_test_score_allow_nans():
# Check that permutation_test_score allows input data with NaNs
X = np.arange(200, dtype=np.float64).reshape(10, -1)
X[2, :] = np.nan
y = np.repeat([0, 1], X.shape[0] / 2)
p = Pipeline([
('imputer', Imputer(strategy='mean', missing_values='NaN')),
('classifier', MockClassifier()),
])
cval.permutation_test_score(p, X, y, cv=5)
def test_check_cv_return_types():
X = np.ones((9, 2))
cv = cval.check_cv(3, X, classifier=False)
assert_true(isinstance(cv, cval.KFold))
y_binary = np.array([0, 1, 0, 1, 0, 0, 1, 1, 1])
cv = cval.check_cv(3, X, y_binary, classifier=True)
assert_true(isinstance(cv, cval.StratifiedKFold))
y_multiclass = np.array([0, 1, 0, 1, 2, 1, 2, 0, 2])
cv = cval.check_cv(3, X, y_multiclass, classifier=True)
assert_true(isinstance(cv, cval.StratifiedKFold))
X = np.ones((5, 2))
y_multilabel = [[1, 0, 1], [1, 1, 0], [0, 0, 0], [0, 1, 1], [1, 0, 0]]
cv = cval.check_cv(3, X, y_multilabel, classifier=True)
assert_true(isinstance(cv, cval.KFold))
y_multioutput = np.array([[1, 2], [0, 3], [0, 0], [3, 1], [2, 0]])
cv = cval.check_cv(3, X, y_multioutput, classifier=True)
assert_true(isinstance(cv, cval.KFold))
def test_cross_val_score_multilabel():
X = np.array([[-3, 4], [2, 4], [3, 3], [0, 2], [-3, 1],
[-2, 1], [0, 0], [-2, -1], [-1, -2], [1, -2]])
y = np.array([[1, 1], [0, 1], [0, 1], [0, 1], [1, 1],
[0, 1], [1, 0], [1, 1], [1, 0], [0, 0]])
clf = KNeighborsClassifier(n_neighbors=1)
scoring_micro = make_scorer(precision_score, average='micro')
scoring_macro = make_scorer(precision_score, average='macro')
scoring_samples = make_scorer(precision_score, average='samples')
score_micro = cval.cross_val_score(clf, X, y, scoring=scoring_micro, cv=5)
score_macro = cval.cross_val_score(clf, X, y, scoring=scoring_macro, cv=5)
score_samples = cval.cross_val_score(clf, X, y,
scoring=scoring_samples, cv=5)
assert_almost_equal(score_micro, [1, 1 / 2, 3 / 4, 1 / 2, 1 / 3])
assert_almost_equal(score_macro, [1, 1 / 2, 3 / 4, 1 / 2, 1 / 4])
assert_almost_equal(score_samples, [1, 1 / 2, 3 / 4, 1 / 2, 1 / 4])
def test_cross_val_predict():
boston = load_boston()
X, y = boston.data, boston.target
cv = cval.KFold(len(boston.target))
est = Ridge()
# Naive loop (should be same as cross_val_predict):
preds2 = np.zeros_like(y)
for train, test in cv:
est.fit(X[train], y[train])
preds2[test] = est.predict(X[test])
preds = cval.cross_val_predict(est, X, y, cv=cv)
assert_array_almost_equal(preds, preds2)
preds = cval.cross_val_predict(est, X, y)
assert_equal(len(preds), len(y))
cv = cval.LeaveOneOut(len(y))
preds = cval.cross_val_predict(est, X, y, cv=cv)
assert_equal(len(preds), len(y))
Xsp = X.copy()
Xsp *= (Xsp > np.median(Xsp))
Xsp = coo_matrix(Xsp)
preds = cval.cross_val_predict(est, Xsp, y)
assert_array_almost_equal(len(preds), len(y))
preds = cval.cross_val_predict(KMeans(), X)
assert_equal(len(preds), len(y))
def bad_cv():
for i in range(4):
yield np.array([0, 1, 2, 3]), np.array([4, 5, 6, 7, 8])
assert_raises(ValueError, cval.cross_val_predict, est, X, y, cv=bad_cv())
def test_cross_val_predict_input_types():
clf = Ridge()
# Smoke test
predictions = cval.cross_val_predict(clf, X, y)
assert_equal(predictions.shape, (10,))
# test with multioutput y
with ignore_warnings(category=ConvergenceWarning):
predictions = cval.cross_val_predict(clf, X_sparse, X)
assert_equal(predictions.shape, (10, 2))
predictions = cval.cross_val_predict(clf, X_sparse, y)
assert_array_equal(predictions.shape, (10,))
# test with multioutput y
with ignore_warnings(category=ConvergenceWarning):
predictions = cval.cross_val_predict(clf, X_sparse, X)
assert_array_equal(predictions.shape, (10, 2))
# test with X and y as list
list_check = lambda x: isinstance(x, list)
clf = CheckingClassifier(check_X=list_check)
predictions = cval.cross_val_predict(clf, X.tolist(), y.tolist())
clf = CheckingClassifier(check_y=list_check)
predictions = cval.cross_val_predict(clf, X, y.tolist())
# test with 3d X and
X_3d = X[:, :, np.newaxis]
check_3d = lambda x: x.ndim == 3
clf = CheckingClassifier(check_X=check_3d)
predictions = cval.cross_val_predict(clf, X_3d, y)
assert_array_equal(predictions.shape, (10,))
def test_cross_val_predict_pandas():
# check cross_val_score doesn't destroy pandas dataframe
types = [(MockDataFrame, MockDataFrame)]
try:
from pandas import Series, DataFrame
types.append((Series, DataFrame))
except ImportError:
pass
for TargetType, InputFeatureType in types:
# X dataframe, y series
X_df, y_ser = InputFeatureType(X), TargetType(y)
check_df = lambda x: isinstance(x, InputFeatureType)
check_series = lambda x: isinstance(x, TargetType)
clf = CheckingClassifier(check_X=check_df, check_y=check_series)
cval.cross_val_predict(clf, X_df, y_ser)
def test_sparse_fit_params():
iris = load_iris()
X, y = iris.data, iris.target
clf = MockClassifier()
fit_params = {'sparse_sample_weight': coo_matrix(np.eye(X.shape[0]))}
a = cval.cross_val_score(clf, X, y, fit_params=fit_params)
assert_array_equal(a, np.ones(3))
def test_check_is_partition():
p = np.arange(100)
assert_true(cval._check_is_partition(p, 100))
assert_false(cval._check_is_partition(np.delete(p, 23), 100))
p[0] = 23
assert_false(cval._check_is_partition(p, 100))
def test_cross_val_predict_sparse_prediction():
# check that cross_val_predict gives same result for sparse and dense input
X, y = make_multilabel_classification(n_classes=2, n_labels=1,
allow_unlabeled=False,
return_indicator=True,
random_state=1)
X_sparse = csr_matrix(X)
y_sparse = csr_matrix(y)
classif = OneVsRestClassifier(SVC(kernel='linear'))
preds = cval.cross_val_predict(classif, X, y, cv=10)
preds_sparse = cval.cross_val_predict(classif, X_sparse, y_sparse, cv=10)
preds_sparse = preds_sparse.toarray()
assert_array_almost_equal(preds_sparse, preds)
|
[
"sklearn.datasets.load_digits",
"sklearn.datasets.load_iris",
"numpy.sum",
"numpy.abs",
"sklearn.utils.testing.assert_raise_message",
"sklearn.datasets.make_multilabel_classification",
"sklearn.utils.testing.assert_equal",
"sklearn.utils.mocking.CheckingClassifier",
"numpy.ones",
"sklearn.utils.testing.assert_true",
"sklearn.datasets.load_boston",
"numpy.arange",
"sklearn.cross_validation.StratifiedShuffleSplit",
"sklearn.cross_validation.LeavePOut",
"sklearn.svm.SVC",
"sklearn.cross_validation.LeaveOneOut",
"numpy.unique",
"sklearn.utils.testing.assert_raises",
"sklearn.cross_validation.permutation_test_score",
"numpy.lib.arraysetops.intersect1d",
"numpy.zeros_like",
"warnings.simplefilter",
"sklearn.utils.testing.ignore_warnings",
"sklearn.cluster.KMeans",
"sklearn.datasets.make_regression",
"sklearn.cross_validation._safe_split",
"numpy.random.RandomState",
"sklearn.cross_validation.check_cv",
"sklearn.cross_validation._check_is_partition",
"sklearn.utils.testing.assert_warns_message",
"scipy.sparse.coo_matrix",
"sklearn.utils.testing.assert_array_equal",
"warnings.catch_warnings",
"sklearn.metrics.make_scorer",
"numpy.int32",
"numpy.max",
"numpy.intersect1d",
"sklearn.linear_model.Ridge",
"numpy.repeat",
"sklearn.cross_validation.LeavePLabelOut",
"sklearn.cross_validation.cross_val_predict",
"sklearn.externals.six.moves.zip",
"numpy.median",
"sklearn.preprocessing.Imputer",
"numpy.asarray",
"scipy.stats.binom",
"numpy.min",
"sklearn.utils.mocking.MockDataFrame",
"scipy.sparse.csr_matrix",
"sklearn.utils.testing.assert_almost_equal",
"sklearn.cross_validation.KFold",
"numpy.dot",
"sklearn.utils.testing.assert_array_almost_equal",
"numpy.concatenate",
"numpy.delete",
"sklearn.cross_validation.train_test_split",
"sklearn.cross_validation.cross_val_score",
"sklearn.cross_validation.LeaveOneLabelOut",
"sklearn.utils.testing.assert_greater",
"numpy.float32",
"sklearn.utils.testing.assert_less",
"sklearn.cross_validation.LabelKFold",
"sklearn.cross_validation.ShuffleSplit",
"sklearn.cross_validation.LabelShuffleSplit",
"sklearn.neighbors.KNeighborsClassifier",
"numpy.array",
"sklearn.cross_validation.PredefinedSplit",
"numpy.eye",
"sklearn.cross_validation.StratifiedKFold",
"numpy.in1d"
] |
[((4572, 4588), 'numpy.ones', 'np.ones', (['(10, 2)'], {}), '((10, 2))\n', (4579, 4588), True, 'import numpy as np\n'), ((4600, 4613), 'scipy.sparse.coo_matrix', 'coo_matrix', (['X'], {}), '(X)\n', (4610, 4613), False, 'from scipy.sparse import coo_matrix\n'), ((1044, 1069), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {}), '()\n', (1067, 1069), False, 'import warnings\n'), ((1075, 1106), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""'], {}), "('ignore')\n", (1096, 1106), False, 'import warnings\n'), ((4744, 4753), 'numpy.eye', 'np.eye', (['(5)'], {}), '(5)\n', (4750, 4753), True, 'import numpy as np\n'), ((4827, 4840), 'numpy.arange', 'np.arange', (['(10)'], {}), '(10)\n', (4836, 4840), True, 'import numpy as np\n'), ((5917, 5958), 'sklearn.utils.testing.assert_equal', 'assert_equal', (['iterations', 'expected_n_iter'], {}), '(iterations, expected_n_iter)\n', (5929, 5958), False, 'from sklearn.utils.testing import assert_equal\n'), ((6159, 6202), 'sklearn.utils.testing.assert_raises', 'assert_raises', (['ValueError', 'cval.KFold', '(3)', '(4)'], {}), '(ValueError, cval.KFold, 3, 4)\n', (6172, 6202), False, 'from sklearn.utils.testing import assert_raises\n'), ((6333, 6424), 'sklearn.utils.testing.assert_warns_message', 'assert_warns_message', (['Warning', '"""The least populated class"""', 'cval.StratifiedKFold', 'y', '(3)'], {}), "(Warning, 'The least populated class', cval.\n StratifiedKFold, y, 3)\n", (6353, 6424), False, 'from sklearn.utils.testing import assert_warns_message\n'), ((6830, 6883), 'sklearn.utils.testing.assert_raises', 'assert_raises', (['ValueError', 'cval.StratifiedKFold', 'y', '(3)'], {}), '(ValueError, cval.StratifiedKFold, y, 3)\n', (6843, 6883), False, 'from sklearn.utils.testing import assert_raises\n'), ((6930, 6973), 'sklearn.utils.testing.assert_raises', 'assert_raises', (['ValueError', 'cval.KFold', '(2)', '(0)'], {}), '(ValueError, cval.KFold, 2, 0)\n', (6943, 6973), False, 'from sklearn.utils.testing import assert_raises\n'), ((6978, 7021), 'sklearn.utils.testing.assert_raises', 'assert_raises', (['ValueError', 'cval.KFold', '(2)', '(1)'], {}), '(ValueError, cval.KFold, 2, 1)\n', (6991, 7021), False, 'from sklearn.utils.testing import assert_raises\n'), ((7137, 7211), 'sklearn.utils.testing.assert_raise_message', 'assert_raise_message', (['ValueError', 'error_string', 'cval.StratifiedKFold', 'y', '(0)'], {}), '(ValueError, error_string, cval.StratifiedKFold, y, 0)\n', (7157, 7211), False, 'from sklearn.utils.testing import assert_raise_message\n'), ((7241, 7315), 'sklearn.utils.testing.assert_raise_message', 'assert_raise_message', (['ValueError', 'error_string', 'cval.StratifiedKFold', 'y', '(1)'], {}), '(ValueError, error_string, cval.StratifiedKFold, y, 1)\n', (7261, 7315), False, 'from sklearn.utils.testing import assert_raise_message\n'), ((7375, 7420), 'sklearn.utils.testing.assert_raises', 'assert_raises', (['ValueError', 'cval.KFold', '(2.5)', '(2)'], {}), '(ValueError, cval.KFold, 2.5, 2)\n', (7388, 7420), False, 'from sklearn.utils.testing import assert_raises\n'), ((7461, 7506), 'sklearn.utils.testing.assert_raises', 'assert_raises', (['ValueError', 'cval.KFold', '(5)', '(1.5)'], {}), '(ValueError, cval.KFold, 5, 1.5)\n', (7474, 7506), False, 'from sklearn.utils.testing import assert_raises\n'), ((7511, 7566), 'sklearn.utils.testing.assert_raises', 'assert_raises', (['ValueError', 'cval.StratifiedKFold', 'y', '(1.5)'], {}), '(ValueError, cval.StratifiedKFold, y, 1.5)\n', (7524, 7566), False, 'from sklearn.utils.testing import assert_raises\n'), ((7659, 7677), 'sklearn.cross_validation.KFold', 'cval.KFold', (['(300)', '(3)'], {}), '(300, 3)\n', (7669, 7677), True, 'from sklearn import cross_validation as cval\n'), ((7854, 7871), 'sklearn.cross_validation.KFold', 'cval.KFold', (['(17)', '(3)'], {}), '(17, 3)\n', (7864, 7871), True, 'from sklearn import cross_validation as cval\n'), ((8109, 8141), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['test', '[0, 1]'], {}), '(test, [0, 1])\n', (8127, 8141), False, 'from sklearn.utils.testing import assert_array_equal\n'), ((8146, 8179), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['train', '[2, 3]'], {}), '(train, [2, 3])\n', (8164, 8179), False, 'from sklearn.utils.testing import assert_array_equal\n'), ((8216, 8248), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['test', '[2, 3]'], {}), '(test, [2, 3])\n', (8234, 8248), False, 'from sklearn.utils.testing import assert_array_equal\n'), ((8253, 8286), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['train', '[0, 1]'], {}), '(train, [0, 1])\n', (8271, 8286), False, 'from sklearn.utils.testing import assert_array_equal\n'), ((8359, 8394), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['test', '[0, 1, 2]'], {}), '(test, [0, 1, 2])\n', (8377, 8394), False, 'from sklearn.utils.testing import assert_array_equal\n'), ((8399, 8432), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['train', '[3, 4]'], {}), '(train, [3, 4])\n', (8417, 8432), False, 'from sklearn.utils.testing import assert_array_equal\n'), ((8469, 8501), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['test', '[3, 4]'], {}), '(test, [3, 4])\n', (8487, 8501), False, 'from sklearn.utils.testing import assert_array_equal\n'), ((8506, 8542), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['train', '[0, 1, 2]'], {}), '(train, [0, 1, 2])\n', (8524, 8542), False, 'from sklearn.utils.testing import assert_array_equal\n'), ((8854, 8886), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['test', '[0, 2]'], {}), '(test, [0, 2])\n', (8872, 8886), False, 'from sklearn.utils.testing import assert_array_equal\n'), ((8891, 8924), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['train', '[1, 3]'], {}), '(train, [1, 3])\n', (8909, 8924), False, 'from sklearn.utils.testing import assert_array_equal\n'), ((8961, 8993), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['test', '[1, 3]'], {}), '(test, [1, 3])\n', (8979, 8993), False, 'from sklearn.utils.testing import assert_array_equal\n'), ((8998, 9031), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['train', '[0, 2]'], {}), '(train, [0, 2])\n', (9016, 9031), False, 'from sklearn.utils.testing import assert_array_equal\n'), ((9134, 9172), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['test', '[0, 1, 3, 4]'], {}), '(test, [0, 1, 3, 4])\n', (9152, 9172), False, 'from sklearn.utils.testing import assert_array_equal\n'), ((9177, 9213), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['train', '[2, 5, 6]'], {}), '(train, [2, 5, 6])\n', (9195, 9213), False, 'from sklearn.utils.testing import assert_array_equal\n'), ((9250, 9285), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['test', '[2, 5, 6]'], {}), '(test, [2, 5, 6])\n', (9268, 9285), False, 'from sklearn.utils.testing import assert_array_equal\n'), ((9290, 9329), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['train', '[0, 1, 3, 4]'], {}), '(train, [0, 1, 3, 4])\n', (9308, 9329), False, 'from sklearn.utils.testing import assert_array_equal\n'), ((11413, 11461), 'sklearn.cross_validation.KFold', 'cval.KFold', (['(300)', '(3)'], {'shuffle': '(True)', 'random_state': '(0)'}), '(300, 3, shuffle=True, random_state=0)\n', (11423, 11461), True, 'from sklearn import cross_validation as cval\n'), ((11472, 11486), 'numpy.arange', 'np.arange', (['(300)'], {}), '(300)\n', (11481, 11486), True, 'import numpy as np\n'), ((11892, 11926), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['all_folds', 'ind'], {}), '(all_folds, ind)\n', (11910, 11926), False, 'from sklearn.utils.testing import assert_array_equal\n'), ((12281, 12294), 'sklearn.externals.six.moves.zip', 'zip', (['kf0', 'kf1'], {}), '(kf0, kf1)\n', (12284, 12294), False, 'from sklearn.externals.six.moves import zip\n'), ((12979, 12992), 'sklearn.datasets.load_digits', 'load_digits', ([], {}), '()\n', (12990, 12992), False, 'from sklearn.datasets import load_digits\n'), ((13055, 13077), 'sklearn.svm.SVC', 'SVC', ([], {'C': '(10)', 'gamma': '(0.005)'}), '(C=10, gamma=0.005)\n', (13058, 13077), False, 'from sklearn.svm import SVC\n'), ((13103, 13134), 'sklearn.cross_validation.KFold', 'cval.KFold', (['n', '(5)'], {'shuffle': '(False)'}), '(n, 5, shuffle=False)\n', (13113, 13134), True, 'from sklearn import cross_validation as cval\n'), ((13204, 13236), 'sklearn.utils.testing.assert_greater', 'assert_greater', (['(0.88)', 'mean_score'], {}), '(0.88, mean_score)\n', (13218, 13236), False, 'from sklearn.utils.testing import assert_greater\n'), ((13241, 13273), 'sklearn.utils.testing.assert_greater', 'assert_greater', (['mean_score', '(0.85)'], {}), '(mean_score, 0.85)\n', (13255, 13273), False, 'from sklearn.utils.testing import assert_greater\n'), ((13490, 13536), 'sklearn.cross_validation.KFold', 'cval.KFold', (['n', '(5)'], {'shuffle': '(True)', 'random_state': '(0)'}), '(n, 5, shuffle=True, random_state=0)\n', (13500, 13536), True, 'from sklearn import cross_validation as cval\n'), ((13606, 13638), 'sklearn.utils.testing.assert_greater', 'assert_greater', (['mean_score', '(0.95)'], {}), '(mean_score, 0.95)\n', (13620, 13638), False, 'from sklearn.utils.testing import assert_greater\n'), ((13649, 13695), 'sklearn.cross_validation.KFold', 'cval.KFold', (['n', '(5)'], {'shuffle': '(True)', 'random_state': '(1)'}), '(n, 5, shuffle=True, random_state=1)\n', (13659, 13695), True, 'from sklearn import cross_validation as cval\n'), ((13765, 13797), 'sklearn.utils.testing.assert_greater', 'assert_greater', (['mean_score', '(0.95)'], {}), '(mean_score, 0.95)\n', (13779, 13797), False, 'from sklearn.utils.testing import assert_greater\n'), ((14188, 14214), 'sklearn.cross_validation.StratifiedKFold', 'cval.StratifiedKFold', (['y', '(5)'], {}), '(y, 5)\n', (14208, 14214), True, 'from sklearn import cross_validation as cval\n'), ((14284, 14316), 'sklearn.utils.testing.assert_greater', 'assert_greater', (['(0.88)', 'mean_score'], {}), '(0.88, mean_score)\n', (14298, 14316), False, 'from sklearn.utils.testing import assert_greater\n'), ((14321, 14353), 'sklearn.utils.testing.assert_greater', 'assert_greater', (['mean_score', '(0.85)'], {}), '(mean_score, 0.85)\n', (14335, 14353), False, 'from sklearn.utils.testing import assert_greater\n'), ((14390, 14414), 'numpy.random.RandomState', 'np.random.RandomState', (['(0)'], {}), '(0)\n', (14411, 14414), True, 'import numpy as np\n'), ((14861, 14877), 'numpy.unique', 'np.unique', (['folds'], {}), '(folds)\n', (14870, 14877), True, 'import numpy as np\n'), ((15065, 15082), 'numpy.unique', 'np.unique', (['labels'], {}), '(labels)\n', (15074, 15082), True, 'import numpy as np\n'), ((15218, 15250), 'numpy.asarray', 'np.asarray', (['labels'], {'dtype': 'object'}), '(labels, dtype=object)\n', (15228, 15250), True, 'import numpy as np\n'), ((15274, 15314), 'sklearn.cross_validation.LabelKFold', 'cval.LabelKFold', (['labels'], {'n_folds': 'n_folds'}), '(labels, n_folds=n_folds)\n', (15289, 15314), True, 'from sklearn import cross_validation as cval\n'), ((15890, 15922), 'numpy.asarray', 'np.asarray', (['labels'], {'dtype': 'object'}), '(labels, dtype=object)\n', (15900, 15922), True, 'import numpy as np\n'), ((16287, 16303), 'numpy.unique', 'np.unique', (['folds'], {}), '(folds)\n', (16296, 16303), True, 'import numpy as np\n'), ((16491, 16508), 'numpy.unique', 'np.unique', (['labels'], {}), '(labels)\n', (16500, 16508), True, 'import numpy as np\n'), ((16654, 16694), 'sklearn.cross_validation.LabelKFold', 'cval.LabelKFold', (['labels'], {'n_folds': 'n_folds'}), '(labels, n_folds=n_folds)\n', (16669, 16694), True, 'from sklearn import cross_validation as cval\n'), ((16838, 16863), 'numpy.array', 'np.array', (['[1, 1, 1, 2, 2]'], {}), '([1, 1, 1, 2, 2])\n', (16846, 16863), True, 'import numpy as np\n'), ((16868, 16929), 'sklearn.utils.testing.assert_raises', 'assert_raises', (['ValueError', 'cval.LabelKFold', 'labels'], {'n_folds': '(3)'}), '(ValueError, cval.LabelKFold, labels, n_folds=3)\n', (16881, 16929), False, 'from sklearn.utils.testing import assert_raises\n'), ((16968, 17020), 'sklearn.cross_validation.ShuffleSplit', 'cval.ShuffleSplit', (['(10)'], {'test_size': '(0.2)', 'random_state': '(0)'}), '(10, test_size=0.2, random_state=0)\n', (16985, 17020), True, 'from sklearn import cross_validation as cval\n'), ((17031, 17081), 'sklearn.cross_validation.ShuffleSplit', 'cval.ShuffleSplit', (['(10)'], {'test_size': '(2)', 'random_state': '(0)'}), '(10, test_size=2, random_state=0)\n', (17048, 17081), True, 'from sklearn import cross_validation as cval\n'), ((17283, 17306), 'sklearn.externals.six.moves.zip', 'zip', (['ss1', 'ss2', 'ss3', 'ss4'], {}), '(ss1, ss2, ss3, ss4)\n', (17286, 17306), False, 'from sklearn.externals.six.moves import zip\n'), ((17606, 17639), 'numpy.asarray', 'np.asarray', (['[0, 1, 1, 1, 2, 2, 2]'], {}), '([0, 1, 1, 1, 2, 2, 2])\n', (17616, 17639), True, 'import numpy as np\n'), ((17718, 17783), 'sklearn.utils.testing.assert_raises', 'assert_raises', (['ValueError', 'cval.StratifiedShuffleSplit', 'y', '(3)', '(0.2)'], {}), '(ValueError, cval.StratifiedShuffleSplit, y, 3, 0.2)\n', (17731, 17783), False, 'from sklearn.utils.testing import assert_raises\n'), ((17869, 17932), 'sklearn.utils.testing.assert_raises', 'assert_raises', (['ValueError', 'cval.StratifiedShuffleSplit', 'y', '(3)', '(2)'], {}), '(ValueError, cval.StratifiedShuffleSplit, y, 3, 2)\n', (17882, 17932), False, 'from sklearn.utils.testing import assert_raises\n'), ((18024, 18090), 'sklearn.utils.testing.assert_raises', 'assert_raises', (['ValueError', 'cval.StratifiedShuffleSplit', 'y', '(3)', '(3)', '(2)'], {}), '(ValueError, cval.StratifiedShuffleSplit, y, 3, 3, 2)\n', (18037, 18090), False, 'from sklearn.utils.testing import assert_raises\n'), ((18100, 18139), 'numpy.asarray', 'np.asarray', (['[0, 0, 0, 1, 1, 1, 2, 2, 2]'], {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2])\n', (18110, 18139), True, 'import numpy as np\n'), ((18210, 18280), 'sklearn.utils.testing.assert_raises', 'assert_raises', (['ValueError', 'cval.StratifiedShuffleSplit', 'y', '(3)', '(0.5)', '(0.6)'], {}), '(ValueError, cval.StratifiedShuffleSplit, y, 3, 0.5, 0.6)\n', (18223, 18280), False, 'from sklearn.utils.testing import assert_raises\n'), ((18285, 18353), 'sklearn.utils.testing.assert_raises', 'assert_raises', (['ValueError', 'cval.StratifiedShuffleSplit', 'y', '(3)', '(8)', '(0.6)'], {}), '(ValueError, cval.StratifiedShuffleSplit, y, 3, 8, 0.6)\n', (18298, 18353), False, 'from sklearn.utils.testing import assert_raises\n'), ((18358, 18426), 'sklearn.utils.testing.assert_raises', 'assert_raises', (['ValueError', 'cval.StratifiedShuffleSplit', 'y', '(3)', '(0.6)', '(8)'], {}), '(ValueError, cval.StratifiedShuffleSplit, y, 3, 0.6, 8)\n', (18371, 18426), False, 'from sklearn.utils.testing import assert_raises\n'), ((18472, 18543), 'sklearn.utils.testing.assert_raises', 'assert_raises', (['ValueError', 'cval.StratifiedShuffleSplit', 'y'], {'train_size': '(2)'}), '(ValueError, cval.StratifiedShuffleSplit, y, train_size=2)\n', (18485, 18543), False, 'from sklearn.utils.testing import assert_raises\n'), ((18548, 18618), 'sklearn.utils.testing.assert_raises', 'assert_raises', (['ValueError', 'cval.StratifiedShuffleSplit', 'y'], {'test_size': '(2)'}), '(ValueError, cval.StratifiedShuffleSplit, y, test_size=2)\n', (18561, 18618), False, 'from sklearn.utils.testing import assert_raises\n'), ((22000, 22076), 'sklearn.cross_validation.StratifiedShuffleSplit', 'cval.StratifiedShuffleSplit', (['labels'], {'n_iter': '(1)', 'test_size': '(0.5)', 'random_state': '(0)'}), '(labels, n_iter=1, test_size=0.5, random_state=0)\n', (22027, 22076), True, 'from sklearn import cross_validation as cval\n'), ((22618, 22645), 'sklearn.cross_validation.PredefinedSplit', 'cval.PredefinedSplit', (['folds'], {}), '(folds)\n', (22638, 22645), True, 'from sklearn import cross_validation as cval\n'), ((22753, 22791), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['ps_train', 'kf_train'], {}), '(ps_train, kf_train)\n', (22771, 22791), False, 'from sklearn.utils.testing import assert_array_equal\n'), ((22796, 22832), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['ps_test', 'kf_test'], {}), '(ps_test, kf_test)\n', (22814, 22832), False, 'from sklearn.utils.testing import assert_array_equal\n'), ((24629, 24663), 'numpy.array', 'np.array', (['[0, 1, 2, 1, 1, 2, 0, 0]'], {}), '([0, 1, 2, 1, 1, 2, 0, 0])\n', (24637, 24663), True, 'import numpy as np\n'), ((24686, 24713), 'numpy.array', 'np.array', (['labels'], {'copy': '(True)'}), '(labels, copy=True)\n', (24694, 24713), True, 'import numpy as np\n'), ((24725, 24754), 'sklearn.cross_validation.LeaveOneLabelOut', 'cval.LeaveOneLabelOut', (['labels'], {}), '(labels)\n', (24746, 24754), True, 'from sklearn import cross_validation as cval\n'), ((24775, 24813), 'sklearn.cross_validation.LeaveOneLabelOut', 'cval.LeaveOneLabelOut', (['labels_changing'], {}), '(labels_changing)\n', (24796, 24813), True, 'from sklearn import cross_validation as cval\n'), ((24825, 24857), 'sklearn.cross_validation.LeavePLabelOut', 'cval.LeavePLabelOut', (['labels'], {'p': '(2)'}), '(labels, p=2)\n', (24844, 24857), True, 'from sklearn import cross_validation as cval\n'), ((24878, 24919), 'sklearn.cross_validation.LeavePLabelOut', 'cval.LeavePLabelOut', (['labels_changing'], {'p': '(2)'}), '(labels_changing, p=2)\n', (24897, 24919), True, 'from sklearn import cross_validation as cval\n'), ((25932, 25970), 'sklearn.utils.mocking.CheckingClassifier', 'CheckingClassifier', ([], {'check_X': 'list_check'}), '(check_X=list_check)\n', (25950, 25970), False, 'from sklearn.utils.mocking import CheckingClassifier, MockDataFrame\n'), ((26045, 26083), 'sklearn.utils.mocking.CheckingClassifier', 'CheckingClassifier', ([], {'check_y': 'list_check'}), '(check_y=list_check)\n', (26063, 26083), False, 'from sklearn.utils.mocking import CheckingClassifier, MockDataFrame\n'), ((26143, 26220), 'sklearn.utils.testing.assert_raises', 'assert_raises', (['ValueError', 'cval.cross_val_score', 'clf', 'X', 'y'], {'scoring': '"""sklearn"""'}), "(ValueError, cval.cross_val_score, clf, X, y, scoring='sklearn')\n", (26156, 26220), False, 'from sklearn.utils.testing import assert_raises\n'), ((26349, 26383), 'sklearn.cross_validation.cross_val_score', 'cval.cross_val_score', (['clf', 'X_3d', 'y'], {}), '(clf, X_3d, y)\n', (26369, 26383), True, 'from sklearn import cross_validation as cval\n'), ((26430, 26491), 'sklearn.utils.testing.assert_raises', 'assert_raises', (['ValueError', 'cval.cross_val_score', 'clf', 'X_3d', 'y'], {}), '(ValueError, cval.cross_val_score, clf, X_3d, y)\n', (26443, 26491), False, 'from sklearn.utils.testing import assert_raises\n'), ((27246, 27266), 'sklearn.svm.SVC', 'SVC', ([], {'kernel': '"""linear"""'}), "(kernel='linear')\n", (27249, 27266), False, 'from sklearn.svm import SVC\n'), ((27278, 27289), 'sklearn.datasets.load_iris', 'load_iris', ([], {}), '()\n', (27287, 27289), False, 'from sklearn.datasets import load_iris\n'), ((27384, 27430), 'sklearn.cross_validation.cross_val_score', 'cval.cross_val_score', (['svm', 'X', 'y'], {'cv': 'cv_indices'}), '(svm, X, y, cv=cv_indices)\n', (27404, 27430), True, 'from sklearn import cross_validation as cval\n'), ((27744, 27788), 'sklearn.cross_validation.cross_val_score', 'cval.cross_val_score', (['svm', 'X', 'y'], {'cv': 'cv_masks'}), '(svm, X, y, cv=cv_masks)\n', (27764, 27788), True, 'from sklearn import cross_validation as cval\n'), ((27793, 27841), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['scores_indices', 'scores_masks'], {}), '(scores_indices, scores_masks)\n', (27811, 27841), False, 'from sklearn.utils.testing import assert_array_equal\n'), ((27937, 27962), 'sklearn.svm.SVC', 'SVC', ([], {'kernel': '"""precomputed"""'}), "(kernel='precomputed')\n", (27940, 27962), False, 'from sklearn.svm import SVC\n'), ((27974, 27985), 'sklearn.datasets.load_iris', 'load_iris', ([], {}), '()\n', (27983, 27985), False, 'from sklearn.datasets import load_iris\n'), ((28040, 28054), 'numpy.dot', 'np.dot', (['X', 'X.T'], {}), '(X, X.T)\n', (28046, 28054), True, 'import numpy as np\n'), ((28079, 28122), 'sklearn.cross_validation.cross_val_score', 'cval.cross_val_score', (['svm', 'linear_kernel', 'y'], {}), '(svm, linear_kernel, y)\n', (28099, 28122), True, 'from sklearn import cross_validation as cval\n'), ((28133, 28153), 'sklearn.svm.SVC', 'SVC', ([], {'kernel': '"""linear"""'}), "(kernel='linear')\n", (28136, 28153), False, 'from sklearn.svm import SVC\n'), ((28173, 28204), 'sklearn.cross_validation.cross_val_score', 'cval.cross_val_score', (['svm', 'X', 'y'], {}), '(svm, X, y)\n', (28193, 28204), True, 'from sklearn import cross_validation as cval\n'), ((28209, 28260), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['score_precomputed', 'score_linear'], {}), '(score_precomputed, score_linear)\n', (28227, 28260), False, 'from sklearn.utils.testing import assert_array_equal\n'), ((28308, 28333), 'sklearn.svm.SVC', 'SVC', ([], {'kernel': '"""precomputed"""'}), "(kernel='precomputed')\n", (28311, 28333), False, 'from sklearn.svm import SVC\n'), ((28338, 28396), 'sklearn.utils.testing.assert_raises', 'assert_raises', (['ValueError', 'cval.cross_val_score', 'svm', 'X', 'y'], {}), '(ValueError, cval.cross_val_score, svm, X, y)\n', (28351, 28396), False, 'from sklearn.utils.testing import assert_raises\n'), ((29476, 29530), 'sklearn.cross_validation.cross_val_score', 'cval.cross_val_score', (['clf', 'X', 'y'], {'fit_params': 'fit_params'}), '(clf, X, y, fit_params=fit_params)\n', (29496, 29530), True, 'from sklearn import cross_validation as cval\n'), ((29896, 29938), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['score', '[1.0, 1.0, 1.0]'], {}), '(score, [1.0, 1.0, 1.0])\n', (29914, 29938), False, 'from sklearn.utils.testing import assert_array_equal\n'), ((30170, 30218), 'sklearn.utils.testing.assert_raises', 'assert_raises', (['ValueError', 'cval.train_test_split'], {}), '(ValueError, cval.train_test_split)\n', (30183, 30218), False, 'from sklearn.utils.testing import assert_raises\n'), ((31014, 31027), 'scipy.sparse.coo_matrix', 'coo_matrix', (['X'], {}), '(X)\n', (31024, 31027), False, 'from scipy.sparse import coo_matrix\n'), ((31036, 31049), 'numpy.arange', 'np.arange', (['(10)'], {}), '(10)\n', (31045, 31049), True, 'import numpy as np\n'), ((31081, 31140), 'sklearn.cross_validation.train_test_split', 'cval.train_test_split', (['X', 'y'], {'test_size': 'None', 'train_size': '(0.5)'}), '(X, y, test_size=None, train_size=0.5)\n', (31102, 31140), True, 'from sklearn import cross_validation as cval\n'), ((31270, 31317), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['X_train[:, 0]', '(y_train * 10)'], {}), '(X_train[:, 0], y_train * 10)\n', (31288, 31317), False, 'from sklearn.utils.testing import assert_array_equal\n'), ((31322, 31367), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['X_test[:, 0]', '(y_test * 10)'], {}), '(X_test[:, 0], y_test * 10)\n', (31340, 31367), False, 'from sklearn.utils.testing import assert_array_equal\n'), ((32100, 32133), 'sklearn.cross_validation.train_test_split', 'cval.train_test_split', (['X_4d', 'y_3d'], {}), '(X_4d, y_3d)\n', (32121, 32133), True, 'from sklearn import cross_validation as cval\n'), ((32138, 32180), 'sklearn.utils.testing.assert_equal', 'assert_equal', (['split[0].shape', '(7, 5, 3, 2)'], {}), '(split[0].shape, (7, 5, 3, 2))\n', (32150, 32180), False, 'from sklearn.utils.testing import assert_equal\n'), ((32185, 32227), 'sklearn.utils.testing.assert_equal', 'assert_equal', (['split[1].shape', '(3, 5, 3, 2)'], {}), '(split[1].shape, (3, 5, 3, 2))\n', (32197, 32227), False, 'from sklearn.utils.testing import assert_equal\n'), ((32232, 32272), 'sklearn.utils.testing.assert_equal', 'assert_equal', (['split[2].shape', '(7, 7, 11)'], {}), '(split[2].shape, (7, 7, 11))\n', (32244, 32272), False, 'from sklearn.utils.testing import assert_equal\n'), ((32277, 32317), 'sklearn.utils.testing.assert_equal', 'assert_equal', (['split[3].shape', '(3, 7, 11)'], {}), '(split[3].shape, (3, 7, 11))\n', (32289, 32317), False, 'from sklearn.utils.testing import assert_equal\n'), ((32360, 32394), 'numpy.array', 'np.array', (['[1, 1, 1, 1, 2, 2, 2, 2]'], {}), '([1, 1, 1, 1, 2, 2, 2, 2])\n', (32368, 32394), True, 'import numpy as np\n'), ((32431, 32476), 'sklearn.externals.six.moves.zip', 'zip', (['[2, 4, 0.25, 0.5, 0.75]', '[2, 4, 2, 4, 6]'], {}), '([2, 4, 0.25, 0.5, 0.75], [2, 4, 2, 4, 6])\n', (32434, 32476), False, 'from sklearn.externals.six.moves import zip\n'), ((33550, 33566), 'sklearn.utils.mocking.MockDataFrame', 'MockDataFrame', (['X'], {}), '(X)\n', (33563, 33566), False, 'from sklearn.utils.mocking import CheckingClassifier, MockDataFrame\n'), ((33589, 33616), 'sklearn.cross_validation.train_test_split', 'cval.train_test_split', (['X_df'], {}), '(X_df)\n', (33610, 33616), True, 'from sklearn import cross_validation as cval\n'), ((33792, 33803), 'sklearn.datasets.load_iris', 'load_iris', ([], {}), '()\n', (33801, 33803), False, 'from sklearn.datasets import load_iris\n'), ((33814, 33834), 'sklearn.svm.SVC', 'SVC', ([], {'kernel': '"""linear"""'}), "(kernel='linear')\n", (33817, 33834), False, 'from sklearn.svm import SVC\n'), ((33900, 33955), 'sklearn.cross_validation.cross_val_score', 'cval.cross_val_score', (['clf', 'iris.data', 'iris.target'], {'cv': '(5)'}), '(clf, iris.data, iris.target, cv=5)\n', (33920, 33955), True, 'from sklearn import cross_validation as cval\n'), ((33960, 34026), 'sklearn.utils.testing.assert_array_almost_equal', 'assert_array_almost_equal', (['scores', '[0.97, 1.0, 0.97, 0.97, 1.0]', '(2)'], {}), '(scores, [0.97, 1.0, 0.97, 0.97, 1.0], 2)\n', (33985, 34026), False, 'from sklearn.utils.testing import assert_array_almost_equal\n'), ((34159, 34234), 'sklearn.cross_validation.cross_val_score', 'cval.cross_val_score', (['clf', 'iris.data', 'iris.target'], {'scoring': '"""accuracy"""', 'cv': '(5)'}), "(clf, iris.data, iris.target, scoring='accuracy', cv=5)\n", (34179, 34234), True, 'from sklearn import cross_validation as cval\n'), ((34276, 34345), 'sklearn.utils.testing.assert_array_almost_equal', 'assert_array_almost_equal', (['zo_scores', '[0.97, 1.0, 0.97, 0.97, 1.0]', '(2)'], {}), '(zo_scores, [0.97, 1.0, 0.97, 0.97, 1.0], 2)\n', (34301, 34345), False, 'from sklearn.utils.testing import assert_array_almost_equal\n'), ((34448, 34526), 'sklearn.cross_validation.cross_val_score', 'cval.cross_val_score', (['clf', 'iris.data', 'iris.target'], {'scoring': '"""f1_weighted"""', 'cv': '(5)'}), "(clf, iris.data, iris.target, scoring='f1_weighted', cv=5)\n", (34468, 34526), True, 'from sklearn import cross_validation as cval\n'), ((34568, 34637), 'sklearn.utils.testing.assert_array_almost_equal', 'assert_array_almost_equal', (['f1_scores', '[0.97, 1.0, 0.97, 0.97, 1.0]', '(2)'], {}), '(f1_scores, [0.97, 1.0, 0.97, 0.97, 1.0], 2)\n', (34593, 34637), False, 'from sklearn.utils.testing import assert_array_almost_equal\n'), ((34704, 34781), 'sklearn.datasets.make_regression', 'make_regression', ([], {'n_samples': '(30)', 'n_features': '(20)', 'n_informative': '(5)', 'random_state': '(0)'}), '(n_samples=30, n_features=20, n_informative=5, random_state=0)\n', (34719, 34781), False, 'from sklearn.datasets import make_regression\n'), ((34819, 34826), 'sklearn.linear_model.Ridge', 'Ridge', ([], {}), '()\n', (34824, 34826), False, 'from sklearn.linear_model import Ridge\n'), ((34895, 34932), 'sklearn.cross_validation.cross_val_score', 'cval.cross_val_score', (['reg', 'X', 'y'], {'cv': '(5)'}), '(reg, X, y, cv=5)\n', (34915, 34932), True, 'from sklearn import cross_validation as cval\n'), ((34937, 35005), 'sklearn.utils.testing.assert_array_almost_equal', 'assert_array_almost_equal', (['scores', '[0.94, 0.97, 0.97, 0.99, 0.92]', '(2)'], {}), '(scores, [0.94, 0.97, 0.97, 0.99, 0.92], 2)\n', (34962, 35005), False, 'from sklearn.utils.testing import assert_array_almost_equal\n'), ((35129, 35180), 'sklearn.cross_validation.cross_val_score', 'cval.cross_val_score', (['reg', 'X', 'y'], {'scoring': '"""r2"""', 'cv': '(5)'}), "(reg, X, y, scoring='r2', cv=5)\n", (35149, 35180), True, 'from sklearn import cross_validation as cval\n'), ((35185, 35256), 'sklearn.utils.testing.assert_array_almost_equal', 'assert_array_almost_equal', (['r2_scores', '[0.94, 0.97, 0.97, 0.99, 0.92]', '(2)'], {}), '(r2_scores, [0.94, 0.97, 0.97, 0.99, 0.92], 2)\n', (35210, 35256), False, 'from sklearn.utils.testing import assert_array_almost_equal\n'), ((35355, 35426), 'sklearn.cross_validation.cross_val_score', 'cval.cross_val_score', (['reg', 'X', 'y'], {'cv': '(5)', 'scoring': '"""neg_mean_squared_error"""'}), "(reg, X, y, cv=5, scoring='neg_mean_squared_error')\n", (35375, 35426), True, 'from sklearn import cross_validation as cval\n'), ((35492, 35548), 'numpy.array', 'np.array', (['[-763.07, -553.16, -274.38, -273.26, -1681.99]'], {}), '([-763.07, -553.16, -274.38, -273.26, -1681.99])\n', (35500, 35548), True, 'import numpy as np\n'), ((35553, 35615), 'sklearn.utils.testing.assert_array_almost_equal', 'assert_array_almost_equal', (['neg_mse_scores', 'expected_neg_mse', '(2)'], {}), '(neg_mse_scores, expected_neg_mse, 2)\n', (35578, 35615), False, 'from sklearn.utils.testing import assert_array_almost_equal\n'), ((35656, 35693), 'sklearn.metrics.make_scorer', 'make_scorer', (['explained_variance_score'], {}), '(explained_variance_score)\n', (35667, 35693), False, 'from sklearn.metrics import make_scorer\n'), ((35710, 35764), 'sklearn.cross_validation.cross_val_score', 'cval.cross_val_score', (['reg', 'X', 'y'], {'cv': '(5)', 'scoring': 'scoring'}), '(reg, X, y, cv=5, scoring=scoring)\n', (35730, 35764), True, 'from sklearn import cross_validation as cval\n'), ((35769, 35840), 'sklearn.utils.testing.assert_array_almost_equal', 'assert_array_almost_equal', (['ev_scores', '[0.94, 0.97, 0.97, 0.99, 0.92]', '(2)'], {}), '(ev_scores, [0.94, 0.97, 0.97, 0.99, 0.92], 2)\n', (35794, 35840), False, 'from sklearn.utils.testing import assert_array_almost_equal\n'), ((35884, 35895), 'sklearn.datasets.load_iris', 'load_iris', ([], {}), '()\n', (35893, 35895), False, 'from sklearn.datasets import load_iris\n'), ((35929, 35942), 'scipy.sparse.coo_matrix', 'coo_matrix', (['X'], {}), '(X)\n', (35939, 35942), False, 'from scipy.sparse import coo_matrix\n'), ((35973, 35993), 'sklearn.svm.SVC', 'SVC', ([], {'kernel': '"""linear"""'}), "(kernel='linear')\n", (35976, 35993), False, 'from sklearn.svm import SVC\n'), ((36003, 36029), 'sklearn.cross_validation.StratifiedKFold', 'cval.StratifiedKFold', (['y', '(2)'], {}), '(y, 2)\n', (36023, 36029), True, 'from sklearn import cross_validation as cval\n'), ((36059, 36148), 'sklearn.cross_validation.permutation_test_score', 'cval.permutation_test_score', (['svm', 'X', 'y'], {'n_permutations': '(30)', 'cv': 'cv', 'scoring': '"""accuracy"""'}), "(svm, X, y, n_permutations=30, cv=cv, scoring=\n 'accuracy')\n", (36086, 36148), True, 'from sklearn import cross_validation as cval\n'), ((36157, 36183), 'sklearn.utils.testing.assert_greater', 'assert_greater', (['score', '(0.9)'], {}), '(score, 0.9)\n', (36171, 36183), False, 'from sklearn.utils.testing import assert_greater\n'), ((36188, 36223), 'sklearn.utils.testing.assert_almost_equal', 'assert_almost_equal', (['pvalue', '(0.0)', '(1)'], {}), '(pvalue, 0.0, 1)\n', (36207, 36223), False, 'from sklearn.utils.testing import assert_almost_equal\n'), ((36406, 36439), 'sklearn.utils.testing.assert_true', 'assert_true', (['(score_label == score)'], {}), '(score_label == score)\n', (36417, 36439), False, 'from sklearn.utils.testing import assert_true\n'), ((36444, 36479), 'sklearn.utils.testing.assert_true', 'assert_true', (['(pvalue_label == pvalue)'], {}), '(pvalue_label == pvalue)\n', (36455, 36479), False, 'from sklearn.utils.testing import assert_true\n'), ((36571, 36591), 'sklearn.svm.SVC', 'SVC', ([], {'kernel': '"""linear"""'}), "(kernel='linear')\n", (36574, 36591), False, 'from sklearn.svm import SVC\n'), ((36608, 36634), 'sklearn.cross_validation.StratifiedKFold', 'cval.StratifiedKFold', (['y', '(2)'], {}), '(y, 2)\n', (36628, 36634), True, 'from sklearn import cross_validation as cval\n'), ((36838, 36871), 'sklearn.utils.testing.assert_true', 'assert_true', (['(score_label == score)'], {}), '(score_label == score)\n', (36849, 36871), False, 'from sklearn.utils.testing import assert_true\n'), ((36876, 36911), 'sklearn.utils.testing.assert_true', 'assert_true', (['(pvalue_label == pvalue)'], {}), '(pvalue_label == pvalue)\n', (36887, 36911), False, 'from sklearn.utils.testing import assert_true\n'), ((37108, 37133), 'sklearn.metrics.make_scorer', 'make_scorer', (['custom_score'], {}), '(custom_score)\n', (37119, 37133), False, 'from sklearn.metrics import make_scorer\n'), ((37157, 37258), 'sklearn.cross_validation.permutation_test_score', 'cval.permutation_test_score', (['svm', 'X', 'y'], {'n_permutations': '(100)', 'scoring': 'scorer', 'cv': 'cv', 'random_state': '(0)'}), '(svm, X, y, n_permutations=100, scoring=scorer,\n cv=cv, random_state=0)\n', (37184, 37258), True, 'from sklearn import cross_validation as cval\n'), ((37268, 37303), 'sklearn.utils.testing.assert_almost_equal', 'assert_almost_equal', (['score', '(0.93)', '(2)'], {}), '(score, 0.93, 2)\n', (37287, 37303), False, 'from sklearn.utils.testing import assert_almost_equal\n'), ((37307, 37343), 'sklearn.utils.testing.assert_almost_equal', 'assert_almost_equal', (['pvalue', '(0.01)', '(3)'], {}), '(pvalue, 0.01, 3)\n', (37326, 37343), False, 'from sklearn.utils.testing import assert_almost_equal\n'), ((37430, 37519), 'sklearn.cross_validation.permutation_test_score', 'cval.permutation_test_score', (['svm', 'X', 'y'], {'n_permutations': '(30)', 'cv': 'cv', 'scoring': '"""accuracy"""'}), "(svm, X, y, n_permutations=30, cv=cv, scoring=\n 'accuracy')\n", (37457, 37519), True, 'from sklearn import cross_validation as cval\n'), ((37529, 37552), 'sklearn.utils.testing.assert_less', 'assert_less', (['score', '(0.5)'], {}), '(score, 0.5)\n', (37540, 37552), False, 'from sklearn.utils.testing import assert_less\n'), ((37557, 37584), 'sklearn.utils.testing.assert_greater', 'assert_greater', (['pvalue', '(0.2)'], {}), '(pvalue, 0.2)\n', (37571, 37584), False, 'from sklearn.utils.testing import assert_greater\n'), ((37640, 37682), 'numpy.array', 'np.array', (['[[1, 2], [3, 4], [5, 6], [7, 8]]'], {}), '([[1, 2], [3, 4], [5, 6], [7, 8]])\n', (37648, 37682), True, 'import numpy as np\n'), ((37691, 37713), 'numpy.array', 'np.array', (['[1, 1, 2, 2]'], {}), '([1, 1, 2, 2])\n', (37699, 37713), True, 'import numpy as np\n'), ((37727, 37749), 'numpy.array', 'np.array', (['[1, 2, 3, 4]'], {}), '([1, 2, 3, 4])\n', (37735, 37749), True, 'import numpy as np\n'), ((37813, 37832), 'sklearn.cross_validation.LeaveOneOut', 'cval.LeaveOneOut', (['(4)'], {}), '(4)\n', (37829, 37832), True, 'from sklearn import cross_validation as cval\n'), ((37843, 37863), 'sklearn.cross_validation.LeavePOut', 'cval.LeavePOut', (['(4)', '(2)'], {}), '(4, 2)\n', (37857, 37863), True, 'from sklearn import cross_validation as cval\n'), ((37873, 37889), 'sklearn.cross_validation.KFold', 'cval.KFold', (['(4)', '(2)'], {}), '(4, 2)\n', (37883, 37889), True, 'from sklearn import cross_validation as cval\n'), ((37900, 37926), 'sklearn.cross_validation.StratifiedKFold', 'cval.StratifiedKFold', (['y', '(2)'], {}), '(y, 2)\n', (37920, 37926), True, 'from sklearn import cross_validation as cval\n'), ((37938, 37967), 'sklearn.cross_validation.LeaveOneLabelOut', 'cval.LeaveOneLabelOut', (['labels'], {}), '(labels)\n', (37959, 37967), True, 'from sklearn import cross_validation as cval\n'), ((37979, 38009), 'sklearn.cross_validation.LeavePLabelOut', 'cval.LeavePLabelOut', (['labels', '(2)'], {}), '(labels, 2)\n', (37998, 38009), True, 'from sklearn import cross_validation as cval\n'), ((38019, 38053), 'sklearn.cross_validation.PredefinedSplit', 'cval.PredefinedSplit', (['[1, 1, 2, 2]'], {}), '([1, 1, 2, 2])\n', (38039, 38053), True, 'from sklearn import cross_validation as cval\n'), ((38063, 38083), 'sklearn.cross_validation.ShuffleSplit', 'cval.ShuffleSplit', (['(2)'], {}), '(2)\n', (38080, 38083), True, 'from sklearn import cross_validation as cval\n'), ((38438, 38480), 'numpy.array', 'np.array', (['[[1, 2], [3, 4], [5, 6], [7, 8]]'], {}), '([[1, 2], [3, 4], [5, 6], [7, 8]])\n', (38446, 38480), True, 'import numpy as np\n'), ((38489, 38511), 'numpy.array', 'np.array', (['[1, 1, 2, 2]'], {}), '([1, 1, 2, 2])\n', (38497, 38511), True, 'import numpy as np\n'), ((38525, 38547), 'numpy.array', 'np.array', (['[1, 2, 3, 4]'], {}), '([1, 2, 3, 4])\n', (38533, 38547), True, 'import numpy as np\n'), ((38558, 38577), 'sklearn.cross_validation.LeaveOneOut', 'cval.LeaveOneOut', (['(4)'], {}), '(4)\n', (38574, 38577), True, 'from sklearn import cross_validation as cval\n'), ((38588, 38608), 'sklearn.cross_validation.LeavePOut', 'cval.LeavePOut', (['(4)', '(2)'], {}), '(4, 2)\n', (38602, 38608), True, 'from sklearn import cross_validation as cval\n'), ((38618, 38634), 'sklearn.cross_validation.KFold', 'cval.KFold', (['(4)', '(2)'], {}), '(4, 2)\n', (38628, 38634), True, 'from sklearn import cross_validation as cval\n'), ((38645, 38671), 'sklearn.cross_validation.StratifiedKFold', 'cval.StratifiedKFold', (['y', '(2)'], {}), '(y, 2)\n', (38665, 38671), True, 'from sklearn import cross_validation as cval\n'), ((38683, 38712), 'sklearn.cross_validation.LeaveOneLabelOut', 'cval.LeaveOneLabelOut', (['labels'], {}), '(labels)\n', (38704, 38712), True, 'from sklearn import cross_validation as cval\n'), ((38724, 38754), 'sklearn.cross_validation.LeavePLabelOut', 'cval.LeavePLabelOut', (['labels', '(2)'], {}), '(labels, 2)\n', (38743, 38754), True, 'from sklearn import cross_validation as cval\n'), ((38764, 38784), 'sklearn.cross_validation.ShuffleSplit', 'cval.ShuffleSplit', (['(2)'], {}), '(2)\n', (38781, 38784), True, 'from sklearn import cross_validation as cval\n'), ((38794, 38828), 'sklearn.cross_validation.PredefinedSplit', 'cval.PredefinedSplit', (['[1, 1, 2, 2]'], {}), '([1, 1, 2, 2])\n', (38814, 38828), True, 'from sklearn import cross_validation as cval\n'), ((39141, 39204), 'sklearn.utils.testing.assert_raises', 'assert_raises', (['ValueError', 'cval.ShuffleSplit', '(10)'], {'test_size': '(2.0)'}), '(ValueError, cval.ShuffleSplit, 10, test_size=2.0)\n', (39154, 39204), False, 'from sklearn.utils.testing import assert_raises\n'), ((39209, 39272), 'sklearn.utils.testing.assert_raises', 'assert_raises', (['ValueError', 'cval.ShuffleSplit', '(10)'], {'test_size': '(1.0)'}), '(ValueError, cval.ShuffleSplit, 10, test_size=1.0)\n', (39222, 39272), False, 'from sklearn.utils.testing import assert_raises\n'), ((39277, 39362), 'sklearn.utils.testing.assert_raises', 'assert_raises', (['ValueError', 'cval.ShuffleSplit', '(10)'], {'test_size': '(0.1)', 'train_size': '(0.95)'}), '(ValueError, cval.ShuffleSplit, 10, test_size=0.1, train_size=0.95\n )\n', (39290, 39362), False, 'from sklearn.utils.testing import assert_raises\n'), ((39380, 39442), 'sklearn.utils.testing.assert_raises', 'assert_raises', (['ValueError', 'cval.ShuffleSplit', '(10)'], {'test_size': '(11)'}), '(ValueError, cval.ShuffleSplit, 10, test_size=11)\n', (39393, 39442), False, 'from sklearn.utils.testing import assert_raises\n'), ((39447, 39509), 'sklearn.utils.testing.assert_raises', 'assert_raises', (['ValueError', 'cval.ShuffleSplit', '(10)'], {'test_size': '(10)'}), '(ValueError, cval.ShuffleSplit, 10, test_size=10)\n', (39460, 39509), False, 'from sklearn.utils.testing import assert_raises\n'), ((39514, 39589), 'sklearn.utils.testing.assert_raises', 'assert_raises', (['ValueError', 'cval.ShuffleSplit', '(10)'], {'test_size': '(8)', 'train_size': '(3)'}), '(ValueError, cval.ShuffleSplit, 10, test_size=8, train_size=3)\n', (39527, 39589), False, 'from sklearn.utils.testing import assert_raises\n'), ((39594, 39659), 'sklearn.utils.testing.assert_raises', 'assert_raises', (['ValueError', 'cval.ShuffleSplit', '(10)'], {'train_size': '(1.0j)'}), '(ValueError, cval.ShuffleSplit, 10, train_size=1.0j)\n', (39607, 39659), False, 'from sklearn.utils.testing import assert_raises\n'), ((39662, 39748), 'sklearn.utils.testing.assert_raises', 'assert_raises', (['ValueError', 'cval.ShuffleSplit', '(10)'], {'test_size': 'None', 'train_size': 'None'}), '(ValueError, cval.ShuffleSplit, 10, test_size=None, train_size\n =None)\n', (39675, 39748), False, 'from sklearn.utils.testing import assert_raises\n'), ((39939, 39977), 'sklearn.cross_validation.ShuffleSplit', 'cval.ShuffleSplit', (['(10)'], {'random_state': '(21)'}), '(10, random_state=21)\n', (39956, 39977), True, 'from sklearn import cross_validation as cval\n'), ((40108, 40126), 'sklearn.svm.SVC', 'SVC', ([], {'gamma': '"""scale"""'}), "(gamma='scale')\n", (40111, 40126), False, 'from sklearn.svm import SVC\n'), ((40138, 40163), 'sklearn.svm.SVC', 'SVC', ([], {'kernel': '"""precomputed"""'}), "(kernel='precomputed')\n", (40141, 40163), False, 'from sklearn.svm import SVC\n'), ((40176, 40187), 'sklearn.datasets.load_iris', 'load_iris', ([], {}), '()\n', (40185, 40187), False, 'from sklearn.datasets import load_iris\n'), ((40230, 40244), 'numpy.dot', 'np.dot', (['X', 'X.T'], {}), '(X, X.T)\n', (40236, 40244), True, 'import numpy as np\n'), ((40255, 40316), 'sklearn.cross_validation.ShuffleSplit', 'cval.ShuffleSplit', (['X.shape[0]'], {'test_size': '(0.25)', 'random_state': '(0)'}), '(X.shape[0], test_size=0.25, random_state=0)\n', (40272, 40316), True, 'from sklearn import cross_validation as cval\n'), ((40360, 40391), 'sklearn.cross_validation._safe_split', 'cval._safe_split', (['clf', 'X', 'y', 'tr'], {}), '(clf, X, y, tr)\n', (40376, 40391), True, 'from sklearn import cross_validation as cval\n'), ((40410, 40442), 'sklearn.cross_validation._safe_split', 'cval._safe_split', (['clfp', 'K', 'y', 'tr'], {}), '(clfp, K, y, tr)\n', (40426, 40442), True, 'from sklearn import cross_validation as cval\n'), ((40519, 40554), 'sklearn.cross_validation._safe_split', 'cval._safe_split', (['clf', 'X', 'y', 'te', 'tr'], {}), '(clf, X, y, te, tr)\n', (40535, 40554), True, 'from sklearn import cross_validation as cval\n'), ((40573, 40609), 'sklearn.cross_validation._safe_split', 'cval._safe_split', (['clfp', 'K', 'y', 'te', 'tr'], {}), '(clfp, K, y, te, tr)\n', (40589, 40609), True, 'from sklearn import cross_validation as cval\n'), ((40856, 40889), 'numpy.repeat', 'np.repeat', (['[0, 1]', '(X.shape[0] / 2)'], {}), '([0, 1], X.shape[0] / 2)\n', (40865, 40889), True, 'import numpy as np\n'), ((41031, 41066), 'sklearn.cross_validation.cross_val_score', 'cval.cross_val_score', (['p', 'X', 'y'], {'cv': '(5)'}), '(p, X, y, cv=5)\n', (41051, 41066), True, 'from sklearn import cross_validation as cval\n'), ((41257, 41290), 'numpy.repeat', 'np.repeat', (['[0, 1]', '(X.shape[0] / 2)'], {}), '([0, 1], X.shape[0] / 2)\n', (41266, 41290), True, 'import numpy as np\n'), ((41295, 41354), 'sklearn.cross_validation.train_test_split', 'cval.train_test_split', (['X', 'y'], {'test_size': '(0.2)', 'random_state': '(42)'}), '(X, y, test_size=0.2, random_state=42)\n', (41316, 41354), True, 'from sklearn import cross_validation as cval\n'), ((41557, 41590), 'numpy.repeat', 'np.repeat', (['[0, 1]', '(X.shape[0] / 2)'], {}), '([0, 1], X.shape[0] / 2)\n', (41566, 41590), True, 'import numpy as np\n'), ((41732, 41774), 'sklearn.cross_validation.permutation_test_score', 'cval.permutation_test_score', (['p', 'X', 'y'], {'cv': '(5)'}), '(p, X, y, cv=5)\n', (41759, 41774), True, 'from sklearn import cross_validation as cval\n'), ((41819, 41834), 'numpy.ones', 'np.ones', (['(9, 2)'], {}), '((9, 2))\n', (41826, 41834), True, 'import numpy as np\n'), ((41844, 41881), 'sklearn.cross_validation.check_cv', 'cval.check_cv', (['(3)', 'X'], {'classifier': '(False)'}), '(3, X, classifier=False)\n', (41857, 41881), True, 'from sklearn import cross_validation as cval\n'), ((41942, 41979), 'numpy.array', 'np.array', (['[0, 1, 0, 1, 0, 0, 1, 1, 1]'], {}), '([0, 1, 0, 1, 0, 0, 1, 1, 1])\n', (41950, 41979), True, 'import numpy as np\n'), ((41989, 42035), 'sklearn.cross_validation.check_cv', 'cval.check_cv', (['(3)', 'X', 'y_binary'], {'classifier': '(True)'}), '(3, X, y_binary, classifier=True)\n', (42002, 42035), True, 'from sklearn import cross_validation as cval\n'), ((42110, 42147), 'numpy.array', 'np.array', (['[0, 1, 0, 1, 2, 1, 2, 0, 2]'], {}), '([0, 1, 0, 1, 2, 1, 2, 0, 2])\n', (42118, 42147), True, 'import numpy as np\n'), ((42157, 42207), 'sklearn.cross_validation.check_cv', 'cval.check_cv', (['(3)', 'X', 'y_multiclass'], {'classifier': '(True)'}), '(3, X, y_multiclass, classifier=True)\n', (42170, 42207), True, 'from sklearn import cross_validation as cval\n'), ((42271, 42286), 'numpy.ones', 'np.ones', (['(5, 2)'], {}), '((5, 2))\n', (42278, 42286), True, 'import numpy as np\n'), ((42371, 42421), 'sklearn.cross_validation.check_cv', 'cval.check_cv', (['(3)', 'X', 'y_multilabel'], {'classifier': '(True)'}), '(3, X, y_multilabel, classifier=True)\n', (42384, 42421), True, 'from sklearn import cross_validation as cval\n'), ((42487, 42537), 'numpy.array', 'np.array', (['[[1, 2], [0, 3], [0, 0], [3, 1], [2, 0]]'], {}), '([[1, 2], [0, 3], [0, 0], [3, 1], [2, 0]])\n', (42495, 42537), True, 'import numpy as np\n'), ((42547, 42598), 'sklearn.cross_validation.check_cv', 'cval.check_cv', (['(3)', 'X', 'y_multioutput'], {'classifier': '(True)'}), '(3, X, y_multioutput, classifier=True)\n', (42560, 42598), True, 'from sklearn import cross_validation as cval\n'), ((42692, 42795), 'numpy.array', 'np.array', (['[[-3, 4], [2, 4], [3, 3], [0, 2], [-3, 1], [-2, 1], [0, 0], [-2, -1], [-1, \n -2], [1, -2]]'], {}), '([[-3, 4], [2, 4], [3, 3], [0, 2], [-3, 1], [-2, 1], [0, 0], [-2, -\n 1], [-1, -2], [1, -2]])\n', (42700, 42795), True, 'import numpy as np\n'), ((42817, 42912), 'numpy.array', 'np.array', (['[[1, 1], [0, 1], [0, 1], [0, 1], [1, 1], [0, 1], [1, 0], [1, 1], [1, 0], [0, 0]\n ]'], {}), '([[1, 1], [0, 1], [0, 1], [0, 1], [1, 1], [0, 1], [1, 0], [1, 1], [\n 1, 0], [0, 0]])\n', (42825, 42912), True, 'import numpy as np\n'), ((42936, 42971), 'sklearn.neighbors.KNeighborsClassifier', 'KNeighborsClassifier', ([], {'n_neighbors': '(1)'}), '(n_neighbors=1)\n', (42956, 42971), False, 'from sklearn.neighbors import KNeighborsClassifier\n'), ((42992, 43037), 'sklearn.metrics.make_scorer', 'make_scorer', (['precision_score'], {'average': '"""micro"""'}), "(precision_score, average='micro')\n", (43003, 43037), False, 'from sklearn.metrics import make_scorer\n'), ((43058, 43103), 'sklearn.metrics.make_scorer', 'make_scorer', (['precision_score'], {'average': '"""macro"""'}), "(precision_score, average='macro')\n", (43069, 43103), False, 'from sklearn.metrics import make_scorer\n'), ((43126, 43173), 'sklearn.metrics.make_scorer', 'make_scorer', (['precision_score'], {'average': '"""samples"""'}), "(precision_score, average='samples')\n", (43137, 43173), False, 'from sklearn.metrics import make_scorer\n'), ((43192, 43252), 'sklearn.cross_validation.cross_val_score', 'cval.cross_val_score', (['clf', 'X', 'y'], {'scoring': 'scoring_micro', 'cv': '(5)'}), '(clf, X, y, scoring=scoring_micro, cv=5)\n', (43212, 43252), True, 'from sklearn import cross_validation as cval\n'), ((43271, 43331), 'sklearn.cross_validation.cross_val_score', 'cval.cross_val_score', (['clf', 'X', 'y'], {'scoring': 'scoring_macro', 'cv': '(5)'}), '(clf, X, y, scoring=scoring_macro, cv=5)\n', (43291, 43331), True, 'from sklearn import cross_validation as cval\n'), ((43352, 43414), 'sklearn.cross_validation.cross_val_score', 'cval.cross_val_score', (['clf', 'X', 'y'], {'scoring': 'scoring_samples', 'cv': '(5)'}), '(clf, X, y, scoring=scoring_samples, cv=5)\n', (43372, 43414), True, 'from sklearn import cross_validation as cval\n'), ((43460, 43525), 'sklearn.utils.testing.assert_almost_equal', 'assert_almost_equal', (['score_micro', '[1, 1 / 2, 3 / 4, 1 / 2, 1 / 3]'], {}), '(score_micro, [1, 1 / 2, 3 / 4, 1 / 2, 1 / 3])\n', (43479, 43525), False, 'from sklearn.utils.testing import assert_almost_equal\n'), ((43530, 43595), 'sklearn.utils.testing.assert_almost_equal', 'assert_almost_equal', (['score_macro', '[1, 1 / 2, 3 / 4, 1 / 2, 1 / 4]'], {}), '(score_macro, [1, 1 / 2, 3 / 4, 1 / 2, 1 / 4])\n', (43549, 43595), False, 'from sklearn.utils.testing import assert_almost_equal\n'), ((43600, 43667), 'sklearn.utils.testing.assert_almost_equal', 'assert_almost_equal', (['score_samples', '[1, 1 / 2, 3 / 4, 1 / 2, 1 / 4]'], {}), '(score_samples, [1, 1 / 2, 3 / 4, 1 / 2, 1 / 4])\n', (43619, 43667), False, 'from sklearn.utils.testing import assert_almost_equal\n'), ((43713, 43726), 'sklearn.datasets.load_boston', 'load_boston', ([], {}), '()\n', (43724, 43726), False, 'from sklearn.datasets import load_boston\n'), ((43816, 43823), 'sklearn.linear_model.Ridge', 'Ridge', ([], {}), '()\n', (43821, 43823), False, 'from sklearn.linear_model import Ridge\n'), ((43894, 43910), 'numpy.zeros_like', 'np.zeros_like', (['y'], {}), '(y)\n', (43907, 43910), True, 'import numpy as np\n'), ((44031, 44071), 'sklearn.cross_validation.cross_val_predict', 'cval.cross_val_predict', (['est', 'X', 'y'], {'cv': 'cv'}), '(est, X, y, cv=cv)\n', (44053, 44071), True, 'from sklearn import cross_validation as cval\n'), ((44076, 44116), 'sklearn.utils.testing.assert_array_almost_equal', 'assert_array_almost_equal', (['preds', 'preds2'], {}), '(preds, preds2)\n', (44101, 44116), False, 'from sklearn.utils.testing import assert_array_almost_equal\n'), ((44130, 44163), 'sklearn.cross_validation.cross_val_predict', 'cval.cross_val_predict', (['est', 'X', 'y'], {}), '(est, X, y)\n', (44152, 44163), True, 'from sklearn import cross_validation as cval\n'), ((44248, 44288), 'sklearn.cross_validation.cross_val_predict', 'cval.cross_val_predict', (['est', 'X', 'y'], {'cv': 'cv'}), '(est, X, y, cv=cv)\n', (44270, 44288), True, 'from sklearn import cross_validation as cval\n'), ((44390, 44405), 'scipy.sparse.coo_matrix', 'coo_matrix', (['Xsp'], {}), '(Xsp)\n', (44400, 44405), False, 'from scipy.sparse import coo_matrix\n'), ((44418, 44453), 'sklearn.cross_validation.cross_val_predict', 'cval.cross_val_predict', (['est', 'Xsp', 'y'], {}), '(est, Xsp, y)\n', (44440, 44453), True, 'from sklearn import cross_validation as cval\n'), ((44837, 44844), 'sklearn.linear_model.Ridge', 'Ridge', ([], {}), '()\n', (44842, 44844), False, 'from sklearn.linear_model import Ridge\n'), ((44880, 44913), 'sklearn.cross_validation.cross_val_predict', 'cval.cross_val_predict', (['clf', 'X', 'y'], {}), '(clf, X, y)\n', (44902, 44913), True, 'from sklearn import cross_validation as cval\n'), ((44918, 44956), 'sklearn.utils.testing.assert_equal', 'assert_equal', (['predictions.shape', '(10,)'], {}), '(predictions.shape, (10,))\n', (44930, 44956), False, 'from sklearn.utils.testing import assert_equal\n'), ((45110, 45150), 'sklearn.utils.testing.assert_equal', 'assert_equal', (['predictions.shape', '(10, 2)'], {}), '(predictions.shape, (10, 2))\n', (45122, 45150), False, 'from sklearn.utils.testing import assert_equal\n'), ((45170, 45210), 'sklearn.cross_validation.cross_val_predict', 'cval.cross_val_predict', (['clf', 'X_sparse', 'y'], {}), '(clf, X_sparse, y)\n', (45192, 45210), True, 'from sklearn import cross_validation as cval\n'), ((45215, 45259), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['predictions.shape', '(10,)'], {}), '(predictions.shape, (10,))\n', (45233, 45259), False, 'from sklearn.utils.testing import assert_array_equal\n'), ((45413, 45459), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['predictions.shape', '(10, 2)'], {}), '(predictions.shape, (10, 2))\n', (45431, 45459), False, 'from sklearn.utils.testing import assert_array_equal\n'), ((45550, 45588), 'sklearn.utils.mocking.CheckingClassifier', 'CheckingClassifier', ([], {'check_X': 'list_check'}), '(check_X=list_check)\n', (45568, 45588), False, 'from sklearn.utils.mocking import CheckingClassifier, MockDataFrame\n'), ((45670, 45708), 'sklearn.utils.mocking.CheckingClassifier', 'CheckingClassifier', ([], {'check_y': 'list_check'}), '(check_y=list_check)\n', (45688, 45708), False, 'from sklearn.utils.mocking import CheckingClassifier, MockDataFrame\n'), ((45874, 45910), 'sklearn.utils.mocking.CheckingClassifier', 'CheckingClassifier', ([], {'check_X': 'check_3d'}), '(check_X=check_3d)\n', (45892, 45910), False, 'from sklearn.utils.mocking import CheckingClassifier, MockDataFrame\n'), ((45929, 45965), 'sklearn.cross_validation.cross_val_predict', 'cval.cross_val_predict', (['clf', 'X_3d', 'y'], {}), '(clf, X_3d, y)\n', (45951, 45965), True, 'from sklearn import cross_validation as cval\n'), ((45970, 46014), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['predictions.shape', '(10,)'], {}), '(predictions.shape, (10,))\n', (45988, 46014), False, 'from sklearn.utils.testing import assert_array_equal\n'), ((46714, 46725), 'sklearn.datasets.load_iris', 'load_iris', ([], {}), '()\n', (46723, 46725), False, 'from sklearn.datasets import load_iris\n'), ((46869, 46923), 'sklearn.cross_validation.cross_val_score', 'cval.cross_val_score', (['clf', 'X', 'y'], {'fit_params': 'fit_params'}), '(clf, X, y, fit_params=fit_params)\n', (46889, 46923), True, 'from sklearn import cross_validation as cval\n'), ((47003, 47017), 'numpy.arange', 'np.arange', (['(100)'], {}), '(100)\n', (47012, 47017), True, 'import numpy as np\n'), ((47341, 47463), 'sklearn.datasets.make_multilabel_classification', 'make_multilabel_classification', ([], {'n_classes': '(2)', 'n_labels': '(1)', 'allow_unlabeled': '(False)', 'return_indicator': '(True)', 'random_state': '(1)'}), '(n_classes=2, n_labels=1, allow_unlabeled=\n False, return_indicator=True, random_state=1)\n', (47371, 47463), False, 'from sklearn.datasets import make_multilabel_classification\n'), ((47600, 47613), 'scipy.sparse.csr_matrix', 'csr_matrix', (['X'], {}), '(X)\n', (47610, 47613), False, 'from scipy.sparse import csr_matrix\n'), ((47629, 47642), 'scipy.sparse.csr_matrix', 'csr_matrix', (['y'], {}), '(y)\n', (47639, 47642), False, 'from scipy.sparse import csr_matrix\n'), ((47711, 47755), 'sklearn.cross_validation.cross_val_predict', 'cval.cross_val_predict', (['classif', 'X', 'y'], {'cv': '(10)'}), '(classif, X, y, cv=10)\n', (47733, 47755), True, 'from sklearn import cross_validation as cval\n'), ((47775, 47833), 'sklearn.cross_validation.cross_val_predict', 'cval.cross_val_predict', (['classif', 'X_sparse', 'y_sparse'], {'cv': '(10)'}), '(classif, X_sparse, y_sparse, cv=10)\n', (47797, 47833), True, 'from sklearn import cross_validation as cval\n'), ((47880, 47926), 'sklearn.utils.testing.assert_array_almost_equal', 'assert_array_almost_equal', (['preds_sparse', 'preds'], {}), '(preds_sparse, preds)\n', (47905, 47926), False, 'from sklearn.utils.testing import assert_array_almost_equal\n'), ((4637, 4650), 'numpy.array', 'np.array', (['[1]'], {}), '([1])\n', (4645, 4650), True, 'import numpy as np\n'), ((8056, 8072), 'sklearn.cross_validation.KFold', 'cval.KFold', (['(4)', '(2)'], {}), '(4, 2)\n', (8066, 8072), True, 'from sklearn import cross_validation as cval\n'), ((8306, 8322), 'sklearn.cross_validation.KFold', 'cval.KFold', (['(5)', '(2)'], {}), '(5, 2)\n', (8316, 8322), True, 'from sklearn import cross_validation as cval\n'), ((8780, 8817), 'sklearn.cross_validation.StratifiedKFold', 'cval.StratifiedKFold', (['[1, 1, 0, 0]', '(2)'], {}), '([1, 1, 0, 0], 2)\n', (8800, 8817), True, 'from sklearn import cross_validation as cval\n'), ((9051, 9097), 'sklearn.cross_validation.StratifiedKFold', 'cval.StratifiedKFold', (['[1, 1, 1, 0, 0, 0, 0]', '(2)'], {}), '([1, 1, 1, 0, 0, 0, 0], 2)\n', (9071, 9097), True, 'from sklearn import cross_validation as cval\n'), ((9729, 9777), 'sklearn.cross_validation.StratifiedKFold', 'cval.StratifiedKFold', (['labels', '(5)'], {'shuffle': 'shuffle'}), '(labels, 5, shuffle=shuffle)\n', (9749, 9777), True, 'from sklearn import cross_validation as cval\n'), ((10461, 10477), 'sklearn.cross_validation.KFold', 'cval.KFold', (['i', '(5)'], {}), '(i, 5)\n', (10471, 10477), True, 'from sklearn import cross_validation as cval\n'), ((12106, 12167), 'sklearn.cross_validation.StratifiedKFold', 'cval.StratifiedKFold', (['labels', '(5)'], {'shuffle': '(True)', 'random_state': '(0)'}), '(labels, 5, shuffle=True, random_state=0)\n', (12126, 12167), True, 'from sklearn import cross_validation as cval\n'), ((12184, 12245), 'sklearn.cross_validation.StratifiedKFold', 'cval.StratifiedKFold', (['labels', '(5)'], {'shuffle': '(True)', 'random_state': '(1)'}), '(labels, 5, shuffle=True, random_state=1)\n', (12204, 12245), True, 'from sklearn import cross_validation as cval\n'), ((14652, 14692), 'sklearn.cross_validation.LabelKFold', 'cval.LabelKFold', (['labels'], {'n_folds': 'n_folds'}), '(labels, n_folds=n_folds)\n', (14667, 14692), True, 'from sklearn import cross_validation as cval\n'), ((15943, 15960), 'numpy.unique', 'np.unique', (['labels'], {}), '(labels)\n', (15952, 15960), True, 'import numpy as np\n'), ((16078, 16118), 'sklearn.cross_validation.LabelKFold', 'cval.LabelKFold', (['labels'], {'n_folds': 'n_folds'}), '(labels, n_folds=n_folds)\n', (16093, 16118), True, 'from sklearn import cross_validation as cval\n'), ((17316, 17348), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['t1[0]', 't2[0]'], {}), '(t1[0], t2[0])\n', (17334, 17348), False, 'from sklearn.utils.testing import assert_array_equal\n'), ((17357, 17389), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['t2[0]', 't3[0]'], {}), '(t2[0], t3[0])\n', (17375, 17389), False, 'from sklearn.utils.testing import assert_array_equal\n'), ((17398, 17430), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['t3[0]', 't4[0]'], {}), '(t3[0], t4[0])\n', (17416, 17430), False, 'from sklearn.utils.testing import assert_array_equal\n'), ((17439, 17471), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['t1[1]', 't2[1]'], {}), '(t1[1], t2[1])\n', (17457, 17471), False, 'from sklearn.utils.testing import assert_array_equal\n'), ((17480, 17512), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['t2[1]', 't3[1]'], {}), '(t2[1], t3[1])\n', (17498, 17512), False, 'from sklearn.utils.testing import assert_array_equal\n'), ((17521, 17553), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['t3[1]', 't4[1]'], {}), '(t3[1], t4[1])\n', (17539, 17553), False, 'from sklearn.utils.testing import assert_array_equal\n'), ((18673, 18719), 'numpy.array', 'np.array', (['[1, 1, 1, 1, 2, 2, 2, 3, 3, 3, 3, 3]'], {}), '([1, 1, 1, 1, 2, 2, 2, 3, 3, 3, 3, 3])\n', (18681, 18719), True, 'import numpy as np\n'), ((18731, 18777), 'numpy.array', 'np.array', (['[0, 0, 0, 1, 1, 1, 2, 2, 2, 3, 3, 3]'], {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2, 3, 3, 3])\n', (18739, 18777), True, 'import numpy as np\n'), ((18789, 18848), 'numpy.array', 'np.array', (['([0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2] * 2)'], {}), '([0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2] * 2)\n', (18797, 18848), True, 'import numpy as np\n'), ((18860, 18918), 'numpy.array', 'np.array', (['[1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4]'], {}), '([1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4])\n', (18868, 18918), True, 'import numpy as np\n'), ((18930, 18961), 'numpy.array', 'np.array', (['([-1] * 800 + [1] * 50)'], {}), '([-1] * 800 + [1] * 50)\n', (18938, 18961), True, 'import numpy as np\n'), ((19006, 19071), 'sklearn.cross_validation.StratifiedShuffleSplit', 'cval.StratifiedShuffleSplit', (['y', '(6)'], {'test_size': '(0.33)', 'random_state': '(0)'}), '(y, 6, test_size=0.33, random_state=0)\n', (19033, 19071), True, 'from sklearn import cross_validation as cval\n'), ((20331, 20355), 'scipy.stats.binom', 'stats.binom', (['n_splits', 'p'], {}), '(n_splits, p)\n', (20342, 20355), False, 'from scipy import stats\n'), ((20622, 20655), 'numpy.array', 'np.array', (['(n_samples // 2 * [0, 1])'], {}), '(n_samples // 2 * [0, 1])\n', (20630, 20655), True, 'import numpy as np\n'), ((20675, 20770), 'sklearn.cross_validation.StratifiedShuffleSplit', 'cval.StratifiedShuffleSplit', (['labels'], {'n_iter': 'n_iter', 'test_size': '(1.0 / n_folds)', 'random_state': '(0)'}), '(labels, n_iter=n_iter, test_size=1.0 / n_folds,\n random_state=0)\n', (20702, 20770), True, 'from sklearn import cross_validation as cval\n'), ((21170, 21200), 'sklearn.utils.testing.assert_equal', 'assert_equal', (['n_splits', 'n_iter'], {}), '(n_splits, n_iter)\n', (21182, 21200), False, 'from sklearn.utils.testing import assert_equal\n'), ((21382, 21399), 'numpy.unique', 'np.unique', (['labels'], {}), '(labels)\n', (21391, 21399), True, 'import numpy as np\n'), ((21408, 21453), 'sklearn.utils.testing.assert_equal', 'assert_equal', (['splits.test_size', '(1.0 / n_folds)'], {}), '(splits.test_size, 1.0 / n_folds)\n', (21420, 21453), False, 'from sklearn.utils.testing import assert_equal\n'), ((22179, 22206), 'numpy.intersect1d', 'np.intersect1d', (['train', 'test'], {}), '(train, test)\n', (22193, 22206), True, 'import numpy as np\n'), ((22351, 22362), 'numpy.ones', 'np.ones', (['(10)'], {}), '(10)\n', (22358, 22362), True, 'import numpy as np\n'), ((22444, 22475), 'sklearn.cross_validation.KFold', 'cval.KFold', (['(10)', '(5)'], {'shuffle': '(True)'}), '(10, 5, shuffle=True)\n', (22454, 22475), True, 'from sklearn import cross_validation as cval\n'), ((22877, 22923), 'numpy.array', 'np.array', (['[1, 1, 1, 1, 2, 2, 2, 3, 3, 3, 3, 3]'], {}), '([1, 1, 1, 1, 2, 2, 2, 3, 3, 3, 3, 3])\n', (22885, 22923), True, 'import numpy as np\n'), ((22935, 22981), 'numpy.array', 'np.array', (['[0, 0, 0, 1, 1, 1, 2, 2, 2, 3, 3, 3]'], {}), '([0, 0, 0, 1, 1, 1, 2, 2, 2, 3, 3, 3])\n', (22943, 22981), True, 'import numpy as np\n'), ((22993, 23048), 'numpy.array', 'np.array', (['[0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2]'], {}), '([0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2])\n', (23001, 23048), True, 'import numpy as np\n'), ((23060, 23118), 'numpy.array', 'np.array', (['[1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4]'], {}), '([1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4])\n', (23068, 23118), True, 'import numpy as np\n'), ((23210, 23280), 'sklearn.cross_validation.LabelShuffleSplit', 'cval.LabelShuffleSplit', (['y', 'n_iter'], {'test_size': 'test_size', 'random_state': '(0)'}), '(y, n_iter, test_size=test_size, random_state=0)\n', (23232, 23280), True, 'from sklearn import cross_validation as cval\n'), ((23474, 23486), 'numpy.unique', 'np.unique', (['y'], {}), '(y)\n', (23483, 23486), True, 'import numpy as np\n'), ((25078, 25100), 'sklearn.externals.six.moves.zip', 'zip', (['llo', 'llo_changing'], {}), '(llo, llo_changing)\n', (25081, 25100), False, 'from sklearn.externals.six.moves import zip\n'), ((25342, 25373), 'sklearn.cross_validation.cross_val_score', 'cval.cross_val_score', (['clf', 'X', 'y'], {}), '(clf, X, y)\n', (25362, 25373), True, 'from sklearn import cross_validation as cval\n'), ((25478, 25516), 'sklearn.cross_validation.cross_val_score', 'cval.cross_val_score', (['clf', 'X_sparse', 'X'], {}), '(clf, X_sparse, X)\n', (25498, 25516), True, 'from sklearn import cross_validation as cval\n'), ((25594, 25632), 'sklearn.cross_validation.cross_val_score', 'cval.cross_val_score', (['clf', 'X_sparse', 'y'], {}), '(clf, X_sparse, y)\n', (25614, 25632), True, 'from sklearn import cross_validation as cval\n'), ((25744, 25782), 'sklearn.cross_validation.cross_val_score', 'cval.cross_val_score', (['clf', 'X_sparse', 'X'], {}), '(clf, X_sparse, X)\n', (25764, 25782), True, 'from sklearn import cross_validation as cval\n'), ((27038, 27096), 'sklearn.utils.mocking.CheckingClassifier', 'CheckingClassifier', ([], {'check_X': 'check_df', 'check_y': 'check_series'}), '(check_X=check_df, check_y=check_series)\n', (27056, 27096), False, 'from sklearn.utils.mocking import CheckingClassifier, MockDataFrame\n'), ((27105, 27143), 'sklearn.cross_validation.cross_val_score', 'cval.cross_val_score', (['clf', 'X_df', 'y_ser'], {}), '(clf, X_df, y_ser)\n', (27125, 27143), True, 'from sklearn import cross_validation as cval\n'), ((28704, 28716), 'numpy.unique', 'np.unique', (['y'], {}), '(y)\n', (28713, 28716), True, 'import numpy as np\n'), ((28947, 28985), 'sklearn.utils.testing.assert_equal', 'assert_equal', (['clf.dummy_int', 'DUMMY_INT'], {}), '(clf.dummy_int, DUMMY_INT)\n', (28959, 28985), False, 'from sklearn.utils.testing import assert_equal\n'), ((28994, 29032), 'sklearn.utils.testing.assert_equal', 'assert_equal', (['clf.dummy_str', 'DUMMY_STR'], {}), '(clf.dummy_str, DUMMY_STR)\n', (29006, 29032), False, 'from sklearn.utils.testing import assert_equal\n'), ((29041, 29079), 'sklearn.utils.testing.assert_equal', 'assert_equal', (['clf.dummy_obj', 'DUMMY_OBJ'], {}), '(clf.dummy_obj, DUMMY_OBJ)\n', (29053, 29079), False, 'from sklearn.utils.testing import assert_equal\n'), ((29116, 29134), 'numpy.ones', 'np.ones', (['n_samples'], {}), '(n_samples)\n', (29123, 29134), True, 'import numpy as np\n'), ((29747, 29783), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {'record': '(True)'}), '(record=True)\n', (29770, 29783), False, 'import warnings\n'), ((29803, 29826), 'sklearn.metrics.make_scorer', 'make_scorer', (['score_func'], {}), '(score_func)\n', (29814, 29826), False, 'from sklearn.metrics import make_scorer\n'), ((29843, 29891), 'sklearn.cross_validation.cross_val_score', 'cval.cross_val_score', (['clf', 'X', 'y'], {'scoring': 'scoring'}), '(clf, X, y, scoring=scoring)\n', (29863, 29891), True, 'from sklearn import cross_validation as cval\n'), ((31428, 31464), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {'record': '(True)'}), '(record=True)\n', (31451, 31464), False, 'import warnings\n'), ((32540, 32613), 'sklearn.cross_validation.train_test_split', 'cval.train_test_split', (['y'], {'test_size': 'test_size', 'stratify': 'y', 'random_state': '(0)'}), '(y, test_size=test_size, stratify=y, random_state=0)\n', (32561, 32613), True, 'from sklearn import cross_validation as cval\n'), ((33334, 33361), 'sklearn.cross_validation.train_test_split', 'cval.train_test_split', (['X_df'], {}), '(X_df)\n', (33355, 33361), True, 'from sklearn import cross_validation as cval\n'), ((40479, 40499), 'numpy.dot', 'np.dot', (['X_tr', 'X_tr.T'], {}), '(X_tr, X_tr.T)\n', (40485, 40499), True, 'import numpy as np\n'), ((40646, 40666), 'numpy.dot', 'np.dot', (['X_te', 'X_tr.T'], {}), '(X_te, X_tr.T)\n', (40652, 40666), True, 'import numpy as np\n'), ((44364, 44378), 'numpy.median', 'np.median', (['Xsp'], {}), '(Xsp)\n', (44373, 44378), True, 'import numpy as np\n'), ((44540, 44548), 'sklearn.cluster.KMeans', 'KMeans', ([], {}), '()\n', (44546, 44548), False, 'from sklearn.cluster import KMeans\n'), ((44997, 45041), 'sklearn.utils.testing.ignore_warnings', 'ignore_warnings', ([], {'category': 'ConvergenceWarning'}), '(category=ConvergenceWarning)\n', (45012, 45041), False, 'from sklearn.utils.testing import ignore_warnings\n'), ((45065, 45105), 'sklearn.cross_validation.cross_val_predict', 'cval.cross_val_predict', (['clf', 'X_sparse', 'X'], {}), '(clf, X_sparse, X)\n', (45087, 45105), True, 'from sklearn import cross_validation as cval\n'), ((45300, 45344), 'sklearn.utils.testing.ignore_warnings', 'ignore_warnings', ([], {'category': 'ConvergenceWarning'}), '(category=ConvergenceWarning)\n', (45315, 45344), False, 'from sklearn.utils.testing import ignore_warnings\n'), ((45368, 45408), 'sklearn.cross_validation.cross_val_predict', 'cval.cross_val_predict', (['clf', 'X_sparse', 'X'], {}), '(clf, X_sparse, X)\n', (45390, 45408), True, 'from sklearn import cross_validation as cval\n'), ((46563, 46621), 'sklearn.utils.mocking.CheckingClassifier', 'CheckingClassifier', ([], {'check_X': 'check_df', 'check_y': 'check_series'}), '(check_X=check_df, check_y=check_series)\n', (46581, 46621), False, 'from sklearn.utils.mocking import CheckingClassifier, MockDataFrame\n'), ((46630, 46670), 'sklearn.cross_validation.cross_val_predict', 'cval.cross_val_predict', (['clf', 'X_df', 'y_ser'], {}), '(clf, X_df, y_ser)\n', (46652, 46670), True, 'from sklearn import cross_validation as cval\n'), ((46950, 46960), 'numpy.ones', 'np.ones', (['(3)'], {}), '(3)\n', (46957, 46960), True, 'import numpy as np\n'), ((47034, 47066), 'sklearn.cross_validation._check_is_partition', 'cval._check_is_partition', (['p', '(100)'], {}), '(p, 100)\n', (47058, 47066), True, 'from sklearn import cross_validation as cval\n'), ((47166, 47198), 'sklearn.cross_validation._check_is_partition', 'cval._check_is_partition', (['p', '(100)'], {}), '(p, 100)\n', (47190, 47198), True, 'from sklearn import cross_validation as cval\n'), ((47677, 47697), 'sklearn.svm.SVC', 'SVC', ([], {'kernel': '"""linear"""'}), "(kernel='linear')\n", (47680, 47697), False, 'from sklearn.svm import SVC\n'), ((4653, 4666), 'numpy.array', 'np.array', (['[1]'], {}), '([1])\n', (4661, 4666), True, 'import numpy as np\n'), ((4668, 4681), 'numpy.array', 'np.array', (['[0]'], {}), '([0])\n', (4676, 4681), True, 'import numpy as np\n'), ((10665, 10678), 'numpy.sum', 'np.sum', (['sizes'], {}), '(sizes)\n', (10671, 10678), True, 'import numpy as np\n'), ((10958, 11010), 'sklearn.cross_validation.StratifiedKFold', 'cval.StratifiedKFold', (['labels[:i]', '(3)'], {'shuffle': 'shuffle'}), '(labels[:i], 3, shuffle=shuffle)\n', (10978, 11010), True, 'from sklearn import cross_validation as cval\n'), ((11827, 11865), 'numpy.concatenate', 'np.concatenate', (['(all_folds, ind[test])'], {}), '((all_folds, ind[test]))\n', (11841, 11865), True, 'import numpy as np\n'), ((13152, 13192), 'sklearn.cross_validation.cross_val_score', 'cval.cross_val_score', (['model', 'X', 'y'], {'cv': 'cv'}), '(model, X, y, cv=cv)\n', (13172, 13192), True, 'from sklearn import cross_validation as cval\n'), ((13554, 13594), 'sklearn.cross_validation.cross_val_score', 'cval.cross_val_score', (['model', 'X', 'y'], {'cv': 'cv'}), '(model, X, y, cv=cv)\n', (13574, 13594), True, 'from sklearn import cross_validation as cval\n'), ((13713, 13753), 'sklearn.cross_validation.cross_val_score', 'cval.cross_val_score', (['model', 'X', 'y'], {'cv': 'cv'}), '(model, X, y, cv=cv)\n', (13733, 13753), True, 'from sklearn import cross_validation as cval\n'), ((14232, 14272), 'sklearn.cross_validation.cross_val_score', 'cval.cross_val_score', (['model', 'X', 'y'], {'cv': 'cv'}), '(model, X, y, cv=cv)\n', (14252, 14272), True, 'from sklearn import cross_validation as cval\n'), ((17124, 17135), 'numpy.int32', 'np.int32', (['(2)'], {}), '(2)\n', (17132, 17135), True, 'import numpy as np\n'), ((19687, 19732), 'sklearn.utils.testing.assert_array_almost_equal', 'assert_array_almost_equal', (['p_train', 'p_test', '(1)'], {}), '(p_train, p_test, 1)\n', (19712, 19732), False, 'from sklearn.utils.testing import assert_array_almost_equal\n'), ((20431, 20526), 'sklearn.utils.testing.assert_true', 'assert_true', (['(p > threshold)', '"""An index is not drawn with chance corresponding to even draws"""'], {}), "(p > threshold,\n 'An index is not drawn with chance corresponding to even draws')\n", (20442, 20526), False, 'from sklearn.utils.testing import assert_true\n'), ((23624, 23643), 'numpy.unique', 'np.unique', (['y[train]'], {}), '(y[train])\n', (23633, 23643), True, 'import numpy as np\n'), ((23672, 23690), 'numpy.unique', 'np.unique', (['y[test]'], {}), '(y[test])\n', (23681, 23690), True, 'import numpy as np\n'), ((23903, 23953), 'sklearn.utils.testing.assert_equal', 'assert_equal', (['(y[train].size + y[test].size)', 'y.size'], {}), '(y[train].size + y[test].size, y.size)\n', (23915, 23953), False, 'from sklearn.utils.testing import assert_equal\n'), ((25114, 25151), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['train', 'train_chan'], {}), '(train, train_chan)\n', (25132, 25151), False, 'from sklearn.utils.testing import assert_array_equal\n'), ((25164, 25199), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['test', 'test_chan'], {}), '(test, test_chan)\n', (25182, 25199), False, 'from sklearn.utils.testing import assert_array_equal\n'), ((29169, 29187), 'numpy.ones', 'np.ones', (['n_classes'], {}), '(n_classes)\n', (29176, 29187), True, 'import numpy as np\n'), ((30501, 30516), 'numpy.float32', 'np.float32', (['(0.6)'], {}), '(0.6)\n', (30511, 30516), True, 'import numpy as np\n'), ((30529, 30544), 'numpy.float32', 'np.float32', (['(0.6)'], {}), '(0.6)\n', (30539, 30544), True, 'import numpy as np\n'), ((30971, 30985), 'numpy.arange', 'np.arange', (['(100)'], {}), '(100)\n', (30980, 30985), True, 'import numpy as np\n'), ((31988, 32013), 'numpy.arange', 'np.arange', (['(10 * 5 * 3 * 2)'], {}), '(10 * 5 * 3 * 2)\n', (31997, 32013), True, 'import numpy as np\n'), ((32046, 32068), 'numpy.arange', 'np.arange', (['(10 * 7 * 11)'], {}), '(10 * 7 * 11)\n', (32055, 32068), True, 'import numpy as np\n'), ((32939, 32957), 'numpy.sum', 'np.sum', (['(train == 1)'], {}), '(train == 1)\n', (32945, 32957), True, 'import numpy as np\n'), ((32959, 32977), 'numpy.sum', 'np.sum', (['(train == 2)'], {}), '(train == 2)\n', (32965, 32977), True, 'import numpy as np\n'), ((36369, 36384), 'numpy.ones', 'np.ones', (['y.size'], {}), '(y.size)\n', (36376, 36384), True, 'import numpy as np\n'), ((36800, 36815), 'numpy.ones', 'np.ones', (['y.size'], {}), '(y.size)\n', (36807, 36815), True, 'import numpy as np\n'), ((40778, 40810), 'numpy.arange', 'np.arange', (['(200)'], {'dtype': 'np.float64'}), '(200, dtype=np.float64)\n', (40787, 40810), True, 'import numpy as np\n'), ((41179, 41211), 'numpy.arange', 'np.arange', (['(200)'], {'dtype': 'np.float64'}), '(200, dtype=np.float64)\n', (41188, 41211), True, 'import numpy as np\n'), ((41479, 41511), 'numpy.arange', 'np.arange', (['(200)'], {'dtype': 'np.float64'}), '(200, dtype=np.float64)\n', (41488, 41511), True, 'import numpy as np\n'), ((46840, 46858), 'numpy.eye', 'np.eye', (['X.shape[0]'], {}), '(X.shape[0])\n', (46846, 46858), True, 'import numpy as np\n'), ((47110, 47126), 'numpy.delete', 'np.delete', (['p', '(23)'], {}), '(p, 23)\n', (47119, 47126), True, 'import numpy as np\n'), ((4456, 4470), 'numpy.abs', 'np.abs', (['self.a'], {}), '(self.a)\n', (4462, 4470), True, 'import numpy as np\n'), ((11239, 11252), 'numpy.sum', 'np.sum', (['sizes'], {}), '(sizes)\n', (11245, 11252), True, 'import numpy as np\n'), ((15109, 15142), 'numpy.unique', 'np.unique', (['folds[labels == label]'], {}), '(folds[labels == label])\n', (15118, 15142), True, 'import numpy as np\n'), ((15341, 15384), 'numpy.intersect1d', 'np.intersect1d', (['labels[train]', 'labels[test]'], {}), '(labels[train], labels[test])\n', (15355, 15384), True, 'import numpy as np\n'), ((16535, 16568), 'numpy.unique', 'np.unique', (['folds[labels == label]'], {}), '(folds[labels == label])\n', (16544, 16568), True, 'import numpy as np\n'), ((16721, 16764), 'numpy.intersect1d', 'np.intersect1d', (['labels[train]', 'labels[test]'], {}), '(labels[train], labels[test])\n', (16735, 16764), True, 'import numpy as np\n'), ((19260, 19279), 'numpy.unique', 'np.unique', (['y[train]'], {}), '(y[train])\n', (19269, 19279), True, 'import numpy as np\n'), ((19281, 19299), 'numpy.unique', 'np.unique', (['y[test]'], {}), '(y[test])\n', (19290, 19299), True, 'import numpy as np\n'), ((19917, 19960), 'numpy.lib.arraysetops.intersect1d', 'np.lib.arraysetops.intersect1d', (['train', 'test'], {}), '(train, test)\n', (19947, 19960), True, 'import numpy as np\n'), ((24040, 24067), 'numpy.intersect1d', 'np.intersect1d', (['train', 'test'], {}), '(train, test)\n', (24054, 24067), True, 'import numpy as np\n'), ((40929, 40975), 'sklearn.preprocessing.Imputer', 'Imputer', ([], {'strategy': '"""mean"""', 'missing_values': '"""NaN"""'}), "(strategy='mean', missing_values='NaN')\n", (40936, 40975), False, 'from sklearn.preprocessing import Imputer\n'), ((41630, 41676), 'sklearn.preprocessing.Imputer', 'Imputer', ([], {'strategy': '"""mean"""', 'missing_values': '"""NaN"""'}), "(strategy='mean', missing_values='NaN')\n", (41637, 41676), False, 'from sklearn.preprocessing import Imputer\n'), ((9811, 9837), 'numpy.sum', 'np.sum', (['(labels[train] == 4)'], {}), '(labels[train] == 4)\n', (9817, 9837), True, 'import numpy as np\n'), ((9925, 9951), 'numpy.sum', 'np.sum', (['(labels[train] == 0)'], {}), '(labels[train] == 0)\n', (9931, 9951), True, 'import numpy as np\n'), ((10039, 10065), 'numpy.sum', 'np.sum', (['(labels[train] == 1)'], {}), '(labels[train] == 1)\n', (10045, 10065), True, 'import numpy as np\n'), ((10153, 10178), 'numpy.sum', 'np.sum', (['(labels[test] == 4)'], {}), '(labels[test] == 4)\n', (10159, 10178), True, 'import numpy as np\n'), ((10233, 10258), 'numpy.sum', 'np.sum', (['(labels[test] == 0)'], {}), '(labels[test] == 0)\n', (10239, 10258), True, 'import numpy as np\n'), ((10313, 10338), 'numpy.sum', 'np.sum', (['(labels[test] == 1)'], {}), '(labels[test] == 1)\n', (10319, 10338), True, 'import numpy as np\n'), ((10607, 10620), 'numpy.max', 'np.max', (['sizes'], {}), '(sizes)\n', (10613, 10620), True, 'import numpy as np\n'), ((10623, 10636), 'numpy.min', 'np.min', (['sizes'], {}), '(sizes)\n', (10629, 10636), True, 'import numpy as np\n'), ((11563, 11577), 'numpy.arange', 'np.arange', (['(100)'], {}), '(100)\n', (11572, 11577), True, 'import numpy as np\n'), ((11620, 11639), 'numpy.arange', 'np.arange', (['(100)', '(200)'], {}), '(100, 200)\n', (11629, 11639), True, 'import numpy as np\n'), ((11682, 11701), 'numpy.arange', 'np.arange', (['(200)', '(300)'], {}), '(200, 300)\n', (11691, 11701), True, 'import numpy as np\n'), ((23723, 23755), 'numpy.in1d', 'np.in1d', (['y[train]', 'y_test_unique'], {}), '(y[train], y_test_unique)\n', (23730, 23755), True, 'import numpy as np\n'), ((23790, 23822), 'numpy.in1d', 'np.in1d', (['y[test]', 'y_train_unique'], {}), '(y[test], y_train_unique)\n', (23797, 23822), True, 'import numpy as np\n'), ((44654, 44676), 'numpy.array', 'np.array', (['[0, 1, 2, 3]'], {}), '([0, 1, 2, 3])\n', (44662, 44676), True, 'import numpy as np\n'), ((44678, 44703), 'numpy.array', 'np.array', (['[4, 5, 6, 7, 8]'], {}), '([4, 5, 6, 7, 8])\n', (44686, 44703), True, 'import numpy as np\n'), ((3273, 3285), 'numpy.unique', 'np.unique', (['y'], {}), '(y)\n', (3282, 3285), True, 'import numpy as np\n'), ((3504, 3516), 'numpy.unique', 'np.unique', (['y'], {}), '(y)\n', (3513, 3516), True, 'import numpy as np\n'), ((11177, 11190), 'numpy.max', 'np.max', (['sizes'], {}), '(sizes)\n', (11183, 11190), True, 'import numpy as np\n'), ((11193, 11206), 'numpy.min', 'np.min', (['sizes'], {}), '(sizes)\n', (11199, 11206), True, 'import numpy as np\n'), ((19391, 19431), 'numpy.unique', 'np.unique', (['y[train]'], {'return_inverse': '(True)'}), '(y[train], return_inverse=True)\n', (19400, 19431), True, 'import numpy as np\n'), ((19552, 19591), 'numpy.unique', 'np.unique', (['y[test]'], {'return_inverse': '(True)'}), '(y[test], return_inverse=True)\n', (19561, 19591), True, 'import numpy as np\n'), ((38199, 38216), 'numpy.asarray', 'np.asarray', (['train'], {}), '(train)\n', (38209, 38216), True, 'import numpy as np\n'), ((38263, 38280), 'numpy.asarray', 'np.asarray', (['train'], {}), '(train)\n', (38273, 38280), True, 'import numpy as np\n'), ((38944, 38961), 'numpy.asarray', 'np.asarray', (['train'], {}), '(train)\n', (38954, 38961), True, 'import numpy as np\n'), ((39008, 39025), 'numpy.asarray', 'np.asarray', (['train'], {}), '(train)\n', (39018, 39025), True, 'import numpy as np\n')]
|
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from setuptools import setup, find_packages
setup(
name="tosca",
version="0.3.4",
long_description="Advanced FacetView User Interface",
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=[
"Flask",
"gunicorn",
"gevent",
"requests",
"Flask-SQLAlchemy",
"Flask-WTF",
"Flask-DebugToolbar",
"Flask-Login",
"simpleldap",
"simplekml",
"future>=0.17.1",
"icalendar",
],
)
|
[
"setuptools.find_packages"
] |
[((311, 326), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (324, 326), False, 'from setuptools import setup, find_packages\n')]
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import base64
import json
import unittest
import mock
from google.appengine.ext import ndb
from dashboard.common import testing_common
from dashboard.common import utils
from dashboard.models import graph_data
class UtilsTest(testing_common.TestCase):
def setUp(self):
super(UtilsTest, self).setUp()
testing_common.SetIsInternalUser('<EMAIL>', True)
testing_common.SetIsInternalUser('<EMAIL>', False)
def _AssertMatches(self, test_path, pattern):
"""Asserts that a test path matches a pattern with MatchesPattern."""
test_key = utils.TestKey(test_path)
self.assertTrue(utils.TestMatchesPattern(test_key, pattern))
def _AssertDoesntMatch(self, test_path, pattern):
"""Asserts that a test path doesn't match a pattern with MatchesPattern."""
test_key = utils.TestKey(test_path)
self.assertFalse(utils.TestMatchesPattern(test_key, pattern))
def testMatchesPattern_AllWildcards(self):
self._AssertMatches(
'ChromiumPerf/cros-one/dromaeo.top25/Total', '*/*/*/*')
self._AssertDoesntMatch(
'ChromiumPerf/cros-one/dromaeo.top25/Total', '*/*/*')
def testMatchesPattern_SomeWildcards(self):
self._AssertMatches(
'ChromiumPerf/cros-one/dromaeo.top25/Total',
'ChromiumPerf/*/dromaeo.top25/*')
self._AssertDoesntMatch(
'ChromiumPerf/cros-one/dromaeo.top25/Total',
'ChromiumPerf/*/dromaeo.another_page_set/*')
def testMatchesPattern_SomePartialWildcards(self):
self._AssertMatches(
'ChromiumPerf/cros-one/dromaeo.top25/Total',
'ChromiumPerf/cros-*/dromaeo.*/Total')
self._AssertDoesntMatch(
'ChromiumPerf/cros-one/dromaeoXtop25/Total',
'ChromiumPerf/cros-*/dromaeo.*/Total')
self._AssertDoesntMatch(
'ChromiumPerf/cros-one/dromaeo.top25/Total',
'OtherMaster/cros-*/dromaeo.*/Total')
def testMatchesPattern_MorePartialWildcards(self):
# Note that the wildcard matches zero or more characters.
self._AssertMatches(
'ChromiumPerf/cros-one/dromaeo.top25/Total',
'Chromium*/cros-one*/*.*/To*al')
self._AssertDoesntMatch(
'ChromiumPerf/cros-one/dromaeo.top25/Total',
'Chromium*/linux-*/*.*/To*al')
def testMatchesPattern_RequiresFullMatchAtEnd(self):
# If there is no wildcard at the beginning or end of the
# test path part, then a part will only match if it matches
# right up to the beginning or end.
self._AssertDoesntMatch(
'ChromiumPerf/cros-one/dromaeo.top25/Total',
'ChromiumPerf/cros-one/dromaeo.top25/*Tot')
self._AssertDoesntMatch(
'ChromiumPerf/cros-one/dromaeo.top25/Total',
'ChromiumPerf/cros-one/dromaeo.top25/otal*')
def testMostSpecificMatchingPattern_SpecificVsGeneral(self):
test_key = utils.TestKey('M/B/S/Total')
result = utils.MostSpecificMatchingPattern(
test_key,
[('*/*/*/*', 1), ('*/*/*/Total', 2), ('*/*/*/Foo', 3)])
self.assertEqual(2, result)
def testMostSpecificMatchingPattern_PartialVsGeneral(self):
test_key = utils.TestKey('M/B/S/Total')
result = utils.MostSpecificMatchingPattern(
test_key,
[('*/*/*/*', 1), ('*/*/*/To*al', 2), ('*/*/*/Foo', 3)])
self.assertEqual(2, result)
def testMostSpecificMatchingPattern_2ndLevel(self):
test_key = utils.TestKey('M/B/S/Total')
result = utils.MostSpecificMatchingPattern(
test_key,
[('*/*/*/*', 1), ('*/*/S/*', 2), ('*/*/*/Foo', 3)])
self.assertEqual(2, result)
def testMostSpecificMatchingPattern_TopLevelSpecificOverLowerSpecific(self):
test_key = utils.TestKey('M/B/S/Total')
result = utils.MostSpecificMatchingPattern(
test_key,
[('*/*/S/*', 1), ('*/*/*/Total', 2), ('*/*/*/Foo', 3)])
self.assertEqual(2, result)
def testMostSpecificMatchingPattern_TopLevelPartialOverLowerSpecific(self):
test_key = utils.TestKey('M/B/S/Total')
result = utils.MostSpecificMatchingPattern(
test_key,
[('*/*/S/*', 1), ('*/*/*/To*al', 2), ('*/*/*/Foo', 3)])
self.assertEqual(2, result)
def _PutEntitiesAllExternal(self):
"""Puts entities (none internal-only) and returns the keys."""
master = graph_data.Master(id='M').put()
graph_data.Bot(parent=master, id='b').put()
keys = [
graph_data.TestMetadata(id='M/b/a', internal_only=False).put(),
graph_data.TestMetadata(id='M/b/b', internal_only=False).put(),
graph_data.TestMetadata(id='M/b/c', internal_only=False).put(),
graph_data.TestMetadata(id='M/b/d', internal_only=False).put(),
]
return keys
def _PutEntitiesHalfInternal(self):
"""Puts entities (half internal-only) and returns the keys."""
master = graph_data.Master(id='M').put()
graph_data.Bot(parent=master, id='b').put()
keys = [
graph_data.TestMetadata(id='M/b/ax', internal_only=True).put(),
graph_data.TestMetadata(id='M/b/a', internal_only=False).put(),
graph_data.TestMetadata(id='M/b/b', internal_only=False).put(),
graph_data.TestMetadata(id='M/b/bx', internal_only=True).put(),
graph_data.TestMetadata(id='M/b/c', internal_only=False).put(),
graph_data.TestMetadata(id='M/b/cx', internal_only=True).put(),
graph_data.TestMetadata(id='M/b/d', internal_only=False).put(),
graph_data.TestMetadata(id='M/b/dx', internal_only=True).put(),
]
return keys
def testGetMulti_ExternalUser_ReturnsSomeEntities(self):
keys = self._PutEntitiesHalfInternal()
self.SetCurrentUser('<EMAIL>')
self.assertEqual(len(keys) / 2, len(utils.GetMulti(keys)))
def testGetMulti_InternalUser_ReturnsAllEntities(self):
keys = self._PutEntitiesHalfInternal()
self.SetCurrentUser('<EMAIL>')
self.assertEqual(len(keys), len(utils.GetMulti(keys)))
def testGetMulti_AllExternalEntities_ReturnsAllEntities(self):
keys = self._PutEntitiesAllExternal()
self.SetCurrentUser('<EMAIL>')
self.assertEqual(len(keys), len(utils.GetMulti(keys)))
def testTestPath_Test(self):
key = ndb.Key('Master', 'm', 'Bot', 'b', 'Test', 'suite', 'Test', 'metric')
self.assertEqual('m/b/suite/metric', utils.TestPath(key))
def testTestPath_TestMetadata(self):
key = ndb.Key('TestMetadata', 'm/b/suite/metric')
self.assertEqual('m/b/suite/metric', utils.TestPath(key))
def testTestPath_Container(self):
key = ndb.Key('TestContainer', 'm/b/suite/metric')
self.assertEqual('m/b/suite/metric', utils.TestPath(key))
def testTestMetadataKey_None(self):
key = utils.TestMetadataKey(None)
self.assertIsNone(key)
def testTestMetadataKey_Test(self):
key = utils.TestMetadataKey(
ndb.Key('Master', 'm', 'Bot', 'b', 'Test', 'suite', 'Test', 'metric'))
self.assertEqual('TestMetadata', key.kind())
self.assertEqual('m/b/suite/metric', key.id())
self.assertEqual(('TestMetadata', 'm/b/suite/metric'), key.flat())
def testTestMetadataKey_TestMetadata(self):
original_key = ndb.Key('TestMetadata', 'm/b/suite/metric')
key = utils.TestMetadataKey(original_key)
self.assertEqual(original_key, key)
def testTestMetadataKey_String(self):
key = utils.TestMetadataKey('m/b/suite/metric/page')
self.assertEqual('TestMetadata', key.kind())
self.assertEqual('m/b/suite/metric/page', key.id())
self.assertEqual(('TestMetadata', 'm/b/suite/metric/page'), key.flat())
def testOldStyleTestKey_None(self):
key = utils.OldStyleTestKey(None)
self.assertIsNone(key)
def testOldStyleTestKey_Test(self):
original_key = ndb.Key(
'Master', 'm', 'Bot', 'b', 'Test', 'suite', 'Test', 'metric')
key = utils.OldStyleTestKey(original_key)
self.assertEqual(original_key, key)
def testOldStyleTestKey_TestMetadata(self):
key = utils.OldStyleTestKey(ndb.Key('TestMetadata', 'm/b/suite/metric'))
self.assertEqual('Test', key.kind())
self.assertEqual('metric', key.id())
self.assertEqual(
('Master', 'm', 'Bot', 'b', 'Test', 'suite', 'Test', 'metric'),
key.flat())
def testOldStyleTestKey_String(self):
key = utils.OldStyleTestKey('m/b/suite/metric')
self.assertEqual('Test', key.kind())
self.assertEqual('metric', key.id())
self.assertEqual(
('Master', 'm', 'Bot', 'b', 'Test', 'suite', 'Test', 'metric'),
key.flat())
def testTestSuiteName_Basic(self):
key = utils.TestKey('Master/bot/suite-foo/sub/x/y/z')
self.assertEqual('suite-foo', utils.TestSuiteName(key))
def testMinimumRange_Empty_ReturnsNone(self):
self.assertIsNone(utils.MinimumRange([]))
def testMinimumRange_NotOverlapping_ReturnsNone(self):
self.assertIsNone(utils.MinimumRange([(5, 10), (15, 20)]))
def testMinimumRange_OneRange_ReturnsSameRange(self):
self.assertEqual((5, 10), utils.MinimumRange([(5, 10)]))
def testMinimumRange_OverlapsForOneNumber_ReturnsRangeWithOneNumber(self):
self.assertEqual((5, 5), utils.MinimumRange([(2, 5), (5, 10)]))
def testMinimumRange_MoreThanTwoRanges_ReturnsIntersection(self):
self.assertEqual((6, 14), utils.MinimumRange(
[(3, 20), (5, 15), (6, 25), (3, 14)]))
def testValidate_StringNotInOptionList_Fails(self):
with self.assertRaises(ValueError):
utils.Validate(
['completed', 'pending', 'failed'], 'running')
def testValidate_InvalidType_Fails(self):
with self.assertRaises(ValueError):
utils.Validate(int, 'a string')
def testValidate_MissingProperty_Fails(self):
with self.assertRaises(ValueError):
utils.Validate(
{'status': str, 'try_job_id': int, 'required_property': int},
{'status': 'completed', 'try_job_id': 1234})
def testValidate_InvalidTypeInDict_Fails(self):
with self.assertRaises(ValueError):
utils.Validate(
{'status': int, 'try_job_id': int},
{'status': 'completed', 'try_job_id': 1234})
def testValidate_StringNotInNestedOptionList_Fails(self):
with self.assertRaises(ValueError):
utils.Validate(
{'values': {'nested_values': ['orange', 'banana']}},
{'values': {'nested_values': 'apple'}})
def testValidate_MissingPropertyInNestedDict_Fails(self):
with self.assertRaises(ValueError):
utils.Validate(
{'values': {'nested_values': ['orange', 'banana']}},
{'values': {}})
def testValidate_ExpectedValueIsNone_Passes(self):
utils.Validate(None, 'running')
def testValidate_StringInOptionList_Passes(self):
utils.Validate(str, 'a string')
def testValidate_HasExpectedProperties_Passes(self):
utils.Validate(
{'status': str, 'try_job_id': int},
{'status': 'completed', 'try_job_id': 1234})
def testValidate_StringInNestedOptionList_Passes(self):
utils.Validate(
{'values': {'nested_values': ['orange', 'banana']}},
{'values': {'nested_values': 'orange'}})
def testValidate_TypeConversion_Passes(self):
utils.Validate([1], '1')
def testGetBuildDetailsFromStdioLink_InvalidLink(self):
base_url, master, bot, number, step = utils.GetBuildDetailsFromStdioLink(
'[Buildbot stdio](http://notquite/builders/whatever/234)')
self.assertIsNone(base_url)
self.assertIsNone(master)
self.assertIsNone(bot)
self.assertIsNone(number)
self.assertIsNone(step)
def testGetBuildDetailsFromStdioLink(self):
base_url, master, bot, number, step = utils.GetBuildDetailsFromStdioLink((
'[Buildbot stdio](https://build.chromium.org/p/chromium.perf/builders/'
'Android%20One%20Perf%20%282%29/builds/5365/steps/'
'blink_style.top_25/logs/stdio)'))
self.assertEqual('https://build.chromium.org/p/chromium.perf/builders/',
base_url)
self.assertEqual('chromium.perf', master)
self.assertEqual('Android One Perf (2)', bot)
self.assertEqual('5365', number)
self.assertEqual('blink_style.top_25', step)
def testGetBuildDetailsFromStdioLink_DifferentBaseUrl(self):
base_url, master, bot, number, step = utils.GetBuildDetailsFromStdioLink((
'[Buildbot stdio]('
'https://uberchromegw.corp.google.com/i/new.master/builders/Builder/'
'builds/3486/steps/new_test/logs/stdio)'))
self.assertEqual(
'https://uberchromegw.corp.google.com/i/new.master/builders/',
base_url)
self.assertEqual('new.master', master)
self.assertEqual('Builder', bot)
self.assertEqual('3486', number)
self.assertEqual('new_test', step)
def testGetBuildbotStatusPageUriFromStdioLink(self):
buildbot_status_page = utils.GetBuildbotStatusPageUriFromStdioLink((
'[Buildbot stdio](https://build.chromium.org/p/chromium.perf/builders/'
'Android%20One%20Perf%20%282%29/builds/5365/steps/'
'blink_style.top_25/logs/stdio)'))
self.assertEqual((
'https://build.chromium.org/p/chromium.perf/builders/'
'Android%20One%20Perf%20%282%29/builds/5365'), buildbot_status_page)
def testGetLogdogLogUriFromStdioLink(self):
logdog_uri = utils.GetLogdogLogUriFromStdioLink((
'[Buildbot stdio](https://build.chromium.org/p/chromium.perf/builders/'
'Android%20One%20Perf%20%282%29/builds/5365/steps/'
'blink_style.top_25/logs/stdio)'))
self.assertEqual((
'https://luci-logdog.appspot.com/v/?s='
'chrome%2Fbb%2Fchromium.perf%2FAndroid_One_Perf__2_%2F5365%2F%2B%2F'
'recipes%2Fsteps%2Fblink_style.top_25%2F0%2Fstdout'), logdog_uri)
@mock.patch.object(utils, 'ServiceAccountHttp', mock.MagicMock())
@mock.patch('common.utils.discovery.build')
def testIsGroupMember_PositiveCase(self, mock_discovery_build):
mock_request = mock.MagicMock()
mock_request.execute = mock.MagicMock(return_value={'is_member': True})
mock_service = mock.MagicMock()
mock_service.membership = mock.MagicMock(
return_value=mock_request)
mock_discovery_build.return_value = mock_service
self.assertTrue(utils.IsGroupMember('<EMAIL>', 'group'))
mock_service.membership.assert_called_once_with(
identity='<EMAIL>', group='group')
@mock.patch.object(utils, 'ServiceAccountHttp', mock.MagicMock())
@mock.patch('logging.error')
@mock.patch('common.utils.discovery.build')
def testIsGroupMember_RequestFails_LogsErrorAndReturnsFalse(
self, mock_discovery_build, mock_logging_error):
mock_service = mock.MagicMock()
mock_service.membership = mock.MagicMock(
return_value={'error': 'Some error'})
mock_discovery_build.return_value = mock_service
self.assertFalse(utils.IsGroupMember('<EMAIL>', 'group'))
self.assertEqual(1, mock_logging_error.call_count)
def testGetSheriffForAutorollCommit_InvalidCommit_ReturnsNone(self):
self.assertIsNone(utils.GetSheriffForAutorollCommit(None))
self.assertIsNone(utils.GetSheriffForAutorollCommit({}))
self.assertIsNone(utils.GetSheriffForAutorollCommit({'author': {}}))
def testGetSheriffForAutorollCommit_NotAutoroll_ReturnsNone(self):
self.assertIsNone(utils.GetSheriffForAutorollCommit({
'author': {'email': '<EMAIL>'},
'message': 'TBR=<EMAIL>',
}))
self.assertIsNone(utils.GetSheriffForAutorollCommit({
'author': {'email': '<EMAIL>'},
'message': 'TBR=<EMAIL>',
}))
def testGetSheriffForAutorollCommit_AutoRoll_ReturnsSheriff(self):
self.assertEqual(
'<EMAIL>',
utils.GetSheriffForAutorollCommit({
'author': {
'email': '<EMAIL>',
},
'message': 'This is a roll.\n\nTBR=<EMAIL>,<EMAIL>\n\n',
}))
self.assertEqual(
'<EMAIL>',
utils.GetSheriffForAutorollCommit({
'author': {
'email': '<EMAIL>',
},
'message': 'TBR=<EMAIL>',
}))
self.assertEqual(
'<EMAIL>',
utils.GetSheriffForAutorollCommit({'tbr': '<EMAIL>'}))
def _MakeMockFetch(base64_encoded=True, status=200):
"""Returns a mock fetch object that returns a canned response."""
def _MockFetch(_):
response_text = json.dumps({'key': 'this is well-formed JSON.'})
if base64_encoded:
response_text = base64.b64encode(response_text)
return testing_common.FakeResponseObject(status, response_text)
return _MockFetch
if __name__ == '__main__':
unittest.main()
|
[
"dashboard.common.testing_common.SetIsInternalUser",
"dashboard.common.utils.OldStyleTestKey",
"json.dumps",
"dashboard.common.utils.GetMulti",
"unittest.main",
"dashboard.common.utils.TestPath",
"dashboard.common.utils.TestSuiteName",
"dashboard.common.utils.MinimumRange",
"dashboard.common.utils.IsGroupMember",
"dashboard.common.testing_common.FakeResponseObject",
"dashboard.models.graph_data.TestMetadata",
"google.appengine.ext.ndb.Key",
"dashboard.common.utils.MostSpecificMatchingPattern",
"mock.patch",
"dashboard.models.graph_data.Bot",
"dashboard.common.utils.Validate",
"dashboard.common.utils.GetSheriffForAutorollCommit",
"dashboard.common.utils.GetBuildDetailsFromStdioLink",
"dashboard.common.utils.GetLogdogLogUriFromStdioLink",
"dashboard.common.utils.GetBuildbotStatusPageUriFromStdioLink",
"dashboard.models.graph_data.Master",
"dashboard.common.utils.TestMatchesPattern",
"base64.b64encode",
"dashboard.common.utils.TestKey",
"mock.MagicMock",
"dashboard.common.utils.TestMetadataKey"
] |
[((13672, 13714), 'mock.patch', 'mock.patch', (['"""common.utils.discovery.build"""'], {}), "('common.utils.discovery.build')\n", (13682, 13714), False, 'import mock\n'), ((14292, 14319), 'mock.patch', 'mock.patch', (['"""logging.error"""'], {}), "('logging.error')\n", (14302, 14319), False, 'import mock\n'), ((14323, 14365), 'mock.patch', 'mock.patch', (['"""common.utils.discovery.build"""'], {}), "('common.utils.discovery.build')\n", (14333, 14365), False, 'import mock\n'), ((16434, 16449), 'unittest.main', 'unittest.main', ([], {}), '()\n', (16447, 16449), False, 'import unittest\n'), ((479, 528), 'dashboard.common.testing_common.SetIsInternalUser', 'testing_common.SetIsInternalUser', (['"""<EMAIL>"""', '(True)'], {}), "('<EMAIL>', True)\n", (511, 528), False, 'from dashboard.common import testing_common\n'), ((533, 583), 'dashboard.common.testing_common.SetIsInternalUser', 'testing_common.SetIsInternalUser', (['"""<EMAIL>"""', '(False)'], {}), "('<EMAIL>', False)\n", (565, 583), False, 'from dashboard.common import testing_common\n'), ((722, 746), 'dashboard.common.utils.TestKey', 'utils.TestKey', (['test_path'], {}), '(test_path)\n', (735, 746), False, 'from dashboard.common import utils\n'), ((960, 984), 'dashboard.common.utils.TestKey', 'utils.TestKey', (['test_path'], {}), '(test_path)\n', (973, 984), False, 'from dashboard.common import utils\n'), ((2940, 2968), 'dashboard.common.utils.TestKey', 'utils.TestKey', (['"""M/B/S/Total"""'], {}), "('M/B/S/Total')\n", (2953, 2968), False, 'from dashboard.common import utils\n'), ((2983, 3086), 'dashboard.common.utils.MostSpecificMatchingPattern', 'utils.MostSpecificMatchingPattern', (['test_key', "[('*/*/*/*', 1), ('*/*/*/Total', 2), ('*/*/*/Foo', 3)]"], {}), "(test_key, [('*/*/*/*', 1), ('*/*/*/Total',\n 2), ('*/*/*/Foo', 3)])\n", (3016, 3086), False, 'from dashboard.common import utils\n'), ((3210, 3238), 'dashboard.common.utils.TestKey', 'utils.TestKey', (['"""M/B/S/Total"""'], {}), "('M/B/S/Total')\n", (3223, 3238), False, 'from dashboard.common import utils\n'), ((3253, 3356), 'dashboard.common.utils.MostSpecificMatchingPattern', 'utils.MostSpecificMatchingPattern', (['test_key', "[('*/*/*/*', 1), ('*/*/*/To*al', 2), ('*/*/*/Foo', 3)]"], {}), "(test_key, [('*/*/*/*', 1), ('*/*/*/To*al',\n 2), ('*/*/*/Foo', 3)])\n", (3286, 3356), False, 'from dashboard.common import utils\n'), ((3472, 3500), 'dashboard.common.utils.TestKey', 'utils.TestKey', (['"""M/B/S/Total"""'], {}), "('M/B/S/Total')\n", (3485, 3500), False, 'from dashboard.common import utils\n'), ((3515, 3614), 'dashboard.common.utils.MostSpecificMatchingPattern', 'utils.MostSpecificMatchingPattern', (['test_key', "[('*/*/*/*', 1), ('*/*/S/*', 2), ('*/*/*/Foo', 3)]"], {}), "(test_key, [('*/*/*/*', 1), ('*/*/S/*', 2),\n ('*/*/*/Foo', 3)])\n", (3548, 3614), False, 'from dashboard.common import utils\n'), ((3755, 3783), 'dashboard.common.utils.TestKey', 'utils.TestKey', (['"""M/B/S/Total"""'], {}), "('M/B/S/Total')\n", (3768, 3783), False, 'from dashboard.common import utils\n'), ((3798, 3901), 'dashboard.common.utils.MostSpecificMatchingPattern', 'utils.MostSpecificMatchingPattern', (['test_key', "[('*/*/S/*', 1), ('*/*/*/Total', 2), ('*/*/*/Foo', 3)]"], {}), "(test_key, [('*/*/S/*', 1), ('*/*/*/Total',\n 2), ('*/*/*/Foo', 3)])\n", (3831, 3901), False, 'from dashboard.common import utils\n'), ((4041, 4069), 'dashboard.common.utils.TestKey', 'utils.TestKey', (['"""M/B/S/Total"""'], {}), "('M/B/S/Total')\n", (4054, 4069), False, 'from dashboard.common import utils\n'), ((4084, 4187), 'dashboard.common.utils.MostSpecificMatchingPattern', 'utils.MostSpecificMatchingPattern', (['test_key', "[('*/*/S/*', 1), ('*/*/*/To*al', 2), ('*/*/*/Foo', 3)]"], {}), "(test_key, [('*/*/S/*', 1), ('*/*/*/To*al',\n 2), ('*/*/*/Foo', 3)])\n", (4117, 4187), False, 'from dashboard.common import utils\n'), ((6205, 6274), 'google.appengine.ext.ndb.Key', 'ndb.Key', (['"""Master"""', '"""m"""', '"""Bot"""', '"""b"""', '"""Test"""', '"""suite"""', '"""Test"""', '"""metric"""'], {}), "('Master', 'm', 'Bot', 'b', 'Test', 'suite', 'Test', 'metric')\n", (6212, 6274), False, 'from google.appengine.ext import ndb\n'), ((6387, 6430), 'google.appengine.ext.ndb.Key', 'ndb.Key', (['"""TestMetadata"""', '"""m/b/suite/metric"""'], {}), "('TestMetadata', 'm/b/suite/metric')\n", (6394, 6430), False, 'from google.appengine.ext import ndb\n'), ((6540, 6584), 'google.appengine.ext.ndb.Key', 'ndb.Key', (['"""TestContainer"""', '"""m/b/suite/metric"""'], {}), "('TestContainer', 'm/b/suite/metric')\n", (6547, 6584), False, 'from google.appengine.ext import ndb\n'), ((6696, 6723), 'dashboard.common.utils.TestMetadataKey', 'utils.TestMetadataKey', (['None'], {}), '(None)\n', (6717, 6723), False, 'from dashboard.common import utils\n'), ((7139, 7182), 'google.appengine.ext.ndb.Key', 'ndb.Key', (['"""TestMetadata"""', '"""m/b/suite/metric"""'], {}), "('TestMetadata', 'm/b/suite/metric')\n", (7146, 7182), False, 'from google.appengine.ext import ndb\n'), ((7193, 7228), 'dashboard.common.utils.TestMetadataKey', 'utils.TestMetadataKey', (['original_key'], {}), '(original_key)\n', (7214, 7228), False, 'from dashboard.common import utils\n'), ((7320, 7366), 'dashboard.common.utils.TestMetadataKey', 'utils.TestMetadataKey', (['"""m/b/suite/metric/page"""'], {}), "('m/b/suite/metric/page')\n", (7341, 7366), False, 'from dashboard.common import utils\n'), ((7597, 7624), 'dashboard.common.utils.OldStyleTestKey', 'utils.OldStyleTestKey', (['None'], {}), '(None)\n', (7618, 7624), False, 'from dashboard.common import utils\n'), ((7710, 7779), 'google.appengine.ext.ndb.Key', 'ndb.Key', (['"""Master"""', '"""m"""', '"""Bot"""', '"""b"""', '"""Test"""', '"""suite"""', '"""Test"""', '"""metric"""'], {}), "('Master', 'm', 'Bot', 'b', 'Test', 'suite', 'Test', 'metric')\n", (7717, 7779), False, 'from google.appengine.ext import ndb\n'), ((7799, 7834), 'dashboard.common.utils.OldStyleTestKey', 'utils.OldStyleTestKey', (['original_key'], {}), '(original_key)\n', (7820, 7834), False, 'from dashboard.common import utils\n'), ((8246, 8287), 'dashboard.common.utils.OldStyleTestKey', 'utils.OldStyleTestKey', (['"""m/b/suite/metric"""'], {}), "('m/b/suite/metric')\n", (8267, 8287), False, 'from dashboard.common import utils\n'), ((8532, 8579), 'dashboard.common.utils.TestKey', 'utils.TestKey', (['"""Master/bot/suite-foo/sub/x/y/z"""'], {}), "('Master/bot/suite-foo/sub/x/y/z')\n", (8545, 8579), False, 'from dashboard.common import utils\n'), ((10541, 10572), 'dashboard.common.utils.Validate', 'utils.Validate', (['None', '"""running"""'], {}), "(None, 'running')\n", (10555, 10572), False, 'from dashboard.common import utils\n'), ((10630, 10661), 'dashboard.common.utils.Validate', 'utils.Validate', (['str', '"""a string"""'], {}), "(str, 'a string')\n", (10644, 10661), False, 'from dashboard.common import utils\n'), ((10722, 10821), 'dashboard.common.utils.Validate', 'utils.Validate', (["{'status': str, 'try_job_id': int}", "{'status': 'completed', 'try_job_id': 1234}"], {}), "({'status': str, 'try_job_id': int}, {'status': 'completed',\n 'try_job_id': 1234})\n", (10736, 10821), False, 'from dashboard.common import utils\n'), ((10898, 11011), 'dashboard.common.utils.Validate', 'utils.Validate', (["{'values': {'nested_values': ['orange', 'banana']}}", "{'values': {'nested_values': 'orange'}}"], {}), "({'values': {'nested_values': ['orange', 'banana']}}, {\n 'values': {'nested_values': 'orange'}})\n", (10912, 11011), False, 'from dashboard.common import utils\n'), ((11077, 11101), 'dashboard.common.utils.Validate', 'utils.Validate', (['[1]', '"""1"""'], {}), "([1], '1')\n", (11091, 11101), False, 'from dashboard.common import utils\n'), ((11203, 11301), 'dashboard.common.utils.GetBuildDetailsFromStdioLink', 'utils.GetBuildDetailsFromStdioLink', (['"""[Buildbot stdio](http://notquite/builders/whatever/234)"""'], {}), "(\n '[Buildbot stdio](http://notquite/builders/whatever/234)')\n", (11237, 11301), False, 'from dashboard.common import utils\n'), ((11542, 11738), 'dashboard.common.utils.GetBuildDetailsFromStdioLink', 'utils.GetBuildDetailsFromStdioLink', (['"""[Buildbot stdio](https://build.chromium.org/p/chromium.perf/builders/Android%20One%20Perf%20%282%29/builds/5365/steps/blink_style.top_25/logs/stdio)"""'], {}), "(\n '[Buildbot stdio](https://build.chromium.org/p/chromium.perf/builders/Android%20One%20Perf%20%282%29/builds/5365/steps/blink_style.top_25/logs/stdio)'\n )\n", (11576, 11738), False, 'from dashboard.common import utils\n'), ((12158, 12328), 'dashboard.common.utils.GetBuildDetailsFromStdioLink', 'utils.GetBuildDetailsFromStdioLink', (['"""[Buildbot stdio](https://uberchromegw.corp.google.com/i/new.master/builders/Builder/builds/3486/steps/new_test/logs/stdio)"""'], {}), "(\n '[Buildbot stdio](https://uberchromegw.corp.google.com/i/new.master/builders/Builder/builds/3486/steps/new_test/logs/stdio)'\n )\n", (12192, 12328), False, 'from dashboard.common import utils\n'), ((12702, 12907), 'dashboard.common.utils.GetBuildbotStatusPageUriFromStdioLink', 'utils.GetBuildbotStatusPageUriFromStdioLink', (['"""[Buildbot stdio](https://build.chromium.org/p/chromium.perf/builders/Android%20One%20Perf%20%282%29/builds/5365/steps/blink_style.top_25/logs/stdio)"""'], {}), "(\n '[Buildbot stdio](https://build.chromium.org/p/chromium.perf/builders/Android%20One%20Perf%20%282%29/builds/5365/steps/blink_style.top_25/logs/stdio)'\n )\n", (12745, 12907), False, 'from dashboard.common import utils\n'), ((13158, 13354), 'dashboard.common.utils.GetLogdogLogUriFromStdioLink', 'utils.GetLogdogLogUriFromStdioLink', (['"""[Buildbot stdio](https://build.chromium.org/p/chromium.perf/builders/Android%20One%20Perf%20%282%29/builds/5365/steps/blink_style.top_25/logs/stdio)"""'], {}), "(\n '[Buildbot stdio](https://build.chromium.org/p/chromium.perf/builders/Android%20One%20Perf%20%282%29/builds/5365/steps/blink_style.top_25/logs/stdio)'\n )\n", (13192, 13354), False, 'from dashboard.common import utils\n'), ((13800, 13816), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (13814, 13816), False, 'import mock\n'), ((13844, 13892), 'mock.MagicMock', 'mock.MagicMock', ([], {'return_value': "{'is_member': True}"}), "(return_value={'is_member': True})\n", (13858, 13892), False, 'import mock\n'), ((13912, 13928), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (13926, 13928), False, 'import mock\n'), ((13959, 14000), 'mock.MagicMock', 'mock.MagicMock', ([], {'return_value': 'mock_request'}), '(return_value=mock_request)\n', (13973, 14000), False, 'import mock\n'), ((13651, 13667), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (13665, 13667), False, 'import mock\n'), ((14503, 14519), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (14517, 14519), False, 'import mock\n'), ((14550, 14602), 'mock.MagicMock', 'mock.MagicMock', ([], {'return_value': "{'error': 'Some error'}"}), "(return_value={'error': 'Some error'})\n", (14564, 14602), False, 'import mock\n'), ((14271, 14287), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (14285, 14287), False, 'import mock\n'), ((16190, 16238), 'json.dumps', 'json.dumps', (["{'key': 'this is well-formed JSON.'}"], {}), "({'key': 'this is well-formed JSON.'})\n", (16200, 16238), False, 'import json\n'), ((16327, 16383), 'dashboard.common.testing_common.FakeResponseObject', 'testing_common.FakeResponseObject', (['status', 'response_text'], {}), '(status, response_text)\n', (16360, 16383), False, 'from dashboard.common import testing_common\n'), ((767, 810), 'dashboard.common.utils.TestMatchesPattern', 'utils.TestMatchesPattern', (['test_key', 'pattern'], {}), '(test_key, pattern)\n', (791, 810), False, 'from dashboard.common import utils\n'), ((1006, 1049), 'dashboard.common.utils.TestMatchesPattern', 'utils.TestMatchesPattern', (['test_key', 'pattern'], {}), '(test_key, pattern)\n', (1030, 1049), False, 'from dashboard.common import utils\n'), ((6316, 6335), 'dashboard.common.utils.TestPath', 'utils.TestPath', (['key'], {}), '(key)\n', (6330, 6335), False, 'from dashboard.common import utils\n'), ((6472, 6491), 'dashboard.common.utils.TestPath', 'utils.TestPath', (['key'], {}), '(key)\n', (6486, 6491), False, 'from dashboard.common import utils\n'), ((6626, 6645), 'dashboard.common.utils.TestPath', 'utils.TestPath', (['key'], {}), '(key)\n', (6640, 6645), False, 'from dashboard.common import utils\n'), ((6831, 6900), 'google.appengine.ext.ndb.Key', 'ndb.Key', (['"""Master"""', '"""m"""', '"""Bot"""', '"""b"""', '"""Test"""', '"""suite"""', '"""Test"""', '"""metric"""'], {}), "('Master', 'm', 'Bot', 'b', 'Test', 'suite', 'Test', 'metric')\n", (6838, 6900), False, 'from google.appengine.ext import ndb\n'), ((7954, 7997), 'google.appengine.ext.ndb.Key', 'ndb.Key', (['"""TestMetadata"""', '"""m/b/suite/metric"""'], {}), "('TestMetadata', 'm/b/suite/metric')\n", (7961, 7997), False, 'from google.appengine.ext import ndb\n'), ((8614, 8638), 'dashboard.common.utils.TestSuiteName', 'utils.TestSuiteName', (['key'], {}), '(key)\n', (8633, 8638), False, 'from dashboard.common import utils\n'), ((8711, 8733), 'dashboard.common.utils.MinimumRange', 'utils.MinimumRange', (['[]'], {}), '([])\n', (8729, 8733), False, 'from dashboard.common import utils\n'), ((8815, 8854), 'dashboard.common.utils.MinimumRange', 'utils.MinimumRange', (['[(5, 10), (15, 20)]'], {}), '([(5, 10), (15, 20)])\n', (8833, 8854), False, 'from dashboard.common import utils\n'), ((8943, 8972), 'dashboard.common.utils.MinimumRange', 'utils.MinimumRange', (['[(5, 10)]'], {}), '([(5, 10)])\n', (8961, 8972), False, 'from dashboard.common import utils\n'), ((9081, 9118), 'dashboard.common.utils.MinimumRange', 'utils.MinimumRange', (['[(2, 5), (5, 10)]'], {}), '([(2, 5), (5, 10)])\n', (9099, 9118), False, 'from dashboard.common import utils\n'), ((9219, 9275), 'dashboard.common.utils.MinimumRange', 'utils.MinimumRange', (['[(3, 20), (5, 15), (6, 25), (3, 14)]'], {}), '([(3, 20), (5, 15), (6, 25), (3, 14)])\n', (9237, 9275), False, 'from dashboard.common import utils\n'), ((9387, 9448), 'dashboard.common.utils.Validate', 'utils.Validate', (["['completed', 'pending', 'failed']", '"""running"""'], {}), "(['completed', 'pending', 'failed'], 'running')\n", (9401, 9448), False, 'from dashboard.common import utils\n'), ((9551, 9582), 'dashboard.common.utils.Validate', 'utils.Validate', (['int', '"""a string"""'], {}), "(int, 'a string')\n", (9565, 9582), False, 'from dashboard.common import utils\n'), ((9678, 9803), 'dashboard.common.utils.Validate', 'utils.Validate', (["{'status': str, 'try_job_id': int, 'required_property': int}", "{'status': 'completed', 'try_job_id': 1234}"], {}), "({'status': str, 'try_job_id': int, 'required_property': int},\n {'status': 'completed', 'try_job_id': 1234})\n", (9692, 9803), False, 'from dashboard.common import utils\n'), ((9918, 10017), 'dashboard.common.utils.Validate', 'utils.Validate', (["{'status': int, 'try_job_id': int}", "{'status': 'completed', 'try_job_id': 1234}"], {}), "({'status': int, 'try_job_id': int}, {'status': 'completed',\n 'try_job_id': 1234})\n", (9932, 10017), False, 'from dashboard.common import utils\n'), ((10142, 10254), 'dashboard.common.utils.Validate', 'utils.Validate', (["{'values': {'nested_values': ['orange', 'banana']}}", "{'values': {'nested_values': 'apple'}}"], {}), "({'values': {'nested_values': ['orange', 'banana']}}, {\n 'values': {'nested_values': 'apple'}})\n", (10156, 10254), False, 'from dashboard.common import utils\n'), ((10378, 10466), 'dashboard.common.utils.Validate', 'utils.Validate', (["{'values': {'nested_values': ['orange', 'banana']}}", "{'values': {}}"], {}), "({'values': {'nested_values': ['orange', 'banana']}}, {\n 'values': {}})\n", (10392, 10466), False, 'from dashboard.common import utils\n'), ((14083, 14122), 'dashboard.common.utils.IsGroupMember', 'utils.IsGroupMember', (['"""<EMAIL>"""', '"""group"""'], {}), "('<EMAIL>', 'group')\n", (14102, 14122), False, 'from dashboard.common import utils\n'), ((14686, 14725), 'dashboard.common.utils.IsGroupMember', 'utils.IsGroupMember', (['"""<EMAIL>"""', '"""group"""'], {}), "('<EMAIL>', 'group')\n", (14705, 14725), False, 'from dashboard.common import utils\n'), ((14876, 14915), 'dashboard.common.utils.GetSheriffForAutorollCommit', 'utils.GetSheriffForAutorollCommit', (['None'], {}), '(None)\n', (14909, 14915), False, 'from dashboard.common import utils\n'), ((14939, 14976), 'dashboard.common.utils.GetSheriffForAutorollCommit', 'utils.GetSheriffForAutorollCommit', (['{}'], {}), '({})\n', (14972, 14976), False, 'from dashboard.common import utils\n'), ((15000, 15049), 'dashboard.common.utils.GetSheriffForAutorollCommit', 'utils.GetSheriffForAutorollCommit', (["{'author': {}}"], {}), "({'author': {}})\n", (15033, 15049), False, 'from dashboard.common import utils\n'), ((15143, 15240), 'dashboard.common.utils.GetSheriffForAutorollCommit', 'utils.GetSheriffForAutorollCommit', (["{'author': {'email': '<EMAIL>'}, 'message': 'TBR=<EMAIL>'}"], {}), "({'author': {'email': '<EMAIL>'},\n 'message': 'TBR=<EMAIL>'})\n", (15176, 15240), False, 'from dashboard.common import utils\n'), ((15283, 15380), 'dashboard.common.utils.GetSheriffForAutorollCommit', 'utils.GetSheriffForAutorollCommit', (["{'author': {'email': '<EMAIL>'}, 'message': 'TBR=<EMAIL>'}"], {}), "({'author': {'email': '<EMAIL>'},\n 'message': 'TBR=<EMAIL>'})\n", (15316, 15380), False, 'from dashboard.common import utils\n'), ((15520, 15648), 'dashboard.common.utils.GetSheriffForAutorollCommit', 'utils.GetSheriffForAutorollCommit', (['{\'author\': {\'email\': \'<EMAIL>\'}, \'message\':\n """This is a roll.\n\nTBR=<EMAIL>,<EMAIL>\n\n"""}'], {}), '({\'author\': {\'email\': \'<EMAIL>\'},\n \'message\': """This is a roll.\n\nTBR=<EMAIL>,<EMAIL>\n\n"""})\n', (15553, 15648), False, 'from dashboard.common import utils\n'), ((15761, 15858), 'dashboard.common.utils.GetSheriffForAutorollCommit', 'utils.GetSheriffForAutorollCommit', (["{'author': {'email': '<EMAIL>'}, 'message': 'TBR=<EMAIL>'}"], {}), "({'author': {'email': '<EMAIL>'},\n 'message': 'TBR=<EMAIL>'})\n", (15794, 15858), False, 'from dashboard.common import utils\n'), ((15971, 16024), 'dashboard.common.utils.GetSheriffForAutorollCommit', 'utils.GetSheriffForAutorollCommit', (["{'tbr': '<EMAIL>'}"], {}), "({'tbr': '<EMAIL>'})\n", (16004, 16024), False, 'from dashboard.common import utils\n'), ((16284, 16315), 'base64.b64encode', 'base64.b64encode', (['response_text'], {}), '(response_text)\n', (16300, 16315), False, 'import base64\n'), ((4351, 4376), 'dashboard.models.graph_data.Master', 'graph_data.Master', ([], {'id': '"""M"""'}), "(id='M')\n", (4368, 4376), False, 'from dashboard.models import graph_data\n'), ((4387, 4424), 'dashboard.models.graph_data.Bot', 'graph_data.Bot', ([], {'parent': 'master', 'id': '"""b"""'}), "(parent=master, id='b')\n", (4401, 4424), False, 'from dashboard.models import graph_data\n'), ((4873, 4898), 'dashboard.models.graph_data.Master', 'graph_data.Master', ([], {'id': '"""M"""'}), "(id='M')\n", (4890, 4898), False, 'from dashboard.models import graph_data\n'), ((4909, 4946), 'dashboard.models.graph_data.Bot', 'graph_data.Bot', ([], {'parent': 'master', 'id': '"""b"""'}), "(parent=master, id='b')\n", (4923, 4946), False, 'from dashboard.models import graph_data\n'), ((5742, 5762), 'dashboard.common.utils.GetMulti', 'utils.GetMulti', (['keys'], {}), '(keys)\n', (5756, 5762), False, 'from dashboard.common import utils\n'), ((5938, 5958), 'dashboard.common.utils.GetMulti', 'utils.GetMulti', (['keys'], {}), '(keys)\n', (5952, 5958), False, 'from dashboard.common import utils\n'), ((6140, 6160), 'dashboard.common.utils.GetMulti', 'utils.GetMulti', (['keys'], {}), '(keys)\n', (6154, 6160), False, 'from dashboard.common import utils\n'), ((4452, 4508), 'dashboard.models.graph_data.TestMetadata', 'graph_data.TestMetadata', ([], {'id': '"""M/b/a"""', 'internal_only': '(False)'}), "(id='M/b/a', internal_only=False)\n", (4475, 4508), False, 'from dashboard.models import graph_data\n'), ((4524, 4580), 'dashboard.models.graph_data.TestMetadata', 'graph_data.TestMetadata', ([], {'id': '"""M/b/b"""', 'internal_only': '(False)'}), "(id='M/b/b', internal_only=False)\n", (4547, 4580), False, 'from dashboard.models import graph_data\n'), ((4596, 4652), 'dashboard.models.graph_data.TestMetadata', 'graph_data.TestMetadata', ([], {'id': '"""M/b/c"""', 'internal_only': '(False)'}), "(id='M/b/c', internal_only=False)\n", (4619, 4652), False, 'from dashboard.models import graph_data\n'), ((4668, 4724), 'dashboard.models.graph_data.TestMetadata', 'graph_data.TestMetadata', ([], {'id': '"""M/b/d"""', 'internal_only': '(False)'}), "(id='M/b/d', internal_only=False)\n", (4691, 4724), False, 'from dashboard.models import graph_data\n'), ((4974, 5030), 'dashboard.models.graph_data.TestMetadata', 'graph_data.TestMetadata', ([], {'id': '"""M/b/ax"""', 'internal_only': '(True)'}), "(id='M/b/ax', internal_only=True)\n", (4997, 5030), False, 'from dashboard.models import graph_data\n'), ((5046, 5102), 'dashboard.models.graph_data.TestMetadata', 'graph_data.TestMetadata', ([], {'id': '"""M/b/a"""', 'internal_only': '(False)'}), "(id='M/b/a', internal_only=False)\n", (5069, 5102), False, 'from dashboard.models import graph_data\n'), ((5118, 5174), 'dashboard.models.graph_data.TestMetadata', 'graph_data.TestMetadata', ([], {'id': '"""M/b/b"""', 'internal_only': '(False)'}), "(id='M/b/b', internal_only=False)\n", (5141, 5174), False, 'from dashboard.models import graph_data\n'), ((5190, 5246), 'dashboard.models.graph_data.TestMetadata', 'graph_data.TestMetadata', ([], {'id': '"""M/b/bx"""', 'internal_only': '(True)'}), "(id='M/b/bx', internal_only=True)\n", (5213, 5246), False, 'from dashboard.models import graph_data\n'), ((5262, 5318), 'dashboard.models.graph_data.TestMetadata', 'graph_data.TestMetadata', ([], {'id': '"""M/b/c"""', 'internal_only': '(False)'}), "(id='M/b/c', internal_only=False)\n", (5285, 5318), False, 'from dashboard.models import graph_data\n'), ((5334, 5390), 'dashboard.models.graph_data.TestMetadata', 'graph_data.TestMetadata', ([], {'id': '"""M/b/cx"""', 'internal_only': '(True)'}), "(id='M/b/cx', internal_only=True)\n", (5357, 5390), False, 'from dashboard.models import graph_data\n'), ((5406, 5462), 'dashboard.models.graph_data.TestMetadata', 'graph_data.TestMetadata', ([], {'id': '"""M/b/d"""', 'internal_only': '(False)'}), "(id='M/b/d', internal_only=False)\n", (5429, 5462), False, 'from dashboard.models import graph_data\n'), ((5478, 5534), 'dashboard.models.graph_data.TestMetadata', 'graph_data.TestMetadata', ([], {'id': '"""M/b/dx"""', 'internal_only': '(True)'}), "(id='M/b/dx', internal_only=True)\n", (5501, 5534), False, 'from dashboard.models import graph_data\n')]
|
#!/usr/bin/env python
# Copyright 2019 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import sys
import neal
import dimod
import hybrid
from hybrid.reference.pt import FixedTemperatureSampler
from hybrid.reference.pa import (
CalculateAnnealingBetaSchedule, ProgressBetaAlongSchedule, EnergyWeightedResampler)
# load a problem
problem = sys.argv[1]
with open(problem) as fp:
bqm = dimod.BinaryQuadraticModel.from_coo(fp)
print("BQM: {} nodes, {} edges, {:.2f} density".format(
len(bqm), len(bqm.quadratic), hybrid.bqm_density(bqm)))
# sweeps per fixed-temperature sampling step
num_sweeps = 1000
# number of generations, or temperatures to progress through
num_iter = 20
# population size
num_samples = 20
# PA workflow: after initial beta schedule estimation, we do `num_iter` steps
# (one per beta/temperature) of fixed-temperature sampling / weighted resampling
workflow = CalculateAnnealingBetaSchedule(length=num_iter) | hybrid.Loop(
ProgressBetaAlongSchedule() | FixedTemperatureSampler(num_sweeps=num_sweeps, num_reads=num_samples) | EnergyWeightedResampler(),
max_iter=num_iter
)
# run the workflow
state = hybrid.State.from_problem(bqm)
solution = workflow.run(state).result()
# show execution profile
hybrid.profiling.print_counters(workflow)
# show results
print("Solution: sample={0.samples.first}, energy={0.samples.first.energy}".format(solution))
|
[
"hybrid.bqm_density",
"dimod.BinaryQuadraticModel.from_coo",
"hybrid.profiling.print_counters",
"hybrid.reference.pa.EnergyWeightedResampler",
"hybrid.reference.pt.FixedTemperatureSampler",
"hybrid.State.from_problem",
"hybrid.reference.pa.ProgressBetaAlongSchedule",
"hybrid.reference.pa.CalculateAnnealingBetaSchedule"
] |
[((1704, 1734), 'hybrid.State.from_problem', 'hybrid.State.from_problem', (['bqm'], {}), '(bqm)\n', (1729, 1734), False, 'import hybrid\n'), ((1801, 1842), 'hybrid.profiling.print_counters', 'hybrid.profiling.print_counters', (['workflow'], {}), '(workflow)\n', (1832, 1842), False, 'import hybrid\n'), ((951, 990), 'dimod.BinaryQuadraticModel.from_coo', 'dimod.BinaryQuadraticModel.from_coo', (['fp'], {}), '(fp)\n', (986, 990), False, 'import dimod\n'), ((1456, 1503), 'hybrid.reference.pa.CalculateAnnealingBetaSchedule', 'CalculateAnnealingBetaSchedule', ([], {'length': 'num_iter'}), '(length=num_iter)\n', (1486, 1503), False, 'from hybrid.reference.pa import CalculateAnnealingBetaSchedule, ProgressBetaAlongSchedule, EnergyWeightedResampler\n'), ((1082, 1105), 'hybrid.bqm_density', 'hybrid.bqm_density', (['bqm'], {}), '(bqm)\n', (1100, 1105), False, 'import hybrid\n'), ((1625, 1650), 'hybrid.reference.pa.EnergyWeightedResampler', 'EnergyWeightedResampler', ([], {}), '()\n', (1648, 1650), False, 'from hybrid.reference.pa import CalculateAnnealingBetaSchedule, ProgressBetaAlongSchedule, EnergyWeightedResampler\n'), ((1523, 1550), 'hybrid.reference.pa.ProgressBetaAlongSchedule', 'ProgressBetaAlongSchedule', ([], {}), '()\n', (1548, 1550), False, 'from hybrid.reference.pa import CalculateAnnealingBetaSchedule, ProgressBetaAlongSchedule, EnergyWeightedResampler\n'), ((1553, 1622), 'hybrid.reference.pt.FixedTemperatureSampler', 'FixedTemperatureSampler', ([], {'num_sweeps': 'num_sweeps', 'num_reads': 'num_samples'}), '(num_sweeps=num_sweeps, num_reads=num_samples)\n', (1576, 1622), False, 'from hybrid.reference.pt import FixedTemperatureSampler\n')]
|
import numpy as np
from example import algs
def test_pointless_sort():
# generate random vector of length 10
x = np.random.rand(10)
# check that pointless_sort always returns [1,2,3]
assert np.array_equal(algs.pointless_sort(x), np.array([1,2,3]))
# generate a new random vector of length 10
x = np.random.rand(10)
# check that pointless_sort still returns [1,2,3]
assert np.array_equal(algs.pointless_sort(x), np.array([1,2,3]))
def test_bubblesort():
# Actually test bubblesort here. It might be useful to think about
# some edge cases for your code, where it might fail. Some things to
# think about: (1) does your code handle 0-element arrays without
# failing, (2) does your code handle characters?
w, x, y, z = np.array([1,2,4,0,1]), np.array([]), np.array([0]), np.array([2,1,0,-1,-2])
assert np.array_equal(algs.bubblesort(w)[0], sorted(w))
assert np.array_equal(algs.bubblesort(x)[0], sorted(x))
assert np.array_equal(algs.bubblesort(y)[0], sorted(y))
assert np.array_equal(algs.bubblesort(z)[0], sorted(z))
def test_quicksort():
w, x, y, z = np.array([1,2,4,0,1]), np.array([]), np.array([0]), np.array([2,1,0,-1,-2])
assert np.array_equal(algs.quicksort(w, 0, len(w)-1, 0, 0)[0], sorted(w))
assert np.array_equal(algs.quicksort(x, 0, len(x)-1, 0, 0)[0], sorted(x))
assert np.array_equal(algs.quicksort(y, 0, len(y)-1, 0, 0)[0], sorted(y))
assert np.array_equal(algs.quicksort(z, 0, len(z)-1, 0, 0)[0], sorted(z))
|
[
"numpy.random.rand",
"example.algs.bubblesort",
"numpy.array",
"example.algs.pointless_sort"
] |
[((122, 140), 'numpy.random.rand', 'np.random.rand', (['(10)'], {}), '(10)\n', (136, 140), True, 'import numpy as np\n'), ((323, 341), 'numpy.random.rand', 'np.random.rand', (['(10)'], {}), '(10)\n', (337, 341), True, 'import numpy as np\n'), ((223, 245), 'example.algs.pointless_sort', 'algs.pointless_sort', (['x'], {}), '(x)\n', (242, 245), False, 'from example import algs\n'), ((247, 266), 'numpy.array', 'np.array', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (255, 266), True, 'import numpy as np\n'), ((423, 445), 'example.algs.pointless_sort', 'algs.pointless_sort', (['x'], {}), '(x)\n', (442, 445), False, 'from example import algs\n'), ((447, 466), 'numpy.array', 'np.array', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (455, 466), True, 'import numpy as np\n'), ((775, 800), 'numpy.array', 'np.array', (['[1, 2, 4, 0, 1]'], {}), '([1, 2, 4, 0, 1])\n', (783, 800), True, 'import numpy as np\n'), ((798, 810), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (806, 810), True, 'import numpy as np\n'), ((812, 825), 'numpy.array', 'np.array', (['[0]'], {}), '([0])\n', (820, 825), True, 'import numpy as np\n'), ((827, 854), 'numpy.array', 'np.array', (['[2, 1, 0, -1, -2]'], {}), '([2, 1, 0, -1, -2])\n', (835, 854), True, 'import numpy as np\n'), ((1136, 1161), 'numpy.array', 'np.array', (['[1, 2, 4, 0, 1]'], {}), '([1, 2, 4, 0, 1])\n', (1144, 1161), True, 'import numpy as np\n'), ((1159, 1171), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (1167, 1171), True, 'import numpy as np\n'), ((1173, 1186), 'numpy.array', 'np.array', (['[0]'], {}), '([0])\n', (1181, 1186), True, 'import numpy as np\n'), ((1188, 1215), 'numpy.array', 'np.array', (['[2, 1, 0, -1, -2]'], {}), '([2, 1, 0, -1, -2])\n', (1196, 1215), True, 'import numpy as np\n'), ((882, 900), 'example.algs.bubblesort', 'algs.bubblesort', (['w'], {}), '(w)\n', (897, 900), False, 'from example import algs\n'), ((942, 960), 'example.algs.bubblesort', 'algs.bubblesort', (['x'], {}), '(x)\n', (957, 960), False, 'from example import algs\n'), ((1002, 1020), 'example.algs.bubblesort', 'algs.bubblesort', (['y'], {}), '(y)\n', (1017, 1020), False, 'from example import algs\n'), ((1062, 1080), 'example.algs.bubblesort', 'algs.bubblesort', (['z'], {}), '(z)\n', (1077, 1080), False, 'from example import algs\n')]
|
import pytest
from ....account.models import Address
from ....checkout.fetch import CheckoutInfo, get_delivery_method_info
from ....shipping.models import ShippingMethodChannelListing
from ...models import PluginConfiguration
from ..plugin import AvataxPlugin
@pytest.fixture(scope="module")
def vcr_config():
return {
"filter_headers": [("Authorization", "Basic Og==")],
}
@pytest.fixture
def plugin_configuration(db, channel_USD):
def set_configuration(
username="test",
password="<PASSWORD>",
sandbox=False,
channel=None,
active=True,
from_street_address="Teczowa 7",
from_city="Wroclaw",
from_country="PL",
from_country_area="",
from_postal_code="53-601",
):
channel = channel or channel_USD
data = {
"active": active,
"name": AvataxPlugin.PLUGIN_NAME,
"channel": channel,
"configuration": [
{"name": "Username or account", "value": username},
{"name": "Password or license", "value": password},
{"name": "Use sandbox", "value": sandbox},
{"name": "Company name", "value": "DEFAULT"},
{"name": "Autocommit", "value": False},
{"name": "from_street_address", "value": from_street_address},
{"name": "from_city", "value": from_city},
{"name": "from_country", "value": from_country},
{"name": "from_country_area", "value": from_country_area},
{"name": "from_postal_code", "value": from_postal_code},
],
}
configuration = PluginConfiguration.objects.create(
identifier=AvataxPlugin.PLUGIN_ID, **data
)
return configuration
return set_configuration
@pytest.fixture
def ship_to_pl_address(db):
return Address.objects.create(
first_name="Eleanor",
last_name="Smith",
street_address_1="Oławska 10",
city="WROCŁAW",
postal_code="53-105",
country="PL",
phone="+48713988155",
)
@pytest.fixture
def checkout_with_items_and_shipping(checkout_with_items, address, shipping_method):
checkout_with_items.shipping_address = address
checkout_with_items.shipping_method = shipping_method
checkout_with_items.billing_address = address
checkout_with_items.save()
return checkout_with_items
@pytest.fixture
def checkout_with_items_and_shipping_info(checkout_with_items_and_shipping):
checkout = checkout_with_items_and_shipping
channel = checkout.channel
shipping_address = checkout.shipping_address
shipping_method = checkout.shipping_method
shipping_channel_listings = ShippingMethodChannelListing.objects.filter(
shipping_method=shipping_method, channel=channel
).first()
checkout_info = CheckoutInfo(
checkout=checkout,
user=checkout.user,
channel=channel,
billing_address=checkout.billing_address,
shipping_address=shipping_address,
delivery_method_info=get_delivery_method_info(
shipping_method, shipping_address
),
shipping_method_channel_listings=shipping_channel_listings,
valid_shipping_methods=[],
valid_pick_up_points=[],
)
return checkout_info
@pytest.fixture
def avalara_response_for_checkout_with_items_and_shipping():
return {
"id": 0,
"code": "8657e84b-c5ab-4c27-bcc2-c8d3ebbe771b",
"companyId": 242975,
"date": "2021-03-18",
"paymentDate": "2021-03-18",
"status": "Temporary",
"type": "SalesOrder",
"batchCode": "",
"currencyCode": "USD",
"exchangeRateCurrencyCode": "USD",
"customerUsageType": "",
"entityUseCode": "",
"customerVendorCode": "0",
"customerCode": "0",
"exemptNo": "",
"reconciled": False,
"locationCode": "",
"reportingLocationCode": "",
"purchaseOrderNo": "",
"referenceCode": "",
"salespersonCode": "",
"totalAmount": 12.2,
"totalExempt": 0.0,
"totalDiscount": 0.0,
"totalTax": 2.8,
"totalTaxable": 12.2,
"totalTaxCalculated": 2.8,
"adjustmentReason": "NotAdjusted",
"locked": False,
"version": 1,
"exchangeRateEffectiveDate": "2021-03-18",
"exchangeRate": 1.0,
"email": "",
"modifiedDate": "2021-03-18T13:23:21.7641305Z",
"modifiedUserId": 283192,
"taxDate": "2021-03-18T00:00:00Z",
"lines": [
{
"id": 0,
"transactionId": 0,
"lineNumber": "1",
"customerUsageType": "",
"entityUseCode": "",
"description": "Test product",
"discountAmount": 0.0,
"exemptAmount": 0.0,
"exemptCertId": 0,
"exemptNo": "",
"isItemTaxable": True,
"itemCode": "123",
"lineAmount": 4.07,
"quantity": 1.0,
"ref1": "",
"ref2": "",
"reportingDate": "2021-03-18",
"tax": 0.93,
"taxableAmount": 4.07,
"taxCalculated": 0.93,
"taxCode": "O9999999",
"taxCodeId": 5340,
"taxDate": "2021-03-18",
"taxIncluded": True,
"details": [
{
"id": 0,
"transactionLineId": 0,
"transactionId": 0,
"country": "PL",
"region": "PL",
"exemptAmount": 0.0,
"jurisCode": "PL",
"jurisName": "POLAND",
"stateAssignedNo": "",
"jurisType": "CNT",
"jurisdictionType": "Country",
"nonTaxableAmount": 0.0,
"rate": 0.23,
"tax": 0.93,
"taxableAmount": 4.07,
"taxType": "Output",
"taxSubTypeId": "O",
"taxName": "Standard Rate",
"taxAuthorityTypeId": 45,
"taxCalculated": 0.93,
"rateType": "Standard",
"rateTypeCode": "S",
"unitOfBasis": "PerCurrencyUnit",
"isNonPassThru": False,
"isFee": False,
"reportingTaxableUnits": 4.07,
"reportingNonTaxableUnits": 0.0,
"reportingExemptUnits": 0.0,
"reportingTax": 0.93,
"reportingTaxCalculated": 0.93,
"liabilityType": "Seller",
},
{
"id": 0,
"transactionLineId": 0,
"transactionId": 0,
"country": "PL",
"region": "PL",
"exemptAmount": 0.0,
"jurisCode": "EU",
"jurisName": "EUROPE",
"stateAssignedNo": "",
"jurisType": "CNT",
"jurisdictionType": "Country",
"nonTaxableAmount": 0.0,
"rate": 0.13,
"tax": 0.93,
"taxableAmount": 4.07,
"taxType": "Output",
"taxSubTypeId": "O",
"taxName": "Standard Rate",
"taxAuthorityTypeId": 45,
"taxCalculated": 0.93,
"rateType": "Standard",
"rateTypeCode": "S",
"unitOfBasis": "PerCurrencyUnit",
"isNonPassThru": False,
"isFee": False,
"reportingTaxableUnits": 4.07,
"reportingNonTaxableUnits": 0.0,
"reportingExemptUnits": 0.0,
"reportingTax": 0.93,
"reportingTaxCalculated": 0.93,
"liabilityType": "Seller",
},
],
"nonPassthroughDetails": [],
"hsCode": "",
"costInsuranceFreight": 0.0,
"vatCode": "PLS-230O--PL",
"vatNumberTypeId": 0,
},
{
"id": 0,
"transactionId": 0,
"lineNumber": "2",
"customerUsageType": "",
"entityUseCode": "",
"discountAmount": 0.0,
"exemptAmount": 0.0,
"exemptCertId": 0,
"exemptNo": "",
"isItemTaxable": True,
"itemCode": "Shipping",
"lineAmount": 8.13,
"quantity": 1.0,
"ref1": "",
"ref2": "",
"reportingDate": "2021-03-18",
"tax": 1.87,
"taxableAmount": 8.13,
"taxCalculated": 1.87,
"taxCode": "FR020100",
"taxCodeId": 4784,
"taxDate": "2021-03-18",
"taxIncluded": True,
"details": [
{
"id": 0,
"transactionLineId": 0,
"transactionId": 0,
"country": "PL",
"region": "PL",
"exemptAmount": 0.0,
"jurisCode": "PL",
"jurisName": "POLAND",
"stateAssignedNo": "",
"jurisType": "CNT",
"jurisdictionType": "Country",
"nonTaxableAmount": 0.0,
"rate": 0.23,
"tax": 1.87,
"taxableAmount": 8.13,
"taxType": "Output",
"taxSubTypeId": "O",
"taxName": "Standard Rate",
"taxAuthorityTypeId": 45,
"taxCalculated": 1.87,
"rateType": "Standard",
"rateTypeCode": "S",
"unitOfBasis": "PerCurrencyUnit",
"isNonPassThru": False,
"isFee": False,
"reportingTaxableUnits": 8.13,
"reportingNonTaxableUnits": 0.0,
"reportingExemptUnits": 0.0,
"reportingTax": 1.87,
"reportingTaxCalculated": 1.87,
"liabilityType": "Seller",
},
{
"id": 0,
"transactionLineId": 0,
"transactionId": 0,
"country": "PL",
"region": "PL",
"exemptAmount": 0.0,
"jurisCode": "EU",
"jurisName": "EUROPE",
"stateAssignedNo": "",
"jurisType": "CNT",
"jurisdictionType": "Country",
"nonTaxableAmount": 0.0,
"rate": 0.23,
"tax": 1.87,
"taxableAmount": 8.13,
"taxType": "Output",
"taxSubTypeId": "O",
"taxName": "Standard Rate",
"taxAuthorityTypeId": 45,
"taxCalculated": 1.87,
"rateType": "Standard",
"rateTypeCode": "S",
"unitOfBasis": "PerCurrencyUnit",
"isNonPassThru": False,
"isFee": False,
"reportingTaxableUnits": 8.13,
"reportingNonTaxableUnits": 0.0,
"reportingExemptUnits": 0.0,
"reportingTax": 1.87,
"reportingTaxCalculated": 1.87,
"liabilityType": "Seller",
},
],
"nonPassthroughDetails": [],
"hsCode": "",
"costInsuranceFreight": 0.0,
"vatCode": "PLS-230F--PL",
"vatNumberTypeId": 0,
},
],
"addresses": [
{
"id": 0,
"transactionId": 0,
"boundaryLevel": "Zip5",
"line1": "Teczowa 7",
"line2": "",
"line3": "",
"city": "WROCLAW",
"region": "",
"postalCode": "53-601",
"country": "PL",
"taxRegionId": 205102,
"latitude": "",
"longitude": "",
}
],
"summary": [
{
"country": "PL",
"region": "PL",
"jurisType": "Country",
"jurisCode": "PL",
"jurisName": "POLAND",
"taxAuthorityType": 45,
"stateAssignedNo": "",
"taxType": "Output",
"taxSubType": "O",
"taxName": "Standard Rate",
"rateType": "Standard",
"taxable": 12.2,
"rate": 0.23,
"tax": 2.8,
"taxCalculated": 2.8,
"nonTaxable": 0.0,
"exemption": 0.0,
}
],
}
|
[
"pytest.fixture"
] |
[((264, 294), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (278, 294), False, 'import pytest\n')]
|
import torch
import torch.nn as nn
class FocalLoss(nn.Module):
def __init__(self, gamma=2, alpha=0.5, weight=None, ignore_index=255):
super().__init__()
self.gamma = gamma
self.alpha = alpha
self.weight = weight
self.ignore_index = ignore_index
self.ce_fn = nn.CrossEntropyLoss(weight=self.weight, ignore_index=self.ignore_index)
def forward(self, preds, labels):
logpt = -self.ce_fn(preds, labels)
pt = torch.exp(logpt)
if self.alpha is not None:
logpt *= self.alpha
loss = -((1 - pt) ** self.gamma) * logpt
return loss
|
[
"torch.exp",
"torch.nn.CrossEntropyLoss"
] |
[((323, 394), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {'weight': 'self.weight', 'ignore_index': 'self.ignore_index'}), '(weight=self.weight, ignore_index=self.ignore_index)\n', (342, 394), True, 'import torch.nn as nn\n'), ((494, 510), 'torch.exp', 'torch.exp', (['logpt'], {}), '(logpt)\n', (503, 510), False, 'import torch\n')]
|
from datetime import datetime
from django.core.urlresolvers import reverse
from sentry.models import Release
from sentry.testutils import APITestCase
class ProjectReleasesTest(APITestCase):
def test_simple(self):
self.login_as(user=self.user)
team = self.create_team(owner=self.user)
project1 = self.create_project(team=team, name='foo')
project2 = self.create_project(team=team, name='bar')
release1 = Release.objects.create(
project=project1,
version='1',
date_added=datetime(2013, 8, 13, 3, 8, 24, 880386),
)
release2 = Release.objects.create(
project=project1,
version='2',
date_added=datetime(2013, 8, 14, 3, 8, 24, 880386),
)
Release.objects.create(
project=project2,
version='1',
)
url = reverse('sentry-api-0-project-releases', kwargs={
'project_id': project1.id,
})
response = self.client.get(url, format='json')
assert response.status_code == 200, response.content
assert len(response.data) == 2
assert response.data[0]['id'] == str(release2.id)
assert response.data[1]['id'] == str(release1.id)
|
[
"sentry.models.Release.objects.create",
"django.core.urlresolvers.reverse",
"datetime.datetime"
] |
[((785, 838), 'sentry.models.Release.objects.create', 'Release.objects.create', ([], {'project': 'project2', 'version': '"""1"""'}), "(project=project2, version='1')\n", (807, 838), False, 'from sentry.models import Release\n'), ((889, 965), 'django.core.urlresolvers.reverse', 'reverse', (['"""sentry-api-0-project-releases"""'], {'kwargs': "{'project_id': project1.id}"}), "('sentry-api-0-project-releases', kwargs={'project_id': project1.id})\n", (896, 965), False, 'from django.core.urlresolvers import reverse\n'), ((554, 593), 'datetime.datetime', 'datetime', (['(2013)', '(8)', '(13)', '(3)', '(8)', '(24)', '(880386)'], {}), '(2013, 8, 13, 3, 8, 24, 880386)\n', (562, 593), False, 'from datetime import datetime\n'), ((726, 765), 'datetime.datetime', 'datetime', (['(2013)', '(8)', '(14)', '(3)', '(8)', '(24)', '(880386)'], {}), '(2013, 8, 14, 3, 8, 24, 880386)\n', (734, 765), False, 'from datetime import datetime\n')]
|
#!/usr/bin/python
import sys
import graph
def printUsage():
print('main.py <graph_file>')
if __name__ == '__main__':
if (len(sys.argv) != 2):
printUsage()
# Open input file
f = open(sys.argv[1], "r")
data = f.readlines()
# Fill graph with edges in file
g = graph.Graph()
for line in data:
src, dst, cost = line.split()
g.add_edge(int(src), int(dst), int(cost))
# Tests
'''
print(g)
print(g.get_vertices())
for u in g:
print('%s' % (g.vertices[u.get_id()]))
for u in g:
for v in u.get_neighbors():
print('(%s , %s , %s)' % (u.get_id(), v.get_id(), u.get_weight(v)))
print("find_all_paths = ", g.find_all_paths(1, 7))
print("find_path = ", g.find_path(1, 7))
print("find_shortest_path = ", g.find_shortest_path(1, 7))
print("is_connected = ", g.is_connected())
'''
print("shortest_path = ", g.shortest_path(g.vertices, 3, 2))
|
[
"graph.Graph"
] |
[((303, 316), 'graph.Graph', 'graph.Graph', ([], {}), '()\n', (314, 316), False, 'import graph\n')]
|
from antarest.study.storage.rawstudy.model.filesystem.config.model import (
FileStudyTreeConfig,
)
from antarest.study.storage.rawstudy.model.filesystem.context import (
ContextServer,
)
from antarest.study.storage.rawstudy.model.filesystem.folder_node import (
FolderNode,
)
from antarest.study.storage.rawstudy.model.filesystem.inode import TREE
from antarest.study.storage.rawstudy.model.filesystem.root.output.simulation.mode.mcind.scn.links.item.item import (
OutputSimulationModeMcIndScnLinksItem as Item,
)
class _OutputSimulationModeMcIndScnLinksBis(FolderNode):
def __init__(
self, context: ContextServer, config: FileStudyTreeConfig, area: str
):
FolderNode.__init__(self, context, config)
self.area = area
def build(self) -> TREE:
children: TREE = {}
for link in self.config.get_links(self.area):
name = f"{self.area} - {link}"
children[link] = Item(
self.context, self.config.next_file(name), self.area, link
)
return children
class OutputSimulationModeMcIndScnLinks(FolderNode):
def build(self) -> TREE:
children: TREE = {}
for area in self.config.area_names():
children[area] = _OutputSimulationModeMcIndScnLinksBis(
self.context, self.config, area
)
return children
|
[
"antarest.study.storage.rawstudy.model.filesystem.folder_node.FolderNode.__init__"
] |
[((699, 741), 'antarest.study.storage.rawstudy.model.filesystem.folder_node.FolderNode.__init__', 'FolderNode.__init__', (['self', 'context', 'config'], {}), '(self, context, config)\n', (718, 741), False, 'from antarest.study.storage.rawstudy.model.filesystem.folder_node import FolderNode\n')]
|
#!/usr/bin/env python
from setuptools import setup, find_packages
import os
__version__ = '0.1.0'
PACKAGE_DIR = os.path.abspath(os.path.dirname(__file__))
os.chdir(PACKAGE_DIR)
setup(name='django-pint-unit-field',
version=__version__,
url='https://github.com/marcosalcazar/django-pint-unit-field',
author="<NAME>",
author_email="<EMAIL>",
description=("Unit Field for Django using pint library "
"for automated unit conversions"),
long_description=open(os.path.join(PACKAGE_DIR, 'README.md')).read(),
license='MIT',
packages=find_packages(exclude=["tests*"]),
include_package_data=True,
install_requires=[
'django>=3.0',
'pint>=0.12',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=['Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python']
)
|
[
"os.path.dirname",
"os.path.join",
"os.chdir",
"setuptools.find_packages"
] |
[((158, 179), 'os.chdir', 'os.chdir', (['PACKAGE_DIR'], {}), '(PACKAGE_DIR)\n', (166, 179), False, 'import os\n'), ((131, 156), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (146, 156), False, 'import os\n'), ((597, 630), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['tests*']"}), "(exclude=['tests*'])\n", (610, 630), False, 'from setuptools import setup, find_packages\n'), ((513, 551), 'os.path.join', 'os.path.join', (['PACKAGE_DIR', '"""README.md"""'], {}), "(PACKAGE_DIR, 'README.md')\n", (525, 551), False, 'import os\n')]
|
from sense_hat import SenseHat
import pygame
from pygame.locals import *
from time import sleep
pygame.init()
pygame.display.set_mode((1,1))
sense = SenseHat()
loop = True
i = 0
while i < 25:
i += 1
while loop == True:
for event in pygame.event.get():
if event.type == KEYDOWN:
if event.key == K_DOWN:
loop = False
print("1")
sleep(1)
while loop == False:
for event in pygame.event.get():
if event.type == KEYDOWN:
if event.key == K_UP:
loop = True
print("2")
sleep(1)
|
[
"pygame.event.get",
"pygame.display.set_mode",
"sense_hat.SenseHat",
"pygame.init",
"time.sleep"
] |
[((97, 110), 'pygame.init', 'pygame.init', ([], {}), '()\n', (108, 110), False, 'import pygame\n'), ((111, 142), 'pygame.display.set_mode', 'pygame.display.set_mode', (['(1, 1)'], {}), '((1, 1))\n', (134, 142), False, 'import pygame\n'), ((150, 160), 'sense_hat.SenseHat', 'SenseHat', ([], {}), '()\n', (158, 160), False, 'from sense_hat import SenseHat\n'), ((256, 274), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (272, 274), False, 'import pygame\n'), ((414, 422), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (419, 422), False, 'from time import sleep\n'), ((478, 496), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (494, 496), False, 'import pygame\n'), ((633, 641), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (638, 641), False, 'from time import sleep\n')]
|
from setuptools import setup, find_packages
def readme():
with open('README.md', encoding='utf-8') as f:
README = f.read()
return README
setup(
name="mealpy",
version="1.0.3",
author="Thieu",
author_email="<EMAIL>",
description="A collection of the state-of-the-art MEta-heuristics ALgorithms in PYthon (mealpy)",
long_description=readme(),
long_description_content_type="text/markdown",
url="https://github.com/thieu1995/mealpy",
download_url="https://github.com/thieu1995/mealpy/archive/v1.0.3.zip",
packages=find_packages(),
include_package_data=True,
license="MIT",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Intended Audience :: Education",
"Intended Audience :: Information Technology",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Topic :: System :: Benchmark",
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Mathematics",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
"Topic :: Scientific/Engineering :: Information Analysis",
"Topic :: Scientific/Engineering :: Visualization",
"Topic :: Scientific/Engineering :: Bio-Informatics",
"Topic :: Software Development :: Build Tools",
"Topic :: Software Development :: Libraries",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Utilities",
],
install_requires=["numpy", "opfunu", "matplotlib", "scipy"],
python_requires='>=3.6',
)
|
[
"setuptools.find_packages"
] |
[((569, 584), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (582, 584), False, 'from setuptools import setup, find_packages\n')]
|
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
# -*- coding: utf-8 -*-
"""
# @Time : 2019/5/25
# @Author : Jiaqi&Zecheng
# @File : args.py
# @Software: PyCharm
"""
import random
# argparse帮助在命令行界面运行.py文件,帮助程序获取参数,并生成报错,用户手册等信息
import argparse
import torch
import numpy as np
def init_arg_parser():
# 使用 argparse 的第一步是创建一个 ArgumentParser 对象(解析器)
# 将命令行解析成 Python 数据类型所需的全部信息
arg_parser = argparse.ArgumentParser()
#add_argument指定 ArgumentParser 如何获取命令行字符串并将其转换为对象
arg_parser.add_argument('--seed', default=5783287, type=int, help='random seed')
arg_parser.add_argument('--cuda', action='store_true', help='use gpu')
arg_parser.add_argument('--lr_scheduler', action='store_true', help='use learning rate scheduler')
arg_parser.add_argument('--lr_scheduler_gammar', default=0.5, type=float, help='decay rate of learning rate scheduler')
arg_parser.add_argument('--column_pointer', action='store_true', help='use column pointer')
arg_parser.add_argument('--loss_epoch_threshold', default=20, type=int, help='loss epoch threshold')
arg_parser.add_argument('--sketch_loss_coefficient', default=0.2, type=float, help='sketch loss coefficient')
arg_parser.add_argument('--sentence_features', action='store_true', help='use sentence features')
arg_parser.add_argument('--model_name', choices=['transformer', 'rnn', 'table', 'sketch'], default='rnn',
help='model name')
arg_parser.add_argument('--lstm', choices=['lstm', 'lstm_with_dropout', 'parent_feed'], default='lstm')
arg_parser.add_argument('--load_model', default=None, type=str, help='load a pre-trained model')
arg_parser.add_argument('--glove_embed_path', default="glove.42B.300d.txt", type=str)
arg_parser.add_argument('--batch_size', default=64, type=int, help='batch size')
arg_parser.add_argument('--beam_size', default=5, type=int, help='beam size for beam search')
arg_parser.add_argument('--embed_size', default=300, type=int, help='size of word embeddings')
arg_parser.add_argument('--col_embed_size', default=300, type=int, help='size of word embeddings')
arg_parser.add_argument('--action_embed_size', default=128, type=int, help='size of word embeddings')
arg_parser.add_argument('--type_embed_size', default=128, type=int, help='size of word embeddings')
arg_parser.add_argument('--hidden_size', default=100, type=int, help='size of LSTM hidden states')
arg_parser.add_argument('--att_vec_size', default=100, type=int, help='size of attentional vector')
arg_parser.add_argument('--dropout', default=0.3, type=float, help='dropout rate')
arg_parser.add_argument('--word_dropout', default=0.2, type=float, help='word dropout rate')
# readout layer
arg_parser.add_argument('--no_query_vec_to_action_map', default=False, action='store_true')
arg_parser.add_argument('--readout', default='identity', choices=['identity', 'non_linear'])
arg_parser.add_argument('--query_vec_to_action_diff_map', default=False, action='store_true')
arg_parser.add_argument('--column_att', choices=['dot_prod', 'affine'], default='affine')
arg_parser.add_argument('--decode_max_time_step', default=40, type=int, help='maximum number of time steps used '
'in decoding and sampling')
arg_parser.add_argument('--save_to', default='model', type=str, help='save trained model to')
arg_parser.add_argument('--toy', action='store_true',
help='If set, use small data; used for fast debugging.')
arg_parser.add_argument('--clip_grad', default=5., type=float, help='clip gradients')
arg_parser.add_argument('--max_epoch', default=-1, type=int, help='maximum number of training epoches')
arg_parser.add_argument('--optimizer', default='Adam', type=str, help='optimizer')
arg_parser.add_argument('--lr', default=0.001, type=float, help='learning rate')
arg_parser.add_argument('--dataset', default="./data", type=str)
arg_parser.add_argument('--epoch', default=50, type=int, help='Maximum Epoch')
arg_parser.add_argument('--save', default='./', type=str,
help="Path to save the checkpoint and logs of epoch")
return arg_parser
def init_config(arg_parser):
# 从命令行转化的参数通过parse_args()获取
args = arg_parser.parse_args()
# manual_seed用来torch每次训练初始化网络的随机数种子固定
torch.manual_seed(args.seed)
if args.cuda:
torch.cuda.manual_seed(args.seed)
np.random.seed(int(args.seed * 13 / 7))
# 每次使用int(args.seed)作为参数时,生成的随机数相同
random.seed(int(args.seed))
return args
|
[
"torch.manual_seed",
"torch.cuda.manual_seed",
"argparse.ArgumentParser"
] |
[((455, 480), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (478, 480), False, 'import argparse\n'), ((4560, 4588), 'torch.manual_seed', 'torch.manual_seed', (['args.seed'], {}), '(args.seed)\n', (4577, 4588), False, 'import torch\n'), ((4617, 4650), 'torch.cuda.manual_seed', 'torch.cuda.manual_seed', (['args.seed'], {}), '(args.seed)\n', (4639, 4650), False, 'import torch\n')]
|
import logging
import os
import subprocess
import sys
import textwrap
import toml
from datetime import datetime
from typing import Any, Callable, Dict, Optional
try:
from secrets import token_hex
except ImportError:
# TODO: remove after dropping Python 2.7 and 3.5 support
import string
import random
def token_hex(nbytes=None): # type: (Optional[int]) -> str
size = nbytes * 2 if nbytes is not None else 64
return "".join(random.choice(string.hexdigits) for i in range(size)).lower()
log = logging.getLogger(__name__)
root = os.path.dirname(os.path.dirname(__file__))
def rand_str(): # type: () -> str
return token_hex()
def rand_full_sha(): # type: () -> str
return token_hex(40)
def rand_sha(): # type: () -> str
return token_hex(8)
def execute(cwd, cmd, **kwargs): # type: (str, str, **Any) -> str
log.info(cwd)
return subprocess.check_output(cmd, cwd=cwd, shell=True, universal_newlines=True, **kwargs) # nosec
def get_full_sha(cwd, **kwargs): # type: (str, **Any) -> str
return execute(cwd, "git rev-list -n 1 HEAD", **kwargs).strip()
def get_sha(cwd, **kwargs): # type: (str, **Any) -> str
return get_full_sha(cwd, **kwargs)[:8]
def create_commit(
cwd, # type: str
message, # type: str
dt=None, # type: Optional[datetime]
**kwargs # type: Any
): # type: (...) -> str
options = ""
if dt is not None:
options += "--date {timestamp}".format(timestamp=dt.isoformat())
return execute(cwd, 'git commit -m "{message}" {options}'.format(message=message, options=options), **kwargs)
def create_tag(
cwd, # type: str
tag, # type: str
message=None, # type: Optional[str]
commit=None, # type: Optional[str]
**kwargs # type: Any
): # type: (...) -> str
options = ""
if message:
options += ' -a -m "{message}"'.format(message=message)
if not commit:
commit = ""
return execute(cwd, "git tag {options} {tag} {commit}".format(options=options, tag=tag, commit=commit), **kwargs)
def checkout_branch(cwd, branch, new=True, **kwargs): # type: (str, str, bool, **Any) -> str
options = ""
if new:
options += " -b"
return execute(cwd, "git checkout {options} {branch}".format(options=options, branch=branch), **kwargs)
def create_file(
cwd, # type: str
name=None, # type: Optional[str]
content=None, # type: Optional[str]
add=True, # type: bool
commit=True, # type: bool
**kwargs # type: Any
): # type: (...) -> Optional[str]
result = None
if not name:
name = rand_str()
if content is None:
content = rand_str()
log.warning(content)
with open(os.path.join(cwd, name), "w") as f:
f.write(content)
if add:
execute(cwd, "git add {name}".format(name=name))
log.info(execute(cwd, "git status"))
log.info(execute(cwd, "git diff"))
if commit:
create_commit(cwd, "Add {name}".format(name=name))
result = get_sha(cwd)
return result
def create_pyproject_toml(
cwd, # type: str
config=None, # type: Optional[dict]
commit=True, # type: bool
**kwargs # type: Any
): # type: (...) -> Optional[str]
# well, using pyproject.toml+setup.cfg is more classic
# but it is not easy to check code coverage in such a case
# so we're using pyproject.toml+setup.py
create_file(
cwd,
"setup.py",
textwrap.dedent(
"""
from coverage.control import Coverage
coverage = Coverage()
coverage.start()
try:
import setuptools
setuptools.setup(
name="mypkg",
)
finally:
coverage.stop()
coverage.save()
"""
),
commit=False,
**kwargs,
)
cfg = {} # type: Dict[str, Any]
cfg["build-system"] = {
"requires": [
"setuptools>=41",
"wheel",
"setuptools-git-versioning",
],
# with default "setuptools.build_meta" it is not possible to build package
# which uses its own source code to get version number,
# e.g. `version_callback` or `branch_formatter`
# mote details: https://github.com/pypa/setuptools/issues/1642#issuecomment-457673563
"build-backend": "setuptools.build_meta:__legacy__",
}
if config is None:
config = {"enabled": True}
if config != NotImplemented:
cfg["tool"] = {"setuptools-git-versioning": config}
return create_file(cwd, "pyproject.toml", toml.dumps(cfg), commit=commit, **kwargs)
def create_setup_py(
cwd, # type: str
config=None, # type: Optional[dict]
option="setuptools_git_versioning", # # type: str
**kwargs # type: Any
): # type: (...) -> Optional[str]
if config is None:
config = {"enabled": True}
if config == NotImplemented:
cfg = ""
else:
cfg = "{option}={config},".format(option=option, config=config)
return create_file(
cwd,
"setup.py",
textwrap.dedent(
"""
from coverage.control import Coverage
coverage = Coverage()
coverage.start()
try:
import setuptools
setuptools.setup(
name="mypkg",
{cfg}
setup_requires=[
"setuptools>=41",
"wheel",
"setuptools-git-versioning",
]
)
finally:
coverage.stop()
coverage.save()
"""
).format(cfg=cfg),
**kwargs,
)
def typed_config(
repo, # type: str
config_creator, # type: Callable
config_type, # type: str
template=None, # type: Optional[str]
template_name=None, # type: Optional[str]
config=None, # type: Optional[dict]
):
if config_type == "tag":
cfg = {}
else:
cfg = {"version_file": "VERSION.txt", "count_commits_from_version_file": True}
if template_name is None:
if config_type == "tag":
template_name = "template"
else:
template_name = "dev_template"
if template:
cfg[template_name] = template
if config:
cfg.update(config)
config_creator(repo, cfg)
if config_type == "tag":
create_tag(repo, "1.2.3")
else:
create_file(repo, "VERSION.txt", "1.2.3")
def get_version_setup_py(cwd, **kwargs): # type: (str, **Any) -> str
return execute(cwd, "{python} setup.py --version".format(python=sys.executable), **kwargs).strip()
def get_version(cwd, isolated=False, **kwargs): # type: (str, bool, **Any) -> str
cmd = "{python} -m build -s".format(python=sys.executable)
if not isolated:
cmd += " --no-isolation"
execute(cwd, cmd, **kwargs)
with open(os.path.join(cwd, "mypkg.egg-info/PKG-INFO")) as f:
content = f.read().splitlines()
for line in content:
if line.startswith("Version: "):
return line.replace("Version: ", "").strip()
raise RuntimeError("Cannot get package version")
|
[
"textwrap.dedent",
"toml.dumps",
"os.path.dirname",
"subprocess.check_output",
"secrets.token_hex",
"random.choice",
"os.path.join",
"logging.getLogger"
] |
[((533, 560), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (550, 560), False, 'import logging\n'), ((584, 609), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (599, 609), False, 'import os\n'), ((659, 670), 'secrets.token_hex', 'token_hex', ([], {}), '()\n', (668, 670), False, 'from secrets import token_hex\n'), ((724, 737), 'secrets.token_hex', 'token_hex', (['(40)'], {}), '(40)\n', (733, 737), False, 'from secrets import token_hex\n'), ((786, 798), 'secrets.token_hex', 'token_hex', (['(8)'], {}), '(8)\n', (795, 798), False, 'from secrets import token_hex\n'), ((897, 985), 'subprocess.check_output', 'subprocess.check_output', (['cmd'], {'cwd': 'cwd', 'shell': '(True)', 'universal_newlines': '(True)'}), '(cmd, cwd=cwd, shell=True, universal_newlines=True,\n **kwargs)\n', (920, 985), False, 'import subprocess\n'), ((3480, 3864), 'textwrap.dedent', 'textwrap.dedent', (['"""\n from coverage.control import Coverage\n\n coverage = Coverage()\n coverage.start()\n\n try:\n import setuptools\n\n setuptools.setup(\n name="mypkg",\n )\n finally:\n coverage.stop()\n coverage.save()\n """'], {}), '(\n """\n from coverage.control import Coverage\n\n coverage = Coverage()\n coverage.start()\n\n try:\n import setuptools\n\n setuptools.setup(\n name="mypkg",\n )\n finally:\n coverage.stop()\n coverage.save()\n """\n )\n', (3495, 3864), False, 'import textwrap\n'), ((4679, 4694), 'toml.dumps', 'toml.dumps', (['cfg'], {}), '(cfg)\n', (4689, 4694), False, 'import toml\n'), ((2716, 2739), 'os.path.join', 'os.path.join', (['cwd', 'name'], {}), '(cwd, name)\n', (2728, 2739), False, 'import os\n'), ((7057, 7101), 'os.path.join', 'os.path.join', (['cwd', '"""mypkg.egg-info/PKG-INFO"""'], {}), "(cwd, 'mypkg.egg-info/PKG-INFO')\n", (7069, 7101), False, 'import os\n'), ((5181, 5778), 'textwrap.dedent', 'textwrap.dedent', (['"""\n from coverage.control import Coverage\n\n coverage = Coverage()\n coverage.start()\n\n try:\n import setuptools\n\n setuptools.setup(\n name="mypkg",\n {cfg}\n setup_requires=[\n "setuptools>=41",\n "wheel",\n "setuptools-git-versioning",\n ]\n )\n finally:\n coverage.stop()\n coverage.save()\n """'], {}), '(\n """\n from coverage.control import Coverage\n\n coverage = Coverage()\n coverage.start()\n\n try:\n import setuptools\n\n setuptools.setup(\n name="mypkg",\n {cfg}\n setup_requires=[\n "setuptools>=41",\n "wheel",\n "setuptools-git-versioning",\n ]\n )\n finally:\n coverage.stop()\n coverage.save()\n """\n )\n', (5196, 5778), False, 'import textwrap\n'), ((463, 494), 'random.choice', 'random.choice', (['string.hexdigits'], {}), '(string.hexdigits)\n', (476, 494), False, 'import random\n')]
|
"""
997. Find the Town Judge
In a town, there are n people labeled from 1 to n.
There is a rumor that one of these people is secretly the town judge.
If the town judge exists, then:
1. The town judge trusts nobody.
2. Everybody (except for the town judge) trusts the town judge.
3. There is exactly one person that satisfies properties 1 and 2.
You are given an array trust where trust[i] = [ai, bi] representing that
the person labeled ai trusts the person labeled bi.
Return the label of the town judge if the town judge exists and can be identified,
or return -1 otherwise.
"""
testcases = [
{
'input': {
'n': 2,
'trust': [[1, 2]]
},
'output': 2
},
{
'input': {
'n': 3,
'trust': [[1, 3], [2, 3]]
},
'output': 3
},
{
'input': {
'n': 3,
'trust': [[1, 3], [2, 3], [3, 1]]
},
'output': -1
},
{
'input': {
'n': 4,
'trust': [[1, 3], [1, 4], [2, 3], [2, 4], [4, 3]]
},
'output': 3
},
{
'input': {
'n': 1,
'trust': [],
},
'output': 1,
}
]
from typing import List
from lib import run_tests
def main():
kls = Solution()
run_tests(
testcases=testcases,
function=kls.findJudge,
)
# 966ms 13.31% | 18.9MB 87.18%
class Solution1:
def findJudge(self, n: int, trust: List[List[int]]) -> int:
if n == 1:
return 1
chosen = None
candidate = {}
for arr in trust:
civilian, judge = arr
candidate[civilian] = -1
if candidate.get(judge, 0) != -1:
candidate[judge] = candidate.get(judge, 0) + 1
if chosen is None or candidate[judge] > candidate[chosen]:
chosen = judge
count = candidate.get(chosen, -1)
return chosen if count == n-1 else -1
# 744ms 50.36% | 19MB 23.32%
class Solution2:
def findJudge(self, n: int, trust: List[List[int]]) -> int:
chosen = 1
candidate = dict((i, 0) for i in range(1, n+1))
for civilian, judge in trust:
candidate[civilian] = -1
if candidate[judge] != -1:
candidate[judge] += 1
if candidate[judge] > candidate[chosen]:
chosen = judge
return chosen if candidate[chosen] == n-1 else -1
# 744ms 50.36% | 18.8MB 87.18%
class Solution:
def findJudge(self, n: int, trust: List[List[int]]) -> int:
cached = [0] * (n + 1)
for i, j in trust:
cached[i] -= 1
cached[j] += 1
for i in range(1, n+1):
if cached[i] == n-1:
return i
return -1
|
[
"lib.run_tests"
] |
[((1320, 1374), 'lib.run_tests', 'run_tests', ([], {'testcases': 'testcases', 'function': 'kls.findJudge'}), '(testcases=testcases, function=kls.findJudge)\n', (1329, 1374), False, 'from lib import run_tests\n')]
|
# -*- coding: utf-8 -*-
"""
Generating image window by weighted sampling map from input image
This can also be considered as a `weighted random cropping` layer of the
input image
"""
from __future__ import absolute_import, division, print_function
import numpy as np
import tensorflow as tf
from niftynet.engine.image_window import N_SPATIAL
from niftynet.engine.sampler_uniform import UniformSampler
class WeightedSampler(UniformSampler):
"""
This class generators samples from a user provided
frequency map for each input volume
The sampling likelihood of each voxel (and window around)
is proportional to its frequency
This is implemented in a closed form using cumulative histograms
for efficiency purposes i.e., the first three dims of image.
This layer can be considered as a `weighted random cropping` layer of the
input image.
"""
def __init__(self,
reader,
data_param,
batch_size,
windows_per_image,
queue_length=10):
UniformSampler.__init__(self,
reader=reader,
data_param=data_param,
batch_size=batch_size,
windows_per_image=windows_per_image,
queue_length=queue_length)
tf.logging.info('Initialised weighted sampler window instance')
self.spatial_coordinates_generator = weighted_spatial_coordinates
def weighted_spatial_coordinates(subject_id,
data,
img_sizes,
win_sizes,
n_samples=1):
"""
This is the function that actually does the cumulative histogram
and sampling.
also, note that win_sizes could be different,
for example in segmentation network
input image window size is 32x32x10,
training label window is 16x16x10, the network reduces x-y plane
spatial resolution.
This function handles this situation by first find the largest
window across these window definitions, and generate the coordinates.
These coordinates are then adjusted for each of the
smaller window sizes (the output windows are concentric).
"""
# requiring a data['sampler'] as the frequency map.
# the shape should be [x, y, z, 1, 1]
if data is None or data.get('sampler', None) is None:
tf.logging.fatal("input weight map not found. please check "
"the configuration file")
raise RuntimeError
n_samples = max(n_samples, 1)
uniq_spatial_size = set([img_size[:N_SPATIAL]
for img_size in list(img_sizes.values())])
if len(uniq_spatial_size) > 1:
tf.logging.fatal("Don't know how to generate sampling "
"locations: Spatial dimensions of the "
"grouped input sources are not "
"consistent. %s", uniq_spatial_size)
raise NotImplementedError
uniq_spatial_size = uniq_spatial_size.pop()
# find spatial window location based on the largest spatial window
spatial_win_sizes = [win_size[:N_SPATIAL]
for win_size in win_sizes.values()]
spatial_win_sizes = np.asarray(spatial_win_sizes, dtype=np.int32)
max_spatial_win = np.max(spatial_win_sizes, axis=0)
# testing window size
for i in range(0, N_SPATIAL):
assert uniq_spatial_size[i] >= max_spatial_win[i], \
"window size {} is larger than image size {}".format(
max_spatial_win[i], uniq_spatial_size[i])
# get cropped version of the input weight map where the centre of
# the window might be. If the centre of the window was outside of
# this crop area, the patch would be outside of the field of view
half_win = np.floor(max_spatial_win / 2).astype(int)
try:
cropped_map = data['sampler'][
half_win[0]:-half_win[0] if max_spatial_win[0] > 1 else 1,
half_win[1]:-half_win[1] if max_spatial_win[1] > 1 else 1,
half_win[2]:-half_win[2] if max_spatial_win[2] > 1 else 1,
0, 0]
assert np.all(cropped_map.shape) > 0
except (IndexError, KeyError):
tf.logging.fatal("incompatible map: %s", data['sampler'].shape)
raise
except AssertionError:
tf.logging.fatal(
"incompatible window size for weighted sampler. "
"Please use smaller (fully-specified) spatial window sizes")
raise
# Get the cumulative sum of the normalised sorted intensities
# i.e. first sort the sampling frequencies, normalise them
# to sum to one, and then accumulate them in order
flatten_map = cropped_map.flatten()
sorted_data = np.cumsum(np.divide(np.sort(flatten_map), flatten_map.sum()))
# get the sorting indexes to that we can invert the sorting later on.
sorted_indexes = np.argsort(flatten_map)
middle_coords = np.zeros((n_samples, N_SPATIAL), dtype=np.int32)
for sample in range(0, n_samples):
# get n_sample from the cumulative histogram, spaced by 1/n_samples,
# plus a random perturbation to give us a stochastic sampler
sample_ratio = 1 - (np.random.random() + sample) / (n_samples + 1)
# find the index where the cumulative it above the sample threshold
# import pdb; pdb.set_trace()
try:
sample_index = np.argmax(sorted_data >= sample_ratio)
except ValueError:
tf.logging.fatal("unable to choose sampling window based on "
"the current frequency map.")
raise
# invert the sample index to the pre-sorted index
inverted_sample_index = sorted_indexes[sample_index]
# get the x,y,z coordinates on the cropped_map
# (note: we need to re-shift it later due to the crop)
middle_coords[sample, :N_SPATIAL] = np.unravel_index(
inverted_sample_index, cropped_map.shape)[:N_SPATIAL]
# adjust max spatial coordinates based on each mod spatial window size
all_coordinates = {}
for mod in list(win_sizes):
win_size = win_sizes[mod][:N_SPATIAL]
half_win_diff = np.floor((max_spatial_win - win_size) / 2.0)
# shift starting coordinates of the window
# Note that we did not shift the centre coordinates
# above to the corner of the window
# because the shift is the same as the cropping amount
# Also, we need to add half_win_diff/2 so that smaller windows
# are centred within the large windows
spatial_coords = np.zeros((n_samples, N_SPATIAL * 2), dtype=np.int32)
spatial_coords[:, :N_SPATIAL] = \
middle_coords[:, :N_SPATIAL] + half_win_diff[:N_SPATIAL]
# the opposite corner of the window is
# just adding the mod specific window size
spatial_coords[:, N_SPATIAL:] = \
spatial_coords[:, :N_SPATIAL] + win_size[:N_SPATIAL]
# include the subject id
subject_id = np.ones((n_samples,), dtype=np.int32) * subject_id
spatial_coords = np.append(subject_id[:, None], spatial_coords, axis=1)
all_coordinates[mod] = spatial_coords
return all_coordinates
|
[
"tensorflow.logging.info",
"tensorflow.logging.fatal",
"numpy.argmax",
"numpy.asarray",
"numpy.floor",
"numpy.zeros",
"numpy.unravel_index",
"numpy.ones",
"numpy.argsort",
"niftynet.engine.sampler_uniform.UniformSampler.__init__",
"numpy.max",
"numpy.append",
"numpy.sort",
"numpy.random.random",
"numpy.all"
] |
[((3368, 3413), 'numpy.asarray', 'np.asarray', (['spatial_win_sizes'], {'dtype': 'np.int32'}), '(spatial_win_sizes, dtype=np.int32)\n', (3378, 3413), True, 'import numpy as np\n'), ((3436, 3469), 'numpy.max', 'np.max', (['spatial_win_sizes'], {'axis': '(0)'}), '(spatial_win_sizes, axis=0)\n', (3442, 3469), True, 'import numpy as np\n'), ((5030, 5053), 'numpy.argsort', 'np.argsort', (['flatten_map'], {}), '(flatten_map)\n', (5040, 5053), True, 'import numpy as np\n'), ((5075, 5123), 'numpy.zeros', 'np.zeros', (['(n_samples, N_SPATIAL)'], {'dtype': 'np.int32'}), '((n_samples, N_SPATIAL), dtype=np.int32)\n', (5083, 5123), True, 'import numpy as np\n'), ((1071, 1233), 'niftynet.engine.sampler_uniform.UniformSampler.__init__', 'UniformSampler.__init__', (['self'], {'reader': 'reader', 'data_param': 'data_param', 'batch_size': 'batch_size', 'windows_per_image': 'windows_per_image', 'queue_length': 'queue_length'}), '(self, reader=reader, data_param=data_param,\n batch_size=batch_size, windows_per_image=windows_per_image,\n queue_length=queue_length)\n', (1094, 1233), False, 'from niftynet.engine.sampler_uniform import UniformSampler\n'), ((1394, 1457), 'tensorflow.logging.info', 'tf.logging.info', (['"""Initialised weighted sampler window instance"""'], {}), "('Initialised weighted sampler window instance')\n", (1409, 1457), True, 'import tensorflow as tf\n'), ((2504, 2592), 'tensorflow.logging.fatal', 'tf.logging.fatal', (['"""input weight map not found. please check the configuration file"""'], {}), "(\n 'input weight map not found. please check the configuration file')\n", (2520, 2592), True, 'import tensorflow as tf\n'), ((2842, 3008), 'tensorflow.logging.fatal', 'tf.logging.fatal', (['"""Don\'t know how to generate sampling locations: Spatial dimensions of the grouped input sources are not consistent. %s"""', 'uniq_spatial_size'], {}), '(\n "Don\'t know how to generate sampling locations: Spatial dimensions of the grouped input sources are not consistent. %s"\n , uniq_spatial_size)\n', (2858, 3008), True, 'import tensorflow as tf\n'), ((6327, 6371), 'numpy.floor', 'np.floor', (['((max_spatial_win - win_size) / 2.0)'], {}), '((max_spatial_win - win_size) / 2.0)\n', (6335, 6371), True, 'import numpy as np\n'), ((6734, 6786), 'numpy.zeros', 'np.zeros', (['(n_samples, N_SPATIAL * 2)'], {'dtype': 'np.int32'}), '((n_samples, N_SPATIAL * 2), dtype=np.int32)\n', (6742, 6786), True, 'import numpy as np\n'), ((7234, 7288), 'numpy.append', 'np.append', (['subject_id[:, None]', 'spatial_coords'], {'axis': '(1)'}), '(subject_id[:, None], spatial_coords, axis=1)\n', (7243, 7288), True, 'import numpy as np\n'), ((3942, 3971), 'numpy.floor', 'np.floor', (['(max_spatial_win / 2)'], {}), '(max_spatial_win / 2)\n', (3950, 3971), True, 'import numpy as np\n'), ((4278, 4303), 'numpy.all', 'np.all', (['cropped_map.shape'], {}), '(cropped_map.shape)\n', (4284, 4303), True, 'import numpy as np\n'), ((4351, 4414), 'tensorflow.logging.fatal', 'tf.logging.fatal', (['"""incompatible map: %s"""', "data['sampler'].shape"], {}), "('incompatible map: %s', data['sampler'].shape)\n", (4367, 4414), True, 'import tensorflow as tf\n'), ((4464, 4598), 'tensorflow.logging.fatal', 'tf.logging.fatal', (['"""incompatible window size for weighted sampler. Please use smaller (fully-specified) spatial window sizes"""'], {}), "(\n 'incompatible window size for weighted sampler. Please use smaller (fully-specified) spatial window sizes'\n )\n", (4480, 4598), True, 'import tensorflow as tf\n'), ((4893, 4913), 'numpy.sort', 'np.sort', (['flatten_map'], {}), '(flatten_map)\n', (4900, 4913), True, 'import numpy as np\n'), ((5542, 5580), 'numpy.argmax', 'np.argmax', (['(sorted_data >= sample_ratio)'], {}), '(sorted_data >= sample_ratio)\n', (5551, 5580), True, 'import numpy as np\n'), ((6040, 6098), 'numpy.unravel_index', 'np.unravel_index', (['inverted_sample_index', 'cropped_map.shape'], {}), '(inverted_sample_index, cropped_map.shape)\n', (6056, 6098), True, 'import numpy as np\n'), ((7158, 7195), 'numpy.ones', 'np.ones', (['(n_samples,)'], {'dtype': 'np.int32'}), '((n_samples,), dtype=np.int32)\n', (7165, 7195), True, 'import numpy as np\n'), ((5620, 5713), 'tensorflow.logging.fatal', 'tf.logging.fatal', (['"""unable to choose sampling window based on the current frequency map."""'], {}), "(\n 'unable to choose sampling window based on the current frequency map.')\n", (5636, 5713), True, 'import tensorflow as tf\n'), ((5337, 5355), 'numpy.random.random', 'np.random.random', ([], {}), '()\n', (5353, 5355), True, 'import numpy as np\n')]
|
"""update user table
Revision ID: 0a909e2b59a1
Revises: 0b026dea7cc6
Create Date: 2019-11-25 10:15:20.875486
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '<KEY>'
down_revision = '0b026dea7cc6'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('usertable', sa.Column('site', sa.String(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('usertable', 'site')
# ### end Alembic commands ###
|
[
"sqlalchemy.String",
"alembic.op.drop_column"
] |
[((579, 614), 'alembic.op.drop_column', 'op.drop_column', (['"""usertable"""', '"""site"""'], {}), "('usertable', 'site')\n", (593, 614), False, 'from alembic import op\n'), ((426, 437), 'sqlalchemy.String', 'sa.String', ([], {}), '()\n', (435, 437), True, 'import sqlalchemy as sa\n')]
|
from tqdm import tqdm
from network import LatentModel
from tensorboardX import SummaryWriter
import torchvision
import torch as t
from torch.utils.data import DataLoader
import os
import sys
from tools import *
import time
import matplotlib.pyplot as plt
import matplotlib
os.environ["CUDA_VISIBLE_DEVICES"]="0"
def adjust_learning_rate(optimizer, step_num, warmup_step=4000):
lr = 0.001 * warmup_step**0.5 * min(step_num * warmup_step**-1.5, step_num**-0.5)
for param_group in optimizer.param_groups:
param_group['lr'] = lr
def main(num_train, diff, dim, y_dim, case, noise_level):
epochs = 30001
num_hidden = 128
input_dim = dim
PLOT_AFTER = 1
noise_levels = [0.01,0.02,0.1]
model_files = ['spiral_range4_0.01',
'spiral_range4_0.02',
'spiral_range4_0.1']
if case == 'fea':
dataset = MaterialFEADataset('../Data/data.txt', num_train)
folder_prefix = 'fea_range4'
if case == 'spiral':
spiral_filename = '../Data/' + 'spiral_data_range4_' + str(noise_levels[noise_level]) + '.txt'
dataset = SpiralDataset(spiral_filename, num_train)
folder_prefix = 'spiral_range4_' + str(noise_levels[noise_level])
path = './runs/' + model_files[noise_level] + '/'
model = LatentModel(num_hidden, input_dim, y_dim).cuda()
checkpoint = torch.load(path + 'RMSE_checkpoint.pth.tar')
model.load_state_dict(checkpoint['model'])
context_x_test, context_y_test, target_x_test, target_y_test = dataset.test_data()
context_x_test = torch.from_numpy(context_x_test).cuda()
context_y_test = torch.from_numpy(context_y_test).cuda()
target_x_test = torch.from_numpy(target_x_test).cuda()
target_y_test = torch.from_numpy(target_y_test).cuda()
# pass through the latent model
y_pred, sigma, kl, loss = model(context_x_test, context_y_test, target_x_test)
target_x = target_x_test.cpu().detach().numpy()
target_y = target_y_test.cpu().detach().numpy()
context_x = context_x_test.cpu().detach().numpy()
context_y = context_y_test.cpu().detach().numpy()
pred_y = y_pred.cpu().detach().numpy()
std = sigma.cpu().detach().numpy()
test_RMSE = np.sqrt(np.mean((pred_y[0, num_train:] - target_y[0, num_train:])**2))
test_MAPE = np.mean(np.sqrt(np.square((pred_y[0, num_train:] - target_y[0, num_train:])\
/target_y[0, num_train:])))
train_RMSE = np.sqrt(np.mean((pred_y[0, :num_train] - target_y[0, :num_train])**2))
train_MAPE = np.mean(np.sqrt(np.square((pred_y[0, :num_train] - target_y[0, :num_train])\
/target_y[0, :num_train])))
print('============================')
print('test_RMSE: ', test_RMSE, 'test_MAPE: ', test_MAPE)
print('train_RMSE: ', train_RMSE, 'train_MAPE: ', train_MAPE)
#print(std)
print('============================')
#filename = '../results/50_NP_ODE.npz'
#np.savez(filename, pred = pred_y[0,num_train:], target = target_y[0, num_train:], std = std[0,num_train:,0])
#print(pred_y[0,num_train:].shape, target_y[0, num_train:].shape, std[0,num_train:,0].shape)
labelfont = 50
legendfont = 50
markerline = 25
markerstar=17
linewidth = 5
matplotlib.rc('xtick', labelsize=labelfont)
matplotlib.rc('ytick', labelsize=labelfont)
spiral_wonoise = np.loadtxt("../Data/spiral_data_range4.txt", dtype='float32',delimiter=',')
if case == "spiral":
f = plt.figure(figsize=(20,80))
ax1 = f.add_subplot(411)
ax2 = f.add_subplot(412)
ax3 = f.add_subplot(413)
ax4 = f.add_subplot(414)
result = np.concatenate((target_x,pred_y,std), axis = -1)[0]
result = result[result[:,0].argsort()]
ax1.plot(spiral_wonoise[:,1], spiral_wonoise[:,2], lw=linewidth, c = 'b', label = "spiral")
ax1.scatter(target_y[0,:num_train,0], target_y[0,:num_train,1], c = 'g', s = 400, marker = '*', label = "training data")
ax1.scatter(target_y[0,num_train:,0], target_y[0,num_train:,1], c = 'r', s = 300, marker = 's', label = "testing data")
ax1.set_xlabel('y1', fontsize=labelfont)
ax1.set_ylabel('y2', fontsize=labelfont)
ax2.plot(spiral_wonoise[:,1], spiral_wonoise[:,2], lw=linewidth, c = 'b', label = "spiral")
ax2.plot(result[:,1], result[:,2], lw=linewidth, c = 'r', label = "generation")
ax2.fill_between(result[:,1], result[:,2] + result[:,4], result[:,2] - result[:,4],
facecolor='yellow', alpha=0.8)
ax2.set_title('Generated spiral with UQ in y2', fontsize=labelfont)
ax2.set_xlabel('y1', fontsize=labelfont)
ax2.set_ylabel('y2', fontsize=labelfont)
ax3.plot(spiral_wonoise[:,1], spiral_wonoise[:,2], lw=linewidth, c = 'b', label = "spiral")
ax3.plot(result[:,1], result[:,2], lw=linewidth, c = 'r', label = "generation")
ax3.fill_betweenx(result[:,2], result[:,1] + result[:,3], result[:,1] - result[:,3],
facecolor='cyan', alpha=0.8)
ax3.set_title('Generated spiral with UQ in y1', fontsize=labelfont)
ax3.set_xlabel('y1', fontsize=labelfont)
ax3.set_ylabel('y2', fontsize=labelfont)
ax4.plot(spiral_wonoise[:,1], spiral_wonoise[:,2], lw=linewidth, c = 'b', label = "spiral")
ax4.plot(result[:,1], result[:,2], lw=linewidth, c = 'r', label = "generation")
ax4.fill_between(result[:,1], result[:,2] + result[:,4], result[:,2] - result[:,4],
facecolor='yellow', alpha=0.8)
ax4.fill_betweenx(result[:,2], result[:,1] + result[:,3], result[:,1] - result[:,3],
facecolor='cyan', alpha=0.8)
ax4.set_title('Generated spiral with UQ in y1 and y2', fontsize=labelfont)
ax4.set_xlabel('y1', fontsize=labelfont)
ax4.set_ylabel('y2', fontsize=labelfont)
# plt.scatter(result[:,1], result[:,1], c = 'b', marker = 'o')
# plt.errorbar(pred_y[0,num_train:,0], pred_y[0,num_train:,1], std[0,num_train:,1], std[0,num_train:,0], linestyle='None', capsize=6, capthick=3, elinewidth=3, ecolor='r', label='Confidence Interval')
ax1.legend(fontsize=legendfont, loc='lower right')
ax2.legend(fontsize=legendfont, loc='lower right')
ax3.legend(fontsize=legendfont, loc='lower right')
ax4.legend(fontsize=legendfont, loc='lower right')
plt.grid('off')
plt.gca()
# plt.show()
plt.savefig('./results/spiral_result_NPs' + str(noise_levels[noise_level]) + '.png',bbox_inches='tight',pad_inches=0)
if case == "fea":
plt.figure(figsize=(28,20))
plt.plot(pred_y[0,num_train:,0], 'r_', markersize=markerline, mew=4, label='Prediction')
plt.plot(target_y[0, num_train:], 'b*', markersize=markerstar, label='Ground Truth')
plt.errorbar(np.arange(pred_y.shape[1]-num_train), pred_y[0,num_train:,0], std[0,num_train:,0]*1.96, linestyle='None', capsize=12,capthick=5, elinewidth=5, ecolor='c', label='Confidence Interval')
plt.grid('off')
plt.legend(fontsize=labelfont)
ax = plt.gca()
plt.ylim(-2.5,2.5)
plt.xlim(-1,21)
plt.xlabel('index of data points',fontsize=labelfont)
# plt.ylabel('value of y',fontsize=labelfont)
plt.savefig(path + 'test.png')
return std
if __name__ == '__main__':
num_train = 150
diff = 30
dim = 3 ## input dimension
y_dim = 2 ## response dimension
case = 'spiral' ## two cases, spiral/fea
noise_level = int(sys.argv[1])
seed = 1
torch.manual_seed(seed)
std = main(num_train, diff, dim, y_dim, case, noise_level)
|
[
"matplotlib.pyplot.xlim",
"matplotlib.rc",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.ylim",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.gca",
"network.LatentModel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.grid",
"matplotlib.pyplot.savefig"
] |
[((3304, 3347), 'matplotlib.rc', 'matplotlib.rc', (['"""xtick"""'], {'labelsize': 'labelfont'}), "('xtick', labelsize=labelfont)\n", (3317, 3347), False, 'import matplotlib\n'), ((3353, 3396), 'matplotlib.rc', 'matplotlib.rc', (['"""ytick"""'], {'labelsize': 'labelfont'}), "('ytick', labelsize=labelfont)\n", (3366, 3396), False, 'import matplotlib\n'), ((3537, 3565), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(20, 80)'}), '(figsize=(20, 80))\n', (3547, 3565), True, 'import matplotlib.pyplot as plt\n'), ((6536, 6551), 'matplotlib.pyplot.grid', 'plt.grid', (['"""off"""'], {}), "('off')\n", (6544, 6551), True, 'import matplotlib.pyplot as plt\n'), ((6560, 6569), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (6567, 6569), True, 'import matplotlib.pyplot as plt\n'), ((6771, 6799), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(28, 20)'}), '(figsize=(28, 20))\n', (6781, 6799), True, 'import matplotlib.pyplot as plt\n'), ((6807, 6901), 'matplotlib.pyplot.plot', 'plt.plot', (['pred_y[0, num_train:, 0]', '"""r_"""'], {'markersize': 'markerline', 'mew': '(4)', 'label': '"""Prediction"""'}), "(pred_y[0, num_train:, 0], 'r_', markersize=markerline, mew=4,\n label='Prediction')\n", (6815, 6901), True, 'import matplotlib.pyplot as plt\n'), ((6904, 6993), 'matplotlib.pyplot.plot', 'plt.plot', (['target_y[0, num_train:]', '"""b*"""'], {'markersize': 'markerstar', 'label': '"""Ground Truth"""'}), "(target_y[0, num_train:], 'b*', markersize=markerstar, label=\n 'Ground Truth')\n", (6912, 6993), True, 'import matplotlib.pyplot as plt\n'), ((7202, 7217), 'matplotlib.pyplot.grid', 'plt.grid', (['"""off"""'], {}), "('off')\n", (7210, 7217), True, 'import matplotlib.pyplot as plt\n'), ((7226, 7256), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'fontsize': 'labelfont'}), '(fontsize=labelfont)\n', (7236, 7256), True, 'import matplotlib.pyplot as plt\n'), ((7270, 7279), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (7277, 7279), True, 'import matplotlib.pyplot as plt\n'), ((7288, 7307), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(-2.5)', '(2.5)'], {}), '(-2.5, 2.5)\n', (7296, 7307), True, 'import matplotlib.pyplot as plt\n'), ((7315, 7331), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(-1)', '(21)'], {}), '(-1, 21)\n', (7323, 7331), True, 'import matplotlib.pyplot as plt\n'), ((7339, 7393), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""index of data points"""'], {'fontsize': 'labelfont'}), "('index of data points', fontsize=labelfont)\n", (7349, 7393), True, 'import matplotlib.pyplot as plt\n'), ((7455, 7485), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(path + 'test.png')"], {}), "(path + 'test.png')\n", (7466, 7485), True, 'import matplotlib.pyplot as plt\n'), ((1330, 1371), 'network.LatentModel', 'LatentModel', (['num_hidden', 'input_dim', 'y_dim'], {}), '(num_hidden, input_dim, y_dim)\n', (1341, 1371), False, 'from network import LatentModel\n')]
|
from skimage.io import imread
import numpy as np
import pickle
from pumapy.utilities.workspace import Workspace
from os import path
from glob import glob
from vtkmodules.util.numpy_support import vtk_to_numpy
from vtkmodules.vtkIOXML import vtkXMLImageDataReader, vtkXMLUnstructuredGridReader
from vtkmodules.vtkIOLegacy import vtkDataSetReader
def io_logs(ws, filename, input=True):
if input:
s = ["Importing", "from "]
else:
s = ["Exporting", "to "]
ws.log.log_section(s[0] + " Domain " + s[1] + filename)
ws.log.write_log()
def import_3Dtiff(filename, voxel_length=1e-6, import_ws=True):
""" Function to io 3D tiff
:param filename: filepath and name
:type filename: string
:param voxel_length: size of a voxel side
:type voxel_length: float
:param import_ws: if True returns a puma.Workspace, otherwise a ndarray
:type import_ws: bool
:return: domain
:rtype: pumapy.Workspace or np.ndarray
:Example:
>>> import pumapy as puma
>>> ws_tiff = puma.import_3Dtiff(puma.path_to_example_file("50_artfibers.tif"), 1.3e-6, import_ws=True)
>>> ws_tiff.get_shape()
"""
print("Importing " + filename + " ... ", end='')
if not path.exists(filename):
raise Exception("File " + filename + " not found.")
nparray = imread(filename).astype(np.uint16)
if nparray.ndim == 2:
nparray = nparray[:, :, np.newaxis]
else:
nparray = nparray.transpose(2, 1, 0)
print("Done")
if import_ws:
ws = Workspace.from_array(nparray)
ws.set_voxel_length(voxel_length)
io_logs(ws, filename)
return ws
return nparray
def import_bin(filename):
""" Import a pumapy.Workspace to binary (.pumapy extension)
:param filename: filepath and name
:type filename: string
:return: True if successful, False otherwise.
:rtype: bool
:Example:
>>> import pumapy as puma
>>> ws_binary = puma.import_bin(puma.path_to_example_file("fibers_with_orientation.pumapy"))
"""
print("Importing " + filename + " ... ", end='')
if not path.exists(filename):
raise Exception("File " + filename + " not found.")
pumapy_file = open(filename, 'rb')
ws = pickle.load(pumapy_file)
pumapy_file.close()
print("Done")
io_logs(ws, filename)
return ws
def import_vti(filename, voxel_length=None, import_ws=True):
""" Function to import either legacy VTK file (.vtk) or vtkImageData (.vti)
:param filename: filepath and name
:type filename: string
:param voxel_length: voxel_length. If None, voxel_length from the vtk file is used
:type voxel_length: float
:param import_ws: True returns a puma.Workspace, otherwise a list of ndarrays
:type import_ws: bool
:return: if import_ws is True, then it returns a Workspace.
if import_ws is False, it returns a dictionary of ndarrays as {"name1": data1, "name2": data2 ...}
:rtype: pumapy.Workspace or {str: np.ndarray}
:Example:
>>> import pumapy as puma
>>> ws_vtk = puma.import_vti(puma.path_to_example_file("fibers_with_orientation.vti"))
"""
print("Importing " + filename + " ... ", end='')
if not path.exists(filename):
raise Exception("File " + filename + " not found.")
if filename[-4:] == ".vti":
reader = vtkXMLImageDataReader()
reader.SetFileName(filename)
reader.Update()
vtkobject = reader.GetOutput()
elif filename[-4:] == ".vtk":
reader = vtkDataSetReader()
reader.SetFileName(filename)
reader.ReadAllScalarsOn()
reader.ReadAllColorScalarsOn()
reader.ReadAllNormalsOn()
reader.ReadAllTCoordsOn()
reader.ReadAllVectorsOn()
reader.Update() # reading
vtkobject = reader.GetOutputDataObject(0)
else:
raise Exception("File of an unrecognized extension, only .vti and .vtk supported.")
shape = vtkobject.GetDimensions()
orientation = None
if vtkobject.GetPointData().GetNumberOfArrays() == 0 and vtkobject.GetCellData().GetNumberOfArrays() == 0:
raise Exception("No CELL_DATA or POINT_DATA arrays detected in file.")
if import_ws:
if vtkobject.GetCellData().GetNumberOfArrays() + vtkobject.GetPointData().GetNumberOfArrays() > 2:
raise Exception("More than two arrays in file detected: set the import_ws to False to import it.")
# checking for CELL_DATA
if vtkobject.GetCellData().GetNumberOfArrays() > 0:
if vtkobject.GetCellData().GetNumberOfArrays() == 1: # if one array, could either be orientation or matrix
nparray = vtk_to_numpy(vtkobject.GetCellData().GetArray(0))
if nparray.ndim == 2: # if orientation
orientation = nparray.copy()
nparray = None
else:
nparray = vtk_to_numpy(vtkobject.GetCellData().GetArray(0))
orientation = vtk_to_numpy(vtkobject.GetCellData().GetArray(1))
# checking for POINT_DATA
else:
if vtkobject.GetPointData().GetNumberOfArrays() == 1: # if one array, could either be orientation or matrix
nparray = vtk_to_numpy(vtkobject.GetPointData().GetArray(0))
if nparray.ndim == 2: # if orientation
orientation = nparray.copy()
nparray = None
else:
nparray = vtk_to_numpy(vtkobject.GetPointData().GetArray(0))
orientation = vtk_to_numpy(vtkobject.GetPointData().GetArray(1))
if nparray is not None:
nparray = nparray.reshape(shape[0] - 1, shape[1] - 1, shape[2] - 1, order="F")
if orientation is not None:
orientation = orientation.reshape(shape[0] - 1, shape[1] - 1, shape[2] - 1, 3, order="F")
print("Done")
if nparray is None:
ws = Workspace.from_shape((shape[0] - 1, shape[1] - 1, shape[2] - 1))
else:
ws = Workspace.from_array(nparray)
if orientation is not None:
ws.set_orientation(orientation)
if voxel_length is None:
ws.set_voxel_length(vtkobject.GetSpacing()[0])
else:
ws.set_voxel_length(voxel_length)
io_logs(ws, filename)
return ws
else:
nparray_list = dict()
for i in range(vtkobject.GetCellData().GetNumberOfArrays()):
tmp = vtk_to_numpy(vtkobject.GetCellData().GetArray(i))
if tmp.ndim == 1:
tmp = tmp.reshape(shape[0] - 1, shape[1] - 1, shape[2] - 1, order="F")
else:
tmp = tmp.reshape(shape[0] - 1, shape[1] - 1, shape[2] - 1, 3, order="F")
nparray_list[vtkobject.GetCellData().GetArrayName(i)] = tmp
for i in range(vtkobject.GetPointData().GetNumberOfArrays()):
tmp = vtk_to_numpy(vtkobject.GetPointData().GetArray(i))
if tmp.ndim == 1:
tmp = tmp.reshape(shape[0] - 1, shape[1] - 1, shape[2] - 1, order="F")
else:
tmp = tmp.reshape(shape[0] - 1, shape[1] - 1, shape[2] - 1, 3, order="F")
nparray_list[vtkobject.GetPointData().GetArrayName(i)] = tmp
print("Done")
return nparray_list
def import_weave_vtu(filename, from_texgen_gui=False):
""" Import TexGen vtu weave in a Workspace
:param filename: file path and name
:type filename: string
:param from_texgen_gui: voxel grid exported from the TexGen GUI (Windows) or from TexGen inside PuMA
:type from_texgen_gui: bool
:return: voxelized weave from TexGen
:rtype: pumapy.Workspace
"""
if not path.exists(filename):
filename = glob(filename + '*.vtu')
if len(filename) == 0:
raise Exception("File " + filename + " not found.")
else:
filename = filename[0]
print("Importing " + filename + " ... ", end='')
reader = vtkXMLUnstructuredGridReader()
reader.SetFileName(filename)
reader.Update() # reading
vtkobject = reader.GetOutputDataObject(0)
dims = path.split(filename[:-4])[1].split('_')[-3:]
yarn_index = vtk_to_numpy(vtkobject.GetCellData().GetArray(0)) + 1
ws = Workspace.from_array(yarn_index.reshape(int(dims[0]), int(dims[1]), int(dims[2]), order="F"))
if vtkobject.GetCellData().GetNumberOfArrays() > 2:
if from_texgen_gui:
# ORIGINAL TEXGEN (GUI in Windows)
# Number Of Arrays: 6
# Array 0 name = YarnIndex <-- transferring to ws
# Array 1 name = YarnTangent (unnecessary)
# Array 2 name = Location (unnecessary)
# Array 3 name = VolumeFraction (unnecessary)
# Array 4 name = SurfaceDistance (unnecessary)
# Array 5 name = Orientation <-- transferring to ws
orientation = vtk_to_numpy(vtkobject.GetCellData().GetArray(5))
else:
# MODIFIED TEXGEN wrapped in PuMA
# Number Of Arrays: 6
# Array 0 name = YarnIndex <-- transferring to ws
# Array 1 name = Orientation <-- depends on export_orientation in export_weave_vtu
orientation = vtk_to_numpy(vtkobject.GetCellData().GetArray(1))
orientation = orientation.reshape(int(dims[0]), int(dims[1]), int(dims[2]), 3, order="F")
ws.set_orientation(orientation)
print("Done")
io_logs(ws, filename)
return ws
|
[
"pumapy.utilities.workspace.Workspace.from_shape",
"vtkmodules.vtkIOXML.vtkXMLImageDataReader",
"os.path.exists",
"pickle.load",
"vtkmodules.vtkIOXML.vtkXMLUnstructuredGridReader",
"pumapy.utilities.workspace.Workspace.from_array",
"glob.glob",
"os.path.split",
"skimage.io.imread",
"vtkmodules.vtkIOLegacy.vtkDataSetReader"
] |
[((2317, 2341), 'pickle.load', 'pickle.load', (['pumapy_file'], {}), '(pumapy_file)\n', (2328, 2341), False, 'import pickle\n'), ((8122, 8152), 'vtkmodules.vtkIOXML.vtkXMLUnstructuredGridReader', 'vtkXMLUnstructuredGridReader', ([], {}), '()\n', (8150, 8152), False, 'from vtkmodules.vtkIOXML import vtkXMLImageDataReader, vtkXMLUnstructuredGridReader\n'), ((1268, 1289), 'os.path.exists', 'path.exists', (['filename'], {}), '(filename)\n', (1279, 1289), False, 'from os import path\n'), ((1577, 1606), 'pumapy.utilities.workspace.Workspace.from_array', 'Workspace.from_array', (['nparray'], {}), '(nparray)\n', (1597, 1606), False, 'from pumapy.utilities.workspace import Workspace\n'), ((2185, 2206), 'os.path.exists', 'path.exists', (['filename'], {}), '(filename)\n', (2196, 2206), False, 'from os import path\n'), ((3337, 3358), 'os.path.exists', 'path.exists', (['filename'], {}), '(filename)\n', (3348, 3358), False, 'from os import path\n'), ((3470, 3493), 'vtkmodules.vtkIOXML.vtkXMLImageDataReader', 'vtkXMLImageDataReader', ([], {}), '()\n', (3491, 3493), False, 'from vtkmodules.vtkIOXML import vtkXMLImageDataReader, vtkXMLUnstructuredGridReader\n'), ((7844, 7865), 'os.path.exists', 'path.exists', (['filename'], {}), '(filename)\n', (7855, 7865), False, 'from os import path\n'), ((7886, 7910), 'glob.glob', 'glob', (["(filename + '*.vtu')"], {}), "(filename + '*.vtu')\n", (7890, 7910), False, 'from glob import glob\n'), ((1366, 1382), 'skimage.io.imread', 'imread', (['filename'], {}), '(filename)\n', (1372, 1382), False, 'from skimage.io import imread\n'), ((3646, 3664), 'vtkmodules.vtkIOLegacy.vtkDataSetReader', 'vtkDataSetReader', ([], {}), '()\n', (3662, 3664), False, 'from vtkmodules.vtkIOLegacy import vtkDataSetReader\n'), ((6053, 6117), 'pumapy.utilities.workspace.Workspace.from_shape', 'Workspace.from_shape', (['(shape[0] - 1, shape[1] - 1, shape[2] - 1)'], {}), '((shape[0] - 1, shape[1] - 1, shape[2] - 1))\n', (6073, 6117), False, 'from pumapy.utilities.workspace import Workspace\n'), ((6149, 6178), 'pumapy.utilities.workspace.Workspace.from_array', 'Workspace.from_array', (['nparray'], {}), '(nparray)\n', (6169, 6178), False, 'from pumapy.utilities.workspace import Workspace\n'), ((8275, 8300), 'os.path.split', 'path.split', (['filename[:-4]'], {}), '(filename[:-4])\n', (8285, 8300), False, 'from os import path\n')]
|
import argparse
import cv2 as cv
import numpy as np
from trainer.config import img_rows, img_cols
if __name__ == '__main__':
ap = argparse.ArgumentParser()
ap.add_argument("-x0")
ap.add_argument("-y0")
ap.add_argument("-x1")
ap.add_argument("-y1")
args = vars(ap.parse_args())
x0 = int(args["x0"])
x1 = int(args["x1"])
y0 = int(args["y0"])
y1 = int(args["y1"])
trimap = np.zeros((img_rows, img_cols, 1), dtype=np.uint8)
trimap[ x0:x1, y0:y1, 0] = 128
cv.imshow('trimap', trimap)
cv.imwrite('made-trimap.png', trimap)
|
[
"cv2.imwrite",
"cv2.imshow",
"numpy.zeros",
"argparse.ArgumentParser"
] |
[((136, 161), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (159, 161), False, 'import argparse\n'), ((417, 466), 'numpy.zeros', 'np.zeros', (['(img_rows, img_cols, 1)'], {'dtype': 'np.uint8'}), '((img_rows, img_cols, 1), dtype=np.uint8)\n', (425, 466), True, 'import numpy as np\n'), ((507, 534), 'cv2.imshow', 'cv.imshow', (['"""trimap"""', 'trimap'], {}), "('trimap', trimap)\n", (516, 534), True, 'import cv2 as cv\n'), ((539, 576), 'cv2.imwrite', 'cv.imwrite', (['"""made-trimap.png"""', 'trimap'], {}), "('made-trimap.png', trimap)\n", (549, 576), True, 'import cv2 as cv\n')]
|
##############################################################################
#
# Copyright (c) 2006 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Viewlet manager.
"""
from Acquisition import aq_base
from AccessControl.ZopeGuards import guarded_hasattr
import zope.interface
import zope.security
from zope.viewlet import interfaces
from zope.viewlet.manager import ViewletManagerBase as origManagerBase
from Products.Five.browser.pagetemplatefile import ZopeTwoPageTemplateFile
class ViewletManagerBase(origManagerBase):
"""A base class for Viewlet managers to work in Zope2"""
template = None
def __getitem__(self, name):
"""See zope.interface.common.mapping.IReadMapping"""
# Find the viewlet
viewlet = zope.component.queryMultiAdapter(
(self.context, self.request, self.__parent__, self),
interfaces.IViewlet, name=name)
# If the viewlet was not found, then raise a lookup error
if viewlet is None:
raise zope.component.interfaces.ComponentLookupError(
'No provider with name `%s` found.' %name)
# If the viewlet cannot be accessed, then raise an
# unauthorized error
if not guarded_hasattr(viewlet, 'render'):
raise zope.security.interfaces.Unauthorized(
'You are not authorized to access the provider '
'called `%s`.' %name)
# Return the viewlet.
return viewlet
def filter(self, viewlets):
"""Sort out all content providers
``viewlets`` is a list of tuples of the form (name, viewlet).
"""
results = []
# Only return viewlets accessible to the principal
# We need to wrap each viewlet in its context to make sure that
# the object has a real context from which to determine owner
# security.
for name, viewlet in viewlets:
if guarded_hasattr(viewlet, 'render'):
results.append((name, viewlet))
return results
def sort(self, viewlets):
"""Sort the viewlets.
``viewlets`` is a list of tuples of the form (name, viewlet).
"""
# By default, use the standard Python way of doing sorting. Unwrap the
# objects first so that they are sorted as expected. This is dumb
# but it allows the tests to have deterministic results.
return sorted(viewlets, lambda x, y: cmp(aq_base(x[1]), aq_base(y[1])))
def ViewletManager(name, interface, template=None, bases=()):
attrDict = {'__name__': name}
if template is not None:
attrDict['template'] = ZopeTwoPageTemplateFile(template)
if ViewletManagerBase not in bases:
# Make sure that we do not get a default viewlet manager mixin, if the
# provided base is already a full viewlet manager implementation.
if not (len(bases) == 1 and
interfaces.IViewletManager.implementedBy(bases[0])):
bases = bases + (ViewletManagerBase,)
ViewletManager = type(
'<ViewletManager providing %s>' % interface.getName(), bases, attrDict)
zope.interface.classImplements(ViewletManager, interface)
return ViewletManager
|
[
"AccessControl.ZopeGuards.guarded_hasattr",
"Products.Five.browser.pagetemplatefile.ZopeTwoPageTemplateFile",
"zope.viewlet.interfaces.IViewletManager.implementedBy",
"Acquisition.aq_base"
] |
[((3125, 3158), 'Products.Five.browser.pagetemplatefile.ZopeTwoPageTemplateFile', 'ZopeTwoPageTemplateFile', (['template'], {}), '(template)\n', (3148, 3158), False, 'from Products.Five.browser.pagetemplatefile import ZopeTwoPageTemplateFile\n'), ((1709, 1743), 'AccessControl.ZopeGuards.guarded_hasattr', 'guarded_hasattr', (['viewlet', '"""render"""'], {}), "(viewlet, 'render')\n", (1724, 1743), False, 'from AccessControl.ZopeGuards import guarded_hasattr\n'), ((2413, 2447), 'AccessControl.ZopeGuards.guarded_hasattr', 'guarded_hasattr', (['viewlet', '"""render"""'], {}), "(viewlet, 'render')\n", (2428, 2447), False, 'from AccessControl.ZopeGuards import guarded_hasattr\n'), ((3405, 3455), 'zope.viewlet.interfaces.IViewletManager.implementedBy', 'interfaces.IViewletManager.implementedBy', (['bases[0]'], {}), '(bases[0])\n', (3445, 3455), False, 'from zope.viewlet import interfaces\n'), ((2932, 2945), 'Acquisition.aq_base', 'aq_base', (['x[1]'], {}), '(x[1])\n', (2939, 2945), False, 'from Acquisition import aq_base\n'), ((2947, 2960), 'Acquisition.aq_base', 'aq_base', (['y[1]'], {}), '(y[1])\n', (2954, 2960), False, 'from Acquisition import aq_base\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2018 VMware, Inc.
# SPDX-License-Identifier: BSD-2-Clause OR GPL-3.0-only
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
# BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: nsxt_policy_ip_pool
short_description: Create or Delete a Policy IP Pool
description:
Creates or deletes a Policy IP Pool.
Required attributes include id and display_name.
version_added: "2.8"
author: <NAME>
extends_documentation_fragment:
- vmware.ansible_for_nsxt.vmware_nsxt
options:
id:
description: The id of the Policy IP Pool.
required: false
type: str
description:
description: Resource description.
type: str
pool_block_subnets:
type: list
element: dict
description: Specify the IP Pool Block Subnets that need to be created,
updated, or deleted as a list of dict in this section
suboptions:
auto_assign_gateway:
description:
- Indicate whether default gateway is to be reserved from
the range
- If this property is set to true, the first IP in the
range will be reserved for gateway.
type: bool
default: true
description:
description: Resource description.
type: str
display_name:
description:
- Display name.
- If resource ID is not specified, display_name will be
used as ID.
required: false
type: str
do_wait_till_create:
type: bool
default: false
description: Can be used to wait for the realization of
subresource before the request to create the next
resource is sent to the Manager
id:
description: The id of the Policy IP Pool Block Subnet.
required: false
type: str
ip_block_display_name:
description: Same as ip_block_id. Either one must be specified.
If both are specified, ip_block_id takes
precedence.
required: false
type: str
ip_block_id:
description: The ID of the IpAddressBlock from which the subnet
is to be created
type: str
size:
description:
- Represents the size or number of IP addresses in the
subnet
- The size parameter is required for subnet creation. It
must be specified during creation but cannot be changed
later.
type: int
state:
choices:
- present
- absent
description: "State can be either 'present' or 'absent'.
'present' is used to create or update resource.
'absent' is used to delete resource."
required: true
tags:
description: Opaque identifiers meaningful to the API user.
type: dict
suboptions:
scope:
description: Tag scope.
required: true
type: str
tag:
description: Tag value.
required: true
type: str
pool_static_subnets:
type: list
element: dict
description: Specify the IP Pool Static Subnets that need to be
created, updated, or deleted as a list of dict in
this section
suboptions:
allocation_ranges:
description: A collection of IPv4 or IPv6 IP Pool Ranges.
type: list
element: dict
suboptions:
start:
description: The start IP Address of the IP Range.
type: str
required: true
end:
description: The end IP Address of the IP Range.
type: str
required: true
cidr:
description: Subnet representation is a network address
and prefix length
type: str
required: true
description:
description: Resource description.
type: str
display_name:
description:
- Display name.
- If resource ID is not specified, display_name will be
used as ID.
required: false
type: str
dns_nameservers:
description: The collection of upto 3 DNS servers
for the subnet.
type: list
element: str
dns_suffix:
description: The DNS suffix for the DNS server.
type: str
do_wait_till_create:
type: bool
default: false
description: Can be used to wait for the realization of
subresource before the request to create the next
resource is sent to the Manager
gateway_ip:
description: The default gateway address on a
layer-3 router.
type: str
id:
description: The id of the Policy IP Pool Block Subnet.
required: false
type: str
state:
choices:
- present
- absent
description: "State can be either 'present' or 'absent'.
'present' is used to create or update resource.
'absent' is used to delete resource."
tags:
description: Opaque identifiers meaningful to the API user.
type: dict
suboptions:
scope:
description: Tag scope.
required: true
type: str
tag:
description: Tag value.
required: true
type: str
'''
EXAMPLES = '''
- name: create IP Pool
nsxt_policy_ip_pool:
hostname: "10.10.10.10"
nsx_cert_path: /root/com.vmware.nsx.ncp/nsx.crt
nsx_key_path: /root/com.vmware.nsx.ncp/nsx.key
validate_certs: False
id: test-ip-pool
display_name: test-ip-pool
state: "absent"
tags:
- tag: "a"
scope: "b"
pool_block_subnets:
- id: test-ip-subnet-1
state: present
ip_block_id: "test-ip-blk-1"
size: 16
- display_name: test-ip-subnet-2
state: present
ip_block_id: "test-ip-blk-1"
size: 16
- display_name: test-ip-subnet-3
state: present
ip_block_id: "test-ip-blk-1"
size: 8
pool_static_subnets:
- id: test-ip-static-subnet-1
state: present
allocation_ranges:
- start: '172.16.31.10'
end: '172.16.31.10'
- start: '172.16.31.10'
end: '172.16.58.3'
cidr: '172.16.17.32/26'
- display_name: test-ip-static-subnet-2
state: present
allocation_ranges:
- start: '172.16.17.32'
end: '172.16.58.3'
- start: '172.16.58.3'
end: '172.16.31.10'
cidr: '192.168.3.11/26'
'''
RETURN = '''# '''
import json
import time
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.vmware.ansible_for_nsxt.plugins.module_utils.nsxt_base_resource import NSXTBaseRealizableResource
from ansible_collections.vmware.ansible_for_nsxt.plugins.module_utils.nsxt_resource_urls import (
IP_ADDRESS_POOL_SUBNET_URL, IP_BLOCK_URL, IP_POOL_URL)
from ansible.module_utils._text import to_native
class NSXTIpPool(NSXTBaseRealizableResource):
@staticmethod
def get_resource_spec():
ip_pool_arg_spec = {}
return ip_pool_arg_spec
@staticmethod
def get_resource_base_url(baseline_args=None):
return IP_POOL_URL
def update_parent_info(self, parent_info):
parent_info["ip_pool_id"] = self.id
class NSXTIpAddressPoolBlockSubnet(NSXTBaseRealizableResource):
def get_spec_identifier(self):
return (NSXTIpPool.NSXTIpAddressPoolBlockSubnet.
get_spec_identifier())
@classmethod
def get_spec_identifier(cls):
return "pool_block_subnets"
@staticmethod
def get_resource_spec():
ip_addr_pool_blk_subnet_arg_spec = {}
ip_addr_pool_blk_subnet_arg_spec.update(
ip_block_id=dict(
required=False,
type='str'
),
ip_block_display_name=dict(
required=False,
type='str'
),
auto_assign_gateway=dict(
required=False,
type='bool'
),
size=dict(
required=True,
type='int'
),
start_ip=dict(
required=False,
type='str'
),
)
return ip_addr_pool_blk_subnet_arg_spec
@staticmethod
def get_resource_base_url(parent_info):
return IP_ADDRESS_POOL_SUBNET_URL.format(
parent_info["ip_pool_id"]
)
def update_resource_params(self, nsx_resource_params):
# ip_block is a required attr
ip_block_id = self.get_id_using_attr_name_else_fail(
"ip_block", nsx_resource_params,
IP_BLOCK_URL, "IP Block")
nsx_resource_params["ip_block_path"] = (
IP_BLOCK_URL + "/" + ip_block_id)
nsx_resource_params["resource_type"] = "IpAddressPoolBlockSubnet"
class NSXTIpAddressPoolStaticSubnet(NSXTBaseRealizableResource):
def get_spec_identifier(self):
return (NSXTIpPool.NSXTIpAddressPoolStaticSubnet.
get_spec_identifier())
@classmethod
def get_spec_identifier(cls):
return "pool_static_subnets"
@staticmethod
def get_resource_spec():
ip_addr_pool_static_subnet_arg_spec = {}
ip_addr_pool_static_subnet_arg_spec.update(
auto_assign_gateway=dict(
required=False,
type='bool'
),
allocation_ranges=dict(
required=True,
elements='dict',
type='list',
options=dict(
start=dict(
required=True,
type='str'
),
end=dict(
required=True,
type='str'
),
)
),
cidr=dict(
required=True,
type='str'
),
dns_nameservers=dict(
required=False,
elements='str',
type='list'
),
dns_suffix=dict(
required=False,
type='str'
),
gateway_ip=dict(
required=False,
type='str'
),
)
return ip_addr_pool_static_subnet_arg_spec
@staticmethod
def get_resource_base_url(parent_info):
return IP_ADDRESS_POOL_SUBNET_URL.format(
parent_info["ip_pool_id"]
)
def update_resource_params(self, nsx_resource_params):
nsx_resource_params["resource_type"] = "IpAddressPoolStaticSubnet"
if __name__ == '__main__':
ip_pool = NSXTIpPool()
ip_pool.realize()
|
[
"ansible_collections.vmware.ansible_for_nsxt.plugins.module_utils.nsxt_resource_urls.IP_ADDRESS_POOL_SUBNET_URL.format"
] |
[((10940, 11000), 'ansible_collections.vmware.ansible_for_nsxt.plugins.module_utils.nsxt_resource_urls.IP_ADDRESS_POOL_SUBNET_URL.format', 'IP_ADDRESS_POOL_SUBNET_URL.format', (["parent_info['ip_pool_id']"], {}), "(parent_info['ip_pool_id'])\n", (10973, 11000), False, 'from ansible_collections.vmware.ansible_for_nsxt.plugins.module_utils.nsxt_resource_urls import IP_ADDRESS_POOL_SUBNET_URL, IP_BLOCK_URL, IP_POOL_URL\n'), ((13262, 13322), 'ansible_collections.vmware.ansible_for_nsxt.plugins.module_utils.nsxt_resource_urls.IP_ADDRESS_POOL_SUBNET_URL.format', 'IP_ADDRESS_POOL_SUBNET_URL.format', (["parent_info['ip_pool_id']"], {}), "(parent_info['ip_pool_id'])\n", (13295, 13322), False, 'from ansible_collections.vmware.ansible_for_nsxt.plugins.module_utils.nsxt_resource_urls import IP_ADDRESS_POOL_SUBNET_URL, IP_BLOCK_URL, IP_POOL_URL\n')]
|
import matplotlib.pyplot as plt
def plot_train_data_errors(images, predictions, labels, show_predictions = True):
'''
Function that plots images, and overlays model predictions with true labels
The function takes a list of 25 images
'''
fig = plt.figure(figsize = (15,15))
for i,image in enumerate(images):
ax = plt.subplot(5,5,i+1)
ax.imshow(image, cmap = 'gray')
#If show predictions, then overlay the scatter on top of the labels
if show_predictions:
ax.scatter(*predictions[i].reshape(15,2).T, color = 'r', label = 'predictions - arg1')
ax.scatter(*labels[i].reshape(15,2).T, color = 'g', label = 'labels - arg2')
plt.legend()
|
[
"matplotlib.pyplot.subplot",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.legend"
] |
[((271, 299), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(15, 15)'}), '(figsize=(15, 15))\n', (281, 299), True, 'import matplotlib.pyplot as plt\n'), ((708, 720), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (718, 720), True, 'import matplotlib.pyplot as plt\n'), ((353, 377), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(5)', '(5)', '(i + 1)'], {}), '(5, 5, i + 1)\n', (364, 377), True, 'import matplotlib.pyplot as plt\n')]
|
import os
from api_gateway_service.api import app
def main():
port = int(os.environ.get('PORT', 5000))
login_url = 'http://localhost:{}/login'.format(port)
print('If you run locally, browse to', login_url)
host = '0.0.0.0'
app.run(host=host, port=port)
if __name__ == "__main__":
main()
|
[
"os.environ.get",
"api_gateway_service.api.app.run"
] |
[((244, 273), 'api_gateway_service.api.app.run', 'app.run', ([], {'host': 'host', 'port': 'port'}), '(host=host, port=port)\n', (251, 273), False, 'from api_gateway_service.api import app\n'), ((78, 106), 'os.environ.get', 'os.environ.get', (['"""PORT"""', '(5000)'], {}), "('PORT', 5000)\n", (92, 106), False, 'import os\n')]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-07-10 12:51
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('questions', '0020_data_migration'),
]
operations = [
migrations.AddField(
model_name='questionentity',
name='is_collection',
field=models.BooleanField(default=False, help_text='Designates whether this question/questionset is a collection.', verbose_name='is collection'),
),
]
|
[
"django.db.models.BooleanField"
] |
[((413, 561), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'help_text': '"""Designates whether this question/questionset is a collection."""', 'verbose_name': '"""is collection"""'}), "(default=False, help_text=\n 'Designates whether this question/questionset is a collection.',\n verbose_name='is collection')\n", (432, 561), False, 'from django.db import migrations, models\n')]
|
import torch
from typing import Dict
# @torch.jit.script
# def myfn(x, mask: Dict[int, int]):
# if x.dim() == 1:
# return torch.ones(10)
# else:
# return torch.zeros(10)
# inp1 = torch.randn(1)
# inp2 = torch.randn(())
# mask: Dict[int, int] = {}
# mask[0] = 1
# mask[1] = 2
# print(myfn(inp1, mask))
# print(myfn(inp2, mask))
# traced_fn = torch.jit.trace(myfn, (inp1, mask))
# # traced_fn = torch.jit.trace(myfn, inp1)
# print(traced_fn.graph)
# print(traced_fn.code)
# print(traced_fn(inp1))
# print(traced_fn(inp2))
class MyScriptModule(torch.nn.Module):
def __init__(self):
super.__init__()
def myfn(self, x):
if x.dim() == 1:
return torch.ones(10)
else:
return torch.zeros(10)
def foward(self, x):
return self.my(x)
class MyModule(torch.nn.Module):
def __init__(self):
super.__init__()
self.net = torch.nn.Sequential(torch.nn.Linear(10, 256))
self.scriptModule = MyScriptModule()
def foward(self, x):
x = self.scriptModule(x)
return self.net(x)
# x1 = torch.randn(1)
# x2 = torch.randn(())
# model = MyModule()
# y1 = model(x1)
# y2 = model(x2)
class MyModule2(torch.nn.Module):
def __init__(self):
super().__init__()
self.net = torch.nn.Sequential(torch.nn.Linear(512, 256),
torch.nn.PReLU(),
torch.nn.Linear(256, 128),
torch.nn.LogSoftmax(dim=1))
def forward(self, x):
return self.net(x)
model2 = MyModule2()
x = torch.randn([1, 512])
y = model2(x)
print(y)
t1 = torch.jit.script(model2)
print(type(t1))
print(t1.graph)
print(t1.code)
t2 = torch.jit.trace(model2, x)
print(type(t2))
print(t2.graph)
print(t2.code)
|
[
"torch.ones",
"torch.nn.PReLU",
"torch.jit.trace",
"torch.jit.script",
"torch.nn.LogSoftmax",
"torch.randn",
"torch.nn.Linear",
"torch.zeros"
] |
[((1629, 1650), 'torch.randn', 'torch.randn', (['[1, 512]'], {}), '([1, 512])\n', (1640, 1650), False, 'import torch\n'), ((1679, 1703), 'torch.jit.script', 'torch.jit.script', (['model2'], {}), '(model2)\n', (1695, 1703), False, 'import torch\n'), ((1756, 1782), 'torch.jit.trace', 'torch.jit.trace', (['model2', 'x'], {}), '(model2, x)\n', (1771, 1782), False, 'import torch\n'), ((706, 720), 'torch.ones', 'torch.ones', (['(10)'], {}), '(10)\n', (716, 720), False, 'import torch\n'), ((754, 769), 'torch.zeros', 'torch.zeros', (['(10)'], {}), '(10)\n', (765, 769), False, 'import torch\n'), ((945, 969), 'torch.nn.Linear', 'torch.nn.Linear', (['(10)', '(256)'], {}), '(10, 256)\n', (960, 969), False, 'import torch\n'), ((1331, 1356), 'torch.nn.Linear', 'torch.nn.Linear', (['(512)', '(256)'], {}), '(512, 256)\n', (1346, 1356), False, 'import torch\n'), ((1397, 1413), 'torch.nn.PReLU', 'torch.nn.PReLU', ([], {}), '()\n', (1411, 1413), False, 'import torch\n'), ((1454, 1479), 'torch.nn.Linear', 'torch.nn.Linear', (['(256)', '(128)'], {}), '(256, 128)\n', (1469, 1479), False, 'import torch\n'), ((1520, 1546), 'torch.nn.LogSoftmax', 'torch.nn.LogSoftmax', ([], {'dim': '(1)'}), '(dim=1)\n', (1539, 1546), False, 'import torch\n')]
|
from django.db import models
# Create your models here.
class Order(models.Model):
category = (('Confirmed', 'Confirmed'),
('Preparing', 'Preparing'),
('Out for Delivery', 'Out for Delivery'),
('Delivered', 'Delivered'),
('Cancelled', 'Cancelled'),
)
restaurant = models.ForeignKey(
"restaurants.Restaurant", null=True, on_delete=models.SET_NULL)
customer = models.ForeignKey(
"customers.Customer", null=True, on_delete=models.SET_NULL)
status = models.CharField(
max_length=60, choices=category, default=category[0][0])
order_activity = models.BooleanField(default=True)
class OrderList(models.Model):
order = models.ForeignKey(
"Order", null=True, on_delete=models.CASCADE)
dish = models.ForeignKey(
"restaurants.Menu", null=True, on_delete=models.SET_NULL)
quantity = models.IntegerField(default=1)
|
[
"django.db.models.ForeignKey",
"django.db.models.IntegerField",
"django.db.models.CharField",
"django.db.models.BooleanField"
] |
[((356, 442), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""restaurants.Restaurant"""'], {'null': '(True)', 'on_delete': 'models.SET_NULL'}), "('restaurants.Restaurant', null=True, on_delete=models.\n SET_NULL)\n", (373, 442), False, 'from django.db import models\n'), ((462, 539), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""customers.Customer"""'], {'null': '(True)', 'on_delete': 'models.SET_NULL'}), "('customers.Customer', null=True, on_delete=models.SET_NULL)\n", (479, 539), False, 'from django.db import models\n'), ((562, 635), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(60)', 'choices': 'category', 'default': 'category[0][0]'}), '(max_length=60, choices=category, default=category[0][0])\n', (578, 635), False, 'from django.db import models\n'), ((666, 699), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (685, 699), False, 'from django.db import models\n'), ((745, 808), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""Order"""'], {'null': '(True)', 'on_delete': 'models.CASCADE'}), "('Order', null=True, on_delete=models.CASCADE)\n", (762, 808), False, 'from django.db import models\n'), ((829, 904), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""restaurants.Menu"""'], {'null': '(True)', 'on_delete': 'models.SET_NULL'}), "('restaurants.Menu', null=True, on_delete=models.SET_NULL)\n", (846, 904), False, 'from django.db import models\n'), ((929, 959), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(1)'}), '(default=1)\n', (948, 959), False, 'from django.db import models\n')]
|
#!/usr/bin/env python
# Copyright 2019 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [START storage_generate_encryption_key]
import base64
import os
def generate_encryption_key():
"""Generates a 256 bit (32 byte) AES encryption key and prints the
base64 representation.
This is included for demonstration purposes. You should generate your own
key. Please remember that encryption keys should be handled with a
comprehensive security policy.
"""
key = os.urandom(32)
encoded_key = base64.b64encode(key).decode("utf-8")
print(f"Base 64 encoded encryption key: {encoded_key}")
# [END storage_generate_encryption_key]
if __name__ == "__main__":
generate_encryption_key()
|
[
"base64.b64encode",
"os.urandom"
] |
[((999, 1013), 'os.urandom', 'os.urandom', (['(32)'], {}), '(32)\n', (1009, 1013), False, 'import os\n'), ((1032, 1053), 'base64.b64encode', 'base64.b64encode', (['key'], {}), '(key)\n', (1048, 1053), False, 'import base64\n')]
|
from SPARQLWrapper import SPARQLWrapper, JSON, POST
import json
import requests
def test_wrapper_select(sparql_endpoint):
repo_uri = 'https://my.rdfdb.com/repo/sparql'
rdf_files = ['tests/upper_ontology.ttl',
'tests/domain_ontology.ttl',
'tests/instance_data.ttl']
endpoint = sparql_endpoint(repo_uri, rdf_files)
sparql = SPARQLWrapper(endpoint=repo_uri)
query = "select distinct ?class where { [] a ?class } order by ?class"
sparql.setQuery(query)
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
expected = json.loads(endpoint.graph.query(query).serialize(format='json'))
assert results == expected
def test_wrapper_ask(sparql_endpoint):
repo_uri = 'https://my.rdfdb.com/repo/sparql'
rdf_files = ['tests/upper_ontology.ttl',
'tests/domain_ontology.ttl',
'tests/instance_data.ttl']
endpoint = sparql_endpoint(repo_uri, rdf_files)
sparql = SPARQLWrapper(endpoint=repo_uri)
query = "ASK { ?instance a ?class }"
sparql.setQuery(query)
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
expected = json.loads(endpoint.graph.query(query).serialize(format='json'))
assert results == expected
def test_wrapper_update(sparql_endpoint):
repo_uri = 'https://my.rdfdb.com/repo/sparql'
rdf_files = ['tests/upper_ontology.ttl',
'tests/domain_ontology.ttl',
'tests/instance_data.ttl']
endpoint = sparql_endpoint(repo_uri, rdf_files) # noqa: F841
sparql = SPARQLWrapper(endpoint=repo_uri)
sparql.setReturnFormat(JSON)
sparql.setMethod(POST)
query = "select (count(?person) as ?num) where { ?person a <http://example.com/Person> }"
sparql.setQuery(query)
results = sparql.query().convert()
assert results['results']['bindings'][0]['num']['value'] == '1'
update = "insert { ?instance a ?super } " \
"where { ?instance a/<http://www.w3.org/2000/01/rdf-schema#subClassOf> ?super }"
sparql.setQuery(update)
results = sparql.query()
assert results.info()['status'] == '200'
query = "select (count(?person) as ?num) where { ?person a <http://example.com/Person> }"
sparql.setQuery(query)
results = sparql.query().convert()
assert results['results']['bindings'][0]['num']['value'] == '3'
def test_request_get(sparql_endpoint):
repo_uri = 'https://my.rdfdb.com/repo/sparql'
rdf_files = ['tests/upper_ontology.ttl',
'tests/domain_ontology.ttl',
'tests/instance_data.ttl']
endpoint = sparql_endpoint(repo_uri, rdf_files)
query = "select distinct ?class where { [] a ?class } order by ?class"
response = requests.get(url=repo_uri, params={'query': query}, headers={'Accept': 'application/json'})
results = response.text
expected = endpoint.graph.query(query).serialize(format='json').decode('utf-8')
assert results == expected
query = 'construct { <http://example.com/_t1> ?p ?o } where { <http://example.com/_t1> ?p ?o }'
response = requests.get(url=repo_uri, params={'query': query}, headers={'Accept': 'text/turtle'})
results = response.text
expected = endpoint.graph.query(query).serialize(format='turtle').decode('utf-8')
assert results == expected
def test_multiple_graphs(sparql_endpoint):
repo_uri = 'https://my.rdfdb.com/repo/sparql'
rdf_files = [{'http://example.com/graph/upper': 'tests/upper_ontology.ttl',
'http://example.com/graph/domain': 'tests/domain_ontology.ttl',
'http://example.com/graph/instance': 'tests/instance_data.ttl'}]
endpoint = sparql_endpoint(repo_uri, rdf_files) # noqa: F841
query = "select ?graph (count(?s) as ?size) where { graph ?graph { ?s ?p ?o } } group by ?graph"
response = requests.get(url=repo_uri, params={'query': query}, headers={'Accept': 'application/json'})
results = dict(
(row['graph']['value'], row['size']['value'])
for row in response.json()['results']['bindings'])
expected = {'http://example.com/graph/upper': '18',
'http://example.com/graph/domain': '21',
'http://example.com/graph/instance': '15'}
assert results == expected
def test_multiple_graphs_inline(sparql_endpoint):
repo_uri = 'https://my.rdfdb.com/repo/sparql'
rdf_files = [{'http://example.com/graph/upper': open('tests/upper_ontology.ttl', 'r').read(),
'http://example.com/graph/domain': open('tests/domain_ontology.ttl', 'r').read(),
'http://example.com/graph/instance': open('tests/instance_data.ttl', 'r').read()}]
endpoint = sparql_endpoint(repo_uri, rdf_files) # noqa: F841
query = "select ?graph (count(?s) as ?size) where { graph ?graph { ?s ?p ?o } } group by ?graph"
response = requests.get(url=repo_uri, params={'query': query}, headers={'Accept': 'application/json'})
results = dict(
(row['graph']['value'], row['size']['value'])
for row in response.json()['results']['bindings'])
expected = {'http://example.com/graph/upper': '18',
'http://example.com/graph/domain': '21',
'http://example.com/graph/instance': '15'}
assert results == expected
def test_request_update_get(sparql_endpoint):
repo_uri = 'https://my.rdfdb.com/repo/sparql'
rdf_files = ['tests/upper_ontology.ttl',
'tests/domain_ontology.ttl',
'tests/instance_data.ttl']
endpoint = sparql_endpoint(repo_uri, rdf_files) # noqa: F841
query = "select (count(?person) as ?num) where { ?person a <http://example.com/Person> }"
response = requests.get(url=repo_uri, params={'query': query}, headers={'Accept': 'application/json'})
assert response.json()['results']['bindings'][0]['num']['value'] == '1'
update = "insert { ?instance a ?super } " \
"where { ?instance a/<http://www.w3.org/2000/01/rdf-schema#subClassOf> ?super }"
response = requests.get(url=repo_uri, params={'update': update})
assert response.status_code == 200
query = "select (count(?person) as ?num) where { ?person a <http://example.com/Person> }"
response = requests.get(url=repo_uri, params={'query': query}, headers={'Accept': 'application/json'})
assert response.json()['results']['bindings'][0]['num']['value'] == '3'
def test_request_update_post(sparql_endpoint):
repo_uri = 'https://my.rdfdb.com/repo/sparql'
rdf_files = ['tests/upper_ontology.ttl',
'tests/domain_ontology.ttl',
'tests/instance_data.ttl']
endpoint = sparql_endpoint(repo_uri, rdf_files) # noqa: F841
query = "select (count(?person) as ?num) where { ?person a <http://example.com/Person> }"
response = requests.post(url=repo_uri, data={'query': query}, headers={'Accept': 'application/json'})
assert response.json()['results']['bindings'][0]['num']['value'] == '1'
update = "insert { ?instance a ?super } " \
"where { ?instance a/<http://www.w3.org/2000/01/rdf-schema#subClassOf> ?super }"
response = requests.post(url=repo_uri, data={'update': update})
assert response.status_code == 200
query = "select (count(?person) as ?num) where { ?person a <http://example.com/Person> }"
response = requests.get(url=repo_uri, params={'query': query}, headers={'Accept': 'application/json'})
assert response.json()['results']['bindings'][0]['num']['value'] == '3'
def test_request_update_post_raw(sparql_endpoint):
repo_uri = 'https://my.rdfdb.com/repo/sparql'
rdf_files = ['tests/upper_ontology.ttl',
'tests/domain_ontology.ttl',
'tests/instance_data.ttl']
endpoint = sparql_endpoint(repo_uri, rdf_files) # noqa: F841
query = "select (count(?person) as ?num) where { ?person a <http://example.com/Person> }"
response = requests.get(url=repo_uri, params={'query': query}, headers={'Accept': 'application/json'})
assert response.json()['results']['bindings'][0]['num']['value'] == '1'
update = "insert { ?instance a ?super } " \
"where { ?instance a/<http://www.w3.org/2000/01/rdf-schema#subClassOf> ?super }"
response = requests.post(url=repo_uri, data=update.encode('utf-8'),
headers={'Content-Type': 'application/sparql-update'})
assert response.status_code == 200
query = "select (count(?person) as ?num) where { ?person a <http://example.com/Person> }"
response = requests.get(url=repo_uri, params={'query': query}, headers={'Accept': 'application/json'})
assert response.json()['results']['bindings'][0]['num']['value'] == '3'
def test_request_post(sparql_endpoint):
repo_uri = 'https://my.rdfdb.com/repo/sparql'
rdf_files = ['tests/upper_ontology.ttl',
'tests/domain_ontology.ttl',
'tests/instance_data.ttl']
endpoint = sparql_endpoint(repo_uri, rdf_files)
query = "select distinct ?class where { [] a ?class } order by ?class"
response = requests.post(url=repo_uri, data={'query': query}, headers={'Accept': 'application/json'})
results = response.text
expected = endpoint.graph.query(query).serialize(format='json').decode('utf-8')
assert results == expected
def test_request_post_raw(sparql_endpoint):
repo_uri = 'https://my.rdfdb.com/repo/sparql'
rdf_files = ['tests/upper_ontology.ttl',
'tests/domain_ontology.ttl',
'tests/instance_data.ttl']
endpoint = sparql_endpoint(repo_uri, rdf_files)
query = "select distinct ?class where { [] a ?class } order by ?class"
expected = endpoint.graph.query(query).serialize(format='json').decode('utf-8')
response = requests.post(url=repo_uri, data=query.encode('utf-8'),
headers={'Content-Type': 'application/sparql-query',
'Accept': 'application/json'})
results = response.text
assert results == expected
|
[
"requests.post",
"SPARQLWrapper.SPARQLWrapper",
"requests.get"
] |
[((374, 406), 'SPARQLWrapper.SPARQLWrapper', 'SPARQLWrapper', ([], {'endpoint': 'repo_uri'}), '(endpoint=repo_uri)\n', (387, 406), False, 'from SPARQLWrapper import SPARQLWrapper, JSON, POST\n'), ((984, 1016), 'SPARQLWrapper.SPARQLWrapper', 'SPARQLWrapper', ([], {'endpoint': 'repo_uri'}), '(endpoint=repo_uri)\n', (997, 1016), False, 'from SPARQLWrapper import SPARQLWrapper, JSON, POST\n'), ((1578, 1610), 'SPARQLWrapper.SPARQLWrapper', 'SPARQLWrapper', ([], {'endpoint': 'repo_uri'}), '(endpoint=repo_uri)\n', (1591, 1610), False, 'from SPARQLWrapper import SPARQLWrapper, JSON, POST\n'), ((2742, 2837), 'requests.get', 'requests.get', ([], {'url': 'repo_uri', 'params': "{'query': query}", 'headers': "{'Accept': 'application/json'}"}), "(url=repo_uri, params={'query': query}, headers={'Accept':\n 'application/json'})\n", (2754, 2837), False, 'import requests\n'), ((3094, 3184), 'requests.get', 'requests.get', ([], {'url': 'repo_uri', 'params': "{'query': query}", 'headers': "{'Accept': 'text/turtle'}"}), "(url=repo_uri, params={'query': query}, headers={'Accept':\n 'text/turtle'})\n", (3106, 3184), False, 'import requests\n'), ((3849, 3944), 'requests.get', 'requests.get', ([], {'url': 'repo_uri', 'params': "{'query': query}", 'headers': "{'Accept': 'application/json'}"}), "(url=repo_uri, params={'query': query}, headers={'Accept':\n 'application/json'})\n", (3861, 3944), False, 'import requests\n'), ((4861, 4956), 'requests.get', 'requests.get', ([], {'url': 'repo_uri', 'params': "{'query': query}", 'headers': "{'Accept': 'application/json'}"}), "(url=repo_uri, params={'query': query}, headers={'Accept':\n 'application/json'})\n", (4873, 4956), False, 'import requests\n'), ((5698, 5793), 'requests.get', 'requests.get', ([], {'url': 'repo_uri', 'params': "{'query': query}", 'headers': "{'Accept': 'application/json'}"}), "(url=repo_uri, params={'query': query}, headers={'Accept':\n 'application/json'})\n", (5710, 5793), False, 'import requests\n'), ((6024, 6077), 'requests.get', 'requests.get', ([], {'url': 'repo_uri', 'params': "{'update': update}"}), "(url=repo_uri, params={'update': update})\n", (6036, 6077), False, 'import requests\n'), ((6227, 6322), 'requests.get', 'requests.get', ([], {'url': 'repo_uri', 'params': "{'query': query}", 'headers': "{'Accept': 'application/json'}"}), "(url=repo_uri, params={'query': query}, headers={'Accept':\n 'application/json'})\n", (6239, 6322), False, 'import requests\n'), ((6804, 6898), 'requests.post', 'requests.post', ([], {'url': 'repo_uri', 'data': "{'query': query}", 'headers': "{'Accept': 'application/json'}"}), "(url=repo_uri, data={'query': query}, headers={'Accept':\n 'application/json'})\n", (6817, 6898), False, 'import requests\n'), ((7129, 7181), 'requests.post', 'requests.post', ([], {'url': 'repo_uri', 'data': "{'update': update}"}), "(url=repo_uri, data={'update': update})\n", (7142, 7181), False, 'import requests\n'), ((7331, 7426), 'requests.get', 'requests.get', ([], {'url': 'repo_uri', 'params': "{'query': query}", 'headers': "{'Accept': 'application/json'}"}), "(url=repo_uri, params={'query': query}, headers={'Accept':\n 'application/json'})\n", (7343, 7426), False, 'import requests\n'), ((7912, 8007), 'requests.get', 'requests.get', ([], {'url': 'repo_uri', 'params': "{'query': query}", 'headers': "{'Accept': 'application/json'}"}), "(url=repo_uri, params={'query': query}, headers={'Accept':\n 'application/json'})\n", (7924, 8007), False, 'import requests\n'), ((8528, 8623), 'requests.get', 'requests.get', ([], {'url': 'repo_uri', 'params': "{'query': query}", 'headers': "{'Accept': 'application/json'}"}), "(url=repo_uri, params={'query': query}, headers={'Accept':\n 'application/json'})\n", (8540, 8623), False, 'import requests\n'), ((9065, 9159), 'requests.post', 'requests.post', ([], {'url': 'repo_uri', 'data': "{'query': query}", 'headers': "{'Accept': 'application/json'}"}), "(url=repo_uri, data={'query': query}, headers={'Accept':\n 'application/json'})\n", (9078, 9159), False, 'import requests\n')]
|
import os
import sys
import numpy as np
from math import floor
def splitset(dataset, parts):
"""Partition data into "parts" partitions"""
n = dataset.shape[0]
local_n = floor(n/parts)
result = []
for i in range(parts):
result.append(dataset[i*local_n: (i+1)*local_n])
return np.array(result)
if __name__ == '__main__':
if len(sys.argv) < 2:
nr_of_datasets = 10
else:
nr_of_datasets = int(sys.argv[1])
package = np.load("data/mnist.npz")
data = {}
for key, val in package.items():
data[key] = splitset(val, nr_of_datasets)
print("CREATING {} PARTITIONS INSIDE {}/data/clients".format(nr_of_datasets, os.getcwd()))
if not os.path.exists('data/clients'):
os.mkdir('data/clients')
for i in range(nr_of_datasets):
if not os.path.exists('data/clients/{}'.format(str(i))):
os.mkdir('data/clients/{}'.format(str(i)))
np.savez('data/clients/{}'.format(str(i)) + '/mnist.npz',
x_train=data['x_train'][i],
y_train=data['y_train'][i],
x_test=data['x_test'][i],
y_test=data['y_test'][i])
print("DONE")
|
[
"os.mkdir",
"numpy.load",
"os.getcwd",
"math.floor",
"os.path.exists",
"numpy.array"
] |
[((182, 198), 'math.floor', 'floor', (['(n / parts)'], {}), '(n / parts)\n', (187, 198), False, 'from math import floor\n'), ((308, 324), 'numpy.array', 'np.array', (['result'], {}), '(result)\n', (316, 324), True, 'import numpy as np\n'), ((476, 501), 'numpy.load', 'np.load', (['"""data/mnist.npz"""'], {}), "('data/mnist.npz')\n", (483, 501), True, 'import numpy as np\n'), ((710, 740), 'os.path.exists', 'os.path.exists', (['"""data/clients"""'], {}), "('data/clients')\n", (724, 740), False, 'import os\n'), ((750, 774), 'os.mkdir', 'os.mkdir', (['"""data/clients"""'], {}), "('data/clients')\n", (758, 774), False, 'import os\n'), ((685, 696), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (694, 696), False, 'import os\n')]
|
import numpy as np
def approximate_error(motif):
"""Calculate approximate error"""
pwm = motif.pwm
bases = list(pwm.keys())
n = sum(motif.counts[bases[0]])
approx_error = (len(bases)-1)/(2 * np.log(2) * n)
return approx_error
def exact_error(motif):
"""Calculate exact error, using multinomial(na,nc,ng,nt)"""
## Super Slow. O(n^3)
pwm = motif.pwm
bases = pwm.keys()
na = sum(motif.counts['A'])
n = na
nc = 0
ng = 0
nt = 0
done = False
exact_error = 0
while not done:
print (na,nc,ng,nt)
exact_error += sum([-p*np.log2(p) for p in [na/n, nc/n, ng/n, nt/n]])
if nt<=0:
## iterate inner loop
if ng > 0:
## g => t
ng = ng - 1
nt = nt + 1
elif nc > 0:
## c -> g
nc = nc - 1;
ng = ng + 1;
else:
## a->c
na = na - 1
nc = nc + 1
else:
if ng > 0:
## g => t
ng = ng - 1
nt = nt + 1
elif nc>0:
## c => g; all t -> g
nc = nc - 1
ng = nt + 1
nt = 0
elif na>0:
## a => c; all g,t -> c
nc = nt + 1
na = na - 1
nt = 0
else:
done = True
return exact_error
def calc_info_matrix(motif, correction_type='approx'):
"""Calculate information matrix with small sample correction"""
pwm = motif.pwm
bases = pwm.keys()
if correction_type=='approx':
error = approximate_error(motif)
else:
error = exact_error(motif)
info_matrix = [2-error+sum([pwm[b][l]*np.nan_to_num(np.log2(pwm[b][l])) for b in bases]) for l in range(0, len(motif))]
return info_matrix
def calc_relative_information(motif, correction_type='approx'):
"""Calculate relative information matrix"""
pwm = motif.pwm
bases = pwm.keys()
if correction_type=='approx':
info_matrix = calc_info_matrix(motif)
else:
info_matrix = calc_info_matrix(motif, 'exact')
relative_info = {base: [prob*info for prob,info in zip(pwm[base], info_matrix)] for base in bases}
return relative_info
|
[
"numpy.log2",
"numpy.log"
] |
[((213, 222), 'numpy.log', 'np.log', (['(2)'], {}), '(2)\n', (219, 222), True, 'import numpy as np\n'), ((603, 613), 'numpy.log2', 'np.log2', (['p'], {}), '(p)\n', (610, 613), True, 'import numpy as np\n'), ((1828, 1846), 'numpy.log2', 'np.log2', (['pwm[b][l]'], {}), '(pwm[b][l])\n', (1835, 1846), True, 'import numpy as np\n')]
|
#!/usr/bin/env python
"""
Threshold Graphs
================
"""
import pytest
import networkx as nx
import networkx.algorithms.threshold as nxt
from networkx.algorithms.isomorphism.isomorph import graph_could_be_isomorphic
from networkx.testing import almost_equal
cnlti = nx.convert_node_labels_to_integers
class TestGeneratorThreshold():
def test_threshold_sequence_graph_test(self):
G = nx.star_graph(10)
assert nxt.is_threshold_graph(G)
assert nxt.is_threshold_sequence(list(d for n, d in G.degree()))
G = nx.complete_graph(10)
assert nxt.is_threshold_graph(G)
assert nxt.is_threshold_sequence(list(d for n, d in G.degree()))
deg = [3, 2, 2, 1, 1, 1]
assert not nxt.is_threshold_sequence(deg)
deg = [3, 2, 2, 1]
assert nxt.is_threshold_sequence(deg)
G = nx.generators.havel_hakimi_graph(deg)
assert nxt.is_threshold_graph(G)
def test_creation_sequences(self):
deg = [3, 2, 2, 1]
G = nx.generators.havel_hakimi_graph(deg)
with pytest.raises(ValueError):
nxt.creation_sequence(deg, with_labels=True, compact=True)
cs0 = nxt.creation_sequence(deg)
H0 = nxt.threshold_graph(cs0)
assert ''.join(cs0) == 'ddid'
cs1 = nxt.creation_sequence(deg, with_labels=True)
H1 = nxt.threshold_graph(cs1)
assert cs1 == [(1, 'd'), (2, 'd'), (3, 'i'), (0, 'd')]
cs2 = nxt.creation_sequence(deg, compact=True)
H2 = nxt.threshold_graph(cs2)
assert cs2 == [2, 1, 1]
assert ''.join(nxt.uncompact(cs2)) == 'ddid'
assert graph_could_be_isomorphic(H0, G)
assert graph_could_be_isomorphic(H0, H1)
assert graph_could_be_isomorphic(H0, H2)
def test_make_compact(self):
assert nxt.make_compact(['d', 'd', 'd', 'i', 'd', 'd']) == [3, 1, 2]
assert nxt.make_compact([3, 1, 2]) == [3, 1, 2]
assert pytest.raises(TypeError, nxt.make_compact, [3., 1., 2.])
def test_uncompact(self):
assert nxt.uncompact([3, 1, 2]) == ['d', 'd', 'd', 'i', 'd', 'd']
assert nxt.uncompact(['d', 'd', 'i', 'd']) == ['d', 'd', 'i', 'd']
assert (nxt.uncompact(nxt.uncompact([(1, 'd'), (2, 'd'), (3, 'i'), (0, 'd')])) ==
nxt.uncompact([(1, 'd'), (2, 'd'), (3, 'i'), (0, 'd')]))
assert pytest.raises(TypeError, nxt.uncompact, [3., 1., 2.])
def test_creation_sequence_to_weights(self):
assert nxt.creation_sequence_to_weights([3, 1, 2]) == [0.5, 0.5, 0.5, 0.25, 0.75, 0.75]
assert pytest.raises(TypeError, nxt.creation_sequence_to_weights, [3., 1., 2.])
def test_weights_to_creation_sequence(self):
deg = [3, 2, 2, 1]
with pytest.raises(ValueError):
nxt.weights_to_creation_sequence(deg, with_labels=True, compact=True)
assert (nxt.weights_to_creation_sequence(deg, with_labels=True) ==
[(3, 'd'), (1, 'd'), (2, 'd'), (0, 'd')])
assert nxt.weights_to_creation_sequence(deg, compact=True) == [4]
def test_find_alternating_4_cycle(self):
G = nx.Graph()
G.add_edge(1, 2)
assert not nxt.find_alternating_4_cycle(G)
def test_shortest_path(self):
deg = [3, 2, 2, 1]
G = nx.generators.havel_hakimi_graph(deg)
cs1 = nxt.creation_sequence(deg, with_labels=True)
for n, m in [(3, 0), (0, 3), (0, 2), (0, 1), (1, 3),
(3, 1), (1, 2), (2, 3)]:
assert (nxt.shortest_path(cs1, n, m) ==
nx.shortest_path(G, n, m))
spl = nxt.shortest_path_length(cs1, 3)
spl2 = nxt.shortest_path_length([t for v, t in cs1], 2)
assert spl == spl2
spld = {}
for j, pl in enumerate(spl):
n = cs1[j][0]
spld[n] = pl
assert spld == nx.single_source_shortest_path_length(G, 3)
assert nxt.shortest_path(['d', 'd', 'd', 'i', 'd', 'd'], 1, 2) == [1, 2]
assert nxt.shortest_path([3, 1, 2], 1, 2) == [1, 2]
assert pytest.raises(TypeError, nxt.shortest_path, [3., 1., 2.], 1, 2)
assert pytest.raises(ValueError, nxt.shortest_path, [3, 1, 2], 'a', 2)
assert pytest.raises(ValueError, nxt.shortest_path, [3, 1, 2], 1, 'b')
assert nxt.shortest_path([3, 1, 2], 1, 1) == [1]
def test_shortest_path_length(self):
assert nxt.shortest_path_length([3, 1, 2], 1) == [1, 0, 1, 2, 1, 1]
assert (nxt.shortest_path_length(['d', 'd', 'd', 'i', 'd', 'd'], 1) ==
[1, 0, 1, 2, 1, 1])
assert (nxt.shortest_path_length(('d', 'd', 'd', 'i', 'd', 'd'), 1) ==
[1, 0, 1, 2, 1, 1])
assert pytest.raises(TypeError, nxt.shortest_path, [3., 1., 2.], 1)
def random_threshold_sequence(self):
assert len(nxt.random_threshold_sequence(10, 0.5)) == 10
assert (nxt.random_threshold_sequence(10, 0.5, seed=42) ==
['d', 'i', 'd', 'd', 'd', 'i', 'i', 'i', 'd', 'd'])
assert pytest.raises(ValueError, nxt.random_threshold_sequence, 10, 1.5)
def test_right_d_threshold_sequence(self):
assert nxt.right_d_threshold_sequence(3, 2) == ['d', 'i', 'd']
assert pytest.raises(ValueError, nxt.right_d_threshold_sequence, 2, 3)
def test_left_d_threshold_sequence(self):
assert nxt.left_d_threshold_sequence(3, 2) == ['d', 'i', 'd']
assert pytest.raises(ValueError, nxt.left_d_threshold_sequence, 2, 3)
def test_weights_thresholds(self):
wseq = [3, 4, 3, 3, 5, 6, 5, 4, 5, 6]
cs = nxt.weights_to_creation_sequence(wseq, threshold=10)
wseq = nxt.creation_sequence_to_weights(cs)
cs2 = nxt.weights_to_creation_sequence(wseq)
assert cs == cs2
wseq = nxt.creation_sequence_to_weights(nxt.uncompact([3, 1, 2, 3, 3, 2, 3]))
assert (wseq ==
[s * 0.125 for s in [4, 4, 4, 3, 5, 5, 2, 2, 2, 6, 6, 6, 1, 1, 7, 7, 7]])
wseq = nxt.creation_sequence_to_weights([3, 1, 2, 3, 3, 2, 3])
assert (wseq ==
[s * 0.125 for s in [4, 4, 4, 3, 5, 5, 2, 2, 2, 6, 6, 6, 1, 1, 7, 7, 7]])
wseq = nxt.creation_sequence_to_weights(list(enumerate('ddidiiidididi')))
assert (wseq ==
[s * 0.1 for s in [5, 5, 4, 6, 3, 3, 3, 7, 2, 8, 1, 9, 0]])
wseq = nxt.creation_sequence_to_weights('ddidiiidididi')
assert (wseq ==
[s * 0.1 for s in [5, 5, 4, 6, 3, 3, 3, 7, 2, 8, 1, 9, 0]])
wseq = nxt.creation_sequence_to_weights('ddidiiidididid')
ws = [s / float(12) for s in [6, 6, 5, 7, 4, 4, 4, 8, 3, 9, 2, 10, 1, 11]]
assert sum([abs(c - d) for c, d in zip(wseq, ws)]) < 1e-14
def test_finding_routines(self):
G = nx.Graph({1: [2], 2: [3], 3: [4], 4: [5], 5: [6]})
G.add_edge(2, 4)
G.add_edge(2, 5)
G.add_edge(2, 7)
G.add_edge(3, 6)
G.add_edge(4, 6)
# Alternating 4 cycle
assert nxt.find_alternating_4_cycle(G) == [1, 2, 3, 6]
# Threshold graph
TG = nxt.find_threshold_graph(G)
assert nxt.is_threshold_graph(TG)
assert sorted(TG.nodes()) == [1, 2, 3, 4, 5, 7]
cs = nxt.creation_sequence(dict(TG.degree()), with_labels=True)
assert nxt.find_creation_sequence(G) == cs
def test_fast_versions_properties_threshold_graphs(self):
cs = 'ddiiddid'
G = nxt.threshold_graph(cs)
assert nxt.density('ddiiddid') == nx.density(G)
assert (sorted(nxt.degree_sequence(cs)) ==
sorted(d for n, d in G.degree()))
ts = nxt.triangle_sequence(cs)
assert ts == list(nx.triangles(G).values())
assert sum(ts) // 3 == nxt.triangles(cs)
c1 = nxt.cluster_sequence(cs)
c2 = list(nx.clustering(G).values())
assert almost_equal(sum([abs(c - d) for c, d in zip(c1, c2)]), 0)
b1 = nx.betweenness_centrality(G).values()
b2 = nxt.betweenness_sequence(cs)
assert sum([abs(c - d) for c, d in zip(b1, b2)]) < 1e-14
assert nxt.eigenvalues(cs) == [0, 1, 3, 3, 5, 7, 7, 8]
# Degree Correlation
assert abs(nxt.degree_correlation(cs) + 0.593038821954) < 1e-12
assert nxt.degree_correlation('diiiddi') == -0.8
assert nxt.degree_correlation('did') == -1.0
assert nxt.degree_correlation('ddd') == 1.0
assert nxt.eigenvalues('dddiii') == [0, 0, 0, 0, 3, 3]
assert nxt.eigenvalues('dddiiid') == [0, 1, 1, 1, 4, 4, 7]
def test_tg_creation_routines(self):
s = nxt.left_d_threshold_sequence(5, 7)
s = nxt.right_d_threshold_sequence(5, 7)
s1 = nxt.swap_d(s, 1.0, 1.0)
s1 = nxt.swap_d(s, 1.0, 1.0, seed=1)
def test_eigenvectors(self):
np = pytest.importorskip('numpy')
eigenval = np.linalg.eigvals
scipy = pytest.importorskip('scipy')
cs = 'ddiiddid'
G = nxt.threshold_graph(cs)
(tgeval, tgevec) = nxt.eigenvectors(cs)
dot = np.dot
assert [abs(dot(lv, lv) - 1.0) < 1e-9 for lv in tgevec] == [True] * 8
lapl = nx.laplacian_matrix(G)
# tgev=[ dot(lv,dot(lapl,lv)) for lv in tgevec ]
# assert_true(sum([abs(c-d) for c,d in zip(tgev,tgeval)]) < 1e-9)
# tgev.sort()
# lev=list(eigenval(lapl))
# lev.sort()
# assert_true(sum([abs(c-d) for c,d in zip(tgev,lev)]) < 1e-9)
def test_create_using(self):
cs = 'ddiiddid'
G = nxt.threshold_graph(cs)
assert pytest.raises(nx.exception.NetworkXError,
nxt.threshold_graph, cs, create_using=nx.DiGraph())
MG = nxt.threshold_graph(cs, create_using=nx.MultiGraph())
assert sorted(MG.edges()) == sorted(G.edges())
|
[
"networkx.algorithms.threshold.find_alternating_4_cycle",
"networkx.algorithms.threshold.creation_sequence",
"networkx.algorithms.threshold.shortest_path_length",
"networkx.MultiGraph",
"networkx.algorithms.threshold.threshold_graph",
"networkx.laplacian_matrix",
"networkx.algorithms.threshold.find_threshold_graph",
"networkx.algorithms.threshold.is_threshold_graph",
"networkx.algorithms.threshold.make_compact",
"networkx.algorithms.threshold.uncompact",
"networkx.algorithms.threshold.triangles",
"networkx.algorithms.isomorphism.isomorph.graph_could_be_isomorphic",
"networkx.algorithms.threshold.right_d_threshold_sequence",
"networkx.betweenness_centrality",
"networkx.algorithms.threshold.eigenvectors",
"networkx.shortest_path",
"networkx.algorithms.threshold.weights_to_creation_sequence",
"pytest.raises",
"networkx.algorithms.threshold.eigenvalues",
"networkx.star_graph",
"networkx.algorithms.threshold.creation_sequence_to_weights",
"networkx.clustering",
"networkx.generators.havel_hakimi_graph",
"networkx.algorithms.threshold.degree_correlation",
"networkx.algorithms.threshold.cluster_sequence",
"networkx.algorithms.threshold.find_creation_sequence",
"networkx.triangles",
"networkx.single_source_shortest_path_length",
"networkx.algorithms.threshold.density",
"networkx.DiGraph",
"networkx.algorithms.threshold.is_threshold_sequence",
"networkx.algorithms.threshold.left_d_threshold_sequence",
"networkx.algorithms.threshold.triangle_sequence",
"pytest.importorskip",
"networkx.algorithms.threshold.degree_sequence",
"networkx.algorithms.threshold.betweenness_sequence",
"networkx.algorithms.threshold.random_threshold_sequence",
"networkx.Graph",
"networkx.density",
"networkx.complete_graph",
"networkx.algorithms.threshold.shortest_path",
"networkx.algorithms.threshold.swap_d"
] |
[((407, 424), 'networkx.star_graph', 'nx.star_graph', (['(10)'], {}), '(10)\n', (420, 424), True, 'import networkx as nx\n'), ((440, 465), 'networkx.algorithms.threshold.is_threshold_graph', 'nxt.is_threshold_graph', (['G'], {}), '(G)\n', (462, 465), True, 'import networkx.algorithms.threshold as nxt\n'), ((552, 573), 'networkx.complete_graph', 'nx.complete_graph', (['(10)'], {}), '(10)\n', (569, 573), True, 'import networkx as nx\n'), ((589, 614), 'networkx.algorithms.threshold.is_threshold_graph', 'nxt.is_threshold_graph', (['G'], {}), '(G)\n', (611, 614), True, 'import networkx.algorithms.threshold as nxt\n'), ((815, 845), 'networkx.algorithms.threshold.is_threshold_sequence', 'nxt.is_threshold_sequence', (['deg'], {}), '(deg)\n', (840, 845), True, 'import networkx.algorithms.threshold as nxt\n'), ((859, 896), 'networkx.generators.havel_hakimi_graph', 'nx.generators.havel_hakimi_graph', (['deg'], {}), '(deg)\n', (891, 896), True, 'import networkx as nx\n'), ((912, 937), 'networkx.algorithms.threshold.is_threshold_graph', 'nxt.is_threshold_graph', (['G'], {}), '(G)\n', (934, 937), True, 'import networkx.algorithms.threshold as nxt\n'), ((1017, 1054), 'networkx.generators.havel_hakimi_graph', 'nx.generators.havel_hakimi_graph', (['deg'], {}), '(deg)\n', (1049, 1054), True, 'import networkx as nx\n'), ((1182, 1208), 'networkx.algorithms.threshold.creation_sequence', 'nxt.creation_sequence', (['deg'], {}), '(deg)\n', (1203, 1208), True, 'import networkx.algorithms.threshold as nxt\n'), ((1222, 1246), 'networkx.algorithms.threshold.threshold_graph', 'nxt.threshold_graph', (['cs0'], {}), '(cs0)\n', (1241, 1246), True, 'import networkx.algorithms.threshold as nxt\n'), ((1300, 1344), 'networkx.algorithms.threshold.creation_sequence', 'nxt.creation_sequence', (['deg'], {'with_labels': '(True)'}), '(deg, with_labels=True)\n', (1321, 1344), True, 'import networkx.algorithms.threshold as nxt\n'), ((1358, 1382), 'networkx.algorithms.threshold.threshold_graph', 'nxt.threshold_graph', (['cs1'], {}), '(cs1)\n', (1377, 1382), True, 'import networkx.algorithms.threshold as nxt\n'), ((1461, 1501), 'networkx.algorithms.threshold.creation_sequence', 'nxt.creation_sequence', (['deg'], {'compact': '(True)'}), '(deg, compact=True)\n', (1482, 1501), True, 'import networkx.algorithms.threshold as nxt\n'), ((1515, 1539), 'networkx.algorithms.threshold.threshold_graph', 'nxt.threshold_graph', (['cs2'], {}), '(cs2)\n', (1534, 1539), True, 'import networkx.algorithms.threshold as nxt\n'), ((1640, 1672), 'networkx.algorithms.isomorphism.isomorph.graph_could_be_isomorphic', 'graph_could_be_isomorphic', (['H0', 'G'], {}), '(H0, G)\n', (1665, 1672), False, 'from networkx.algorithms.isomorphism.isomorph import graph_could_be_isomorphic\n'), ((1688, 1721), 'networkx.algorithms.isomorphism.isomorph.graph_could_be_isomorphic', 'graph_could_be_isomorphic', (['H0', 'H1'], {}), '(H0, H1)\n', (1713, 1721), False, 'from networkx.algorithms.isomorphism.isomorph import graph_could_be_isomorphic\n'), ((1737, 1770), 'networkx.algorithms.isomorphism.isomorph.graph_could_be_isomorphic', 'graph_could_be_isomorphic', (['H0', 'H2'], {}), '(H0, H2)\n', (1762, 1770), False, 'from networkx.algorithms.isomorphism.isomorph import graph_could_be_isomorphic\n'), ((1953, 2012), 'pytest.raises', 'pytest.raises', (['TypeError', 'nxt.make_compact', '[3.0, 1.0, 2.0]'], {}), '(TypeError, nxt.make_compact, [3.0, 1.0, 2.0])\n', (1966, 2012), False, 'import pytest\n'), ((2373, 2429), 'pytest.raises', 'pytest.raises', (['TypeError', 'nxt.uncompact', '[3.0, 1.0, 2.0]'], {}), '(TypeError, nxt.uncompact, [3.0, 1.0, 2.0])\n', (2386, 2429), False, 'import pytest\n'), ((2588, 2663), 'pytest.raises', 'pytest.raises', (['TypeError', 'nxt.creation_sequence_to_weights', '[3.0, 1.0, 2.0]'], {}), '(TypeError, nxt.creation_sequence_to_weights, [3.0, 1.0, 2.0])\n', (2601, 2663), False, 'import pytest\n'), ((3130, 3140), 'networkx.Graph', 'nx.Graph', ([], {}), '()\n', (3138, 3140), True, 'import networkx as nx\n'), ((3291, 3328), 'networkx.generators.havel_hakimi_graph', 'nx.generators.havel_hakimi_graph', (['deg'], {}), '(deg)\n', (3323, 3328), True, 'import networkx as nx\n'), ((3343, 3387), 'networkx.algorithms.threshold.creation_sequence', 'nxt.creation_sequence', (['deg'], {'with_labels': '(True)'}), '(deg, with_labels=True)\n', (3364, 3387), True, 'import networkx.algorithms.threshold as nxt\n'), ((3614, 3646), 'networkx.algorithms.threshold.shortest_path_length', 'nxt.shortest_path_length', (['cs1', '(3)'], {}), '(cs1, 3)\n', (3638, 3646), True, 'import networkx.algorithms.threshold as nxt\n'), ((3662, 3710), 'networkx.algorithms.threshold.shortest_path_length', 'nxt.shortest_path_length', (['[t for v, t in cs1]', '(2)'], {}), '([t for v, t in cs1], 2)\n', (3686, 3710), True, 'import networkx.algorithms.threshold as nxt\n'), ((4069, 4135), 'pytest.raises', 'pytest.raises', (['TypeError', 'nxt.shortest_path', '[3.0, 1.0, 2.0]', '(1)', '(2)'], {}), '(TypeError, nxt.shortest_path, [3.0, 1.0, 2.0], 1, 2)\n', (4082, 4135), False, 'import pytest\n'), ((4148, 4211), 'pytest.raises', 'pytest.raises', (['ValueError', 'nxt.shortest_path', '[3, 1, 2]', '"""a"""', '(2)'], {}), "(ValueError, nxt.shortest_path, [3, 1, 2], 'a', 2)\n", (4161, 4211), False, 'import pytest\n'), ((4227, 4290), 'pytest.raises', 'pytest.raises', (['ValueError', 'nxt.shortest_path', '[3, 1, 2]', '(1)', '"""b"""'], {}), "(ValueError, nxt.shortest_path, [3, 1, 2], 1, 'b')\n", (4240, 4290), False, 'import pytest\n'), ((4721, 4784), 'pytest.raises', 'pytest.raises', (['TypeError', 'nxt.shortest_path', '[3.0, 1.0, 2.0]', '(1)'], {}), '(TypeError, nxt.shortest_path, [3.0, 1.0, 2.0], 1)\n', (4734, 4784), False, 'import pytest\n'), ((5044, 5109), 'pytest.raises', 'pytest.raises', (['ValueError', 'nxt.random_threshold_sequence', '(10)', '(1.5)'], {}), '(ValueError, nxt.random_threshold_sequence, 10, 1.5)\n', (5057, 5109), False, 'import pytest\n'), ((5244, 5307), 'pytest.raises', 'pytest.raises', (['ValueError', 'nxt.right_d_threshold_sequence', '(2)', '(3)'], {}), '(ValueError, nxt.right_d_threshold_sequence, 2, 3)\n', (5257, 5307), False, 'import pytest\n'), ((5440, 5502), 'pytest.raises', 'pytest.raises', (['ValueError', 'nxt.left_d_threshold_sequence', '(2)', '(3)'], {}), '(ValueError, nxt.left_d_threshold_sequence, 2, 3)\n', (5453, 5502), False, 'import pytest\n'), ((5602, 5654), 'networkx.algorithms.threshold.weights_to_creation_sequence', 'nxt.weights_to_creation_sequence', (['wseq'], {'threshold': '(10)'}), '(wseq, threshold=10)\n', (5634, 5654), True, 'import networkx.algorithms.threshold as nxt\n'), ((5670, 5706), 'networkx.algorithms.threshold.creation_sequence_to_weights', 'nxt.creation_sequence_to_weights', (['cs'], {}), '(cs)\n', (5702, 5706), True, 'import networkx.algorithms.threshold as nxt\n'), ((5721, 5759), 'networkx.algorithms.threshold.weights_to_creation_sequence', 'nxt.weights_to_creation_sequence', (['wseq'], {}), '(wseq)\n', (5753, 5759), True, 'import networkx.algorithms.threshold as nxt\n'), ((6007, 6062), 'networkx.algorithms.threshold.creation_sequence_to_weights', 'nxt.creation_sequence_to_weights', (['[3, 1, 2, 3, 3, 2, 3]'], {}), '([3, 1, 2, 3, 3, 2, 3])\n', (6039, 6062), True, 'import networkx.algorithms.threshold as nxt\n'), ((6386, 6435), 'networkx.algorithms.threshold.creation_sequence_to_weights', 'nxt.creation_sequence_to_weights', (['"""ddidiiidididi"""'], {}), "('ddidiiidididi')\n", (6418, 6435), True, 'import networkx.algorithms.threshold as nxt\n'), ((6557, 6607), 'networkx.algorithms.threshold.creation_sequence_to_weights', 'nxt.creation_sequence_to_weights', (['"""ddidiiidididid"""'], {}), "('ddidiiidididid')\n", (6589, 6607), True, 'import networkx.algorithms.threshold as nxt\n'), ((6808, 6868), 'networkx.Graph', 'nx.Graph', (['{(1): [2], (2): [3], (3): [4], (4): [5], (5): [6]}'], {}), '({(1): [2], (2): [3], (3): [4], (4): [5], (5): [6]})\n', (6816, 6868), True, 'import networkx as nx\n'), ((7118, 7145), 'networkx.algorithms.threshold.find_threshold_graph', 'nxt.find_threshold_graph', (['G'], {}), '(G)\n', (7142, 7145), True, 'import networkx.algorithms.threshold as nxt\n'), ((7161, 7187), 'networkx.algorithms.threshold.is_threshold_graph', 'nxt.is_threshold_graph', (['TG'], {}), '(TG)\n', (7183, 7187), True, 'import networkx.algorithms.threshold as nxt\n'), ((7467, 7490), 'networkx.algorithms.threshold.threshold_graph', 'nxt.threshold_graph', (['cs'], {}), '(cs)\n', (7486, 7490), True, 'import networkx.algorithms.threshold as nxt\n'), ((7667, 7692), 'networkx.algorithms.threshold.triangle_sequence', 'nxt.triangle_sequence', (['cs'], {}), '(cs)\n', (7688, 7692), True, 'import networkx.algorithms.threshold as nxt\n'), ((7808, 7832), 'networkx.algorithms.threshold.cluster_sequence', 'nxt.cluster_sequence', (['cs'], {}), '(cs)\n', (7828, 7832), True, 'import networkx.algorithms.threshold as nxt\n'), ((8017, 8045), 'networkx.algorithms.threshold.betweenness_sequence', 'nxt.betweenness_sequence', (['cs'], {}), '(cs)\n', (8041, 8045), True, 'import networkx.algorithms.threshold as nxt\n'), ((8623, 8658), 'networkx.algorithms.threshold.left_d_threshold_sequence', 'nxt.left_d_threshold_sequence', (['(5)', '(7)'], {}), '(5, 7)\n', (8652, 8658), True, 'import networkx.algorithms.threshold as nxt\n'), ((8671, 8707), 'networkx.algorithms.threshold.right_d_threshold_sequence', 'nxt.right_d_threshold_sequence', (['(5)', '(7)'], {}), '(5, 7)\n', (8701, 8707), True, 'import networkx.algorithms.threshold as nxt\n'), ((8721, 8744), 'networkx.algorithms.threshold.swap_d', 'nxt.swap_d', (['s', '(1.0)', '(1.0)'], {}), '(s, 1.0, 1.0)\n', (8731, 8744), True, 'import networkx.algorithms.threshold as nxt\n'), ((8758, 8789), 'networkx.algorithms.threshold.swap_d', 'nxt.swap_d', (['s', '(1.0)', '(1.0)'], {'seed': '(1)'}), '(s, 1.0, 1.0, seed=1)\n', (8768, 8789), True, 'import networkx.algorithms.threshold as nxt\n'), ((8837, 8865), 'pytest.importorskip', 'pytest.importorskip', (['"""numpy"""'], {}), "('numpy')\n", (8856, 8865), False, 'import pytest\n'), ((8919, 8947), 'pytest.importorskip', 'pytest.importorskip', (['"""scipy"""'], {}), "('scipy')\n", (8938, 8947), False, 'import pytest\n'), ((8985, 9008), 'networkx.algorithms.threshold.threshold_graph', 'nxt.threshold_graph', (['cs'], {}), '(cs)\n', (9004, 9008), True, 'import networkx.algorithms.threshold as nxt\n'), ((9036, 9056), 'networkx.algorithms.threshold.eigenvectors', 'nxt.eigenvectors', (['cs'], {}), '(cs)\n', (9052, 9056), True, 'import networkx.algorithms.threshold as nxt\n'), ((9171, 9193), 'networkx.laplacian_matrix', 'nx.laplacian_matrix', (['G'], {}), '(G)\n', (9190, 9193), True, 'import networkx as nx\n'), ((9538, 9561), 'networkx.algorithms.threshold.threshold_graph', 'nxt.threshold_graph', (['cs'], {}), '(cs)\n', (9557, 9561), True, 'import networkx.algorithms.threshold as nxt\n'), ((741, 771), 'networkx.algorithms.threshold.is_threshold_sequence', 'nxt.is_threshold_sequence', (['deg'], {}), '(deg)\n', (766, 771), True, 'import networkx.algorithms.threshold as nxt\n'), ((1069, 1094), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1082, 1094), False, 'import pytest\n'), ((1108, 1166), 'networkx.algorithms.threshold.creation_sequence', 'nxt.creation_sequence', (['deg'], {'with_labels': '(True)', 'compact': '(True)'}), '(deg, with_labels=True, compact=True)\n', (1129, 1166), True, 'import networkx.algorithms.threshold as nxt\n'), ((1820, 1868), 'networkx.algorithms.threshold.make_compact', 'nxt.make_compact', (["['d', 'd', 'd', 'i', 'd', 'd']"], {}), "(['d', 'd', 'd', 'i', 'd', 'd'])\n", (1836, 1868), True, 'import networkx.algorithms.threshold as nxt\n'), ((1897, 1924), 'networkx.algorithms.threshold.make_compact', 'nxt.make_compact', (['[3, 1, 2]'], {}), '([3, 1, 2])\n', (1913, 1924), True, 'import networkx.algorithms.threshold as nxt\n'), ((2056, 2080), 'networkx.algorithms.threshold.uncompact', 'nxt.uncompact', (['[3, 1, 2]'], {}), '([3, 1, 2])\n', (2069, 2080), True, 'import networkx.algorithms.threshold as nxt\n'), ((2130, 2165), 'networkx.algorithms.threshold.uncompact', 'nxt.uncompact', (["['d', 'd', 'i', 'd']"], {}), "(['d', 'd', 'i', 'd'])\n", (2143, 2165), True, 'import networkx.algorithms.threshold as nxt\n'), ((2301, 2356), 'networkx.algorithms.threshold.uncompact', 'nxt.uncompact', (["[(1, 'd'), (2, 'd'), (3, 'i'), (0, 'd')]"], {}), "([(1, 'd'), (2, 'd'), (3, 'i'), (0, 'd')])\n", (2314, 2356), True, 'import networkx.algorithms.threshold as nxt\n'), ((2492, 2535), 'networkx.algorithms.threshold.creation_sequence_to_weights', 'nxt.creation_sequence_to_weights', (['[3, 1, 2]'], {}), '([3, 1, 2])\n', (2524, 2535), True, 'import networkx.algorithms.threshold as nxt\n'), ((2751, 2776), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (2764, 2776), False, 'import pytest\n'), ((2790, 2859), 'networkx.algorithms.threshold.weights_to_creation_sequence', 'nxt.weights_to_creation_sequence', (['deg'], {'with_labels': '(True)', 'compact': '(True)'}), '(deg, with_labels=True, compact=True)\n', (2822, 2859), True, 'import networkx.algorithms.threshold as nxt\n'), ((2876, 2931), 'networkx.algorithms.threshold.weights_to_creation_sequence', 'nxt.weights_to_creation_sequence', (['deg'], {'with_labels': '(True)'}), '(deg, with_labels=True)\n', (2908, 2931), True, 'import networkx.algorithms.threshold as nxt\n'), ((3013, 3064), 'networkx.algorithms.threshold.weights_to_creation_sequence', 'nxt.weights_to_creation_sequence', (['deg'], {'compact': '(True)'}), '(deg, compact=True)\n', (3045, 3064), True, 'import networkx.algorithms.threshold as nxt\n'), ((3185, 3216), 'networkx.algorithms.threshold.find_alternating_4_cycle', 'nxt.find_alternating_4_cycle', (['G'], {}), '(G)\n', (3213, 3216), True, 'import networkx.algorithms.threshold as nxt\n'), ((3868, 3911), 'networkx.single_source_shortest_path_length', 'nx.single_source_shortest_path_length', (['G', '(3)'], {}), '(G, 3)\n', (3905, 3911), True, 'import networkx as nx\n'), ((3928, 3983), 'networkx.algorithms.threshold.shortest_path', 'nxt.shortest_path', (["['d', 'd', 'd', 'i', 'd', 'd']", '(1)', '(2)'], {}), "(['d', 'd', 'd', 'i', 'd', 'd'], 1, 2)\n", (3945, 3983), True, 'import networkx.algorithms.threshold as nxt\n'), ((4009, 4043), 'networkx.algorithms.threshold.shortest_path', 'nxt.shortest_path', (['[3, 1, 2]', '(1)', '(2)'], {}), '([3, 1, 2], 1, 2)\n', (4026, 4043), True, 'import networkx.algorithms.threshold as nxt\n'), ((4306, 4340), 'networkx.algorithms.threshold.shortest_path', 'nxt.shortest_path', (['[3, 1, 2]', '(1)', '(1)'], {}), '([3, 1, 2], 1, 1)\n', (4323, 4340), True, 'import networkx.algorithms.threshold as nxt\n'), ((4405, 4443), 'networkx.algorithms.threshold.shortest_path_length', 'nxt.shortest_path_length', (['[3, 1, 2]', '(1)'], {}), '([3, 1, 2], 1)\n', (4429, 4443), True, 'import networkx.algorithms.threshold as nxt\n'), ((4482, 4541), 'networkx.algorithms.threshold.shortest_path_length', 'nxt.shortest_path_length', (["['d', 'd', 'd', 'i', 'd', 'd']", '(1)'], {}), "(['d', 'd', 'd', 'i', 'd', 'd'], 1)\n", (4506, 4541), True, 'import networkx.algorithms.threshold as nxt\n'), ((4602, 4661), 'networkx.algorithms.threshold.shortest_path_length', 'nxt.shortest_path_length', (["('d', 'd', 'd', 'i', 'd', 'd')", '(1)'], {}), "(('d', 'd', 'd', 'i', 'd', 'd'), 1)\n", (4626, 4661), True, 'import networkx.algorithms.threshold as nxt\n'), ((4905, 4952), 'networkx.algorithms.threshold.random_threshold_sequence', 'nxt.random_threshold_sequence', (['(10)', '(0.5)'], {'seed': '(42)'}), '(10, 0.5, seed=42)\n', (4934, 4952), True, 'import networkx.algorithms.threshold as nxt\n'), ((5173, 5209), 'networkx.algorithms.threshold.right_d_threshold_sequence', 'nxt.right_d_threshold_sequence', (['(3)', '(2)'], {}), '(3, 2)\n', (5203, 5209), True, 'import networkx.algorithms.threshold as nxt\n'), ((5370, 5405), 'networkx.algorithms.threshold.left_d_threshold_sequence', 'nxt.left_d_threshold_sequence', (['(3)', '(2)'], {}), '(3, 2)\n', (5399, 5405), True, 'import networkx.algorithms.threshold as nxt\n'), ((5834, 5870), 'networkx.algorithms.threshold.uncompact', 'nxt.uncompact', (['[3, 1, 2, 3, 3, 2, 3]'], {}), '([3, 1, 2, 3, 3, 2, 3])\n', (5847, 5870), True, 'import networkx.algorithms.threshold as nxt\n'), ((7030, 7061), 'networkx.algorithms.threshold.find_alternating_4_cycle', 'nxt.find_alternating_4_cycle', (['G'], {}), '(G)\n', (7058, 7061), True, 'import networkx.algorithms.threshold as nxt\n'), ((7332, 7361), 'networkx.algorithms.threshold.find_creation_sequence', 'nxt.find_creation_sequence', (['G'], {}), '(G)\n', (7358, 7361), True, 'import networkx.algorithms.threshold as nxt\n'), ((7506, 7529), 'networkx.algorithms.threshold.density', 'nxt.density', (['"""ddiiddid"""'], {}), "('ddiiddid')\n", (7517, 7529), True, 'import networkx.algorithms.threshold as nxt\n'), ((7533, 7546), 'networkx.density', 'nx.density', (['G'], {}), '(G)\n', (7543, 7546), True, 'import networkx as nx\n'), ((7776, 7793), 'networkx.algorithms.threshold.triangles', 'nxt.triangles', (['cs'], {}), '(cs)\n', (7789, 7793), True, 'import networkx.algorithms.threshold as nxt\n'), ((8127, 8146), 'networkx.algorithms.threshold.eigenvalues', 'nxt.eigenvalues', (['cs'], {}), '(cs)\n', (8142, 8146), True, 'import networkx.algorithms.threshold as nxt\n'), ((8292, 8325), 'networkx.algorithms.threshold.degree_correlation', 'nxt.degree_correlation', (['"""diiiddi"""'], {}), "('diiiddi')\n", (8314, 8325), True, 'import networkx.algorithms.threshold as nxt\n'), ((8349, 8378), 'networkx.algorithms.threshold.degree_correlation', 'nxt.degree_correlation', (['"""did"""'], {}), "('did')\n", (8371, 8378), True, 'import networkx.algorithms.threshold as nxt\n'), ((8402, 8431), 'networkx.algorithms.threshold.degree_correlation', 'nxt.degree_correlation', (['"""ddd"""'], {}), "('ddd')\n", (8424, 8431), True, 'import networkx.algorithms.threshold as nxt\n'), ((8454, 8479), 'networkx.algorithms.threshold.eigenvalues', 'nxt.eigenvalues', (['"""dddiii"""'], {}), "('dddiii')\n", (8469, 8479), True, 'import networkx.algorithms.threshold as nxt\n'), ((8517, 8543), 'networkx.algorithms.threshold.eigenvalues', 'nxt.eigenvalues', (['"""dddiiid"""'], {}), "('dddiiid')\n", (8532, 8543), True, 'import networkx.algorithms.threshold as nxt\n'), ((1595, 1613), 'networkx.algorithms.threshold.uncompact', 'nxt.uncompact', (['cs2'], {}), '(cs2)\n', (1608, 1613), True, 'import networkx.algorithms.threshold as nxt\n'), ((2220, 2275), 'networkx.algorithms.threshold.uncompact', 'nxt.uncompact', (["[(1, 'd'), (2, 'd'), (3, 'i'), (0, 'd')]"], {}), "([(1, 'd'), (2, 'd'), (3, 'i'), (0, 'd')])\n", (2233, 2275), True, 'import networkx.algorithms.threshold as nxt\n'), ((3515, 3543), 'networkx.algorithms.threshold.shortest_path', 'nxt.shortest_path', (['cs1', 'n', 'm'], {}), '(cs1, n, m)\n', (3532, 3543), True, 'import networkx.algorithms.threshold as nxt\n'), ((3572, 3597), 'networkx.shortest_path', 'nx.shortest_path', (['G', 'n', 'm'], {}), '(G, n, m)\n', (3588, 3597), True, 'import networkx as nx\n'), ((4843, 4881), 'networkx.algorithms.threshold.random_threshold_sequence', 'nxt.random_threshold_sequence', (['(10)', '(0.5)'], {}), '(10, 0.5)\n', (4872, 4881), True, 'import networkx.algorithms.threshold as nxt\n'), ((7570, 7593), 'networkx.algorithms.threshold.degree_sequence', 'nxt.degree_sequence', (['cs'], {}), '(cs)\n', (7589, 7593), True, 'import networkx.algorithms.threshold as nxt\n'), ((7966, 7994), 'networkx.betweenness_centrality', 'nx.betweenness_centrality', (['G'], {}), '(G)\n', (7991, 7994), True, 'import networkx as nx\n'), ((9679, 9691), 'networkx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (9689, 9691), True, 'import networkx as nx\n'), ((9743, 9758), 'networkx.MultiGraph', 'nx.MultiGraph', ([], {}), '()\n', (9756, 9758), True, 'import networkx as nx\n'), ((7851, 7867), 'networkx.clustering', 'nx.clustering', (['G'], {}), '(G)\n', (7864, 7867), True, 'import networkx as nx\n'), ((8224, 8250), 'networkx.algorithms.threshold.degree_correlation', 'nxt.degree_correlation', (['cs'], {}), '(cs)\n', (8246, 8250), True, 'import networkx.algorithms.threshold as nxt\n'), ((7719, 7734), 'networkx.triangles', 'nx.triangles', (['G'], {}), '(G)\n', (7731, 7734), True, 'import networkx as nx\n')]
|
"""
MH-Z19 sensorin datasheetti https://www.winsen-sensor.com/d/files/MH-Z19B.pdf
Oletuksena on käyttää 0 - 5000 ppm sensoria. Tarkkuus ± (50ppm+5%)
Toimintajännite: 4.5 ~ 5.5 V DC, keskimääräinen kulutus < 20mA, maksimi 150mA
Toimintalämpötila -10 ~ 50 °C ja kosteus 0~ 90% RH.
Signaalin lähtö 3.3V sopiva. Vasteaika T90 < 120 s.
Sensorin elinikä > 5 vuotta.
Sensoria voi lukea myös PWM-muodossa, mutta tässä käytetään sarjamuotoista liikennettä (UART).
Komennot:
- 0x86 = lue CO2 konsentraatio, 8 bittiä:
- Lähetyksessä: 0 = 0xFF (starttibitti), 1 = varattu, 2 = komento 0x86, 3-7 = 0x00, 8 = tarkistussumma 0x79
- Vastaanototssa: 0 = 0xFF, 1 = komento 0x86, 2 = konsentraatio high-tavu, 3 = konsentraatio low-tavu, 4-7=0x00, 8 crc
CO2-konsentraatio on tavu 2 (HIGH) * 256 + tavu 3 (LOW)
Esimerkki: konvertoi hexa 02 desimaaliksi 2, hexa 20 desimaaliksi 32, jolloin 2 x 256 + 32 = 554 ppm CO2
- 0x87 = kalibroi nollapiste
- 0 = 0xFF, 1 = 0x01 varattu, 2 = komento 0x87, 3-7 = 0x00, 8 = 0x78 crc
Aseta sensori ulkoilmaan, jossa CO2 on 400 ppm ja anna komento: FF 01 87 00 00 00 00 00 78 nollapisteen kalibrointiin
- 0x88 = kalibroi span-piste
- 0 = 0xFF, 1 = 0x01 varattu, 2 = komento 0x88, 3 = HIGH-arvo, 4 = LOW-arvo, 5-7 = 0x00, 8 = crc
Nollapisteen kalibrointi tulee olla ensin valmis!
Aseta sensori 2000 ppm CO2 kaasuun vähintään 20 minuutiksi.
Mikäli span-arvo on 2000 ppm, HIGH-arvo on 2000/256, LOW-arvo on 2000 % 256
Lähetä komento FF 01 88 07 D0 00 00 00 A0 span-pisteen kalbroimiseksi.
- 0x79 = on/off itsekalibrointi nollapisteelle (ABC-logiikka)
- 0 = 0xFF, 1 = 0x01 varattu, 2 = komento 0x79, 3 = 0xA0/0x00, 4-7 = 0x00. 8 = crc
- ON komento: FF 01 79 A0 00 00 00 00 E6
- OFF komento: FF 01 79 00 00 00 00 00 86
- Oletus on ON ja oletus CO2 ppm on 400. Kalibrointi tehdään 24 tunnin välein, mutta se ei sovellu esimerkiksi
maatiloille, jääkaappiin tai muihin poikkeaviin tiloihin, jolloin automaattinen asetus tulee kytkeä pois ja
kalibrointi tulee tehdä manuaalisesti.
- 0x99 = mittausvälin asetus
- 0 = 0xFF, 1 = 0x01 varattu, 2 = komento 0x99, 3 = 0x00 varattu, 4 = mittausalue 24 ~ 32 bit, 5 = 16 ~ 23 bit,
6 = 8 ~15 bit, 7 = 0~7 bit, 8 = crc
Mittausalueen tulee olla 0~2000, 0~5000, tai 0~10000ppm
0~2000ppm alue komennolla: FF 01 99 00 00 00 07 D0 8F
0~10000ppm alue komennolla: FF 01 99 00 00 00 27 10 2F
Kytkentä:
- ESP32 UART0 on varattu USB serialille, eli käytä UART1 tai 2, esim. pinnit 16(rx) ja 17(tx).
- sensorin tx (transmit) menee ESP:n rx (receive)
Lainattu peruslukua https://github.com/dr-mod/co2-monitoring-station/blob/master/mhz19b.py
3.12.2020 <NAME>
4.12.2020 UART:ssa read ja write metodit pysäyttävät toiminnan luvun tai kirjoituksen ajaksi. Muutettu
käyttämään stremia 6.3 mukaisesti:
https://github.com/peterhinch/micropython-async/blob/master/v3/docs/TUTORIAL.md#63-using-the-stream-mechanism
Luokassa MHZ19bCO2 on mm. muutettu self.sensori.write(self.LUKU_KOMENTO) -> await self.kirjoittaja(self.LUKU_KOMENTO)
"""
import machine
import utime
import uasyncio as asyncio
class MHZ19bCO2:
def __init__(self, rxpin=16, txpin=17):
self.sensori = machine.UART(1)
self.sensori.init(baudrate=9600, bits=8, parity=None, stop=1, rx=rxpin, tx=txpin)
self.nollapiste_kalibroitu = False
self.co2_arvo = 0
self.co2_keskiarvot = []
self.co2_keskiarvoja = 20
self.co2_keskiarvo = 0
self.sensori_aktivoitu_klo = utime.time()
self.arvo_luettu_klo = utime.time()
self.mittausvali = '0_5000'
self.esilammitysaika = 10 # tulee olla 180
self.lukuvali = 10 # tulee olla 120
self.LUKU_KOMENTO = bytearray(b'\xFF\x01\x86\x00\x00\x00\x00\x00\x79')
self.KALIBROI_NOLLAPISTE = bytearray(b'\xFF\x01\x87\x00\x00\x00\x00\x00\x78')
self.KALIBROI_SPAN = bytearray(b'\xFF\x01\x88\x07\xD0\x00\x00\x00\xA0')
self.ITSEKALIBROINTI_ON = bytearray(b'\xFF\x01\x79\xA0\x00\x00\x00\x00\xE6')
self.ITSEKALIBTOINTI_OFF = bytearray(b'\xFF\x01\x79\x00\x00\x00\x00\x00\x86')
self.MITTAUSVALI_0_2000PPM = bytearray(b'\xFF\x01\x99\x00\x00\x00\x07\xD0\x8F')
self.MITTAUSVALI_0_5000PPM = bytearray(b'\xFF\x01\x99\x00\x00\x00\x13\x88\xCB')
self.MITTAUSVALI_0_10000PPM = bytearray(b'\xFF\x01\x99\x00\x00\x00\x27\x10\x2F')
async def kirjoittaja(self, data):
portti = asyncio.StreamWriter(self.sensori, {})
portti.write(data)
await portti.drain() # Lähetys alkaa
await asyncio.sleep(2)
async def lukija(self, merkkia):
portti = asyncio.StreamReader(self.sensori)
data = await portti.readexactly(merkkia)
return data
async def lue_co2_looppi(self):
while True:
if (utime.time() - self.sensori_aktivoitu_klo) < self.esilammitysaika:
# Esilämmitysaika on 3 minuuttia
print("Esilämmitysajalla ... odottele")
await asyncio.sleep(1)
elif (utime.time() - self.arvo_luettu_klo) > self.lukuvali:
# Luetaan arvoja korkeintaan 2 min välein
print("Luetaan arvo, hetki...")
try:
# self.sensori.write(self.LUKU_KOMENTO)
await self.kirjoittaja(self.LUKU_KOMENTO)
lukukehys = bytearray(await self.lukija(9))
if lukukehys[0] == 0xff and self._laske_crc(lukukehys) == lukukehys[8]:
self.co2_arvo = self._data_to_co2_level(lukukehys)
# print(self.co2_arvo)
self.laske_keskiarvo(self.co2_arvo)
self.arvo_luettu_klo = utime.time()
except TypeError as e:
print("Virhe %s", e)
pass
await asyncio.sleep(self.lukuvali)
def laske_keskiarvo(self, co2):
self.co2_keskiarvot.append(co2)
self.co2_keskiarvo = (sum(self.co2_keskiarvot) / len(self.co2_keskiarvot))
# 20 arvoa ja poistetaan vanhin
if len(self.co2_keskiarvot) == self.co2_keskiarvoja:
self.co2_keskiarvot.pop(0)
def kalibroi_nollapiste(self):
if utime.time() - self.sensori_aktivoitu_klo > (20 * 60):
self.kirjoittaja(self.KALIBROI_NOLLAPISTE)
self.nollapiste_kalibroitu = True
else:
print("Ennen kalibrointia sensorin tulee olla lämmennyt 20 minuuttia!")
def kalibroi_span(self):
if self.nollapiste_kalibroitu is True:
self.kirjoittaja(self.KALIBROI_SPAN)
else:
print("Nollapistee tulee olla ensin kablinroituna!")
def itsekalibrointi_on(self):
self.kirjoittaja(self.ITSEKALIBROINTI_ON)
def itsekalibrointi_off(self):
self.kirjoittaja(self.ITSEKALIBTOINTI_OFF)
def mittausvali_0_2000_ppm(self):
self.kirjoittaja(self.MITTAUSVALI_0_2000PPM)
self.mittausvali = '0_2000'
def mittausvali_0_5000_ppm(self):
self.kirjoittaja(self.MITTAUSVALI_0_5000PPM)
self.mittausvali = '0_5000'
def mittausvali_0_10000_ppm(self):
self.kirjoittaja(self.MITTAUSVALI_0_10000PPM)
self.mittausvali = '0_10000'
@staticmethod
def _laske_crc(lukukehys):
if len(lukukehys) != 9:
return None
crc = sum(lukukehys[1:8])
return (~(crc & 0xff) & 0xff) + 1
@staticmethod
def _data_to_co2_level(data):
return data[2] << 8 | data[3]
|
[
"utime.time",
"uasyncio.StreamReader",
"uasyncio.sleep",
"uasyncio.StreamWriter",
"machine.UART"
] |
[((3199, 3214), 'machine.UART', 'machine.UART', (['(1)'], {}), '(1)\n', (3211, 3214), False, 'import machine\n'), ((3509, 3521), 'utime.time', 'utime.time', ([], {}), '()\n', (3519, 3521), False, 'import utime\n'), ((3553, 3565), 'utime.time', 'utime.time', ([], {}), '()\n', (3563, 3565), False, 'import utime\n'), ((4438, 4476), 'uasyncio.StreamWriter', 'asyncio.StreamWriter', (['self.sensori', '{}'], {}), '(self.sensori, {})\n', (4458, 4476), True, 'import uasyncio as asyncio\n'), ((4638, 4672), 'uasyncio.StreamReader', 'asyncio.StreamReader', (['self.sensori'], {}), '(self.sensori)\n', (4658, 4672), True, 'import uasyncio as asyncio\n'), ((4566, 4582), 'uasyncio.sleep', 'asyncio.sleep', (['(2)'], {}), '(2)\n', (4579, 4582), True, 'import uasyncio as asyncio\n'), ((5870, 5898), 'uasyncio.sleep', 'asyncio.sleep', (['self.lukuvali'], {}), '(self.lukuvali)\n', (5883, 5898), True, 'import uasyncio as asyncio\n'), ((6247, 6259), 'utime.time', 'utime.time', ([], {}), '()\n', (6257, 6259), False, 'import utime\n'), ((4815, 4827), 'utime.time', 'utime.time', ([], {}), '()\n', (4825, 4827), False, 'import utime\n'), ((5010, 5026), 'uasyncio.sleep', 'asyncio.sleep', (['(1)'], {}), '(1)\n', (5023, 5026), True, 'import uasyncio as asyncio\n'), ((5045, 5057), 'utime.time', 'utime.time', ([], {}), '()\n', (5055, 5057), False, 'import utime\n'), ((5734, 5746), 'utime.time', 'utime.time', ([], {}), '()\n', (5744, 5746), False, 'import utime\n')]
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This is an example of using convolutional networks over characters
for DBpedia dataset to predict class from description of an entity.
This model is similar to one described in this paper:
"Character-level Convolutional Networks for Text Classification"
http://arxiv.org/abs/1509.01626
and is somewhat alternative to the Lua code from here:
https://github.com/zhangxiangxiao/Crepe
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import numpy as np
import pandas
from sklearn import metrics
import tensorflow as tf
from tensorflow.contrib import learn
FLAGS = None
MAX_DOCUMENT_LENGTH = 100
N_FILTERS = 10
FILTER_SHAPE1 = [20, 256]
FILTER_SHAPE2 = [20, N_FILTERS]
POOLING_WINDOW = 4
POOLING_STRIDE = 2
def char_cnn_model(x, y):
"""Character level convolutional neural network model to predict classes."""
y = tf.one_hot(y, 15, 1, 0)
byte_list = tf.reshape(learn.ops.one_hot_matrix(x, 256),
[-1, MAX_DOCUMENT_LENGTH, 256, 1])
with tf.variable_scope('CNN_Layer1'):
# Apply Convolution filtering on input sequence.
conv1 = tf.contrib.layers.convolution2d(byte_list, N_FILTERS,
FILTER_SHAPE1, padding='VALID')
# Add a RELU for non linearity.
conv1 = tf.nn.relu(conv1)
# Max pooling across output of Convolution+Relu.
pool1 = tf.nn.max_pool(conv1, ksize=[1, POOLING_WINDOW, 1, 1],
strides=[1, POOLING_STRIDE, 1, 1], padding='SAME')
# Transpose matrix so that n_filters from convolution becomes width.
pool1 = tf.transpose(pool1, [0, 1, 3, 2])
with tf.variable_scope('CNN_Layer2'):
# Second level of convolution filtering.
conv2 = tf.contrib.layers.convolution2d(pool1, N_FILTERS,
FILTER_SHAPE2,
padding='VALID')
# Max across each filter to get useful features for classification.
pool2 = tf.squeeze(tf.reduce_max(conv2, 1), squeeze_dims=[1])
# Apply regular WX + B and classification.
prediction, loss = learn.models.logistic_regression(pool2, y)
train_op = tf.contrib.layers.optimize_loss(
loss, tf.contrib.framework.get_global_step(),
optimizer='Adam', learning_rate=0.01)
return {'class': tf.argmax(prediction, 1), 'prob': prediction}, loss, train_op
def main(unused_argv):
# Prepare training and testing data
dbpedia = learn.datasets.load_dataset(
'dbpedia', test_with_fake_data=FLAGS.test_with_fake_data, size='large')
x_train = pandas.DataFrame(dbpedia.train.data)[1]
y_train = pandas.Series(dbpedia.train.target)
x_test = pandas.DataFrame(dbpedia.test.data)[1]
y_test = pandas.Series(dbpedia.test.target)
# Process vocabulary
char_processor = learn.preprocessing.ByteProcessor(MAX_DOCUMENT_LENGTH)
x_train = np.array(list(char_processor.fit_transform(x_train)))
x_test = np.array(list(char_processor.transform(x_test)))
# Build model
classifier = learn.Estimator(model_fn=char_cnn_model)
# Train and predict
classifier.fit(x_train, y_train, steps=100)
y_predicted = [
p['class'] for p in classifier.predict(x_test, as_iterable=True)]
score = metrics.accuracy_score(y_test, y_predicted)
print('Accuracy: {0:f}'.format(score))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'--test_with_fake_data',
default=False,
help='Test the example code with fake data.',
action='store_true'
)
FLAGS = parser.parse_args()
tf.app.run()
|
[
"argparse.ArgumentParser",
"sklearn.metrics.accuracy_score",
"tensorflow.contrib.layers.convolution2d",
"tensorflow.contrib.learn.models.logistic_regression",
"tensorflow.reduce_max",
"pandas.DataFrame",
"tensorflow.one_hot",
"tensorflow.nn.relu",
"tensorflow.contrib.learn.preprocessing.ByteProcessor",
"tensorflow.variable_scope",
"tensorflow.app.run",
"tensorflow.transpose",
"tensorflow.contrib.framework.get_global_step",
"tensorflow.nn.max_pool",
"pandas.Series",
"tensorflow.argmax",
"tensorflow.contrib.learn.datasets.load_dataset",
"tensorflow.contrib.learn.ops.one_hot_matrix",
"tensorflow.contrib.learn.Estimator"
] |
[((1532, 1555), 'tensorflow.one_hot', 'tf.one_hot', (['y', '(15)', '(1)', '(0)'], {}), '(y, 15, 1, 0)\n', (1542, 1555), True, 'import tensorflow as tf\n'), ((2750, 2792), 'tensorflow.contrib.learn.models.logistic_regression', 'learn.models.logistic_regression', (['pool2', 'y'], {}), '(pool2, y)\n', (2782, 2792), False, 'from tensorflow.contrib import learn\n'), ((3093, 3197), 'tensorflow.contrib.learn.datasets.load_dataset', 'learn.datasets.load_dataset', (['"""dbpedia"""'], {'test_with_fake_data': 'FLAGS.test_with_fake_data', 'size': '"""large"""'}), "('dbpedia', test_with_fake_data=FLAGS.\n test_with_fake_data, size='large')\n", (3120, 3197), False, 'from tensorflow.contrib import learn\n'), ((3264, 3299), 'pandas.Series', 'pandas.Series', (['dbpedia.train.target'], {}), '(dbpedia.train.target)\n', (3277, 3299), False, 'import pandas\n'), ((3361, 3395), 'pandas.Series', 'pandas.Series', (['dbpedia.test.target'], {}), '(dbpedia.test.target)\n', (3374, 3395), False, 'import pandas\n'), ((3439, 3493), 'tensorflow.contrib.learn.preprocessing.ByteProcessor', 'learn.preprocessing.ByteProcessor', (['MAX_DOCUMENT_LENGTH'], {}), '(MAX_DOCUMENT_LENGTH)\n', (3472, 3493), False, 'from tensorflow.contrib import learn\n'), ((3652, 3692), 'tensorflow.contrib.learn.Estimator', 'learn.Estimator', ([], {'model_fn': 'char_cnn_model'}), '(model_fn=char_cnn_model)\n', (3667, 3692), False, 'from tensorflow.contrib import learn\n'), ((3862, 3905), 'sklearn.metrics.accuracy_score', 'metrics.accuracy_score', (['y_test', 'y_predicted'], {}), '(y_test, y_predicted)\n', (3884, 3905), False, 'from sklearn import metrics\n'), ((3987, 4012), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (4010, 4012), False, 'import argparse\n'), ((4203, 4215), 'tensorflow.app.run', 'tf.app.run', ([], {}), '()\n', (4213, 4215), True, 'import tensorflow as tf\n'), ((1581, 1613), 'tensorflow.contrib.learn.ops.one_hot_matrix', 'learn.ops.one_hot_matrix', (['x', '(256)'], {}), '(x, 256)\n', (1605, 1613), False, 'from tensorflow.contrib import learn\n'), ((1682, 1713), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""CNN_Layer1"""'], {}), "('CNN_Layer1')\n", (1699, 1713), True, 'import tensorflow as tf\n'), ((1780, 1869), 'tensorflow.contrib.layers.convolution2d', 'tf.contrib.layers.convolution2d', (['byte_list', 'N_FILTERS', 'FILTER_SHAPE1'], {'padding': '"""VALID"""'}), "(byte_list, N_FILTERS, FILTER_SHAPE1,\n padding='VALID')\n", (1811, 1869), True, 'import tensorflow as tf\n'), ((1943, 1960), 'tensorflow.nn.relu', 'tf.nn.relu', (['conv1'], {}), '(conv1)\n', (1953, 1960), True, 'import tensorflow as tf\n'), ((2026, 2135), 'tensorflow.nn.max_pool', 'tf.nn.max_pool', (['conv1'], {'ksize': '[1, POOLING_WINDOW, 1, 1]', 'strides': '[1, POOLING_STRIDE, 1, 1]', 'padding': '"""SAME"""'}), "(conv1, ksize=[1, POOLING_WINDOW, 1, 1], strides=[1,\n POOLING_STRIDE, 1, 1], padding='SAME')\n", (2040, 2135), True, 'import tensorflow as tf\n'), ((2244, 2277), 'tensorflow.transpose', 'tf.transpose', (['pool1', '[0, 1, 3, 2]'], {}), '(pool1, [0, 1, 3, 2])\n', (2256, 2277), True, 'import tensorflow as tf\n'), ((2285, 2316), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""CNN_Layer2"""'], {}), "('CNN_Layer2')\n", (2302, 2316), True, 'import tensorflow as tf\n'), ((2375, 2461), 'tensorflow.contrib.layers.convolution2d', 'tf.contrib.layers.convolution2d', (['pool1', 'N_FILTERS', 'FILTER_SHAPE2'], {'padding': '"""VALID"""'}), "(pool1, N_FILTERS, FILTER_SHAPE2, padding=\n 'VALID')\n", (2406, 2461), True, 'import tensorflow as tf\n'), ((2852, 2890), 'tensorflow.contrib.framework.get_global_step', 'tf.contrib.framework.get_global_step', ([], {}), '()\n', (2888, 2890), True, 'import tensorflow as tf\n'), ((3212, 3248), 'pandas.DataFrame', 'pandas.DataFrame', (['dbpedia.train.data'], {}), '(dbpedia.train.data)\n', (3228, 3248), False, 'import pandas\n'), ((3311, 3346), 'pandas.DataFrame', 'pandas.DataFrame', (['dbpedia.test.data'], {}), '(dbpedia.test.data)\n', (3327, 3346), False, 'import pandas\n'), ((2640, 2663), 'tensorflow.reduce_max', 'tf.reduce_max', (['conv2', '(1)'], {}), '(conv2, 1)\n', (2653, 2663), True, 'import tensorflow as tf\n'), ((2956, 2980), 'tensorflow.argmax', 'tf.argmax', (['prediction', '(1)'], {}), '(prediction, 1)\n', (2965, 2980), True, 'import tensorflow as tf\n')]
|
# ~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~
# MIT License
#
# Copyright (c) 2021 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~
from typing import Any
from typing import Callable
from typing import Dict
from typing import NoReturn
from typing import Optional
from typing import Sequence
from typing import Tuple
from typing import TypeVar
from disent.util.function import wrapped_partial
from disent.util.imports import import_obj_partial
from disent.util.imports import _check_and_split_path
# ========================================================================= #
# Basic Cached Item #
# ========================================================================= #
T = TypeVar('T')
class LazyValue(object):
def __init__(self, generate_fn: Callable[[], T]):
assert callable(generate_fn)
self._is_generated = False
self._generate_fn = generate_fn
self._value = None
def generate(self) -> T:
# replace value -- we don't actually need caching of the
# values since the registry replaces these items automatically,
# but LazyValue is exposed so it might be used unexpectedly.
if not self._is_generated:
self._is_generated = True
self._value = self._generate_fn()
self._generate_fn = None
# return value
return self._value
def __repr__(self):
return f'{self.__class__.__name__}({repr(self._generate_fn)})'
# ========================================================================= #
# Import Helper #
# ========================================================================= #
class LazyImport(LazyValue):
def __init__(self, import_path: str, *partial_args, **partial_kwargs):
super().__init__(
generate_fn=lambda: import_obj_partial(import_path, *partial_args, **partial_kwargs),
)
# ========================================================================= #
# Registry #
# ========================================================================= #
_NONE = object()
class Registry(object):
def __init__(
self,
name: str,
assert_valid_key: Optional[Callable[[str], NoReturn]] = None,
assert_valid_value: Optional[Callable[[T], NoReturn]] = None,
):
# checks!
assert str.isidentifier(name), f'The given name for the registry is not a valid identifier: {repr(name)}'
self._name = name
assert (assert_valid_key is None) or callable(assert_valid_key), f'assert_valid_key must be None or callable'
assert (assert_valid_value is None) or callable(assert_valid_value), f'assert_valid_value must be None or callable'
self._assert_valid_key = assert_valid_key
self._assert_valid_value = assert_valid_value
# storage
self._keys_to_values: Dict[str, Any] = {}
@property
def name(self) -> str:
return self._name
def _get_aliases(self, name, aliases, auto_alias: bool):
if auto_alias:
if name not in self:
aliases = (name, *aliases)
elif not aliases:
raise RuntimeError(f'automatic alias: {repr(name)} already exists but no alternative aliases were specified.')
return aliases
def register(
self,
fn=_NONE,
aliases: Sequence[str] = (),
auto_alias: bool = True,
partial_args: Tuple[Any, ...] = None,
partial_kwargs: Dict[str, Any] = None,
) -> Callable[[T], T]:
def _decorator(orig_fn):
# try add the name of the object
keys = self._get_aliases(orig_fn.__name__, aliases=aliases, auto_alias=auto_alias)
# wrap function
new_fn = orig_fn
if (partial_args is not None) or (partial_kwargs is not None):
new_fn = wrapped_partial(
orig_fn,
*(() if partial_args is None else partial_args),
**({} if partial_kwargs is None else partial_kwargs),
)
# register the function
self.register_value(value=new_fn, aliases=keys)
return orig_fn
# handle case
if fn is _NONE:
return _decorator
else:
return _decorator(fn)
def register_import(
self,
import_path: str,
aliases: Sequence[str] = (),
auto_alias: bool = True,
*partial_args,
**partial_kwargs,
) -> 'Registry':
(*_, name) = _check_and_split_path(import_path)
return self.register_value(
value=LazyImport(import_path=import_path, *partial_args, **partial_kwargs),
aliases=self._get_aliases(name, aliases=aliases, auto_alias=auto_alias),
)
def register_value(self, value: T, aliases: Sequence[str]) -> 'Registry':
# check keys
if len(aliases) < 1:
raise ValueError(f'aliases must be specified, got an empty sequence')
for k in aliases:
if not str.isidentifier(k):
raise ValueError(f'alias is not a valid identifier: {repr(k)}')
if k in self._keys_to_values:
raise RuntimeError(f'registry already contains key: {repr(k)}')
self.assert_valid_key(k)
# handle lazy values -- defer checking if a lazy value
if not isinstance(value, LazyValue):
self.assert_valid_value(value)
# register keys
for k in aliases:
self._keys_to_values[k] = value
return self
def __setitem__(self, aliases: str, value: T):
if isinstance(aliases, str):
aliases = (aliases,)
assert isinstance(aliases, tuple), f'multiple aliases must be provided as a Tuple[str], got: {repr(aliases)}'
self.register_value(value=value, aliases=aliases)
def __contains__(self, key: str):
return key in self._keys_to_values
def __getitem__(self, key: str):
if key not in self._keys_to_values:
raise KeyError(f'registry does not contain the key: {repr(key)}, valid keys include: {sorted(self._keys_to_values.keys())}')
# get the value
value = self._keys_to_values[key]
# replace lazy value
if isinstance(value, LazyValue):
value = value.generate()
# check value & run deferred checks
if isinstance(value, LazyValue):
raise RuntimeError(f'{LazyValue.__name__} should never return other lazy values.')
self.assert_valid_value(value)
# update the value
self._keys_to_values[key] = value
# return the value
return value
def __iter__(self):
yield from self._keys_to_values.keys()
def assert_valid_value(self, value: T) -> T:
if self._assert_valid_value is not None:
self._assert_valid_value(value)
return value
def assert_valid_key(self, key: str) -> str:
if self._assert_valid_key is not None:
self._assert_valid_key(key)
return key
def __repr__(self):
return f'{self.__class__.__name__}({repr(self._name)}, ...)'
# ========================================================================= #
# END #
# ========================================================================= #
|
[
"typing.TypeVar",
"disent.util.imports._check_and_split_path",
"disent.util.function.wrapped_partial",
"disent.util.imports.import_obj_partial"
] |
[((1888, 1900), 'typing.TypeVar', 'TypeVar', (['"""T"""'], {}), "('T')\n", (1895, 1900), False, 'from typing import TypeVar\n'), ((5849, 5883), 'disent.util.imports._check_and_split_path', '_check_and_split_path', (['import_path'], {}), '(import_path)\n', (5870, 5883), False, 'from disent.util.imports import _check_and_split_path\n'), ((5168, 5297), 'disent.util.function.wrapped_partial', 'wrapped_partial', (['orig_fn', '*(() if partial_args is None else partial_args)'], {}), '(orig_fn, *(() if partial_args is None else partial_args),\n **{} if partial_kwargs is None else partial_kwargs)\n', (5183, 5297), False, 'from disent.util.function import wrapped_partial\n'), ((3060, 3124), 'disent.util.imports.import_obj_partial', 'import_obj_partial', (['import_path', '*partial_args'], {}), '(import_path, *partial_args, **partial_kwargs)\n', (3078, 3124), False, 'from disent.util.imports import import_obj_partial\n')]
|
import subprocess
# install dependencies
print("installing dependencies...")
subprocess.call(["pip3 install --user performance"], shell=True)
subprocess.call(["python3 -m pip install perf"], shell=True)
print("\n\n__________________________")
print("dependencies now satisfied")
import sys
import performance
from random import randint
from UIAddons import *
def main():
# check if user has python 3
if sys.version_info[0] < 3:
print("please run with python3")
sys.exit(1)
cls()
print("welcome to easy auto benchmarker")
print("load program? (benchmark times will vary per enviroment)")
print("[y/n]")
chc = uInput(["y","Y","n","N"])
if chc == "n" or chc == "N":
sys.exit(0)
subprocess.call(["python3 -m performance run --python=python3 -r -b all -o log.log"], shell=True)
if __name__ == "__main__":
main()
|
[
"subprocess.call",
"sys.exit"
] |
[((78, 142), 'subprocess.call', 'subprocess.call', (["['pip3 install --user performance']"], {'shell': '(True)'}), "(['pip3 install --user performance'], shell=True)\n", (93, 142), False, 'import subprocess\n'), ((143, 203), 'subprocess.call', 'subprocess.call', (["['python3 -m pip install perf']"], {'shell': '(True)'}), "(['python3 -m pip install perf'], shell=True)\n", (158, 203), False, 'import subprocess\n'), ((761, 867), 'subprocess.call', 'subprocess.call', (["['python3 -m performance run --python=python3 -r -b all -o log.log']"], {'shell': '(True)'}), "([\n 'python3 -m performance run --python=python3 -r -b all -o log.log'],\n shell=True)\n", (776, 867), False, 'import subprocess\n'), ((493, 504), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (501, 504), False, 'import sys\n'), ((736, 747), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (744, 747), False, 'import sys\n')]
|
from common.sagemaker_component import SageMakerComponent, SageMakerJobStatus
from deploy.src.sagemaker_deploy_spec import SageMakerDeploySpec
from deploy.src.sagemaker_deploy_component import (
EndpointRequests,
SageMakerDeployComponent,
)
from tests.unit_tests.tests.deploy.test_deploy_spec import DeploySpecTestCase
import unittest
from unittest.mock import patch, MagicMock, ANY
class DeployComponentTestCase(unittest.TestCase):
REQUIRED_ARGS = DeploySpecTestCase.REQUIRED_ARGS
@classmethod
def setUp(cls):
cls.component = SageMakerDeployComponent()
# Instantiate without calling Do()
cls.component._endpoint_config_name = "endpoint-config"
cls.component._endpoint_name = "endpoint"
cls.component._should_update_existing = False
@patch("deploy.src.sagemaker_deploy_component.super", MagicMock())
def test_do_sets_name(self):
given_endpoint_name = SageMakerDeploySpec(
self.REQUIRED_ARGS + ["--endpoint_name", "my-endpoint"]
)
given_endpoint_config_name = SageMakerDeploySpec(
self.REQUIRED_ARGS + ["--endpoint_config_name", "my-endpoint-config"]
)
unnamed_spec = SageMakerDeploySpec(self.REQUIRED_ARGS)
with patch(
"deploy.src.sagemaker_deploy_component.SageMakerComponent._generate_unique_timestamped_id",
MagicMock(return_value="-generated"),
):
self.component.Do(given_endpoint_name)
self.assertEqual(
"EndpointConfig-generated", self.component._endpoint_config_name
)
self.assertEqual("my-endpoint", self.component._endpoint_name)
self.component.Do(given_endpoint_config_name)
self.assertEqual("my-endpoint-config", self.component._endpoint_config_name)
self.assertEqual("Endpoint-generated", self.component._endpoint_name)
self.component.Do(unnamed_spec)
self.assertEqual(
"EndpointConfig-generated", self.component._endpoint_config_name
)
self.assertEqual("Endpoint-generated", self.component._endpoint_name)
@patch("deploy.src.sagemaker_deploy_component.super", MagicMock())
def test_update_endpoint_do_sets_name(self):
given_endpoint_name = SageMakerDeploySpec(
self.REQUIRED_ARGS
+ ["--endpoint_name", "my-endpoint", "--update_endpoint", "True"]
)
given_endpoint_config_name = SageMakerDeploySpec(
self.REQUIRED_ARGS
+ [
"--endpoint_config_name",
"my-endpoint-config",
"--update_endpoint",
"True",
]
)
unnamed_spec = SageMakerDeploySpec(self.REQUIRED_ARGS)
SageMakerDeployComponent._generate_unique_timestamped_id = MagicMock(
return_value="-generated-update"
)
self.component._endpoint_name_exists = MagicMock(return_value=True)
self.component._get_endpoint_config = MagicMock(return_value="existing-config")
with patch(
"deploy.src.sagemaker_deploy_component.SageMakerComponent._generate_unique_timestamped_id",
MagicMock(return_value="-generated-update"),
):
self.component.Do(given_endpoint_name)
self.assertEqual(
"EndpointConfig-generated-update", self.component._endpoint_config_name
)
self.assertEqual("my-endpoint", self.component._endpoint_name)
self.assertTrue(self.component._should_update_existing)
# Ignore given endpoint config name for update
self.component.Do(given_endpoint_config_name)
self.assertEqual(
"EndpointConfig-generated-update", self.component._endpoint_config_name
)
self.assertEqual("Endpoint-generated-update", self.component._endpoint_name)
self.assertTrue(self.component._should_update_existing)
self.component.Do(unnamed_spec)
self.assertEqual(
"EndpointConfig-generated-update", self.component._endpoint_config_name
)
self.assertEqual("Endpoint-generated-update", self.component._endpoint_name)
self.assertFalse(self.component._should_update_existing)
def test_create_deploy_job_requests(self):
spec = SageMakerDeploySpec(self.REQUIRED_ARGS)
request = self.component._create_job_request(spec.inputs, spec.outputs)
self.assertEqual(
request,
EndpointRequests(
config_request={
"EndpointConfigName": "endpoint-config",
"ProductionVariants": [
{
"VariantName": "variant-name-1",
"ModelName": "model-test",
"InitialInstanceCount": 1,
"InstanceType": "ml.m4.xlarge",
"InitialVariantWeight": 1.0,
}
],
"Tags": [],
},
endpoint_request={
"EndpointName": "endpoint",
"EndpointConfigName": "endpoint-config",
},
),
)
def test_create_update_deploy_job_requests(self):
spec = SageMakerDeploySpec(self.REQUIRED_ARGS)
self.component._should_update_existing = True
request = self.component._create_job_request(spec.inputs, spec.outputs)
self.assertEqual(
request,
EndpointRequests(
config_request={
"EndpointConfigName": "endpoint-config",
"ProductionVariants": [
{
"VariantName": "variant-name-1",
"ModelName": "model-test",
"InitialInstanceCount": 1,
"InstanceType": "ml.m4.xlarge",
"InitialVariantWeight": 1.0,
}
],
"Tags": [],
},
endpoint_request={
"EndpointName": "endpoint",
"EndpointConfigName": "endpoint-config",
},
),
)
def test_create_deploy_job_multiple_variants(self):
spec = SageMakerDeploySpec(
self.REQUIRED_ARGS
+ [
"--variant_name_1",
"variant-test-1",
"--initial_instance_count_1",
"1",
"--instance_type_1",
"t1",
"--initial_variant_weight_1",
"0.1",
"--accelerator_type_1",
"ml.eia1.medium",
"--model_name_2",
"model-test-2",
"--variant_name_2",
"variant-test-2",
"--initial_instance_count_2",
"2",
"--instance_type_2",
"t2",
"--initial_variant_weight_2",
"0.2",
"--accelerator_type_2",
"ml.eia1.large",
]
)
request = self.component._create_job_request(spec.inputs, spec.outputs)
self.assertEqual(
request,
EndpointRequests(
config_request={
"EndpointConfigName": "endpoint-config",
"ProductionVariants": [
{
"VariantName": "variant-test-1",
"ModelName": "model-test",
"InitialInstanceCount": 1,
"InstanceType": "t1",
"InitialVariantWeight": 0.1,
"AcceleratorType": "ml.eia1.medium",
},
{
"VariantName": "variant-test-2",
"ModelName": "model-test-2",
"InitialInstanceCount": 2,
"InstanceType": "t2",
"InitialVariantWeight": 0.2,
"AcceleratorType": "ml.eia1.large",
},
],
"Tags": [],
},
endpoint_request={
"EndpointName": "endpoint",
"EndpointConfigName": "endpoint-config",
},
),
)
def test_get_job_status(self):
self.component._sm_client = mock_client = MagicMock()
self.component._sm_client.describe_endpoint.return_value = {
"EndpointStatus": "Creating"
}
self.assertEqual(
self.component._get_job_status(),
SageMakerJobStatus(is_completed=False, raw_status="Creating"),
)
self.component._sm_client.describe_endpoint.return_value = {
"EndpointStatus": "Updating"
}
self.assertEqual(
self.component._get_job_status(),
SageMakerJobStatus(is_completed=False, raw_status="Updating"),
)
self.component._sm_client.describe_endpoint.return_value = {
"EndpointStatus": "InService"
}
self.assertEqual(
self.component._get_job_status(),
SageMakerJobStatus(is_completed=True, raw_status="InService"),
)
self.component._sm_client.describe_endpoint.return_value = {
"EndpointStatus": "Failed",
"FailureReason": "lolidk",
}
self.assertEqual(
self.component._get_job_status(),
SageMakerJobStatus(
is_completed=True,
raw_status="Failed",
has_error=True,
error_message="lolidk",
),
)
def test_after_job_completed(self):
spec = SageMakerDeploySpec(self.REQUIRED_ARGS)
self.component._after_job_complete({}, {}, spec.inputs, spec.outputs)
self.assertEqual(spec.outputs.endpoint_name, "endpoint")
def test_submit_update_job_request(self):
self.component._should_update_existing = True
self.component._existing_endpoint_config_name = "old-config"
self.component._delete_endpoint_config = MagicMock(return_value=True)
self.component._sm_client = MagicMock()
requests = EndpointRequests(
config_request={
"EndpointConfigName": "endpoint-config",
"ProductionVariants": [
{
"VariantName": "variant-test-1",
"ModelName": "model-test",
"InitialInstanceCount": 1,
"InstanceType": "t1",
"InitialVariantWeight": 0.1,
"AcceleratorType": "ml.eia1.medium",
},
{
"VariantName": "variant-test-2",
"ModelName": "model-test-2",
"InitialInstanceCount": 2,
"InstanceType": "t2",
"InitialVariantWeight": 0.2,
"AcceleratorType": "ml.eia1.large",
},
],
"Tags": [],
},
endpoint_request={
"EndpointName": "endpoint",
"EndpointConfigName": "endpoint-config",
},
)
self.component._submit_job_request(requests)
self.component._sm_client.update_endpoint.assert_called_once_with(
**{"EndpointName": "endpoint", "EndpointConfigName": "endpoint-config",}
)
self.component._delete_endpoint_config.assert_called_once_with("old-config")
|
[
"deploy.src.sagemaker_deploy_component.SageMakerDeployComponent",
"common.sagemaker_component.SageMakerJobStatus",
"unittest.mock.MagicMock",
"deploy.src.sagemaker_deploy_component.EndpointRequests",
"deploy.src.sagemaker_deploy_spec.SageMakerDeploySpec"
] |
[((559, 585), 'deploy.src.sagemaker_deploy_component.SageMakerDeployComponent', 'SageMakerDeployComponent', ([], {}), '()\n', (583, 585), False, 'from deploy.src.sagemaker_deploy_component import EndpointRequests, SageMakerDeployComponent\n'), ((932, 1008), 'deploy.src.sagemaker_deploy_spec.SageMakerDeploySpec', 'SageMakerDeploySpec', (["(self.REQUIRED_ARGS + ['--endpoint_name', 'my-endpoint'])"], {}), "(self.REQUIRED_ARGS + ['--endpoint_name', 'my-endpoint'])\n", (951, 1008), False, 'from deploy.src.sagemaker_deploy_spec import SageMakerDeploySpec\n'), ((1068, 1162), 'deploy.src.sagemaker_deploy_spec.SageMakerDeploySpec', 'SageMakerDeploySpec', (["(self.REQUIRED_ARGS + ['--endpoint_config_name', 'my-endpoint-config'])"], {}), "(self.REQUIRED_ARGS + ['--endpoint_config_name',\n 'my-endpoint-config'])\n", (1087, 1162), False, 'from deploy.src.sagemaker_deploy_spec import SageMakerDeploySpec\n'), ((1204, 1243), 'deploy.src.sagemaker_deploy_spec.SageMakerDeploySpec', 'SageMakerDeploySpec', (['self.REQUIRED_ARGS'], {}), '(self.REQUIRED_ARGS)\n', (1223, 1243), False, 'from deploy.src.sagemaker_deploy_spec import SageMakerDeploySpec\n'), ((856, 867), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (865, 867), False, 'from unittest.mock import patch, MagicMock, ANY\n'), ((2314, 2423), 'deploy.src.sagemaker_deploy_spec.SageMakerDeploySpec', 'SageMakerDeploySpec', (["(self.REQUIRED_ARGS + ['--endpoint_name', 'my-endpoint',\n '--update_endpoint', 'True'])"], {}), "(self.REQUIRED_ARGS + ['--endpoint_name', 'my-endpoint',\n '--update_endpoint', 'True'])\n", (2333, 2423), False, 'from deploy.src.sagemaker_deploy_spec import SageMakerDeploySpec\n'), ((2491, 2614), 'deploy.src.sagemaker_deploy_spec.SageMakerDeploySpec', 'SageMakerDeploySpec', (["(self.REQUIRED_ARGS + ['--endpoint_config_name', 'my-endpoint-config',\n '--update_endpoint', 'True'])"], {}), "(self.REQUIRED_ARGS + ['--endpoint_config_name',\n 'my-endpoint-config', '--update_endpoint', 'True'])\n", (2510, 2614), False, 'from deploy.src.sagemaker_deploy_spec import SageMakerDeploySpec\n'), ((2747, 2786), 'deploy.src.sagemaker_deploy_spec.SageMakerDeploySpec', 'SageMakerDeploySpec', (['self.REQUIRED_ARGS'], {}), '(self.REQUIRED_ARGS)\n', (2766, 2786), False, 'from deploy.src.sagemaker_deploy_spec import SageMakerDeploySpec\n'), ((2854, 2897), 'unittest.mock.MagicMock', 'MagicMock', ([], {'return_value': '"""-generated-update"""'}), "(return_value='-generated-update')\n", (2863, 2897), False, 'from unittest.mock import patch, MagicMock, ANY\n'), ((2967, 2995), 'unittest.mock.MagicMock', 'MagicMock', ([], {'return_value': '(True)'}), '(return_value=True)\n', (2976, 2995), False, 'from unittest.mock import patch, MagicMock, ANY\n'), ((3042, 3083), 'unittest.mock.MagicMock', 'MagicMock', ([], {'return_value': '"""existing-config"""'}), "(return_value='existing-config')\n", (3051, 3083), False, 'from unittest.mock import patch, MagicMock, ANY\n'), ((2222, 2233), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (2231, 2233), False, 'from unittest.mock import patch, MagicMock, ANY\n'), ((4408, 4447), 'deploy.src.sagemaker_deploy_spec.SageMakerDeploySpec', 'SageMakerDeploySpec', (['self.REQUIRED_ARGS'], {}), '(self.REQUIRED_ARGS)\n', (4427, 4447), False, 'from deploy.src.sagemaker_deploy_spec import SageMakerDeploySpec\n'), ((5416, 5455), 'deploy.src.sagemaker_deploy_spec.SageMakerDeploySpec', 'SageMakerDeploySpec', (['self.REQUIRED_ARGS'], {}), '(self.REQUIRED_ARGS)\n', (5435, 5455), False, 'from deploy.src.sagemaker_deploy_spec import SageMakerDeploySpec\n'), ((6480, 6941), 'deploy.src.sagemaker_deploy_spec.SageMakerDeploySpec', 'SageMakerDeploySpec', (["(self.REQUIRED_ARGS + ['--variant_name_1', 'variant-test-1',\n '--initial_instance_count_1', '1', '--instance_type_1', 't1',\n '--initial_variant_weight_1', '0.1', '--accelerator_type_1',\n 'ml.eia1.medium', '--model_name_2', 'model-test-2', '--variant_name_2',\n 'variant-test-2', '--initial_instance_count_2', '2',\n '--instance_type_2', 't2', '--initial_variant_weight_2', '0.2',\n '--accelerator_type_2', 'ml.eia1.large'])"], {}), "(self.REQUIRED_ARGS + ['--variant_name_1',\n 'variant-test-1', '--initial_instance_count_1', '1',\n '--instance_type_1', 't1', '--initial_variant_weight_1', '0.1',\n '--accelerator_type_1', 'ml.eia1.medium', '--model_name_2',\n 'model-test-2', '--variant_name_2', 'variant-test-2',\n '--initial_instance_count_2', '2', '--instance_type_2', 't2',\n '--initial_variant_weight_2', '0.2', '--accelerator_type_2',\n 'ml.eia1.large'])\n", (6499, 6941), False, 'from deploy.src.sagemaker_deploy_spec import SageMakerDeploySpec\n'), ((8753, 8764), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (8762, 8764), False, 'from unittest.mock import patch, MagicMock, ANY\n'), ((10088, 10127), 'deploy.src.sagemaker_deploy_spec.SageMakerDeploySpec', 'SageMakerDeploySpec', (['self.REQUIRED_ARGS'], {}), '(self.REQUIRED_ARGS)\n', (10107, 10127), False, 'from deploy.src.sagemaker_deploy_spec import SageMakerDeploySpec\n'), ((10492, 10520), 'unittest.mock.MagicMock', 'MagicMock', ([], {'return_value': '(True)'}), '(return_value=True)\n', (10501, 10520), False, 'from unittest.mock import patch, MagicMock, ANY\n'), ((10557, 10568), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (10566, 10568), False, 'from unittest.mock import patch, MagicMock, ANY\n'), ((10589, 11175), 'deploy.src.sagemaker_deploy_component.EndpointRequests', 'EndpointRequests', ([], {'config_request': "{'EndpointConfigName': 'endpoint-config', 'ProductionVariants': [{\n 'VariantName': 'variant-test-1', 'ModelName': 'model-test',\n 'InitialInstanceCount': 1, 'InstanceType': 't1', 'InitialVariantWeight':\n 0.1, 'AcceleratorType': 'ml.eia1.medium'}, {'VariantName':\n 'variant-test-2', 'ModelName': 'model-test-2', 'InitialInstanceCount': \n 2, 'InstanceType': 't2', 'InitialVariantWeight': 0.2, 'AcceleratorType':\n 'ml.eia1.large'}], 'Tags': []}", 'endpoint_request': "{'EndpointName': 'endpoint', 'EndpointConfigName': 'endpoint-config'}"}), "(config_request={'EndpointConfigName': 'endpoint-config',\n 'ProductionVariants': [{'VariantName': 'variant-test-1', 'ModelName':\n 'model-test', 'InitialInstanceCount': 1, 'InstanceType': 't1',\n 'InitialVariantWeight': 0.1, 'AcceleratorType': 'ml.eia1.medium'}, {\n 'VariantName': 'variant-test-2', 'ModelName': 'model-test-2',\n 'InitialInstanceCount': 2, 'InstanceType': 't2', 'InitialVariantWeight':\n 0.2, 'AcceleratorType': 'ml.eia1.large'}], 'Tags': []},\n endpoint_request={'EndpointName': 'endpoint', 'EndpointConfigName':\n 'endpoint-config'})\n", (10605, 11175), False, 'from deploy.src.sagemaker_deploy_component import EndpointRequests, SageMakerDeployComponent\n'), ((4588, 4953), 'deploy.src.sagemaker_deploy_component.EndpointRequests', 'EndpointRequests', ([], {'config_request': "{'EndpointConfigName': 'endpoint-config', 'ProductionVariants': [{\n 'VariantName': 'variant-name-1', 'ModelName': 'model-test',\n 'InitialInstanceCount': 1, 'InstanceType': 'ml.m4.xlarge',\n 'InitialVariantWeight': 1.0}], 'Tags': []}", 'endpoint_request': "{'EndpointName': 'endpoint', 'EndpointConfigName': 'endpoint-config'}"}), "(config_request={'EndpointConfigName': 'endpoint-config',\n 'ProductionVariants': [{'VariantName': 'variant-name-1', 'ModelName':\n 'model-test', 'InitialInstanceCount': 1, 'InstanceType': 'ml.m4.xlarge',\n 'InitialVariantWeight': 1.0}], 'Tags': []}, endpoint_request={\n 'EndpointName': 'endpoint', 'EndpointConfigName': 'endpoint-config'})\n", (4604, 4953), False, 'from deploy.src.sagemaker_deploy_component import EndpointRequests, SageMakerDeployComponent\n'), ((5650, 6015), 'deploy.src.sagemaker_deploy_component.EndpointRequests', 'EndpointRequests', ([], {'config_request': "{'EndpointConfigName': 'endpoint-config', 'ProductionVariants': [{\n 'VariantName': 'variant-name-1', 'ModelName': 'model-test',\n 'InitialInstanceCount': 1, 'InstanceType': 'ml.m4.xlarge',\n 'InitialVariantWeight': 1.0}], 'Tags': []}", 'endpoint_request': "{'EndpointName': 'endpoint', 'EndpointConfigName': 'endpoint-config'}"}), "(config_request={'EndpointConfigName': 'endpoint-config',\n 'ProductionVariants': [{'VariantName': 'variant-name-1', 'ModelName':\n 'model-test', 'InitialInstanceCount': 1, 'InstanceType': 'ml.m4.xlarge',\n 'InitialVariantWeight': 1.0}], 'Tags': []}, endpoint_request={\n 'EndpointName': 'endpoint', 'EndpointConfigName': 'endpoint-config'})\n", (5666, 6015), False, 'from deploy.src.sagemaker_deploy_component import EndpointRequests, SageMakerDeployComponent\n'), ((7456, 8042), 'deploy.src.sagemaker_deploy_component.EndpointRequests', 'EndpointRequests', ([], {'config_request': "{'EndpointConfigName': 'endpoint-config', 'ProductionVariants': [{\n 'VariantName': 'variant-test-1', 'ModelName': 'model-test',\n 'InitialInstanceCount': 1, 'InstanceType': 't1', 'InitialVariantWeight':\n 0.1, 'AcceleratorType': 'ml.eia1.medium'}, {'VariantName':\n 'variant-test-2', 'ModelName': 'model-test-2', 'InitialInstanceCount': \n 2, 'InstanceType': 't2', 'InitialVariantWeight': 0.2, 'AcceleratorType':\n 'ml.eia1.large'}], 'Tags': []}", 'endpoint_request': "{'EndpointName': 'endpoint', 'EndpointConfigName': 'endpoint-config'}"}), "(config_request={'EndpointConfigName': 'endpoint-config',\n 'ProductionVariants': [{'VariantName': 'variant-test-1', 'ModelName':\n 'model-test', 'InitialInstanceCount': 1, 'InstanceType': 't1',\n 'InitialVariantWeight': 0.1, 'AcceleratorType': 'ml.eia1.medium'}, {\n 'VariantName': 'variant-test-2', 'ModelName': 'model-test-2',\n 'InitialInstanceCount': 2, 'InstanceType': 't2', 'InitialVariantWeight':\n 0.2, 'AcceleratorType': 'ml.eia1.large'}], 'Tags': []},\n endpoint_request={'EndpointName': 'endpoint', 'EndpointConfigName':\n 'endpoint-config'})\n", (7472, 8042), False, 'from deploy.src.sagemaker_deploy_component import EndpointRequests, SageMakerDeployComponent\n'), ((8970, 9031), 'common.sagemaker_component.SageMakerJobStatus', 'SageMakerJobStatus', ([], {'is_completed': '(False)', 'raw_status': '"""Creating"""'}), "(is_completed=False, raw_status='Creating')\n", (8988, 9031), False, 'from common.sagemaker_component import SageMakerComponent, SageMakerJobStatus\n'), ((9248, 9309), 'common.sagemaker_component.SageMakerJobStatus', 'SageMakerJobStatus', ([], {'is_completed': '(False)', 'raw_status': '"""Updating"""'}), "(is_completed=False, raw_status='Updating')\n", (9266, 9309), False, 'from common.sagemaker_component import SageMakerComponent, SageMakerJobStatus\n'), ((9527, 9588), 'common.sagemaker_component.SageMakerJobStatus', 'SageMakerJobStatus', ([], {'is_completed': '(True)', 'raw_status': '"""InService"""'}), "(is_completed=True, raw_status='InService')\n", (9545, 9588), False, 'from common.sagemaker_component import SageMakerComponent, SageMakerJobStatus\n'), ((9843, 9945), 'common.sagemaker_component.SageMakerJobStatus', 'SageMakerJobStatus', ([], {'is_completed': '(True)', 'raw_status': '"""Failed"""', 'has_error': '(True)', 'error_message': '"""lolidk"""'}), "(is_completed=True, raw_status='Failed', has_error=True,\n error_message='lolidk')\n", (9861, 9945), False, 'from common.sagemaker_component import SageMakerComponent, SageMakerJobStatus\n'), ((1381, 1417), 'unittest.mock.MagicMock', 'MagicMock', ([], {'return_value': '"""-generated"""'}), "(return_value='-generated')\n", (1390, 1417), False, 'from unittest.mock import patch, MagicMock, ANY\n'), ((3221, 3264), 'unittest.mock.MagicMock', 'MagicMock', ([], {'return_value': '"""-generated-update"""'}), "(return_value='-generated-update')\n", (3230, 3264), False, 'from unittest.mock import patch, MagicMock, ANY\n')]
|
"""
:py:class:`Utils` - a set of generic utilities
==============================================
Usage::
# assuming that $PYTHONPATH=.../lcls2/psana
# Run test: python lcls2/psana/psana/pyalgos/generic/Utils.py 1
# Import
from psana.pyalgos.generic.Utils import input_single_char
import psana.pyalgos.generic.Utils as gu
# Methods
#resp = gu.<method(pars)>
ts = gu.str_tstamp(fmt='%Y-%m-%dT%H:%M:%S%z', time_sec=None)
tsec, ts = gu.time_and_stamp(fmt='%Y-%m-%dT%H:%M:%S%z', time_sec=None)
tsec = gu.time_sec_from_stamp(fmt='%Y-%m-%dT%H:%M:%S%z', time_stamp='1970-01-01T00:00:00-0800')
usr = gu.get_enviroment(env='USER')
usr = gu.get_login()
host = gu.get_hostname()
cwd = gu.get_cwd()
pid = gu.get_pid()
stat = gu.shell_command_is_available(cmd='mongorestore', verb=True)
rec = gu.log_rec_on_start()
fmode = gu.file_mode(fname)
gu.create_directory(dir, mode=0o777)
exists = gu.create_path(path, depth=6, mode=0o777)
flist = gu.get_list_of_files_in_dir(dirname)
flist = gu.get_list_of_files_in_dir_for_ext(dir, ext='.xtc')
flist = gu.get_list_of_files_in_dir_for_pattern(dir, pattern='-r0022')
owner = gu.get_path_owner(path)
mode = gu.get_path_mode(path)
tmpf = gu.get_tempfile(mode='r+b',suffix='.txt')
gu.print_parsed_path(path)
arr = gu.load_textfile(path)
gu.save_textfile(text, path, mode='w') # mode: 'w'-write, 'a'-append
gu.set_file_access_mode(fname, mode=0o777)
jo = gu.load_json(fname)
gu.save_json(jo, fname)
o = gu.load_pickle(fname)
gu.save_pickle(o, fname)
# Save image in file
# ==================
gu.save_image_tiff(image, fname='image.tiff', verb=True) # 16-bit tiff
gu.save_image_file(image, fname='image.png', verb=True) # gif, pdf, eps, png, jpg, jpeg, tiff (8-bit only)
list_int = gu.list_of_int_from_list_of_str(list_str)
list_str = gu.list_of_str_from_list_of_int(list_int, fmt='%04d')
resp = gu.has_kerberos_ticket()
resp = gu.check_token(do_print=False)
resp = gu.get_afs_token(do_print=False)
hlst = gu.list_of_hosts_from_lshosts(filter='ps')
resp = gu.text_sataus_of_lsf_hosts(farm='psnehfarm')
resp = gu.ext_status_of_queues(lst_of_queues=['psanaq', 'psnehq', 'psfehq', 'psnehprioq', 'psfehprioq'])
gu.str_kwargs(kwargs, title='Input parameters:', fmt='\n%20s : %s'):
gu.print_kwargs(kwargs)
gu.print_parser(parser) # from optparse import OptionParser
s = gu.do_print(nev) # returns true for sparcified event numbers.
ch = gu.input_single_char('Next event? [y/n]')
os_system(cmd)
os_command(cmd)
See:
- :py:class:`Utils`
- :py:class:`PSUtils`
- :py:class:`NDArrUtils`
- :py:class:`Graphics`
This software was developed for the LCLS2 project.
If you use all or part of it, please give an appropriate acknowledgment.
Created: 2018-01-25 by <NAME>
Adopted for LCLS2 on 2018-02-02
"""
import os
import sys
import getpass
import socket
from time import localtime, strftime, time, strptime, mktime
import numpy as np
import tty, termios
#import subprocessif
from subprocess import call
if sys.version_info.major == 2:
from commands import getoutput
else:
from subprocess import getoutput
# init_logger etc is moved to logger.py
# from psana.pyalgos.generic.logger import init_logger, STR_LEVEL_NAMES, DICT_NAME_TO_LEVEL, TSFORMAT
import logging
logger = logging.getLogger('__name__')
def str_tstamp(fmt='%Y-%m-%dT%H:%M:%S%z', time_sec=None):
"""Returns string timestamp for specified format and time in sec or current time by default
"""
ts = strftime(fmt, localtime(time_sec))
#logger.debug('str_tstamp: %s' % ts)
return ts
def str_tstamp_v1(fmt='%Y-%m-%dT%H:%M:%S.%f%z', time_sec=None):
"""Returns string timestamp for specified format and time in sec or current time by default
"""
from datetime import datetime
dt = datetime.fromtimestamp(time() if time_sec is None else time_sec)
return dt.strftime(fmt)
def time_and_stamp(fmt='%Y-%m-%dT%H:%M:%S%z', time_sec=None):
tsec = time() if time_sec is None else time_sec
return tsec, str_tstamp(fmt, tsec)
def time_sec_from_stamp(fmt='%Y-%m-%dT%H:%M:%S%z', time_stamp='1970-01-01T00:00:00-0800'):
try: struc = strptime(time_stamp, fmt)
except ValueError as err:
logger.exception(err)
sys.exit()
return int(mktime(struc))
def get_enviroment(env='USER'):
"""Returns the value of specified by string name environment variable
"""
return os.environ.get(env, None)
def get_hostname():
"""Returns login name
"""
#return os.uname()[1]
return socket.gethostname()
def get_cwd():
"""Returns current working directory
"""
return os.getcwd()
def get_pid():
"""Returns pid - process id
"""
return os.getpid()
def get_login():
"""Returns login name
"""
#return os.getlogin()
return getpass.getuser()
def shell_command_is_available(cmd='mongorestore', verb=True):
import shutil
if shutil.which(cmd) is None:
if verb: logger.warning('shell command "%s" is unavailable.' % cmd)
return
def file_mode(fname):
"""Returns file mode, e.g. 0o40377
"""
from stat import ST_MODE
return os.stat(fname)[ST_MODE]
def log_rec_on_start(tsfmt='%Y-%m-%dT%H:%M:%S%z'):
"""Returns (str) record containing timestamp, login, host, cwd, and command line
"""
return '\n%s user:%s@%s cwd:%s command:%s'%\
(str_tstamp(fmt=tsfmt), get_login(), get_hostname(), get_cwd(), ' '.join(sys.argv))
def create_directory(dir, mode=0o777):
"""Creates directory and sets its mode
"""
if os.path.exists(dir):
logger.debug('Directory exists: %s' % dir)
else:
os.makedirs(dir)
os.chmod(dir, mode)
logger.debug('Directory created: %s, mode(oct)=%s' % (dir, oct(mode)))
def create_path(path, depth=6, mode=0o777):
"""Creates missing path of specified depth from the beginning
e.g. for '/reg/g/psdm/logs/calibman/2016/07/log-file-name.txt'
or '/reg/d/psdm/cxi/cxi11216/calib/Jungfrau::CalibV1/CxiEndstation.0:Jungfrau.0/pedestals/9-end.data'
Returns True if path to file exists, False othervise
"""
logger.debug('create_path: %s' % path)
#subdirs = path.strip('/').split('/')
subdirs = path.split('/')
cpath = subdirs[0]
for i,sd in enumerate(subdirs[:-1]):
if i>0: cpath += '/%s'% sd
if i<depth: continue
if cpath=='': continue
create_directory(cpath, mode)
return os.path.exists(cpath)
def get_list_of_files_in_dir(dirname):
return os.listdir(dirname)
def get_list_of_files_in_dir_for_ext(dir, ext='.xtc'):
"""Returns the list of files in the directory for specified extension or None if directory is None."""
if dir is None: return []
if not os.path.exists(dir): return []
list_of_files_in_dir = os.listdir(dir)
list_of_files = []
for fname in list_of_files_in_dir:
if os.path.splitext(fname)[1] == ext:
list_of_files.append(fname)
return sorted(list_of_files)
def get_list_of_files_in_dir_for_part_fname(dir, pattern='-r0022'):
"""Returns the list of files in the directory for specified file name pattern or [] - empty list."""
if dir is None: return []
if not os.path.exists(dir): return []
list_of_files_in_dir = os.listdir(dir)
list_of_files = []
for fname in list_of_files_in_dir:
if pattern in fname:
fpath = os.path.join(dir,fname)
list_of_files.append(fpath)
return sorted(list_of_files)
def get_path_owner(path):
import pwd
stat = os.stat(path)
#print(' stat =', stat)
pwuid = pwd.getpwuid(stat.st_uid)
#print(' pwuid =', pwuid)
user_name = pwuid.pw_name
#print(' uid = %s user_name = %s' % (uid, user_name))
return user_name
def get_path_mode(path):
return os.stat(path).st_mode
def get_tempfile(mode='r+b',suffix='.txt'):
import tempfile
tf = tempfile.NamedTemporaryFile(mode=mode,suffix=suffix)
return tf # .name
def print_parsed_path(path): # Output for path:
print('print_parsed_path(path): path:',) # path/reg/d/psdm/XCS/xcsi0112/xtc/e167-r0015-s00-c00.xtc
print('exists(path) =', os.path.exists(path)) # True
print('splitext(path)=', os.path.splitext(path))# ('/reg/d/psdm/XCS/xcsi0112/xtc/e167-r0015-s00-c00', '.xtc')
print('basename(path)=', os.path.basename(path))# e167-r0015-s00-c00.xtc
print('dirname(path) =', os.path.dirname(path)) # /reg/d/psdm/XCS/xcsi0112/xtc
print('lexists(path) =', os.path.lexists(path)) # True
print('isfile(path) =', os.path.isfile(path)) # True
print('isdir(path) =', os.path.isdir(path)) # False
print('split(path) =', os.path.split(path)) # ('/reg/d/psdm/XCS/xcsi0112/xtc', 'e167-r0015-s00-c00.xtc')
def set_file_access_mode(fname, mode=0o777):
os.chmod(fname, mode)
def save_textfile(text, path, mode='w', verb=False):
"""Saves text in file specified by path. mode: 'w'-write, 'a'-append
"""
msg = 'save_textfile %s' % path
if verb: print(msg)
logger.debug(msg)
f=open(path, mode)
f.write(text)
f.close()
def load_textfile(path, verb=False):
"""Returns text file as a str object
"""
msg = 'load_textfile %s' % path
if verb: print(msg)
logger.debug(msg)
f=open(path, 'r')
recs = f.read() # f.readlines()
f.close()
return recs
def load_json(fname):
"""Load json object from file.
"""
logger.debug('load_json %s' % fname)
import json
return json.load(open(fname,'rb'))
# or
#with open(fname) as f: jo = json.load(f)
#return jo
def save_json(jo, fname, mode='w'):
"""Saves json object in file.
"""
logger.debug('save_json %s' % fname)
import json
with open(fname, mode) as f: json.dump(jo, f)
def load_pickle(fname, mode='rb'):
"""Returns object from packed in file.
"""
logger.debug('load_pickle %s' % fname)
import pickle
return pickle.load(open(fname, mode))
def save_pickle(o, fname, mode='wb'):
"""Saves object in the pickle file.
"""
logger.debug('save_pickle %s' % fname)
import pickle
with open(fname, mode) as f: pickle.dump(o, f)
def save_image_tiff(image, fname='image.tiff', verb=False):
"""Saves image in 16-bit tiff file
"""
import Image
msg = 'save_image_tiff %s' % fname
if verb: print(msg)
logger.debug(msg)
img = Image.fromarray(image.astype(np.int16))
img.save(fname)
def save_image_file(image, fname='image.png', verb=False):
"""Saves files with type by extension gif, pdf, eps, png, jpg, jpeg, tiff (8-bit only),
or txt for any other type
"""
import scipy.misc as scim
msg = 'save_image_file %s' % fname
fields = os.path.splitext(fname)
if len(fields)>1 and fields[1] in ['.gif', '.pdf', '.eps', '.png', '.jpg', '.jpeg', '.tiff']:
scim.imsave(fname, image)
else:
fnametxt = '%s.txt' % fname
msg = 'save_image_file: non-supported file extension. Save image in text file %s' % fnametxt
np.savetxt(fnametxt, image, fmt='%8.1f', delimiter=' ', newline='\n')
#raise IOError('Unknown file type in extension %s' % fname)
if verb: print(msg)
logger.debug(msg)
def replace(template, pattern, subst):
"""If pattern in the template replaces it with subst.
Returns str object template with replaced patterns.
"""
fields = template.split(pattern, 1)
if len(fields) > 1:
return '%s%s%s' % (fields[0], subst, fields[1])
else:
return template
def print_command_line_parameters(parser):
"""Prints input arguments and optional parameters"""
(popts, pargs) = parser.parse_args()
args = pargs # list of positional arguments
opts = vars(popts) # dict of options
defs = vars(parser.get_default_values()) # dict of default options
print('Command:\n ', ' '.join(sys.argv)+\
'\nArgument list: %s\nOptional parameters:\n' % str(args)+\
' <key> <value> <default>')
for k,v in opts.items():
print(' %s %s %s' % (k.ljust(10), str(v).ljust(20), str(defs[k]).ljust(20)))
def list_of_int_from_list_of_str(list_str):
"""Converts ['0001', '0202', '0203', '0204',...] to [1, 202, 203, 204,...]
"""
return [int(s) for s in list_str]
def list_of_str_from_list_of_int(list_int, fmt='%04d'):
"""Converts [1, 202, 203, 204,...] to ['0001', '0202', '0203', '0204',...]
"""
return [fmt % i for i in list_int]
def has_kerberos_ticket():
"""Checks to see if the user has a valid Kerberos ticket"""
#stream = os.popen('klist -s')
#output = getoutput('klist -4')
#resp = call(["klist", "-s"])
return True if call(["klist", "-s"]) == 0 else False
def _parse_token(token):
""" from string like: User's (AFS ID 5269) tokens for <EMAIL> [Expires Feb 28 19:16] 54 75 Expires Feb 28 19:16
returns date/time: Feb 28 19:16
"""
timestamp = ''
for line in token.split('\n'):
pos_beg = line.find('[Expire')
if pos_beg == -1: continue
pos_end = line.find(']', pos_beg)
#print(line)
timestamp = line[pos_beg+9:pos_end]
#date_object = datetime.strptime('Jun 1 2005 1:33PM', '%b %d %Y %I:%M%p')
#date_object = datetime.strptime(timestamp, '%b %d %H:%M')
#print('date_object', str(date_object))
return timestamp
def check_token(do_print=False):
token = getoutput('tokens')
#if do_print(: print(token)
status = True if 'Expire' in token else False
timestamp = _parse_token(token) if status else ''
msg = 'Your AFS token %s %s' % ({True:'IS valid until', False:'IS NOT valid'}[status], timestamp)
if do_print: print(msg)
return status, msg
def get_afs_token(do_print=False):
output = getoutput('aklog')
if do_print: print(str(output))
return output
def list_of_hosts(filter='psana'):
"""Returns list of hosts for lshosts"""
cmd = 'lshosts | grep %s' % filter
lines = getoutput(cmd).split('\n')
hosts = [line.split()[0] for line in lines]
return hosts
def text_sataus_of_lsf_hosts(farm='psnehfarm'):
"""Returns text output of the command: bhosts farm"""
cmd = 'bhosts %s' % farm
return cmd, getoutput(cmd)
def text_status_of_queues(lst_of_queues=['psanaq', 'psnehq', 'psfehq', 'psnehprioq', 'psfehprioq']):
"""Checks status of queues"""
cmd = 'bqueues %s' % (' '.join(lst_of_queues))
return cmd, getoutput(cmd)
def str_kwargs(kwargs, title='Input parameters:', fmt='\n%20s: %s'):
return title + ''.join([fmt % (k,str(v)) for k,v in kwargs.items()])
def print_kwargs(kwargs, cmt='%s\n kwargs:' % (40*'_')):
print(cmt)
for k,v in kwargs.items(): print(' %10s: %10s' % (k,v))
print(40*'_')
def str_attributes(o, cmt='\nattributes:', fmt='\n %s'):
return cmt + ''.join([fmt % str(v) for v in dir(o)])
#def str_attributes(o, cmt='\nattributes:', fmt='\n%20s: %s'):
# return str(dir(o))
#return cmt + ''.join([fmt % (k,str(v)) for k,v in dir(o) if len(k)>2 and k[:2] != '__'])
def print_parser(parser):
"""Prints input parameters"""
popts, pargs = parser.parse_args()
args = pargs
opts = vars(popts)
defs = vars(parser.get_default_values())
print('Arguments: %s\nOptional parameters:\n' % str(args)+\
'<key> <value> <default>')
for k,v in opts.items():
print('%s %s %s' % (k.ljust(10), str(v).ljust(16), str(defs[k]).ljust(16)))
def is_in_command_line(ptrn1=None, ptrn2=None):
"""Returns True (False) if parameter is (not) specified in the command line"""
if len(sys.argv) < 2: return False
for p in sys.argv[1:]:
if ptrn1 is not None and (ptrn1 in p[:2]):
#logger.debug('option "%s" is found in CL' % ptrn1)
return True
if ptrn2 is not None and (ptrn2 in p):
#logger.debug('option "%s" is found in CL' % ptrn2)
return True
return False
def do_print(nev):
"""Returns true for sparcified event numbers.
"""
return nev<10\
or (nev<50 and (not nev%10))\
or (nev<500 and (not nev%100))\
or not nev%1000
def input_single_char(prompt='input? >'):
""" input of single character from keybord without <CR>
import sys, tty, termios
"""
sys.stdout.write('\r'+prompt)
sys.stdout.flush()
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
tty.setraw(fd)
ch = sys.stdin.read(1)
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return ch
#def get_grpnames(user='root'):
# """Returns tuple of group names"""
# from grp import getgrnam
# return getgrnam(user)
def os_system(cmd):
assert isinstance(cmd,str), 'command should be str'
os.system(cmd)
logger.debug('os_system command: %s' % cmd)
def os_command(cmd):
assert isinstance(cmd,str), 'command should be str'
#_cmd = cmd.split() if isinstance(cmd,str) else cmd
_cmd = cmd
stream = os.popen(_cmd)
resp = stream.read()
msg = '%s\n%s' % (_cmd, resp) if resp else _cmd
logger.debug('os_command resp: %s' % msg)
#----------- TEST -------------
if __name__ == "__main__":
def test_10():
from psana.pyalgos.generic.NDArrGenerators import random_standard
image = random_standard()
verbosity=True
save_image_tiff(image, fname='image.tiff', verb=verbosity)
save_image_file(image, fname='image.png', verb=verbosity)
save_image_file(image, fname='image.xyz', verb=verbosity)
def test_datetime():
from datetime import datetime
t_sec = time()
print('t_sec:', t_sec)
t = datetime.fromtimestamp(t_sec)
print('t:', t)
tnow = datetime.now()
print('datetime.now:', tnow)
tstamp = t.strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3]
zone = strftime('%z', localtime(t_sec))
print(tstamp)
print('zone', zone)
tsz = '%s%s' % (tstamp,zone)
print('tsz', tsz)
def test_input_single_char():
for n in range(20):
ch = input_single_char('Event:%03d Next event? [y/n]' %n)
if ch != 'y': sys.exit('\nExit by key %s' % ch)
def test_01():
#logger.debug('debug msg') # will print a message to the console
#logger.warning('Watch out!') # will print a message to the console
#logger.info('I told you so') # will not print anything
print('get_enviroment("PWD"): %s' % get_enviroment(env='PWD'))
print('get_hostname() : %s' % get_hostname())
print('get_cwd() : %s' % get_cwd())
print('get_login() : %s' % get_login())
print('str_tstamp() : %s' % str_tstamp(fmt='%Y-%m-%dT%H:%M'))
print('str_tstamp() : %s' % str_tstamp(fmt='%Y-%m-%dT%H:%M:%S%z'))
create_directory('./work', mode=0o377)
print('file_mode("work") : %s' % oct(file_mode('work')))
print('log_rec_on_start() :%s' % log_rec_on_start())
#print('get_grpnames() :%s' % str(get_grpnames('root')))
print('list_of_hosts :%s' % list_of_hosts())
if __name__ == "__main__":
logging.basicConfig(format='%(asctime)s %(name)s %(levelname)s: %(message)s',\
datefmt='%Y-%m-%dT%H:%M:%S',\
level=logging.DEBUG)
#filename='example.log', filemode='w'
test_01()
test_datetime()
test_input_single_char()
sys.exit('\nEnd of test')
# EOF
|
[
"sys.stdout.write",
"time.strptime",
"pickle.dump",
"getpass.getuser",
"os.popen",
"termios.tcsetattr",
"time.mktime",
"os.path.isfile",
"sys.stdout.flush",
"scipy.misc.imsave",
"subprocess.getoutput",
"os.path.join",
"os.path.lexists",
"os.path.dirname",
"numpy.savetxt",
"os.path.exists",
"socket.gethostname",
"datetime.datetime.now",
"time.localtime",
"json.dump",
"os.chmod",
"sys.stdin.read",
"os.stat",
"os.path.basename",
"os.system",
"shutil.which",
"subprocess.call",
"sys.stdin.fileno",
"datetime.datetime.fromtimestamp",
"os.listdir",
"sys.exit",
"tempfile.NamedTemporaryFile",
"os.getpid",
"os.makedirs",
"termios.tcgetattr",
"logging.basicConfig",
"os.getcwd",
"os.path.isdir",
"time.time",
"os.environ.get",
"psana.pyalgos.generic.NDArrGenerators.random_standard",
"os.path.splitext",
"tty.setraw",
"os.path.split",
"pwd.getpwuid",
"logging.getLogger"
] |
[((3460, 3489), 'logging.getLogger', 'logging.getLogger', (['"""__name__"""'], {}), "('__name__')\n", (3477, 3489), False, 'import logging\n'), ((4586, 4611), 'os.environ.get', 'os.environ.get', (['env', 'None'], {}), '(env, None)\n', (4600, 4611), False, 'import os\n'), ((4705, 4725), 'socket.gethostname', 'socket.gethostname', ([], {}), '()\n', (4723, 4725), False, 'import socket\n'), ((4803, 4814), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (4812, 4814), False, 'import os\n'), ((4883, 4894), 'os.getpid', 'os.getpid', ([], {}), '()\n', (4892, 4894), False, 'import os\n'), ((4985, 5002), 'getpass.getuser', 'getpass.getuser', ([], {}), '()\n', (5000, 5002), False, 'import getpass\n'), ((5735, 5754), 'os.path.exists', 'os.path.exists', (['dir'], {}), '(dir)\n', (5749, 5754), False, 'import os\n'), ((6634, 6655), 'os.path.exists', 'os.path.exists', (['cpath'], {}), '(cpath)\n', (6648, 6655), False, 'import os\n'), ((6708, 6727), 'os.listdir', 'os.listdir', (['dirname'], {}), '(dirname)\n', (6718, 6727), False, 'import os\n'), ((6992, 7007), 'os.listdir', 'os.listdir', (['dir'], {}), '(dir)\n', (7002, 7007), False, 'import os\n'), ((7464, 7479), 'os.listdir', 'os.listdir', (['dir'], {}), '(dir)\n', (7474, 7479), False, 'import os\n'), ((7742, 7755), 'os.stat', 'os.stat', (['path'], {}), '(path)\n', (7749, 7755), False, 'import os\n'), ((7796, 7821), 'pwd.getpwuid', 'pwd.getpwuid', (['stat.st_uid'], {}), '(stat.st_uid)\n', (7808, 7821), False, 'import pwd\n'), ((8100, 8153), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {'mode': 'mode', 'suffix': 'suffix'}), '(mode=mode, suffix=suffix)\n', (8127, 8153), False, 'import tempfile\n'), ((9032, 9053), 'os.chmod', 'os.chmod', (['fname', 'mode'], {}), '(fname, mode)\n', (9040, 9053), False, 'import os\n'), ((10957, 10980), 'os.path.splitext', 'os.path.splitext', (['fname'], {}), '(fname)\n', (10973, 10980), False, 'import os\n'), ((13720, 13739), 'subprocess.getoutput', 'getoutput', (['"""tokens"""'], {}), "('tokens')\n", (13729, 13739), False, 'from subprocess import getoutput\n'), ((14079, 14097), 'subprocess.getoutput', 'getoutput', (['"""aklog"""'], {}), "('aklog')\n", (14088, 14097), False, 'from subprocess import getoutput\n'), ((16613, 16644), 'sys.stdout.write', 'sys.stdout.write', (["('\\r' + prompt)"], {}), "('\\r' + prompt)\n", (16629, 16644), False, 'import sys\n'), ((16647, 16665), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (16663, 16665), False, 'import sys\n'), ((16675, 16693), 'sys.stdin.fileno', 'sys.stdin.fileno', ([], {}), '()\n', (16691, 16693), False, 'import sys\n'), ((16713, 16734), 'termios.tcgetattr', 'termios.tcgetattr', (['fd'], {}), '(fd)\n', (16730, 16734), False, 'import tty, termios\n'), ((16739, 16753), 'tty.setraw', 'tty.setraw', (['fd'], {}), '(fd)\n', (16749, 16753), False, 'import tty, termios\n'), ((16763, 16780), 'sys.stdin.read', 'sys.stdin.read', (['(1)'], {}), '(1)\n', (16777, 16780), False, 'import sys\n'), ((16785, 16839), 'termios.tcsetattr', 'termios.tcsetattr', (['fd', 'termios.TCSADRAIN', 'old_settings'], {}), '(fd, termios.TCSADRAIN, old_settings)\n', (16802, 16839), False, 'import tty, termios\n'), ((17067, 17081), 'os.system', 'os.system', (['cmd'], {}), '(cmd)\n', (17076, 17081), False, 'import os\n'), ((17293, 17307), 'os.popen', 'os.popen', (['_cmd'], {}), '(_cmd)\n', (17301, 17307), False, 'import os\n'), ((19337, 19474), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(asctime)s %(name)s %(levelname)s: %(message)s"""', 'datefmt': '"""%Y-%m-%dT%H:%M:%S"""', 'level': 'logging.DEBUG'}), "(format=\n '%(asctime)s %(name)s %(levelname)s: %(message)s', datefmt=\n '%Y-%m-%dT%H:%M:%S', level=logging.DEBUG)\n", (19356, 19474), False, 'import logging\n'), ((19644, 19669), 'sys.exit', 'sys.exit', (['"""\nEnd of test"""'], {}), "('\\nEnd of test')\n", (19652, 19669), False, 'import sys\n'), ((3677, 3696), 'time.localtime', 'localtime', (['time_sec'], {}), '(time_sec)\n', (3686, 3696), False, 'from time import localtime, strftime, time, strptime, mktime\n'), ((4134, 4140), 'time.time', 'time', ([], {}), '()\n', (4138, 4140), False, 'from time import localtime, strftime, time, strptime, mktime\n'), ((4324, 4349), 'time.strptime', 'strptime', (['time_stamp', 'fmt'], {}), '(time_stamp, fmt)\n', (4332, 4349), False, 'from time import localtime, strftime, time, strptime, mktime\n'), ((4444, 4457), 'time.mktime', 'mktime', (['struc'], {}), '(struc)\n', (4450, 4457), False, 'from time import localtime, strftime, time, strptime, mktime\n'), ((5093, 5110), 'shutil.which', 'shutil.which', (['cmd'], {}), '(cmd)\n', (5105, 5110), False, 'import shutil\n'), ((5322, 5336), 'os.stat', 'os.stat', (['fname'], {}), '(fname)\n', (5329, 5336), False, 'import os\n'), ((5825, 5841), 'os.makedirs', 'os.makedirs', (['dir'], {}), '(dir)\n', (5836, 5841), False, 'import os\n'), ((5850, 5869), 'os.chmod', 'os.chmod', (['dir', 'mode'], {}), '(dir, mode)\n', (5858, 5869), False, 'import os\n'), ((6933, 6952), 'os.path.exists', 'os.path.exists', (['dir'], {}), '(dir)\n', (6947, 6952), False, 'import os\n'), ((7405, 7424), 'os.path.exists', 'os.path.exists', (['dir'], {}), '(dir)\n', (7419, 7424), False, 'import os\n'), ((8003, 8016), 'os.stat', 'os.stat', (['path'], {}), '(path)\n', (8010, 8016), False, 'import os\n'), ((8386, 8406), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (8400, 8406), False, 'import os\n'), ((8445, 8467), 'os.path.splitext', 'os.path.splitext', (['path'], {}), '(path)\n', (8461, 8467), False, 'import os\n'), ((8559, 8581), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (8575, 8581), False, 'import os\n'), ((8636, 8657), 'os.path.dirname', 'os.path.dirname', (['path'], {}), '(path)\n', (8651, 8657), False, 'import os\n'), ((8719, 8740), 'os.path.lexists', 'os.path.lexists', (['path'], {}), '(path)\n', (8734, 8740), False, 'import os\n'), ((8778, 8798), 'os.path.isfile', 'os.path.isfile', (['path'], {}), '(path)\n', (8792, 8798), False, 'import os\n'), ((8837, 8856), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (8850, 8856), False, 'import os\n'), ((8897, 8916), 'os.path.split', 'os.path.split', (['path'], {}), '(path)\n', (8910, 8916), False, 'import os\n'), ((9990, 10006), 'json.dump', 'json.dump', (['jo', 'f'], {}), '(jo, f)\n', (9999, 10006), False, 'import json\n'), ((10380, 10397), 'pickle.dump', 'pickle.dump', (['o', 'f'], {}), '(o, f)\n', (10391, 10397), False, 'import pickle\n'), ((11087, 11112), 'scipy.misc.imsave', 'scim.imsave', (['fname', 'image'], {}), '(fname, image)\n', (11098, 11112), True, 'import scipy.misc as scim\n'), ((11268, 11337), 'numpy.savetxt', 'np.savetxt', (['fnametxt', 'image'], {'fmt': '"""%8.1f"""', 'delimiter': '""" """', 'newline': '"""\n"""'}), "(fnametxt, image, fmt='%8.1f', delimiter=' ', newline='\\n')\n", (11278, 11337), True, 'import numpy as np\n'), ((14529, 14543), 'subprocess.getoutput', 'getoutput', (['cmd'], {}), '(cmd)\n', (14538, 14543), False, 'from subprocess import getoutput\n'), ((14748, 14762), 'subprocess.getoutput', 'getoutput', (['cmd'], {}), '(cmd)\n', (14757, 14762), False, 'from subprocess import getoutput\n'), ((17594, 17611), 'psana.pyalgos.generic.NDArrGenerators.random_standard', 'random_standard', ([], {}), '()\n', (17609, 17611), False, 'from psana.pyalgos.generic.NDArrGenerators import random_standard\n'), ((17891, 17897), 'time.time', 'time', ([], {}), '()\n', (17895, 17897), False, 'from time import localtime, strftime, time, strptime, mktime\n'), ((17933, 17962), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (['t_sec'], {}), '(t_sec)\n', (17955, 17962), False, 'from datetime import datetime\n'), ((17993, 18007), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (18005, 18007), False, 'from datetime import datetime\n'), ((3989, 3995), 'time.time', 'time', ([], {}), '()\n', (3993, 3995), False, 'from time import localtime, strftime, time, strptime, mktime\n'), ((4418, 4428), 'sys.exit', 'sys.exit', ([], {}), '()\n', (4426, 4428), False, 'import sys\n'), ((7591, 7615), 'os.path.join', 'os.path.join', (['dir', 'fname'], {}), '(dir, fname)\n', (7603, 7615), False, 'import os\n'), ((12987, 13008), 'subprocess.call', 'call', (["['klist', '-s']"], {}), "(['klist', '-s'])\n", (12991, 13008), False, 'from subprocess import call\n'), ((14284, 14298), 'subprocess.getoutput', 'getoutput', (['cmd'], {}), '(cmd)\n', (14293, 14298), False, 'from subprocess import getoutput\n'), ((18120, 18136), 'time.localtime', 'localtime', (['t_sec'], {}), '(t_sec)\n', (18129, 18136), False, 'from time import localtime, strftime, time, strptime, mktime\n'), ((7081, 7104), 'os.path.splitext', 'os.path.splitext', (['fname'], {}), '(fname)\n', (7097, 7104), False, 'import os\n'), ((18377, 18410), 'sys.exit', 'sys.exit', (["('\\nExit by key %s' % ch)"], {}), "('\\nExit by key %s' % ch)\n", (18385, 18410), False, 'import sys\n')]
|
# Definition of radius
r = 192500
# Import radians function of math package
from math import radians
# Travel distance of Moon over 12 degrees. Store in dist.
phi = radians(12)
dist = r * phi
# Print out dist
print(dist)
|
[
"math.radians"
] |
[((167, 178), 'math.radians', 'radians', (['(12)'], {}), '(12)\n', (174, 178), False, 'from math import radians\n')]
|
""" Tests for admin endpoints """
from mock import MagicMock
from pyramid.httpexceptions import HTTPBadRequest
from pypicloud.views.admin import AdminEndpoints
from . import MockServerTest
class TestAdmin(MockServerTest):
"""Tests for admin endpoints"""
def setUp(self):
super(TestAdmin, self).setUp()
self.access = self.request.access = MagicMock()
def test_rebuild(self):
"""Rebuild endpoint refreshes db cache"""
self.request.db = MagicMock()
AdminEndpoints(self.request).rebuild_package_list()
self.assertTrue(self.request.db.reload_from_storage.called)
def test_get_pending_users(self):
"""Retrieve pending users from access backend"""
ret = AdminEndpoints(self.request).get_pending_users()
self.assertEqual(ret, self.access.pending_users())
def test_get_users(self):
"""Retrieve all users from access backend"""
ret = AdminEndpoints(self.request).get_users()
self.assertEqual(ret, self.access.user_data())
def test_get_user(self):
"""Retrieve data for a single user"""
self.request.named_subpaths = {"username": "a"}
ret = AdminEndpoints(self.request).get_user()
self.access.user_data.assert_called_with("a")
self.assertEqual(ret, self.access.user_data())
def test_delete_user(self):
"""Delete user from access backend"""
self.request.named_subpaths = {"username": "a"}
AdminEndpoints(self.request).delete_user()
self.access.delete_user.assert_called_with("a")
def test_approve_user(self):
"""Approve a pending user"""
self.request.named_subpaths = {"username": "a"}
AdminEndpoints(self.request).approve_user()
self.access.approve_user.assert_called_with("a")
def test_set_admin_status(self):
"""Set the admin flag for a user"""
self.request.named_subpaths = {"username": "a"}
AdminEndpoints(self.request).set_admin_status(True)
self.access.set_user_admin.assert_called_with("a", True)
def test_add_group_member(self):
"""Add a user to a group"""
self.request.named_subpaths = {"username": "a", "group": "b"}
self.request.method = "PUT"
AdminEndpoints(self.request).mutate_group_member()
self.access.edit_user_group.assert_called_with("a", "b", True)
def test_remove_group_member(self):
"""Remove a user from a group"""
self.request.named_subpaths = {"username": "a", "group": "b"}
self.request.method = "DELETE"
AdminEndpoints(self.request).mutate_group_member()
self.access.edit_user_group.assert_called_with("a", "b", False)
def test_get_groups(self):
"""Retrieve list of all groups"""
ret = AdminEndpoints(self.request).get_groups()
self.assertEqual(ret, self.access.groups())
def test_delete_group(self):
"""Delete a group"""
self.request.named_subpaths = {"group": "a"}
AdminEndpoints(self.request).delete_group()
self.access.delete_group.assert_called_with("a")
def test_get_user_permissions(self):
"""Get a user's permissions from the access backend"""
self.request.named_subpaths = {"username": "a"}
ret = AdminEndpoints(self.request).get_user_permissions()
self.access.user_package_permissions.assert_called_with("a")
self.assertEqual(ret, self.access.user_package_permissions())
def test_get_group(self):
"""Get a group's members and permissions"""
self.request.named_subpaths = {"group": "a"}
ret = AdminEndpoints(self.request).get_group()
self.access.group_members.assert_called_with("a")
self.access.group_package_permissions.assert_called_with("a")
self.assertEqual(
ret,
{
"members": self.access.group_members(),
"packages": self.access.group_package_permissions(),
},
)
def test_get_package_permissions(self):
"""Get user and group permissions for a package"""
self.request.named_subpaths = {"package": "a"}
self.access.user_permissions.return_value = {"u1": ["read"]}
self.access.group_permissions.return_value = {"g1": ["read", "write"]}
ret = AdminEndpoints(self.request).get_package_permissions()
self.assertEqual(
ret,
{
"user": [{"username": "u1", "permissions": ["read"]}],
"group": [{"group": "g1", "permissions": ["read", "write"]}],
},
)
def test_create_group(self):
"""Create a group"""
self.request.named_subpaths = {"group": "a"}
AdminEndpoints(self.request).create_group()
self.access.create_group.assert_called_with("a")
def test_no_create_everyone_group(self):
"""Cannot create the 'everyone' group"""
self.request.named_subpaths = {"group": "everyone"}
ret = AdminEndpoints(self.request).create_group()
self.assertTrue(isinstance(ret, HTTPBadRequest))
def test_no_create_authenticated_group(self):
"""Cannot create the 'authenticated' group"""
self.request.named_subpaths = {"group": "authenticated"}
ret = AdminEndpoints(self.request).create_group()
self.assertTrue(isinstance(ret, HTTPBadRequest))
def test_add_user_permission(self):
"""Add a user permission to a package"""
self.request.named_subpaths = {
"type": "user",
"package": "p",
"name": "u",
"permission": "read",
}
self.request.method = "PUT"
AdminEndpoints(self.request).edit_permission()
self.access.edit_user_permission.assert_called_with("p", "u", "read", True)
def test_remove_user_permission(self):
"""Remove a user permission from a package"""
self.request.named_subpaths = {
"type": "user",
"package": "p",
"name": "u",
"permission": "read",
}
self.request.method = "DELETE"
AdminEndpoints(self.request).edit_permission()
self.access.edit_user_permission.assert_called_with("p", "u", "read", False)
def test_add_group_permission(self):
"""Add a group permission to a package"""
self.request.named_subpaths = {
"type": "group",
"package": "p",
"name": "g",
"permission": "read",
}
self.request.method = "PUT"
AdminEndpoints(self.request).edit_permission()
self.access.edit_group_permission.assert_called_with("p", "g", "read", True)
def test_remove_group_permission(self):
"""Remove a group permission from a package"""
self.request.named_subpaths = {
"type": "group",
"package": "p",
"name": "g",
"permission": "read",
}
self.request.method = "DELETE"
AdminEndpoints(self.request).edit_permission()
self.access.edit_group_permission.assert_called_with("p", "g", "read", False)
def test_toggle_allow_register(self):
"""Toggle registration enabled"""
AdminEndpoints(self.request).toggle_allow_register(True)
self.access.set_allow_register.assert_called_with(True)
|
[
"pypicloud.views.admin.AdminEndpoints",
"mock.MagicMock"
] |
[((368, 379), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (377, 379), False, 'from mock import MagicMock\n'), ((485, 496), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (494, 496), False, 'from mock import MagicMock\n'), ((505, 533), 'pypicloud.views.admin.AdminEndpoints', 'AdminEndpoints', (['self.request'], {}), '(self.request)\n', (519, 533), False, 'from pypicloud.views.admin import AdminEndpoints\n'), ((735, 763), 'pypicloud.views.admin.AdminEndpoints', 'AdminEndpoints', (['self.request'], {}), '(self.request)\n', (749, 763), False, 'from pypicloud.views.admin import AdminEndpoints\n'), ((941, 969), 'pypicloud.views.admin.AdminEndpoints', 'AdminEndpoints', (['self.request'], {}), '(self.request)\n', (955, 969), False, 'from pypicloud.views.admin import AdminEndpoints\n'), ((1183, 1211), 'pypicloud.views.admin.AdminEndpoints', 'AdminEndpoints', (['self.request'], {}), '(self.request)\n', (1197, 1211), False, 'from pypicloud.views.admin import AdminEndpoints\n'), ((1475, 1503), 'pypicloud.views.admin.AdminEndpoints', 'AdminEndpoints', (['self.request'], {}), '(self.request)\n', (1489, 1503), False, 'from pypicloud.views.admin import AdminEndpoints\n'), ((1709, 1737), 'pypicloud.views.admin.AdminEndpoints', 'AdminEndpoints', (['self.request'], {}), '(self.request)\n', (1723, 1737), False, 'from pypicloud.views.admin import AdminEndpoints\n'), ((1956, 1984), 'pypicloud.views.admin.AdminEndpoints', 'AdminEndpoints', (['self.request'], {}), '(self.request)\n', (1970, 1984), False, 'from pypicloud.views.admin import AdminEndpoints\n'), ((2261, 2289), 'pypicloud.views.admin.AdminEndpoints', 'AdminEndpoints', (['self.request'], {}), '(self.request)\n', (2275, 2289), False, 'from pypicloud.views.admin import AdminEndpoints\n'), ((2582, 2610), 'pypicloud.views.admin.AdminEndpoints', 'AdminEndpoints', (['self.request'], {}), '(self.request)\n', (2596, 2610), False, 'from pypicloud.views.admin import AdminEndpoints\n'), ((2793, 2821), 'pypicloud.views.admin.AdminEndpoints', 'AdminEndpoints', (['self.request'], {}), '(self.request)\n', (2807, 2821), False, 'from pypicloud.views.admin import AdminEndpoints\n'), ((3011, 3039), 'pypicloud.views.admin.AdminEndpoints', 'AdminEndpoints', (['self.request'], {}), '(self.request)\n', (3025, 3039), False, 'from pypicloud.views.admin import AdminEndpoints\n'), ((3287, 3315), 'pypicloud.views.admin.AdminEndpoints', 'AdminEndpoints', (['self.request'], {}), '(self.request)\n', (3301, 3315), False, 'from pypicloud.views.admin import AdminEndpoints\n'), ((3628, 3656), 'pypicloud.views.admin.AdminEndpoints', 'AdminEndpoints', (['self.request'], {}), '(self.request)\n', (3642, 3656), False, 'from pypicloud.views.admin import AdminEndpoints\n'), ((4325, 4353), 'pypicloud.views.admin.AdminEndpoints', 'AdminEndpoints', (['self.request'], {}), '(self.request)\n', (4339, 4353), False, 'from pypicloud.views.admin import AdminEndpoints\n'), ((4735, 4763), 'pypicloud.views.admin.AdminEndpoints', 'AdminEndpoints', (['self.request'], {}), '(self.request)\n', (4749, 4763), False, 'from pypicloud.views.admin import AdminEndpoints\n'), ((5005, 5033), 'pypicloud.views.admin.AdminEndpoints', 'AdminEndpoints', (['self.request'], {}), '(self.request)\n', (5019, 5033), False, 'from pypicloud.views.admin import AdminEndpoints\n'), ((5290, 5318), 'pypicloud.views.admin.AdminEndpoints', 'AdminEndpoints', (['self.request'], {}), '(self.request)\n', (5304, 5318), False, 'from pypicloud.views.admin import AdminEndpoints\n'), ((5690, 5718), 'pypicloud.views.admin.AdminEndpoints', 'AdminEndpoints', (['self.request'], {}), '(self.request)\n', (5704, 5718), False, 'from pypicloud.views.admin import AdminEndpoints\n'), ((6131, 6159), 'pypicloud.views.admin.AdminEndpoints', 'AdminEndpoints', (['self.request'], {}), '(self.request)\n', (6145, 6159), False, 'from pypicloud.views.admin import AdminEndpoints\n'), ((6565, 6593), 'pypicloud.views.admin.AdminEndpoints', 'AdminEndpoints', (['self.request'], {}), '(self.request)\n', (6579, 6593), False, 'from pypicloud.views.admin import AdminEndpoints\n'), ((7010, 7038), 'pypicloud.views.admin.AdminEndpoints', 'AdminEndpoints', (['self.request'], {}), '(self.request)\n', (7024, 7038), False, 'from pypicloud.views.admin import AdminEndpoints\n'), ((7236, 7264), 'pypicloud.views.admin.AdminEndpoints', 'AdminEndpoints', (['self.request'], {}), '(self.request)\n', (7250, 7264), False, 'from pypicloud.views.admin import AdminEndpoints\n')]
|
#
# OtterTune - views.py
#
# Copyright (c) 2017-18, Carnegie Mellon University Database Group
#
import logging
import datetime
import re
from collections import OrderedDict
from django.contrib.auth import login, logout
from django.contrib.auth.decorators import login_required
from django.contrib.auth import update_session_auth_hash
from django.contrib.auth.forms import AuthenticationForm, UserCreationForm
from django.contrib.auth.forms import PasswordChangeForm
from django.core.exceptions import ObjectDoesNotExist
from django.http import HttpResponse, QueryDict
from django.shortcuts import redirect, render, get_object_or_404
from django.template.context_processors import csrf
from django.template.defaultfilters import register
from django.urls import reverse, reverse_lazy
from django.utils.datetime_safe import datetime
from django.utils.timezone import now
from django.views.decorators.csrf import csrf_exempt
from django.forms.models import model_to_dict
from pytz import timezone
from .forms import NewResultForm, ProjectForm, SessionForm, SessionKnobForm
from .models import (BackupData, DBMSCatalog, KnobCatalog, KnobData, MetricCatalog,
MetricData, MetricManager, Project, Result, Session, Workload,
SessionKnob)
from .parser import Parser
from .tasks import (aggregate_target_results, map_workload,
configuration_recommendation)
from .types import (DBMSType, KnobUnitType, MetricType,
TaskType, VarType, WorkloadStatusType)
from .utils import JSONUtil, LabelUtil, MediaUtil, TaskUtil
from .settings import TIME_ZONE
from .set_default_knobs import set_default_knobs
LOG = logging.getLogger(__name__)
# For the html template to access dict object
@register.filter
def get_item(dictionary, key):
return dictionary.get(key)
def signup_view(request):
if request.user.is_authenticated():
return redirect(reverse('home_projects'))
if request.method == 'POST':
post = request.POST
form = UserCreationForm(post)
if form.is_valid():
form.save()
new_post = QueryDict(mutable=True)
new_post.update(post)
new_post['password'] = post['<PASSWORD>']
request.POST = new_post
return login_view(request)
else:
LOG.warning("Signup form is not valid: %s", str(form.errors))
else:
form = UserCreationForm()
token = {}
token.update(csrf(request))
token['form'] = form
return render(request, 'signup.html', token)
def change_password_view(request):
if not request.user.is_authenticated():
return redirect(reverse('home_project'))
if request.method == 'POST':
form = PasswordChangeForm(request.user, request.POST)
if form.is_valid():
user = form.save()
update_session_auth_hash(request, user)
return redirect(reverse('home_projects'))
else:
form = PasswordChangeForm(request.user)
token = {}
token.update(csrf(request))
token['form'] = form
return render(request, 'change_password.html', token)
def login_view(request):
if request.user.is_authenticated():
return redirect(reverse('home_projects'))
if request.method == 'POST':
post = request.POST
form = AuthenticationForm(None, post)
if form.is_valid():
login(request, form.get_user())
return redirect(reverse('home_projects'))
else:
LOG.warning("Login form is not valid: %s", str(form.errors))
else:
form = AuthenticationForm()
token = {}
token.update(csrf(request))
token['form'] = form
return render(request, 'login.html', token)
@login_required(login_url=reverse_lazy('login'))
def logout_view(request):
logout(request)
return redirect(reverse('login'))
@login_required(login_url=reverse_lazy('login'))
def redirect_home(request): # pylint: disable=unused-argument
return redirect(reverse('home_projects'))
@login_required(login_url=reverse_lazy('login'))
def home_projects_view(request):
form_labels = Project.get_labels()
form_labels.update(LabelUtil.style_labels({
'button_create': 'create a new project',
'button_delete': 'delete selected projects',
}))
form_labels['title'] = 'Your Projects'
projects = Project.objects.filter(user=request.user)
show_descriptions = any([proj.description for proj in projects])
context = {
"projects": projects,
"labels": form_labels,
"show_descriptions": show_descriptions
}
context.update(csrf(request))
return render(request, 'home_projects.html', context)
@login_required(login_url=reverse_lazy('login'))
def create_or_edit_project(request, project_id=''):
if request.method == 'POST':
if project_id == '':
form = ProjectForm(request.POST)
if not form.is_valid():
return render(request, 'edit_project.html', {'form': form})
project = form.save(commit=False)
project.user = request.user
ts = now()
project.creation_time = ts
project.last_update = ts
project.save()
else:
project = get_object_or_404(Project, pk=project_id, user=request.user)
form = ProjectForm(request.POST, instance=project)
if not form.is_valid():
return render(request, 'edit_project.html', {'form': form})
project.last_update = now()
project.save()
return redirect(reverse('project_sessions', kwargs={'project_id': project.pk}))
else:
if project_id == '':
project = None
form = ProjectForm()
else:
project = Project.objects.get(pk=int(project_id))
form = ProjectForm(instance=project)
context = {
'project': project,
'form': form,
}
return render(request, 'edit_project.html', context)
@login_required(login_url=reverse_lazy('login'))
def delete_project(request):
pids = request.POST.getlist('projects', [])
Project.objects.filter(pk__in=pids, user=request.user).delete()
return redirect(reverse('home_projects'))
@login_required(login_url=reverse_lazy('login'))
def project_sessions_view(request, project_id):
sessions = Session.objects.filter(project=project_id)
project = Project.objects.get(pk=project_id)
form_labels = Session.get_labels()
form_labels.update(LabelUtil.style_labels({
'button_delete': 'delete selected session',
'button_create': 'create a new session',
}))
form_labels['title'] = "Your Sessions"
context = {
"sessions": sessions,
"project": project,
"labels": form_labels,
}
context.update(csrf(request))
return render(request, 'project_sessions.html', context)
@login_required(login_url=reverse_lazy('login'))
def session_view(request, project_id, session_id):
project = get_object_or_404(Project, pk=project_id)
session = get_object_or_404(Session, pk=session_id)
# All results from this session
results = Result.objects.filter(session=session)
# Group the session's results by DBMS & workload
dbmss = {}
workloads = {}
dbmss_ids = set()
workloads_ids = set()
for res in results:
if res.dbms_id not in dbmss_ids:
dbmss_ids.add(res.dbms_id)
res_dbms = res.dbms
dbmss[res_dbms.key] = res_dbms
if res.workload_id not in workloads_ids:
workloads_ids.add(res.workload_id)
res_workload = res.workload
workloads[res_workload.name] = set()
workloads[res_workload.name].add(res_workload)
# Sort so names will be ordered in the sidebar
workloads = OrderedDict([(k, sorted(list(v))) for
k, v in sorted(workloads.items())])
dbmss = OrderedDict(sorted(dbmss.items()))
if len(workloads) > 0:
# Set the default workload to whichever is first
default_workload, default_confs = next(iter(list(workloads.items())))
default_confs = ','.join([str(c.pk) for c in default_confs])
else:
# Set the default to display nothing if there are no results yet
default_workload = 'show_none'
default_confs = 'none'
default_metrics = MetricCatalog.objects.get_default_metrics(session.target_objective)
metric_meta = MetricCatalog.objects.get_metric_meta(session.dbms, session.target_objective)
knobs = SessionKnob.objects.get_knobs_for_session(session)
knob_names = [knob["name"] for knob in knobs if knob["tunable"]]
form_labels = Session.get_labels()
form_labels['title'] = "Session Info"
context = {
'project': project,
'dbmss': dbmss,
'workloads': workloads,
'results_per_page': [10, 50, 100, 500, 1000],
'default_dbms': session.dbms.key,
'default_results_per_page': 10,
'default_equidistant': "on",
'default_workload': default_workload,
'defaultspe': default_confs,
'metrics': list(metric_meta.keys()),
'metric_meta': metric_meta,
'default_metrics': default_metrics,
'knob_names': knob_names,
'filters': [],
'session': session,
'results': results,
'labels': form_labels,
}
context.update(csrf(request))
return render(request, 'session.html', context)
@login_required(login_url=reverse_lazy('login'))
def create_or_edit_session(request, project_id, session_id=''):
project = get_object_or_404(Project, pk=project_id, user=request.user)
if request.method == 'POST':
if not session_id:
# Create a new session from the form contents
form = SessionForm(request.POST)
if not form.is_valid():
return render(request, 'edit_session.html',
{'project': project, 'form': form})
session = form.save(commit=False)
session.user = request.user
session.project = project
ts = now()
session.creation_time = ts
session.last_update = ts
session.upload_code = MediaUtil.upload_code_generator()
session.save()
set_default_knobs(session)
else:
# Update an existing session with the form contents
session = Session.objects.get(pk=session_id)
form = SessionForm(request.POST, instance=session)
if not form.is_valid():
return render(request, 'edit_session.html',
{'project': project, 'form': form, 'session': session})
if form.cleaned_data['gen_upload_code'] is True:
session.upload_code = MediaUtil.upload_code_generator()
session.last_update = now()
form.save()
session.save()
return redirect(reverse('session', kwargs={'project_id': project_id,
'session_id': session.pk}))
else:
if session_id:
# Return a pre-filled form for editing an existing session
session = Session.objects.get(pk=session_id)
form = SessionForm(instance=session)
else:
# Return a new form with defaults for creating a new session
session = None
form = SessionForm(
initial={
'dbms': DBMSCatalog.objects.get(
type=DBMSType.POSTGRES, version='9.6'),
'target_objective': 'throughput_txn_per_sec',
})
context = {
'project': project,
'session': session,
'form': form,
}
return render(request, 'edit_session.html', context)
@login_required(login_url=reverse_lazy('login'))
def edit_knobs(request, project_id, session_id):
project = get_object_or_404(Project, pk=project_id, user=request.user)
session = get_object_or_404(Session, pk=session_id, user=request.user)
if request.method == 'POST':
form = SessionKnobForm(request.POST)
if not form.is_valid():
return render(request, 'edit_knobs.html',
{'project': project, 'session': session, 'form': form})
instance = form.instance
instance.session = session
instance.knob = KnobCatalog.objects.filter(dbms=session.dbms,
name=form.cleaned_data["name"])[0]
SessionKnob.objects.filter(session=instance.session, knob=instance.knob).delete()
instance.save()
return HttpResponse(status=204)
else:
knobs = KnobCatalog.objects.filter(dbms=session.dbms).order_by('-tunable')
forms = []
for knob in knobs:
knob_values = model_to_dict(knob)
if SessionKnob.objects.filter(session=session, knob=knob).exists():
new_knob = SessionKnob.objects.filter(session=session, knob=knob)[0]
knob_values["minval"] = new_knob.minval
knob_values["maxval"] = new_knob.maxval
knob_values["tunable"] = new_knob.tunable
forms.append(SessionKnobForm(initial=knob_values))
context = {
'project': project,
'session': session,
'forms': forms
}
return render(request, 'edit_knobs.html', context)
@login_required(login_url=reverse_lazy('login'))
def delete_session(request, project_id):
sids = request.POST.getlist('sessions', [])
Session.objects.filter(pk__in=sids, user=request.user).delete()
return redirect(reverse(
'project_sessions',
kwargs={'project_id': project_id}))
@login_required(login_url=reverse_lazy('login'))
def result_view(request, project_id, session_id, result_id):
target = get_object_or_404(Result, pk=result_id)
session = target.session
default_metrics = MetricCatalog.objects.get_default_metrics(session.target_objective)
metric_meta = MetricCatalog.objects.get_metric_meta(session.dbms, session.target_objective)
metric_data = JSONUtil.loads(target.metric_data.data)
default_metrics = {mname: metric_data[mname] * metric_meta[mname].scale
for mname in default_metrics}
status = None
if target.task_ids is not None:
tasks = TaskUtil.get_tasks(target.task_ids)
status, _ = TaskUtil.get_task_status(tasks)
if status is None:
status = 'UNAVAILABLE'
next_conf_available = True if status == 'SUCCESS' else False
form_labels = Result.get_labels()
form_labels.update(LabelUtil.style_labels({
'status': 'status',
'next_conf_available': 'next configuration'
}))
form_labels['title'] = 'Result Info'
context = {
'result': target,
'metric_meta': metric_meta,
'status': status,
'next_conf_available': next_conf_available,
'labels': form_labels,
'project_id': project_id,
'session_id': session_id
}
return render(request, 'result.html', context)
@csrf_exempt
def new_result(request):
if request.method == 'POST':
form = NewResultForm(request.POST, request.FILES)
if not form.is_valid():
LOG.warning("New result form is not valid: %s", str(form.errors))
return HttpResponse("New result form is not valid: " + str(form.errors))
upload_code = form.cleaned_data['upload_code']
try:
session = Session.objects.get(upload_code=upload_code)
except Session.DoesNotExist:
LOG.warning("Invalid upload code: %s", upload_code)
return HttpResponse("Invalid upload code: " + upload_code)
return handle_result_files(session, request.FILES)
LOG.warning("Request type was not POST")
return HttpResponse("Request type was not POST")
def handle_result_files(session, files):
from celery import chain
# Combine into contiguous files
files = {k: b''.join(v.chunks()).decode() for k, v in list(files.items())}
# Load the contents of the controller's summary file
summary = JSONUtil.loads(files['summary'])
dbms_type = DBMSType.type(summary['database_type'])
dbms_version = summary['database_version'] # TODO: fix parse_version_string
workload_name = summary['workload_name']
observation_time = summary['observation_time']
start_time = datetime.fromtimestamp(
# int(summary['start_time']), # unit: seconds
int(summary['start_time']) / 1000, # unit: ms
timezone(TIME_ZONE))
end_time = datetime.fromtimestamp(
# int(summary['end_time']), # unit: seconds
int(summary['end_time']) / 1000, # unit: ms
timezone(TIME_ZONE))
# Check if workload name only contains alpha-numeric, underscore and hyphen
if not re.match('^[a-zA-Z0-9_-]+$', workload_name):
return HttpResponse('Your workload name ' + workload_name + ' contains '
'invalid characters! It should only contain '
'alpha-numeric, underscore(_) and hyphen(-)')
try:
# Check that we support this DBMS and version
dbms = DBMSCatalog.objects.get(
type=dbms_type, version=dbms_version)
except ObjectDoesNotExist:
return HttpResponse('{} v{} is not yet supported.'.format(
dbms_type, dbms_version))
if dbms != session.dbms:
return HttpResponse('The DBMS must match the type and version '
'specified when creating the session. '
'(expected=' + session.dbms.full_name + ') '
'(actual=' + dbms.full_name + ')')
# Load, process, and store the knobs in the DBMS's configuration
knob_dict, knob_diffs = Parser.parse_dbms_knobs(
dbms.pk, JSONUtil.loads(files['knobs']))
tunable_knob_dict = Parser.convert_dbms_knobs(
dbms.pk, knob_dict)
knob_data = KnobData.objects.create_knob_data(
session, JSONUtil.dumps(knob_dict, pprint=True, sort=True),
JSONUtil.dumps(tunable_knob_dict, pprint=True, sort=True), dbms)
# Load, process, and store the runtime metrics exposed by the DBMS
initial_metric_dict, initial_metric_diffs = Parser.parse_dbms_metrics(
dbms.pk, JSONUtil.loads(files['metrics_before']))
final_metric_dict, final_metric_diffs = Parser.parse_dbms_metrics(
dbms.pk, JSONUtil.loads(files['metrics_after']))
metric_dict = Parser.calculate_change_in_metrics(
dbms.pk, initial_metric_dict, final_metric_dict)
initial_metric_diffs.extend(final_metric_diffs)
numeric_metric_dict = Parser.convert_dbms_metrics(
dbms.pk, metric_dict, observation_time, session.target_objective)
metric_data = MetricData.objects.create_metric_data(
session, JSONUtil.dumps(metric_dict, pprint=True, sort=True),
JSONUtil.dumps(numeric_metric_dict, pprint=True, sort=True), dbms)
# Create a new workload if this one does not already exist
workload = Workload.objects.create_workload(
dbms, session.hardware, workload_name)
# Save this result
result = Result.objects.create_result(
session, dbms, workload, knob_data, metric_data,
start_time, end_time, observation_time)
result.save()
# Workload is now modified so backgroundTasks can make calculationw
workload.status = WorkloadStatusType.MODIFIED
workload.save()
# Save all original data
backup_data = BackupData.objects.create(
result=result, raw_knobs=files['knobs'],
raw_initial_metrics=files['metrics_before'],
raw_final_metrics=files['metrics_after'],
raw_summary=files['summary'],
knob_log=knob_diffs,
metric_log=initial_metric_diffs)
backup_data.save()
nondefault_settings = Parser.get_nondefault_knob_settings(
dbms.pk, knob_dict)
session.project.last_update = now()
session.last_update = now()
if session.nondefault_settings is None:
session.nondefault_settings = JSONUtil.dumps(nondefault_settings)
session.project.save()
session.save()
if session.tuning_session == 'no_tuning_session':
return HttpResponse("Result stored successfully!")
result_id = result.pk
response = chain(aggregate_target_results.s(result.pk),
map_workload.s(),
configuration_recommendation.s()).apply_async()
taskmeta_ids = [response.parent.parent.id, response.parent.id, response.id]
result.task_ids = ','.join(taskmeta_ids)
result.save()
return HttpResponse("Result stored successfully! Running tuner...(status={}) Result ID:{} "
.format(response.status, result_id))
@login_required(login_url=reverse_lazy('login'))
def dbms_knobs_reference(request, dbms_name, version, knob_name):
knob = get_object_or_404(KnobCatalog, dbms__type=DBMSType.type(dbms_name),
dbms__version=version, name=knob_name)
labels = KnobCatalog.get_labels()
list_items = OrderedDict()
if knob.category is not None:
list_items[labels['category']] = knob.category
list_items[labels['scope']] = knob.scope
list_items[labels['tunable']] = knob.tunable
list_items[labels['vartype']] = VarType.name(knob.vartype)
if knob.unit != KnobUnitType.OTHER:
list_items[labels['unit']] = knob.unit
list_items[labels['default']] = knob.default
if knob.minval is not None:
list_items[labels['minval']] = knob.minval
if knob.maxval is not None:
list_items[labels['maxval']] = knob.maxval
if knob.enumvals is not None:
list_items[labels['enumvals']] = knob.enumvals
if knob.summary is not None:
description = knob.summary
if knob.description is not None:
description += knob.description
list_items[labels['summary']] = description
context = {
'title': knob.name,
'dbms': knob.dbms,
'is_used': knob.tunable,
'used_label': 'TUNABLE',
'list_items': list_items,
}
return render(request, 'dbms_reference.html', context)
@login_required(login_url=reverse_lazy('login'))
def dbms_metrics_reference(request, dbms_name, version, metric_name):
metric = get_object_or_404(
MetricCatalog, dbms__type=DBMSType.type(dbms_name),
dbms__version=version, name=metric_name)
labels = MetricCatalog.get_labels()
list_items = OrderedDict()
list_items[labels['scope']] = metric.scope
list_items[labels['vartype']] = VarType.name(metric.vartype)
list_items[labels['summary']] = metric.summary
context = {
'title': metric.name,
'dbms': metric.dbms,
'is_used': metric.metric_type == MetricType.COUNTER,
'used_label': MetricType.name(metric.metric_type),
'list_items': list_items,
}
return render(request, 'dbms_reference.html', context=context)
@login_required(login_url=reverse_lazy('login'))
def knob_data_view(request, project_id, session_id, data_id): # pylint: disable=unused-argument
knob_data = get_object_or_404(KnobData, pk=data_id)
labels = KnobData.get_labels()
labels.update(LabelUtil.style_labels({
'featured_data': 'tunable dbms parameters',
'all_data': 'all dbms parameters',
}))
labels['title'] = 'DBMS Configuration'
context = {
'labels': labels,
'data_type': 'knobs'
}
return dbms_data_view(request, context, knob_data)
@login_required(login_url=reverse_lazy('login'))
def metric_data_view(request, project_id, session_id, data_id): # pylint: disable=unused-argument
metric_data = get_object_or_404(MetricData, pk=data_id)
labels = MetricData.get_labels()
labels.update(LabelUtil.style_labels({
'featured_data': 'numeric dbms metrics',
'all_data': 'all dbms metrics',
}))
labels['title'] = 'DBMS Metrics'
context = {
'labels': labels,
'data_type': 'metrics'
}
return dbms_data_view(request, context, metric_data)
def dbms_data_view(request, context, dbms_data):
if context['data_type'] == 'knobs':
model_class = KnobData
filter_fn = Parser.filter_tunable_knobs
obj_data = dbms_data.knobs
else:
model_class = MetricData
filter_fn = Parser.filter_numeric_metrics
obj_data = dbms_data.metrics
dbms_id = dbms_data.dbms.pk
all_data_dict = JSONUtil.loads(obj_data)
featured_dict = filter_fn(dbms_id, all_data_dict)
if 'compare' in request.GET and request.GET['compare'] != 'none':
comp_id = request.GET['compare']
compare_obj = model_class.objects.get(pk=comp_id)
comp_data = compare_obj.knobs if \
context['data_type'] == 'knobs' else compare_obj.metrics
comp_dict = JSONUtil.loads(comp_data)
comp_featured_dict = filter_fn(dbms_id, comp_dict)
all_data = [(k, v, comp_dict[k]) for k, v in list(all_data_dict.items())]
featured_data = [(k, v, comp_featured_dict[k])
for k, v in list(featured_dict.items())]
else:
comp_id = None
all_data = list(all_data_dict.items())
featured_data = list(featured_dict.items())
peer_data = model_class.objects.filter(
dbms=dbms_data.dbms, session=dbms_data.session)
peer_data = [peer for peer in peer_data if peer.pk != dbms_data.pk]
context['all_data'] = all_data
context['featured_data'] = featured_data
context['dbms_data'] = dbms_data
context['compare'] = comp_id
context['peer_data'] = peer_data
return render(request, 'dbms_data.html', context)
@login_required(login_url=reverse_lazy('login'))
def workload_view(request, project_id, session_id, wkld_id): # pylint: disable=unused-argument
workload = get_object_or_404(Workload, pk=wkld_id)
session = get_object_or_404(Session, pk=session_id)
knob_confs = KnobData.objects.filter(dbms=session.dbms,
session=session)
knob_conf_map = {}
for conf in knob_confs:
latest_result = Result.objects.filter(
session=session, knob_data=conf, workload=workload).order_by(
'-observation_end_time').first()
if not latest_result:
continue
knob_conf_map[conf.name] = [conf, latest_result]
knob_conf_map = OrderedDict(sorted(list(knob_conf_map.items()), key=lambda x: x[1][0].pk))
default_knob_confs = [c for c, _ in list(knob_conf_map.values())][:5]
LOG.debug("default_knob_confs: %s", default_knob_confs)
metric_meta = MetricCatalog.objects.get_metric_meta(session.dbms, session.target_objective)
default_metrics = MetricCatalog.objects.get_default_metrics(session.target_objective)
labels = Workload.get_labels()
labels['title'] = 'Workload Information'
context = {'workload': workload,
'knob_confs': knob_conf_map,
'metric_meta': metric_meta,
'knob_data': default_knob_confs,
'default_metrics': default_metrics,
'labels': labels,
'session_id': session_id}
return render(request, 'workload.html', context)
@login_required(login_url=reverse_lazy('login'))
def download_next_config(request):
data = request.GET
result_id = data['id']
res = Result.objects.get(pk=result_id)
response = HttpResponse(res.next_configuration,
content_type='text/plain')
response['Content-Disposition'] = 'attachment; filename=result_' + str(result_id) + '.cnf'
return response
@login_required(login_url=reverse_lazy('login'))
def tuner_status_view(request, project_id, session_id, result_id): # pylint: disable=unused-argument
res = Result.objects.get(pk=result_id)
tasks = TaskUtil.get_tasks(res.task_ids)
overall_status, num_completed = TaskUtil.get_task_status(tasks)
if overall_status in ['PENDING', 'RECEIVED', 'STARTED']:
completion_time = 'N/A'
total_runtime = 'N/A'
else:
completion_time = tasks[-1].date_done
total_runtime = (completion_time - res.creation_time).total_seconds()
total_runtime = '{0:.2f} seconds'.format(total_runtime)
task_info = [(tname, task) for tname, task in
zip(list(TaskType.TYPE_NAMES.values()), tasks)]
context = {"id": result_id,
"result": res,
"overall_status": overall_status,
"num_completed": "{} / {}".format(num_completed, 3),
"completion_time": completion_time,
"total_runtime": total_runtime,
"tasks": task_info}
return render(request, "task_status.html", context)
# Data Format
# error
# metrics as a list of selected metrics
# results
# data for each selected metric
# meta data for the metric
# Result list for the metric in a folded list
@login_required(login_url=reverse_lazy('login'))
def get_workload_data(request):
data = request.GET
workload = get_object_or_404(Workload, pk=data['id'])
session = get_object_or_404(Session, pk=data['session_id'])
if session.user != request.user:
return render(request, '404.html')
results = Result.objects.filter(workload=workload)
result_data = {r.pk: JSONUtil.loads(r.metric_data.data) for r in results}
results = sorted(results, key=lambda x: int(result_data[x.pk][MetricManager.THROUGHPUT]))
default_metrics = MetricCatalog.objects.get_default_metrics(session.target_objective)
metrics = request.GET.get('met', ','.join(default_metrics)).split(',')
metrics = [m for m in metrics if m != 'none']
if len(metrics) == 0:
metrics = default_metrics
data_package = {'results': [],
'error': 'None',
'metrics': metrics}
metric_meta = MetricCatalog.objects.get_metric_meta(session.dbms, session.target_objective)
for met in data_package['metrics']:
met_info = metric_meta[met]
data_package['results'].append({'data': [[]], 'tick': [],
'unit': met_info.unit,
'lessisbetter': met_info.improvement,
'metric': met_info.pprint})
added = set()
knob_confs = data['conf'].split(',')
i = len(knob_confs)
for r in results:
metric_data = JSONUtil.loads(r.metric_data.data)
if r.knob_data.pk in added or str(r.knob_data.pk) not in knob_confs:
continue
added.add(r.knob_data.pk)
data_val = metric_data[met] * met_info.scale
data_package['results'][-1]['data'][0].append([
i,
data_val,
r.pk,
data_val])
data_package['results'][-1]['tick'].append(r.knob_data.name)
i -= 1
data_package['results'][-1]['data'].reverse()
data_package['results'][-1]['tick'].reverse()
return HttpResponse(JSONUtil.dumps(data_package), content_type='application/json')
# Data Format:
# error
# results
# all result data after the filters for the table
# timelines
# data for each benchmark & metric pair
# meta data for the pair
# data as a map<DBMS name, result list>
@login_required(login_url=reverse_lazy('login'))
def get_timeline_data(request):
result_labels = Result.get_labels()
columnnames = [
result_labels['id'],
result_labels['creation_time'],
result_labels['knob_data'],
result_labels['metric_data'],
result_labels['workload'],
]
data_package = {
'error': 'None',
'timelines': [],
'knobtimelines': [],
'columnnames': columnnames,
}
session = get_object_or_404(Session, pk=request.GET['session'])
if session.user != request.user:
return HttpResponse(JSONUtil.dumps(data_package), content_type='application/json')
default_metrics = MetricCatalog.objects.get_default_metrics(session.target_objective)
metric_meta = MetricCatalog.objects.get_metric_meta(session.dbms, session.target_objective)
for met in default_metrics:
met_info = metric_meta[met]
columnnames.append(met_info.pprint + ' (' + met_info.short_unit + ')')
results_per_page = int(request.GET['nres'])
# Get all results related to the selected session, sort by time
results = Result.objects.filter(session=session)\
.select_related('knob_data', 'metric_data', 'workload')
results = sorted(results, key=lambda x: x.observation_end_time)
display_type = request.GET['wkld']
if display_type == 'show_none':
workloads = []
metrics = default_metrics
results = []
else:
metrics = request.GET.get('met', ','.join(default_metrics)).split(',')
metrics = [m for m in metrics if m != 'none']
if len(metrics) == 0:
metrics = default_metrics
workloads = [display_type]
workload_confs = [wc for wc in request.GET['spe'].strip().split(',') if wc != '']
results = [r for r in results if str(r.workload.pk) in workload_confs]
metric_datas = {r.pk: JSONUtil.loads(r.metric_data.data) for r in results}
result_list = []
for res in results:
entry = [
res.pk,
res.observation_end_time.astimezone(timezone(TIME_ZONE)).strftime("%Y-%m-%d %H:%M:%S"),
res.knob_data.name,
res.metric_data.name,
res.workload.name]
for met in metrics:
entry.append(metric_datas[res.pk][met] * metric_meta[met].scale)
entry.extend([
'',
res.knob_data.pk,
res.metric_data.pk,
res.workload.pk
])
result_list.append(entry)
data_package['results'] = result_list
# For plotting charts
for metric in metrics:
met_info = metric_meta[metric]
for wkld in workloads:
w_r = [r for r in results if r.workload.name == wkld]
if len(w_r) == 0:
continue
data = {
'workload': wkld,
'units': met_info.unit,
'lessisbetter': met_info.improvement,
'data': {},
'baseline': "None",
'metric': metric,
'print_metric': met_info.pprint,
}
for dbms in request.GET['dbms'].split(','):
d_r = [r for r in w_r if r.dbms.key == dbms]
d_r = d_r[-results_per_page:]
out = []
for res in d_r:
metric_data = JSONUtil.loads(res.metric_data.data)
out.append([
res.observation_end_time.astimezone(timezone(TIME_ZONE)).
strftime("%m-%d-%y %H:%M"),
metric_data[metric] * met_info.scale,
"",
str(res.pk)
])
if len(out) > 0:
data['data'][dbms] = out
data_package['timelines'].append(data)
knobs = SessionKnob.objects.get_knobs_for_session(session)
knob_names = [knob["name"] for knob in knobs if knob["tunable"]]
knobs = request.GET.get('knb', ','.join(knob_names)).split(',')
knobs = [knob for knob in knobs if knob != "none"]
LOG.debug("Knobs plotted: %s", str(knobs))
for knob in knobs:
data = {
'units': KnobUnitType.TYPE_NAMES[KnobCatalog.objects.filter(name=knob)[0].unit],
'data': [],
'knob': knob,
}
for res in results:
knob_data = JSONUtil.loads(res.knob_data.data)
data['data'].append([
res.observation_end_time.astimezone(timezone(TIME_ZONE)).
strftime("%m-%d-%y %H:%M"),
knob_data[knob],
"",
str(res.pk)
])
data_package['knobtimelines'].append(data)
return HttpResponse(JSONUtil.dumps(data_package), content_type='application/json')
# get the lastest result
def give_result(request, upload_code): # pylint: disable=unused-argument
try:
session = Session.objects.get(upload_code=upload_code)
except Session.DoesNotExist:
LOG.warning("Invalid upload code: %s", upload_code)
return HttpResponse("Invalid upload code: " + upload_code)
results = Result.objects.filter(session=session)
LOG.info(results)
LOG.info(len(results))
if len(results) > 0:
lastest_result = results[len(results) - 1]
else:
lastest_result = results[0]
tasks = TaskUtil.get_tasks(lastest_result.task_ids)
overall_status, _ = TaskUtil.get_task_status(tasks)
if overall_status in ['PENDING', 'RECEIVED', 'STARTED']:
return HttpResponse("Result not ready")
# unclear behaviors for REVOKED and RETRY, treat as failure
elif overall_status in ['FAILURE', 'REVOKED', 'RETRY']:
return HttpResponse("Fail")
# success
res = Result.objects.get(pk=lastest_result.pk)
return HttpResponse(JSONUtil.dumps(res.next_configuration), content_type='application/json')
|
[
"django.http.HttpResponse",
"django.utils.timezone.now",
"django.template.context_processors.csrf",
"django.urls.reverse_lazy",
"django.contrib.auth.forms.AuthenticationForm",
"re.match",
"django.urls.reverse",
"django.shortcuts.get_object_or_404",
"django.contrib.auth.logout",
"django.contrib.auth.forms.UserCreationForm",
"django.contrib.auth.forms.PasswordChangeForm",
"pytz.timezone",
"django.shortcuts.render",
"django.forms.models.model_to_dict",
"django.http.QueryDict",
"collections.OrderedDict",
"logging.getLogger",
"django.contrib.auth.update_session_auth_hash"
] |
[((1675, 1702), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1692, 1702), False, 'import logging\n'), ((2524, 2561), 'django.shortcuts.render', 'render', (['request', '"""signup.html"""', 'token'], {}), "(request, 'signup.html', token)\n", (2530, 2561), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((3093, 3139), 'django.shortcuts.render', 'render', (['request', '"""change_password.html"""', 'token'], {}), "(request, 'change_password.html', token)\n", (3099, 3139), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((3706, 3742), 'django.shortcuts.render', 'render', (['request', '"""login.html"""', 'token'], {}), "(request, 'login.html', token)\n", (3712, 3742), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((3824, 3839), 'django.contrib.auth.logout', 'logout', (['request'], {}), '(request)\n', (3830, 3839), False, 'from django.contrib.auth import login, logout\n'), ((4663, 4709), 'django.shortcuts.render', 'render', (['request', '"""home_projects.html"""', 'context'], {}), "(request, 'home_projects.html', context)\n", (4669, 4709), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((6887, 6936), 'django.shortcuts.render', 'render', (['request', '"""project_sessions.html"""', 'context'], {}), "(request, 'project_sessions.html', context)\n", (6893, 6936), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((7053, 7094), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Project'], {'pk': 'project_id'}), '(Project, pk=project_id)\n', (7070, 7094), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((7109, 7150), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Session'], {'pk': 'session_id'}), '(Session, pk=session_id)\n', (7126, 7150), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((9482, 9522), 'django.shortcuts.render', 'render', (['request', '"""session.html"""', 'context'], {}), "(request, 'session.html', context)\n", (9488, 9522), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((9652, 9712), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Project'], {'pk': 'project_id', 'user': 'request.user'}), '(Project, pk=project_id, user=request.user)\n', (9669, 9712), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((12034, 12094), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Project'], {'pk': 'project_id', 'user': 'request.user'}), '(Project, pk=project_id, user=request.user)\n', (12051, 12094), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((12109, 12169), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Session'], {'pk': 'session_id', 'user': 'request.user'}), '(Session, pk=session_id, user=request.user)\n', (12126, 12169), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((13991, 14030), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Result'], {'pk': 'result_id'}), '(Result, pk=result_id)\n', (14008, 14030), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((15208, 15247), 'django.shortcuts.render', 'render', (['request', '"""result.html"""', 'context'], {}), "(request, 'result.html', context)\n", (15214, 15247), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((15998, 16039), 'django.http.HttpResponse', 'HttpResponse', (['"""Request type was not POST"""'], {}), "('Request type was not POST')\n", (16010, 16039), False, 'from django.http import HttpResponse, QueryDict\n'), ((20126, 20131), 'django.utils.timezone.now', 'now', ([], {}), '()\n', (20129, 20131), False, 'from django.utils.timezone import now\n'), ((20158, 20163), 'django.utils.timezone.now', 'now', ([], {}), '()\n', (20161, 20163), False, 'from django.utils.timezone import now\n'), ((21257, 21270), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (21268, 21270), False, 'from collections import OrderedDict\n'), ((22302, 22349), 'django.shortcuts.render', 'render', (['request', '"""dbms_reference.html"""', 'context'], {}), "(request, 'dbms_reference.html', context)\n", (22308, 22349), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((22669, 22682), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (22680, 22682), False, 'from collections import OrderedDict\n'), ((23092, 23147), 'django.shortcuts.render', 'render', (['request', '"""dbms_reference.html"""'], {'context': 'context'}), "(request, 'dbms_reference.html', context=context)\n", (23098, 23147), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((23312, 23351), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['KnobData'], {'pk': 'data_id'}), '(KnobData, pk=data_id)\n', (23329, 23351), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((23876, 23917), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['MetricData'], {'pk': 'data_id'}), '(MetricData, pk=data_id)\n', (23893, 23917), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((25829, 25871), 'django.shortcuts.render', 'render', (['request', '"""dbms_data.html"""', 'context'], {}), "(request, 'dbms_data.html', context)\n", (25835, 25871), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((26034, 26073), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Workload'], {'pk': 'wkld_id'}), '(Workload, pk=wkld_id)\n', (26051, 26073), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((26088, 26129), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Session'], {'pk': 'session_id'}), '(Session, pk=session_id)\n', (26105, 26129), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((27383, 27424), 'django.shortcuts.render', 'render', (['request', '"""workload.html"""', 'context'], {}), "(request, 'workload.html', context)\n", (27389, 27424), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((27619, 27682), 'django.http.HttpResponse', 'HttpResponse', (['res.next_configuration'], {'content_type': '"""text/plain"""'}), "(res.next_configuration, content_type='text/plain')\n", (27631, 27682), False, 'from django.http import HttpResponse, QueryDict\n'), ((28899, 28943), 'django.shortcuts.render', 'render', (['request', '"""task_status.html"""', 'context'], {}), "(request, 'task_status.html', context)\n", (28905, 28943), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((29281, 29323), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Workload'], {'pk': "data['id']"}), "(Workload, pk=data['id'])\n", (29298, 29323), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((29338, 29387), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Session'], {'pk': "data['session_id']"}), "(Session, pk=data['session_id'])\n", (29355, 29387), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((32087, 32140), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Session'], {'pk': "request.GET['session']"}), "(Session, pk=request.GET['session'])\n", (32104, 32140), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((2024, 2046), 'django.contrib.auth.forms.UserCreationForm', 'UserCreationForm', (['post'], {}), '(post)\n', (2040, 2046), False, 'from django.contrib.auth.forms import AuthenticationForm, UserCreationForm\n'), ((2422, 2440), 'django.contrib.auth.forms.UserCreationForm', 'UserCreationForm', ([], {}), '()\n', (2438, 2440), False, 'from django.contrib.auth.forms import AuthenticationForm, UserCreationForm\n'), ((2473, 2486), 'django.template.context_processors.csrf', 'csrf', (['request'], {}), '(request)\n', (2477, 2486), False, 'from django.template.context_processors import csrf\n'), ((2740, 2786), 'django.contrib.auth.forms.PasswordChangeForm', 'PasswordChangeForm', (['request.user', 'request.POST'], {}), '(request.user, request.POST)\n', (2758, 2786), False, 'from django.contrib.auth.forms import PasswordChangeForm\n'), ((2977, 3009), 'django.contrib.auth.forms.PasswordChangeForm', 'PasswordChangeForm', (['request.user'], {}), '(request.user)\n', (2995, 3009), False, 'from django.contrib.auth.forms import PasswordChangeForm\n'), ((3042, 3055), 'django.template.context_processors.csrf', 'csrf', (['request'], {}), '(request)\n', (3046, 3055), False, 'from django.template.context_processors import csrf\n'), ((3333, 3363), 'django.contrib.auth.forms.AuthenticationForm', 'AuthenticationForm', (['None', 'post'], {}), '(None, post)\n', (3351, 3363), False, 'from django.contrib.auth.forms import AuthenticationForm, UserCreationForm\n'), ((3602, 3622), 'django.contrib.auth.forms.AuthenticationForm', 'AuthenticationForm', ([], {}), '()\n', (3620, 3622), False, 'from django.contrib.auth.forms import AuthenticationForm, UserCreationForm\n'), ((3655, 3668), 'django.template.context_processors.csrf', 'csrf', (['request'], {}), '(request)\n', (3659, 3668), False, 'from django.template.context_processors import csrf\n'), ((3860, 3876), 'django.urls.reverse', 'reverse', (['"""login"""'], {}), "('login')\n", (3867, 3876), False, 'from django.urls import reverse, reverse_lazy\n'), ((3771, 3792), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""login"""'], {}), "('login')\n", (3783, 3792), False, 'from django.urls import reverse, reverse_lazy\n'), ((4012, 4036), 'django.urls.reverse', 'reverse', (['"""home_projects"""'], {}), "('home_projects')\n", (4019, 4036), False, 'from django.urls import reverse, reverse_lazy\n'), ((3906, 3927), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""login"""'], {}), "('login')\n", (3918, 3927), False, 'from django.urls import reverse, reverse_lazy\n'), ((4637, 4650), 'django.template.context_processors.csrf', 'csrf', (['request'], {}), '(request)\n', (4641, 4650), False, 'from django.template.context_processors import csrf\n'), ((4066, 4087), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""login"""'], {}), "('login')\n", (4078, 4087), False, 'from django.urls import reverse, reverse_lazy\n'), ((5998, 6043), 'django.shortcuts.render', 'render', (['request', '"""edit_project.html"""', 'context'], {}), "(request, 'edit_project.html', context)\n", (6004, 6043), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((4738, 4759), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""login"""'], {}), "('login')\n", (4750, 4759), False, 'from django.urls import reverse, reverse_lazy\n'), ((6260, 6284), 'django.urls.reverse', 'reverse', (['"""home_projects"""'], {}), "('home_projects')\n", (6267, 6284), False, 'from django.urls import reverse, reverse_lazy\n'), ((6072, 6093), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""login"""'], {}), "('login')\n", (6084, 6093), False, 'from django.urls import reverse, reverse_lazy\n'), ((6861, 6874), 'django.template.context_processors.csrf', 'csrf', (['request'], {}), '(request)\n', (6865, 6874), False, 'from django.template.context_processors import csrf\n'), ((6314, 6335), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""login"""'], {}), "('login')\n", (6326, 6335), False, 'from django.urls import reverse, reverse_lazy\n'), ((9456, 9469), 'django.template.context_processors.csrf', 'csrf', (['request'], {}), '(request)\n', (9460, 9469), False, 'from django.template.context_processors import csrf\n'), ((6965, 6986), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""login"""'], {}), "('login')\n", (6977, 6986), False, 'from django.urls import reverse, reverse_lazy\n'), ((11874, 11919), 'django.shortcuts.render', 'render', (['request', '"""edit_session.html"""', 'context'], {}), "(request, 'edit_session.html', context)\n", (11880, 11919), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((9551, 9572), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""login"""'], {}), "('login')\n", (9563, 9572), False, 'from django.urls import reverse, reverse_lazy\n'), ((12769, 12793), 'django.http.HttpResponse', 'HttpResponse', ([], {'status': '(204)'}), '(status=204)\n', (12781, 12793), False, 'from django.http import HttpResponse, QueryDict\n'), ((13513, 13556), 'django.shortcuts.render', 'render', (['request', '"""edit_knobs.html"""', 'context'], {}), "(request, 'edit_knobs.html', context)\n", (13519, 13556), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((11948, 11969), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""login"""'], {}), "('login')\n", (11960, 11969), False, 'from django.urls import reverse, reverse_lazy\n'), ((13785, 13847), 'django.urls.reverse', 'reverse', (['"""project_sessions"""'], {'kwargs': "{'project_id': project_id}"}), "('project_sessions', kwargs={'project_id': project_id})\n", (13792, 13847), False, 'from django.urls import reverse, reverse_lazy\n'), ((13585, 13606), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""login"""'], {}), "('login')\n", (13597, 13606), False, 'from django.urls import reverse, reverse_lazy\n'), ((13894, 13915), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""login"""'], {}), "('login')\n", (13906, 13915), False, 'from django.urls import reverse, reverse_lazy\n'), ((16723, 16742), 'pytz.timezone', 'timezone', (['TIME_ZONE'], {}), '(TIME_ZONE)\n', (16731, 16742), False, 'from pytz import timezone\n'), ((16896, 16915), 'pytz.timezone', 'timezone', (['TIME_ZONE'], {}), '(TIME_ZONE)\n', (16904, 16915), False, 'from pytz import timezone\n'), ((17009, 17052), 're.match', 're.match', (['"""^[a-zA-Z0-9_-]+$"""', 'workload_name'], {}), "('^[a-zA-Z0-9_-]+$', workload_name)\n", (17017, 17052), False, 'import re\n'), ((17069, 17229), 'django.http.HttpResponse', 'HttpResponse', (["('Your workload name ' + workload_name +\n ' contains invalid characters! It should only contain alpha-numeric, underscore(_) and hyphen(-)'\n )"], {}), "('Your workload name ' + workload_name +\n ' contains invalid characters! It should only contain alpha-numeric, underscore(_) and hyphen(-)'\n )\n", (17081, 17229), False, 'from django.http import HttpResponse, QueryDict\n'), ((17618, 17795), 'django.http.HttpResponse', 'HttpResponse', (["(\n 'The DBMS must match the type and version specified when creating the session. (expected='\n + session.dbms.full_name + ') (actual=' + dbms.full_name + ')')"], {}), "(\n 'The DBMS must match the type and version specified when creating the session. (expected='\n + session.dbms.full_name + ') (actual=' + dbms.full_name + ')')\n", (17630, 17795), False, 'from django.http import HttpResponse, QueryDict\n'), ((20398, 20441), 'django.http.HttpResponse', 'HttpResponse', (['"""Result stored successfully!"""'], {}), "('Result stored successfully!')\n", (20410, 20441), False, 'from django.http import HttpResponse, QueryDict\n'), ((20966, 20987), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""login"""'], {}), "('login')\n", (20978, 20987), False, 'from django.urls import reverse, reverse_lazy\n'), ((22378, 22399), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""login"""'], {}), "('login')\n", (22390, 22399), False, 'from django.urls import reverse, reverse_lazy\n'), ((23176, 23197), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""login"""'], {}), "('login')\n", (23188, 23197), False, 'from django.urls import reverse, reverse_lazy\n'), ((23736, 23757), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""login"""'], {}), "('login')\n", (23748, 23757), False, 'from django.urls import reverse, reverse_lazy\n'), ((25900, 25921), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""login"""'], {}), "('login')\n", (25912, 25921), False, 'from django.urls import reverse, reverse_lazy\n'), ((27453, 27474), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""login"""'], {}), "('login')\n", (27465, 27474), False, 'from django.urls import reverse, reverse_lazy\n'), ((27854, 27875), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""login"""'], {}), "('login')\n", (27866, 27875), False, 'from django.urls import reverse, reverse_lazy\n'), ((29440, 29467), 'django.shortcuts.render', 'render', (['request', '"""404.html"""'], {}), "(request, '404.html')\n", (29446, 29467), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((29187, 29208), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""login"""'], {}), "('login')\n", (29199, 29208), False, 'from django.urls import reverse, reverse_lazy\n'), ((31631, 31652), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""login"""'], {}), "('login')\n", (31643, 31652), False, 'from django.urls import reverse, reverse_lazy\n'), ((37169, 37201), 'django.http.HttpResponse', 'HttpResponse', (['"""Result not ready"""'], {}), "('Result not ready')\n", (37181, 37201), False, 'from django.http import HttpResponse, QueryDict\n'), ((1922, 1946), 'django.urls.reverse', 'reverse', (['"""home_projects"""'], {}), "('home_projects')\n", (1929, 1946), False, 'from django.urls import reverse, reverse_lazy\n'), ((2122, 2145), 'django.http.QueryDict', 'QueryDict', ([], {'mutable': '(True)'}), '(mutable=True)\n', (2131, 2145), False, 'from django.http import HttpResponse, QueryDict\n'), ((2667, 2690), 'django.urls.reverse', 'reverse', (['"""home_project"""'], {}), "('home_project')\n", (2674, 2690), False, 'from django.urls import reverse, reverse_lazy\n'), ((2858, 2897), 'django.contrib.auth.update_session_auth_hash', 'update_session_auth_hash', (['request', 'user'], {}), '(request, user)\n', (2882, 2897), False, 'from django.contrib.auth import update_session_auth_hash\n'), ((3231, 3255), 'django.urls.reverse', 'reverse', (['"""home_projects"""'], {}), "('home_projects')\n", (3238, 3255), False, 'from django.urls import reverse, reverse_lazy\n'), ((5135, 5140), 'django.utils.timezone.now', 'now', ([], {}), '()\n', (5138, 5140), False, 'from django.utils.timezone import now\n'), ((5280, 5340), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Project'], {'pk': 'project_id', 'user': 'request.user'}), '(Project, pk=project_id, user=request.user)\n', (5297, 5340), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((5550, 5555), 'django.utils.timezone.now', 'now', ([], {}), '()\n', (5553, 5555), False, 'from django.utils.timezone import now\n'), ((5607, 5669), 'django.urls.reverse', 'reverse', (['"""project_sessions"""'], {'kwargs': "{'project_id': project.pk}"}), "('project_sessions', kwargs={'project_id': project.pk})\n", (5614, 5669), False, 'from django.urls import reverse, reverse_lazy\n'), ((10179, 10184), 'django.utils.timezone.now', 'now', ([], {}), '()\n', (10182, 10184), False, 'from django.utils.timezone import now\n'), ((10942, 10947), 'django.utils.timezone.now', 'now', ([], {}), '()\n', (10945, 10947), False, 'from django.utils.timezone import now\n'), ((11023, 11102), 'django.urls.reverse', 'reverse', (['"""session"""'], {'kwargs': "{'project_id': project_id, 'session_id': session.pk}"}), "('session', kwargs={'project_id': project_id, 'session_id': session.pk})\n", (11030, 11102), False, 'from django.urls import reverse, reverse_lazy\n'), ((12299, 12393), 'django.shortcuts.render', 'render', (['request', '"""edit_knobs.html"""', "{'project': project, 'session': session, 'form': form}"], {}), "(request, 'edit_knobs.html', {'project': project, 'session': session,\n 'form': form})\n", (12305, 12393), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((12959, 12978), 'django.forms.models.model_to_dict', 'model_to_dict', (['knob'], {}), '(knob)\n', (12972, 12978), False, 'from django.forms.models import model_to_dict\n'), ((36703, 36754), 'django.http.HttpResponse', 'HttpResponse', (["('Invalid upload code: ' + upload_code)"], {}), "('Invalid upload code: ' + upload_code)\n", (36715, 36754), False, 'from django.http import HttpResponse, QueryDict\n'), ((37341, 37361), 'django.http.HttpResponse', 'HttpResponse', (['"""Fail"""'], {}), "('Fail')\n", (37353, 37361), False, 'from django.http import HttpResponse, QueryDict\n'), ((2926, 2950), 'django.urls.reverse', 'reverse', (['"""home_projects"""'], {}), "('home_projects')\n", (2933, 2950), False, 'from django.urls import reverse, reverse_lazy\n'), ((3464, 3488), 'django.urls.reverse', 'reverse', (['"""home_projects"""'], {}), "('home_projects')\n", (3471, 3488), False, 'from django.urls import reverse, reverse_lazy\n'), ((4979, 5031), 'django.shortcuts.render', 'render', (['request', '"""edit_project.html"""', "{'form': form}"], {}), "(request, 'edit_project.html', {'form': form})\n", (4985, 5031), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((5463, 5515), 'django.shortcuts.render', 'render', (['request', '"""edit_project.html"""', "{'form': form}"], {}), "(request, 'edit_project.html', {'form': form})\n", (5469, 5515), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((9935, 10007), 'django.shortcuts.render', 'render', (['request', '"""edit_session.html"""', "{'project': project, 'form': form}"], {}), "(request, 'edit_session.html', {'project': project, 'form': form})\n", (9941, 10007), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((10652, 10748), 'django.shortcuts.render', 'render', (['request', '"""edit_session.html"""', "{'project': project, 'form': form, 'session': session}"], {}), "(request, 'edit_session.html', {'project': project, 'form': form,\n 'session': session})\n", (10658, 10748), False, 'from django.shortcuts import redirect, render, get_object_or_404\n'), ((15830, 15881), 'django.http.HttpResponse', 'HttpResponse', (["('Invalid upload code: ' + upload_code)"], {}), "('Invalid upload code: ' + upload_code)\n", (15842, 15881), False, 'from django.http import HttpResponse, QueryDict\n'), ((33688, 33707), 'pytz.timezone', 'timezone', (['TIME_ZONE'], {}), '(TIME_ZONE)\n', (33696, 33707), False, 'from pytz import timezone\n'), ((36122, 36141), 'pytz.timezone', 'timezone', (['TIME_ZONE'], {}), '(TIME_ZONE)\n', (36130, 36141), False, 'from pytz import timezone\n'), ((35099, 35118), 'pytz.timezone', 'timezone', (['TIME_ZONE'], {}), '(TIME_ZONE)\n', (35107, 35118), False, 'from pytz import timezone\n')]
|
# -*- coding: utf-8 -*-
"""
Module containing the available commands of the game, the factory class to
create the commands...
Today, the available commands are:
- look,
- talk,
- move,
- enter,
- exit,
- take,
- drop,
- inventory,
- stats,
- help,
- quit,
- attack
- save
"""
import core.command
from core.commands import look, talk, move, enter, exit, take, drop, inventory,\
help, open, stats, attack, save
from core.localisation import _
import sys
"""
Code corresponding to the quit command
"""
quit = -1
class factory:
"""
Class to instanciate a command from a string.
"""
"""
Available commands stored in a dict with as key, the commands and as value,
the command class to execute.
"""
mapping = {
_('LOOK_COMMAND'): {'allowed_while_fighting': True, 'command': 'look'},
_('TALK_COMMAND'): {'allowed_while_fighting': False, 'command': 'talk'},
_('MOVE_COMMAND'): {'allowed_while_fighting': True, 'command': 'move'},
_('ENTER_COMMAND'): {'allowed_while_fighting': True, 'command': 'enter'},
_('EXIT_COMMAND'): {'allowed_while_fighting': True, 'command': 'exit'},
_('TAKE_COMMAND'): {'allowed_while_fighting': False, 'command': 'take'},
_('DROP_COMMAND'): {'allowed_while_fighting': False, 'command': 'drop'},
_('INVENTORY_COMMAND'): {'allowed_while_fighting': True, 'command': 'inventory'},
_('INVENTORY_SHORT_COMMAND'): {'allowed_while_fighting': True, 'command': 'inventory'},
_('STATS_COMMAND'): {'allowed_while_fighting': True, 'command': 'stats'},
_('HELP_COMMAND'): {'allowed_while_fighting': True, 'command': 'help'},
_('OPEN_COMMAND'): {'allowed_while_fighting': False, 'command': 'open'},
_('SAVE_COMMAND'): {'allowed_while_fighting': False, 'command': 'save'},
_('ATTACK_COMMAND'): {'allowed_while_fighting': True, 'command': 'attack'}
}
@staticmethod
def create(p, commandFull, savedGameId=None):
"""
command.factory.create(p, commandFull, savedGameId=None) -> command.command
Create the desired command.
@param p player.player Current player.
@param commandFull list command to run, the first element of the list
is the command, the other elements are the command's arguments.
@return the created command
"""
cmd = commandFull[0]
del commandFull[0]
if cmd in (_('QUIT_COMMAND'), _('QUIT_SHORT_COMMAND')):
return quit
elif cmd in factory.mapping.keys():
cmd = factory.mapping[cmd]
module = sys.modules['core.commands.' + cmd['command']]
if p.isFighting() and not cmd['allowed_while_fighting']:
raise core.command.exception(_('ERROR_DENIED_COMMAND_WHILE_FIGHTING'))
cmd = getattr(module, cmd['command'])()
else:
raise core.command.exception(_('ERROR_UNKNOWN_COMMAND'))
cmd.setArgs(commandFull)
cmd.setPlayer(p)
cmd.setSavedGameId(savedGameId)
return cmd
|
[
"core.localisation._"
] |
[((719, 736), 'core.localisation._', '_', (['"""LOOK_COMMAND"""'], {}), "('LOOK_COMMAND')\n", (720, 736), False, 'from core.localisation import _\n'), ((793, 810), 'core.localisation._', '_', (['"""TALK_COMMAND"""'], {}), "('TALK_COMMAND')\n", (794, 810), False, 'from core.localisation import _\n'), ((868, 885), 'core.localisation._', '_', (['"""MOVE_COMMAND"""'], {}), "('MOVE_COMMAND')\n", (869, 885), False, 'from core.localisation import _\n'), ((942, 960), 'core.localisation._', '_', (['"""ENTER_COMMAND"""'], {}), "('ENTER_COMMAND')\n", (943, 960), False, 'from core.localisation import _\n'), ((1018, 1035), 'core.localisation._', '_', (['"""EXIT_COMMAND"""'], {}), "('EXIT_COMMAND')\n", (1019, 1035), False, 'from core.localisation import _\n'), ((1092, 1109), 'core.localisation._', '_', (['"""TAKE_COMMAND"""'], {}), "('TAKE_COMMAND')\n", (1093, 1109), False, 'from core.localisation import _\n'), ((1167, 1184), 'core.localisation._', '_', (['"""DROP_COMMAND"""'], {}), "('DROP_COMMAND')\n", (1168, 1184), False, 'from core.localisation import _\n'), ((1242, 1264), 'core.localisation._', '_', (['"""INVENTORY_COMMAND"""'], {}), "('INVENTORY_COMMAND')\n", (1243, 1264), False, 'from core.localisation import _\n'), ((1326, 1354), 'core.localisation._', '_', (['"""INVENTORY_SHORT_COMMAND"""'], {}), "('INVENTORY_SHORT_COMMAND')\n", (1327, 1354), False, 'from core.localisation import _\n'), ((1416, 1434), 'core.localisation._', '_', (['"""STATS_COMMAND"""'], {}), "('STATS_COMMAND')\n", (1417, 1434), False, 'from core.localisation import _\n'), ((1492, 1509), 'core.localisation._', '_', (['"""HELP_COMMAND"""'], {}), "('HELP_COMMAND')\n", (1493, 1509), False, 'from core.localisation import _\n'), ((1566, 1583), 'core.localisation._', '_', (['"""OPEN_COMMAND"""'], {}), "('OPEN_COMMAND')\n", (1567, 1583), False, 'from core.localisation import _\n'), ((1641, 1658), 'core.localisation._', '_', (['"""SAVE_COMMAND"""'], {}), "('SAVE_COMMAND')\n", (1642, 1658), False, 'from core.localisation import _\n'), ((1716, 1735), 'core.localisation._', '_', (['"""ATTACK_COMMAND"""'], {}), "('ATTACK_COMMAND')\n", (1717, 1735), False, 'from core.localisation import _\n'), ((2249, 2266), 'core.localisation._', '_', (['"""QUIT_COMMAND"""'], {}), "('QUIT_COMMAND')\n", (2250, 2266), False, 'from core.localisation import _\n'), ((2268, 2291), 'core.localisation._', '_', (['"""QUIT_SHORT_COMMAND"""'], {}), "('QUIT_SHORT_COMMAND')\n", (2269, 2291), False, 'from core.localisation import _\n'), ((2656, 2682), 'core.localisation._', '_', (['"""ERROR_UNKNOWN_COMMAND"""'], {}), "('ERROR_UNKNOWN_COMMAND')\n", (2657, 2682), False, 'from core.localisation import _\n'), ((2530, 2570), 'core.localisation._', '_', (['"""ERROR_DENIED_COMMAND_WHILE_FIGHTING"""'], {}), "('ERROR_DENIED_COMMAND_WHILE_FIGHTING')\n", (2531, 2570), False, 'from core.localisation import _\n')]
|
# Generated by pypy/tool/import_cffi.py
import py, sys
from pypy.module.test_lib_pypy.cffi_tests.cffi0 import backend_tests
from cffi.backend_ctypes import CTypesBackend
class TestCTypes(backend_tests.BackendTests):
# for individual tests see
# ====> backend_tests.py
Backend = CTypesBackend
TypeRepr = "<class 'ffi.CData<%s>'>"
def test_array_of_func_ptr(self):
py.test.skip("ctypes backend: not supported: "
"initializers for function pointers")
def test_structptr_argument(self):
py.test.skip("ctypes backend: not supported: passing a list "
"for a pointer argument")
def test_array_argument_as_list(self):
py.test.skip("ctypes backend: not supported: passing a list "
"for a pointer argument")
def test_cast_to_array_type(self):
py.test.skip("ctypes backend: not supported: casting to array")
def test_nested_anonymous_struct(self):
py.test.skip("ctypes backend: not supported: nested anonymous struct")
def test_nested_field_offset_align(self):
py.test.skip("ctypes backend: not supported: nested anonymous struct")
def test_nested_anonymous_union(self):
py.test.skip("ctypes backend: not supported: nested anonymous union")
def test_CData_CType_2(self):
if sys.version_info >= (3,):
py.test.skip("ctypes backend: not supported in Python 3: CType")
backend_tests.BackendTests.test_CData_CType_2(self)
|
[
"pypy.module.test_lib_pypy.cffi_tests.cffi0.backend_tests.BackendTests.test_CData_CType_2",
"py.test.skip"
] |
[((399, 485), 'py.test.skip', 'py.test.skip', (['"""ctypes backend: not supported: initializers for function pointers"""'], {}), "(\n 'ctypes backend: not supported: initializers for function pointers')\n", (411, 485), False, 'import py, sys\n'), ((553, 642), 'py.test.skip', 'py.test.skip', (['"""ctypes backend: not supported: passing a list for a pointer argument"""'], {}), "(\n 'ctypes backend: not supported: passing a list for a pointer argument')\n", (565, 642), False, 'import py, sys\n'), ((714, 803), 'py.test.skip', 'py.test.skip', (['"""ctypes backend: not supported: passing a list for a pointer argument"""'], {}), "(\n 'ctypes backend: not supported: passing a list for a pointer argument')\n", (726, 803), False, 'import py, sys\n'), ((871, 934), 'py.test.skip', 'py.test.skip', (['"""ctypes backend: not supported: casting to array"""'], {}), "('ctypes backend: not supported: casting to array')\n", (883, 934), False, 'import py, sys\n'), ((988, 1058), 'py.test.skip', 'py.test.skip', (['"""ctypes backend: not supported: nested anonymous struct"""'], {}), "('ctypes backend: not supported: nested anonymous struct')\n", (1000, 1058), False, 'import py, sys\n'), ((1114, 1184), 'py.test.skip', 'py.test.skip', (['"""ctypes backend: not supported: nested anonymous struct"""'], {}), "('ctypes backend: not supported: nested anonymous struct')\n", (1126, 1184), False, 'import py, sys\n'), ((1237, 1306), 'py.test.skip', 'py.test.skip', (['"""ctypes backend: not supported: nested anonymous union"""'], {}), "('ctypes backend: not supported: nested anonymous union')\n", (1249, 1306), False, 'import py, sys\n'), ((1464, 1515), 'pypy.module.test_lib_pypy.cffi_tests.cffi0.backend_tests.BackendTests.test_CData_CType_2', 'backend_tests.BackendTests.test_CData_CType_2', (['self'], {}), '(self)\n', (1509, 1515), False, 'from pypy.module.test_lib_pypy.cffi_tests.cffi0 import backend_tests\n'), ((1391, 1455), 'py.test.skip', 'py.test.skip', (['"""ctypes backend: not supported in Python 3: CType"""'], {}), "('ctypes backend: not supported in Python 3: CType')\n", (1403, 1455), False, 'import py, sys\n')]
|
# -*- coding: utf-8 -*-
"""
TODO:
Fix slowness
Fix sorting so columns are initially sorted in ascending order
"""
import logging
from wbia.guitool.__PYQT__ import QtCore, QtGui, QVariantHack
from wbia.guitool.__PYQT__.QtCore import Qt
from wbia.guitool import qtype
from wbia.guitool.guitool_decorators import checks_qt_error, signal_ # NOQA
from six.moves import zip # builtins # NOQA
# from utool._internal.meta_util_six import get_funcname
import functools
import utool as ut
# from .api_thumb_delegate import APIThumbDelegate
import numpy as np
from wbia.guitool import api_tree_node as _atn
import cachetools
# UTOOL PRINT STATEMENTS CAUSE RACE CONDITIONS IN QT THAT CAN LEAD TO SEGFAULTS
# DO NOT INJECT THEM IN GUITOOL
# print, rrr, profile = ut.inject2(__name__)
logger = logging.getLogger('wbia')
ut.noinject(__name__, '[APIItemModel]')
# raise ImportError('refused to import wbia.guitool')
profile = ut.profile
API_MODEL_BASE = QtCore.QAbstractItemModel
VERBOSE_MODEL = ut.VERBOSE or ut.get_argflag(('--verbose-qt', '--verbqt'))
VERBOSE_MODEL = VERBOSE_MODEL or ut.get_argflag(('--verbose-qt-api', '--verbqt-api'))
class ChangeLayoutContext(object):
"""
Context manager emitting layoutChanged before body,
not updating durring body, and then updating after body.
"""
@ut.accepts_scalar_input
def __init__(self, model_list, *args):
# logger.info('Changing: %r' % (model_list,))
self.model_list = list(model_list) + list(args)
def __enter__(self):
for model in self.model_list:
if model._get_context_id() is not None:
# logger.info("[ChangeLayoutContext] WARNING: ENTERING CONTEXT TWICE")
continue
model._set_context_id(id(self))
# logger.info("[ChangeLayoutContext] ENTERING CONTEXT, context_id: %r" % (model._get_context_id(), ))
model._about_to_change()
# isabouttochange = model._about_to_change()
# logger.info("... isabouttochange = %r" % (isabouttochange,))
model._set_changeblocked(True)
return self
def __exit__(self, type_, value, trace):
if trace is not None:
logger.info('[api_model] Error in context manager!: ' + str(value))
return False # return a falsey value on error
for model in self.model_list:
if model._get_context_id() == id(self):
# logger.info("[ChangeLayoutContext] EXITING CONTEXT, context_id: %r" % (id(self), ))
model._set_context_id(None)
model._set_changeblocked(False)
model._change()
# didchange = model._change()
# logger.info("... didchange = %r" % (didchange,))
def default_method_decorator(func):
""" Dummy decorator """
# return profile(func)
# return checks_qt_error(profile(func))
return func
def updater(func):
"""
Decorates a function by executing layoutChanged signals if not already in
the middle of a layout changed
"""
func_ = default_method_decorator(func)
# @checks_qt_error
@functools.wraps(func)
def upd_wrapper(model, *args, **kwargs):
with ChangeLayoutContext([model]):
return func_(model, *args, **kwargs)
return upd_wrapper
class APIItemModel(API_MODEL_BASE):
"""
Item model for displaying a list of columns
Attributes:
iders (list) : functions that return ids for setters and getters
col_name_list (list) : keys or SQL-like name for column to reference
abstracted data storage using getters and setters
col_type_list (list) : column value (Python) types
col_nice_list (list) : well-formatted names of the columns
col_edit_list (list) : booleans for if column should be editable
col_setter_list (list) : setter functions
col_getter_list (list) : getter functions
col_sort_index (int) : index into col_name_list for sorting
col_sort_reverse (bool) : flag to reverse the sort ordering
"""
_rows_updated = signal_(str, int)
EditableItemColor = QtGui.QColor(242, 242, 255)
# EditableItemColor = QtGui.QColor(200, 200, 255)
TrueItemColor = QtGui.QColor(230, 250, 230)
FalseItemColor = QtGui.QColor(250, 230, 230)
def _set_context_id(self, id_):
self._context_id = id_
def _get_context_id(self):
return self._context_id
def _set_changeblocked(self, changeblocked_):
self._changeblocked = changeblocked_
def _get_changeblocked(self):
return self._changeblocked
#
# Non-Qt Init Functions
def __init__(model, headers=None, parent=None):
if VERBOSE_MODEL:
logger.info('[APIItemModel] __init__')
# FIXME: don't let the model point to the view
model.view = parent
API_MODEL_BASE.__init__(model, parent=parent)
# Internal Flags
model._abouttochange = False
model._context_id = None
model._haschanged = True
model._changeblocked = False
# Model Data And Accessors
model.name = 'None'
model.nice = 'None'
model.iders = [lambda: []]
model.col_visible_list = []
model.col_name_list = []
model.col_type_list = []
model.col_nice_list = []
model.col_edit_list = []
model.col_setter_list = []
model.col_getter_list = []
model.col_level_list = []
model.col_bgrole_getter_list = None
model.col_sort_index = None
model.col_sort_reverse = False
model.level_index_list = []
model.cache = None # FIXME: This is not sustainable
model.cache_timeout_sec = 2.5
model.cache_size = 512
model.batch_size = None # Small batch sizes give good response time
model.scope_hack_list = []
model.root_node = _atn.TreeNode(-1, None, -1)
# Initialize member variables
# model._about_to_change()
model.headers = headers # save the headers
model.ider_filters = None
model.num_rows_loaded = 0
model.num_rows_total = None
# len(model.level_index_list)
# model.lazy_updater = None
if headers is not None:
model._update_headers(**headers)
def set_ider_filters(model, ider_filters):
""" Used to induce a filter on the rows, needs call of udpate rows after """
model.ider_filters = ider_filters
def get_iders(model):
# def filtfun_test(x_list):
# return [x for x in x_list if x % 2 == 0]
# model.name == 'annotations'
# if len(model.iders) == 1:
# model.ider_filters = [filtfun_test]
if model.ider_filters is None:
ider_list = model.iders
else:
assert len(model.ider_filters) == len(model.iders), 'bad filters'
# ider_list = [lambda: filtfn(ider()) for filtfn, ider in zip(model.ider_filters, model.iders)]
# with ut.embed_on_exception_context:
def wrap_ider(ider, filtfn):
def wrapped_ider(*args, **kwargs):
return filtfn(ider(*args, **kwargs))
return wrapped_ider
ider_list = [
# ider
wrap_ider(ider, filtfn)
# lambda *args: filtfn(ider(*args))
for filtfn, ider in zip(model.ider_filters, model.iders)
]
return ider_list
@updater
def _update_headers(model, **headers):
if VERBOSE_MODEL:
logger.info('[APIItemModel] _update_headers')
iders = headers.get('iders', None)
name = headers.get('name', None)
nice = headers.get('nice', None)
col_name_list = headers.get('col_name_list', None)
col_type_list = headers.get('col_type_list', None)
col_nice_list = headers.get('col_nice_list', None)
col_edit_list = headers.get('col_edit_list', None)
col_setter_list = headers.get('col_setter_list', None)
col_getter_list = headers.get('col_getter_list', None)
col_level_list = headers.get('col_level_list', None)
col_sort_index = headers.get('col_sort_index', 0)
col_sort_reverse = headers.get('col_sort_reverse', False)
# New for dynamically getting non-data roles for each row
col_bgrole_getter_list = headers.get('col_bgrole_getter_list', None)
col_visible_list = headers.get('col_visible_list', None)
#
if iders is None:
iders = []
if ut.USE_ASSERT:
assert ut.is_list(iders), 'bad type: %r' % type(iders)
for index, ider in enumerate(iders):
assert ut.is_funclike(ider), 'bad type at index %r: %r' % (
index,
type(ider),
)
if col_name_list is None:
col_name_list = []
if col_type_list is None:
col_type_list = []
if col_nice_list is None:
col_nice_list = col_name_list[:]
if col_edit_list is None:
col_edit_list = [False] * len(col_name_list)
if col_setter_list is None:
col_setter_list = []
if col_getter_list is None:
col_getter_list = []
if col_bgrole_getter_list is None:
col_bgrole_getter_list = [None] * len(col_name_list)
if col_visible_list is None:
col_visible_list = [True] * len(col_name_list)
if col_level_list is None:
col_level_list = [0] * len(col_name_list)
if True or ut.USE_ASSERT:
assert len(col_name_list) == len(col_type_list), 'inconsistent colnametype'
assert len(col_name_list) == len(col_nice_list), 'inconsistent colnice'
assert len(col_name_list) == len(col_edit_list), 'inconsistent coledit'
assert len(col_name_list) == len(col_setter_list), 'inconsistent colsetter'
assert len(col_bgrole_getter_list) == len(
col_name_list
), 'inconsistent col_bgrole_getter_list'
assert len(col_name_list) == len(col_getter_list), 'inconsistent colgetter'
assert len(col_visible_list) == len(
col_name_list
), 'inconsistent col_visible_list'
assert len(col_name_list) == len(col_level_list), 'inconsistent collevel'
for colname, flag, func in zip(col_name_list, col_edit_list, col_setter_list):
if flag:
assert func is not None, 'column=%r is editable but func is None' % (
colname,
)
model.clear_cache()
model.name = str(name)
model.nice = str(nice)
model.iders = iders
model.col_name_list = col_name_list
model.col_type_list = col_type_list
model.col_nice_list = col_nice_list
model.col_edit_list = col_edit_list
model.col_setter_list = col_setter_list
model.col_getter_list = col_getter_list
model.col_visible_list = col_visible_list
model.col_level_list = col_level_list
model.col_bgrole_getter_list = col_bgrole_getter_list
model.col_display_role_func_dict = headers.get('col_display_role_func_dict', None)
model.num_rows_loaded = 0
# model.num_cols_loaded = 0
model.num_rows_total = None
model.lazy_rows = True
# calls model._update_rows()
model._set_sort(col_sort_index, col_sort_reverse, rebuild_structure=True)
def clear_cache(model):
model.cache = cachetools.TTLCache(
maxsize=model.cache_size, ttl=model.cache_timeout_sec
)
@updater
def _set_sort(model, col_sort_index, col_sort_reverse=False, rebuild_structure=False):
if VERBOSE_MODEL:
logger.info(
'[APIItemModel] _set_sort, index=%r reverse=%r, rebuild=%r'
% (col_sort_index, col_sort_reverse, rebuild_structure)
)
if len(model.col_name_list) > 0:
if ut.USE_ASSERT:
assert isinstance(col_sort_index, int) and col_sort_index < len(
model.col_name_list
), ('sort index out of bounds by: %r' % col_sort_index)
model.col_sort_index = col_sort_index
model.col_sort_reverse = col_sort_reverse
# Update the row-id order
model._update_rows(rebuild_structure=rebuild_structure)
@updater
def _update_rows(model, rebuild_structure=True):
"""
Uses the current ider and col_sort_index to create
row_indices
"""
# with ut.Timer('[gt] update_rows (%s)' % (model.name,)):
if True:
# flag = model.blockSignals(True)
if VERBOSE_MODEL:
logger.info('[APIItemModel] +-----------')
logger.info('[APIItemModel] _update_rows')
# this is not slow
# logger.info('UPDATE ROWS!')
if len(model.col_level_list) == 0:
return
# old_root = model.root_node # NOQA
if rebuild_structure:
# logger.info('Rebuilging api_item_model internal structure')
model.beginResetModel() # I think this is preventing a segfault
model.root_node = _atn.build_internal_structure(model)
model.endResetModel()
if VERBOSE_MODEL:
logger.info('[APIItemModel] lazy_update_rows')
model.level_index_list = []
sort_index = 0 if model.col_sort_index is None else model.col_sort_index
children = (
model.root_node.get_children()
) # THIS IS THE LINE THAT TAKES FOREVER
id_list = [child.get_id() for child in children]
# logger.info('ids_ generated')
nodes = []
if len(id_list) != 0:
if VERBOSE_MODEL:
logger.info(
'[APIItemModel] lazy_update_rows len(id_list) = %r'
% (len(id_list))
)
# start sort
if model.col_sort_index is not None:
type_ = model.col_type_list[sort_index]
getter = model.col_getter_list[sort_index]
values = getter(id_list)
if type_ == 'PIXMAP':
# TODO: find a better sorting metric for pixmaps
values = ut.get_list_column(values, 0)
else:
type_ = int
values = id_list
reverse = model.col_sort_reverse
# <NUMPY MULTIARRAY SORT>
if True:
if values is None:
logger.info('SORTING VALUES IS NONE. VERY WEIRD')
if type_ is float:
values = np.array(ut.replace_nones(values, np.nan))
# Force nan to be the smallest number
values[np.isnan(values)] = -np.inf
elif type_ is str:
values = ut.replace_nones(values, '')
import vtool as vt
sortx = vt.argsort_records([values, id_list], reverse=reverse)
# </NUMPY MULTIARRAY SORT>
nodes = ut.take(children, sortx)
level = model.col_level_list[sort_index]
if level == 0:
model.root_node.set_children(nodes)
# end sort
if ut.USE_ASSERT:
assert nodes is not None, 'no indices'
model.level_index_list = nodes
# Book keeping for lazy loading rows
model.num_rows_total = len(model.level_index_list)
# model.num_cols_total = len(model.col_name_list)
model.num_cols_loaded = 0
if model.lazy_rows:
model.num_rows_loaded = 0
else:
model.num_rows_loaded = model.num_rows_total
# emit the numerr of rows and the name of for the view to display
# model.blockSignals(flag)
model._rows_updated.emit(model.name, model.num_rows_total)
if VERBOSE_MODEL:
logger.info('[APIItemModel] finished _update_rows')
logger.info('[APIItemModel] L__________')
# ------------------------------------
# --- Data maintainence functions ---
# ------------------------------------
@default_method_decorator
def _about_to_change(model, force=False):
if force or (not model._abouttochange and not model._changeblocked):
if VERBOSE_MODEL:
logger.info('ABOUT TO CHANGE: %r' % (model.name,))
model._abouttochange = True
model.layoutAboutToBeChanged.emit()
return True
else:
if VERBOSE_MODEL:
logger.info('NOT ABOUT TO CHANGE')
return False
@default_method_decorator
def _change(model, force=False):
if force or (model._abouttochange and not model._changeblocked):
if VERBOSE_MODEL:
logger.info('LAYOUT CHANGED: %r' % (model.name,))
model._abouttochange = False
model.clear_cache()
model.layoutChanged.emit()
return True
else:
if VERBOSE_MODEL:
logger.info('NOT LAYOUT CHANGING')
return False
@default_method_decorator
def _update(model, newrows=False):
model.cache = {}
model._update_rows()
def _use_ider(model, level=0):
if level == 0:
return model.iders[level]()
else:
parent_ids = model._use_ider(level - 1)
level_ider = model.iders[level]
return level_ider(parent_ids)
def get_row_and_qtindex_from_id(model, _id):
""" uses an sqlrowid (from iders) to get a qtindex """
row = model.root_node.find_row_from_id(_id)
qtindex = model.index(row, 0) if row is not None else None
return qtindex, row
# ----------------------------------
# --- API Convineince Functions ---
# ----------------------------------
@default_method_decorator
def get_header_data(model, colname, qtindex):
""" Use _get_data if the column number is known """
if not qtindex.isValid():
return None
# row = qtindex.row()
node = qtindex.internalPointer()
col = model.col_name_list.index(colname)
getter = model.col_getter_list[col]
id_ = node.id_
# id_ = model.root_node[row].get_id()
value = getter(id_)
return value
@default_method_decorator
def get_header_name(model, column):
# TODO: use qtindex?
colname = model.col_name_list[column]
return colname
@default_method_decorator
def _get_level(model, qtindex):
node = qtindex.internalPointer()
if node is None:
return -1
level = node.get_level()
# level = model.col_level_list[column]
return level
# --------------------------------
# --- API Interface Functions ---
# --------------------------------
@default_method_decorator
def _get_col_align(model, col):
if ut.USE_ASSERT:
assert col is not None, 'bad column'
raise NotImplementedError('_get_col_align')
@default_method_decorator
def _get_row_id(model, qtindex=QtCore.QModelIndex()):
"""
returns the id (specified by iders i.e. an wbia rowid) from qtindex
"""
if qtindex is not None and qtindex.isValid():
node = qtindex.internalPointer()
if ut.USE_ASSERT:
try:
assert isinstance(node, _atn.TreeNode), 'type(node)=%r, node=%r' % (
type(node),
node,
)
except AssertionError as ex:
ut.printex(
ex, 'error in _get_row_id', keys=['model', 'qtindex', 'node']
)
raise
try:
id_ = node.get_id()
except AttributeError as ex:
ut.printex(ex, key_list=['node', 'model', 'qtindex'])
raise
return id_
@default_method_decorator
def _get_adjacent_qtindex(model, qtindex=QtCore.QModelIndex(), offset=1):
# check qtindex
if qtindex is None or not qtindex.isValid():
return None
node = qtindex.internalPointer()
# check node
try:
if ut.USE_ASSERT:
assert isinstance(node, _atn.TreeNode), type(node)
except AssertionError as ex:
ut.printex(ex, key_list=['node'], pad_stdout=True)
raise
# get node parent
try:
node_parent = node.get_parent()
except Exception as ex:
ut.printex(ex, key_list=['node'], reraise=False, pad_stdout=True)
raise
# parent_node check
if node_parent is None:
logger.info('[model._get_adjacent_qtindex] node_parent is None!')
return None
# Offset to find the next qtindex
next_index = node_parent.child_index(node) + offset
nChildren = node_parent.get_num_children()
# check next index validitiy
if next_index >= 0 and next_index < nChildren:
next_node = node_parent.get_child(next_index)
next_level = next_node.get_level()
col = model.col_level_list.index(next_level)
row = next_node.get_row()
# Create qtindex for the adjacent note
parent_qtindex = model.parent(qtindex)
next_qtindex = model.index(row, col, parent_qtindex)
return next_qtindex
else:
# There is no adjacent node
return None
@default_method_decorator
def _get_type(model, col):
return model.col_type_list[col]
@default_method_decorator
def _get_bgrole_value(model, qtindex):
""" Gets the background role if specified """
col = qtindex.column()
bgrole_getter = model.col_bgrole_getter_list[col]
if bgrole_getter is None:
return None
row_id = model._get_row_id(qtindex) # row_id w.r.t. to sorting
color = bgrole_getter(row_id)
if color is None:
return None
val = qtype.to_qcolor(color)
return val
@default_method_decorator
def _get_data(model, qtindex, **kwargs):
col = qtindex.column()
# row_id wrt. to sorting
row_id = model._get_row_id(qtindex)
cachekey = (row_id, col)
try:
data = model.cache[cachekey]
except KeyError:
# getter function for this column
getter = model.col_getter_list[col]
try:
# Using this getter may not be thread safe
# Should this work around decorators?
# data = getter((row_id,), **kwargs)[0]
data = getter(row_id, **kwargs)
except Exception as ex:
qtindex_rc = (qtindex.row(), qtindex.column()) # NOQA
ut.printex(
ex,
'[api_item_model] problem getting in column %r' % (col,),
keys=[
'model.name',
'getter',
'row_id',
'col',
'qtindex',
'qtindex_rc',
],
iswarning=True,
)
# getting from: %r' % ut.util_str.get_callable_name(getter))
raise
model.cache[cachekey] = data
# </MODEL_CACHE>
return data
@default_method_decorator
def _set_data(model, qtindex, value):
"""
The setter function should be of the following format def
setter(column_name, row_id, value) column_name is the key or SQL-like
name for the column row_id is the corresponding row key or SQL-like id
that the row call back returned value is the value that needs to be
stored The setter function should return a boolean, if setting the
value was successfull or not
"""
col = qtindex.column()
row_id = model._get_row_id(qtindex)
# <HACK: MODEL_CACHE>
cachekey = (row_id, col)
try:
del model.cache[cachekey]
except KeyError:
pass
# </HACK: MODEL_CACHE>
setter = model.col_setter_list[col]
if VERBOSE_MODEL:
logger.info('[model] Setting data: row_id=%r, setter=%r' % (row_id, setter))
try:
return setter(row_id, value)
except Exception as ex:
ut.printex(
ex,
'ERROR: setting data: row_id=%r, setter=%r, col=%r'
% (row_id, setter, col),
)
raise
# ------------------------
# --- QtGui Functions ---
# ------------------------
@default_method_decorator
def parent(model, qindex):
"""
A common convention used in models that expose tree data structures is
that only items in the first column have children. For that case, when
reimplementing this function in a subclass the column of the returned
QModelIndex would be 0.
When reimplementing this function in a subclass, be careful to avoid
calling QModelIndex member functions, such as QModelIndex.parent(),
since indexes belonging to your model will simply call your
implementation, leading to infinite recursion.
FIXME:
seems to segfault in here
https://riverbankcomputing.com/pipermail/pyqt/2016-February/036977.html
https://gist.github.com/estan/c051d1f798c4c46caa7d
Returns:
the parent of the model item with the given index. If the item has
no parent, an invalid QModelIndex is returned.
"""
# model.lazy_checks()
if qindex.isValid():
try:
node = qindex.internalPointer()
# <HACK>
# A segfault happens in isinstance when updating rows?
if not isinstance(node, _atn.TreeNode):
logger.info(
'WARNING: tried to access parent of %r type object' % type(node)
)
return QtCore.QModelIndex()
# assert node.__dict__, "node.__dict__=%r" % node.__dict__
# </HACK>
parent_node = node.get_parent()
parent_id = parent_node.get_id()
if parent_id == -1 or parent_id is None:
return QtCore.QModelIndex()
row = parent_node.get_row()
col = model.col_level_list.index(parent_node.get_level())
return model.createIndex(row, col, parent_node)
except Exception as ex:
import utool
with utool.embed_on_exception_context:
qindex_rc = (qindex.row(), qindex.column()) # NOQA
ut.printex(
ex,
'failed to do parenty things',
keys=['qindex_rc', 'model.name'],
tb=True,
)
import utool
utool.embed()
raise
return QtCore.QModelIndex()
@default_method_decorator
def index(model, row, column, parent=QtCore.QModelIndex()):
"""
Qt Override
Returns:
the index of the item in the model specified by the given row,
column and parent index. When reimplementing this function in a
subclass, call createIndex() to generate model indexes that other
components can use to refer to items in your model.
NOTE:
Object must be specified to sort delegates.
"""
# model.lazy_checks()
if not parent.isValid():
# This is a top level == 0 index
# logger.info('[model.index] ROOT: row=%r, col=%r' % (row, column))
if row >= model.root_node.get_num_children():
return QtCore.QModelIndex()
# import traceback
# traceback.print_stack()
node = model.root_node[row]
if model.col_level_list[column] != node.get_level():
return QtCore.QModelIndex()
qtindex = model.createIndex(row, column, object=node)
return qtindex
else:
# This is a child level > 0 index
parent_node = parent.internalPointer()
node = parent_node[row]
if ut.USE_ASSERT:
assert isinstance(parent_node, _atn.TreeNode), type(parent_node)
assert isinstance(node, _atn.TreeNode), type(node)
return model.createIndex(row, column, object=node)
def _get_level_row_count(model, qtindex):
return model.rowCount(qtindex.parent())
def _get_level_row_index(model, qtindex):
node = qtindex.internalPointer()
return node.get_row()
@default_method_decorator
def rowCount(model, parent=QtCore.QModelIndex()):
""" Qt Override """
# model.lazy_checks()
if not parent.isValid():
# Root row count
if len(model.level_index_list) == 0:
return 0
return model.num_rows_loaded
# nRows = len(model.level_index_list)
# # logger.info('* nRows=%r' % nRows)
# return nRows
else:
node = parent.internalPointer()
nRows = node.get_num_children()
# logger.info('+ nRows=%r' % nRows)
return nRows
@default_method_decorator
def columnCount(model, parent=QtCore.QModelIndex()):
""" Qt Override """
# FOR NOW THE COLUMN COUNT IS CONSTANT
# model.lazy_checks()
return len(model.col_name_list)
@default_method_decorator
def canFetchMore(model, parent=QtCore.QModelIndex()):
"""
Returns true if there is more data available for parent; otherwise
returns false. The default implementation always returns false. If
canFetchMore() returns true, the fetchMore() function should be called.
This is the behavior of QAbstractItemView, for example.
References:
http://doc.qt.io/qt-5/qtwidgets-itemviews-fetchmore-example.html
# Extend this to work well with QTreeViews
http://blog.tjwakeham.com/lazy-loading-pyqt-data-models/
http://stackoverflow.com/questions/38506808/pyqt4-force-view-to-fetchmore-from
"""
if parent is None:
return
if parent.isValid():
# Check if we are at a leaf node
node = parent.internalPointer()
if node.get_num_children() == 0:
return
# if node.get_level() == len(model.col_level_list):
# return
# logger.info('model.num_rows_total = %r' % (model.num_rows_total,))
# logger.info('model.num_rows_loaded = %r' % (model.num_rows_loaded,))
if model.num_rows_total is not None:
if model.num_rows_loaded < model.num_rows_total:
if VERBOSE_MODEL:
logger.info('canFetchMore %s? -- Yes' % (model.name,))
return True
if VERBOSE_MODEL:
logger.info('canFetchMore %s? -- No' % (model.name,))
return False
# if not parent.isValid():
# return False
# flags = model.flags(qtindex)
# # row = qtindex.row()
# col = qtindex.column()
# node = qtindex.internalPointer()
# return False
@default_method_decorator
def fetchMore(model, parent=QtCore.QModelIndex()):
"""
Fetches any available data for the items with the parent specified by
the parent index.
Reimplement this if you are populating your model incrementally.
The default implementation does nothing.
"""
if parent is None:
return
if parent.isValid():
# Check if we are at a leaf node
node = parent.internalPointer()
if node.get_num_children() == 0:
return
remainder = model.num_rows_total - model.num_rows_loaded
if model.batch_size is None:
num_fetching = remainder
else:
num_fetching = min(model.batch_size, remainder)
if VERBOSE_MODEL:
logger.info('Fetching %r more %s' % (num_fetching, model.name))
idx1 = model.num_rows_total
idx2 = model.num_rows_total + num_fetching - 1
# model.beginInsertRows(QtCore.QModelIndex(), idx1, idx2)
model.beginInsertRows(parent, idx1, idx2)
model.num_rows_loaded += num_fetching
# logger.info('model.num_rows_total = %r' % (model.num_rows_total,))
# logger.info('model.num_rows_loaded = %r' % (model.num_rows_loaded,))
model.endInsertRows()
if VERBOSE_MODEL:
logger.info(
'Fetched %r/%r rows' % (model.num_rows_loaded, model.num_rows_total)
)
# model.numberPopulated.emit(num_loading)
@default_method_decorator
def data(model, qtindex, role=Qt.DisplayRole, **kwargs):
"""
Depending on the role, returns either data or how to display data
Returns the data stored under the given role for the item referred to by
the index.
Note:
If you do not have a value to return, return None
"""
if not qtindex.isValid():
return None
flags = model.flags(qtindex)
# row = qtindex.row()
col = qtindex.column()
node = qtindex.internalPointer()
if model.col_level_list[col] != node.get_level():
return QVariantHack()
type_ = model._get_type(col)
#
# Specify Text Alignment Role
if role == Qt.TextAlignmentRole:
if type_ in qtype.QT_IMAGE_TYPES:
value = Qt.AlignRight | Qt.AlignVCenter
elif type_ in qtype.QT_BUTTON_TYPES:
value = Qt.AlignRight | Qt.AlignVCenter
elif type_ in ut.VALID_FLOAT_TYPES:
value = Qt.AlignRight | Qt.AlignVCenter
else:
value = Qt.AlignHCenter | Qt.AlignVCenter
return value
#
# Specify Background Rule
elif role == Qt.BackgroundRole:
value = model._get_bgrole_value(qtindex)
if value is not None:
return value
if flags & Qt.ItemIsEditable:
# Editable fields are colored
return QVariantHack(model.EditableItemColor)
elif flags & Qt.ItemIsUserCheckable:
# Checkable color depends on the truth value
data = model._get_data(qtindex, **kwargs)
if data:
return QVariantHack(model.TrueItemColor)
else:
return QVariantHack(model.FalseItemColor)
else:
pass
#
# Specify Foreground Role
elif role == Qt.ForegroundRole:
if flags & Qt.ItemIsEditable:
return QtGui.QBrush(QtGui.QColor(0, 0, 0))
# Specify Decoration Role (superceded by thumbdelegate)
# elif role == Qt.DecorationRole and type_ in qtype.QT_IMAGE_TYPES:
# Specify CheckState Role:
if role == Qt.CheckStateRole:
if flags & Qt.ItemIsUserCheckable:
data = model._get_data(qtindex, **kwargs)
return Qt.Checked if data else Qt.Unchecked
#
# Return the data to edit or display
elif role in (Qt.DisplayRole, Qt.EditRole):
# For types displayed with custom delegates do not cast data into a
# qvariant. This includes PIXMAP, BUTTON, and COMBO
if type_ in qtype.QT_DELEGATE_TYPES:
data = model._get_data(qtindex, **kwargs)
# logger.info(data)
return data
else:
# Display data with default delegate by casting to a qvariant
data = model._get_data(qtindex, **kwargs)
if model.col_display_role_func_dict is not None:
col_name = model.col_name_list[col]
display_role_func = model.col_display_role_func_dict.get(
col_name, None
)
if display_role_func is not None:
value = display_role_func(data)
return value
value = qtype.cast_into_qt(data)
return value
else:
# import builtins
# role_name = qtype.ItemDataRoles[role]
# builtins.print('UNHANDLED ROLE=%r' % role_name)
pass
# else return None
return QVariantHack()
@default_method_decorator
def setData(model, qtindex, value, role=Qt.EditRole):
"""
Sets the role data for the item at qtindex to value. value is a
QVariant (called data in documentation) Returns a map with values for
all predefined roles in the model for the item at the given index.
Reimplement this function if you want to extend the default behavior of
this function to include custom roles in the map.
"""
try:
if not qtindex.isValid():
return None
flags = model.flags(qtindex)
# row = qtindex.row()
col = qtindex.column()
if not (flags & Qt.ItemIsEditable or flags & Qt.ItemIsUserCheckable):
return None
if role == Qt.CheckStateRole:
type_ = 'QtCheckState'
data = value == Qt.Checked
elif role != Qt.EditRole:
return False
else:
# Cast value into datatype
type_ = model.col_type_list[col]
data = qtype.cast_from_qt(value, type_)
# Do actual setting of data
old_data = model._get_data(qtindex)
if old_data != data:
model._set_data(qtindex, data)
# This may not work with PyQt5
# http://stackoverflow.com/questions/22560296/not-responding-datachanged
# Emit that data was changed and return succcess
model.dataChanged.emit(qtindex, qtindex)
return True
except Exception as ex:
# value = str(value.toString()) # NOQA
ut.printex(
ex,
'ignoring setData',
'[model]',
tb=True,
key_list=['value'],
iswarning=True,
)
return False
@default_method_decorator
def headerData(model, section, orientation, role=Qt.DisplayRole):
"""
Qt Override
Returns:
the data for the given role and section in the header with the
specified orientation. For horizontal headers, the section number
corresponds to the column number. Similarly, for vertical headers,
the section number corresponds to the row number.
"""
# model.lazy_checks()
if orientation == Qt.Horizontal and role == Qt.DisplayRole:
column = section
if column >= len(model.col_nice_list):
return []
return model.col_nice_list[column]
if orientation == Qt.Vertical and role == Qt.DisplayRole:
# row = section
# rowid = model._get_row_id(row)
# return rowid
return section
return QVariantHack()
@updater
def sort(model, column, order):
""" Qt Override """
# model.lazy_checks()
reverse = order == QtCore.Qt.DescendingOrder
model._set_sort(column, reverse)
@default_method_decorator
def flags(model, qtindex):
"""
Qt Override
Returns:
Qt.ItemFlag:
0: 'NoItemFlags' # It does not have any properties set.
1: 'ItemIsSelectable' # It can be selected.
2: 'ItemIsEditable' # It can be edited.
4: 'ItemIsDragEnabled' # It can be dragged.
8: 'ItemIsDropEnabled' # It can be used as a drop target.
16: 'ItemIsUserCheckable' # It can be checked or unchecked by the user.
32: 'ItemIsEnabled' # The user can interact with the item.
64: 'ItemIsTristate' # The item is checkable with three separate states.
"""
# Return flags based on column properties (like type, and editable)
col = qtindex.column()
type_ = model._get_type(col)
editable = (
model.col_edit_list[col]
and model._get_level(qtindex) == model.col_level_list[col]
)
if type_ in qtype.QT_IMAGE_TYPES:
# return Qt.NoItemFlags
return Qt.ItemIsEnabled | Qt.ItemIsSelectable
elif not editable:
return Qt.ItemIsEnabled | Qt.ItemIsSelectable
elif type_ in ut.VALID_BOOL_TYPES:
return Qt.ItemIsEnabled | Qt.ItemIsUserCheckable
else:
return Qt.ItemIsEnabled | Qt.ItemIsEditable | Qt.ItemIsSelectable
def simple_thumbnail_widget():
r"""
Very simple example to test thumbnails
CommandLine:
python -m wbia.guitool.api_item_model --test-simple_thumbnail_widget --show
Example:
>>> # ENABLE_DOCTEST
>>> # xdoctest: +REQUIRES(--gui)
>>> import wbia.guitool as guitool
>>> from wbia.guitool.api_item_model import * # NOQA
>>> guitool.ensure_qapp() # must be ensured before any embeding
>>> wgt = simple_thumbnail_widget()
>>> ut.quit_if_noshow()
>>> wgt.show()
>>> guitool.qtapp_loop(wgt, frequency=100, init_signals=True)
"""
import wbia.guitool as guitool
guitool.ensure_qapp()
col_name_list = ['rowid', 'image_name', 'thumb']
col_types_dict = {
'thumb': 'PIXMAP',
}
def thumb_getter(id_, thumbsize=128):
""" Thumb getters must conform to thumbtup structure """
# logger.info(id_)
return ut.grab_test_imgpath(id_)
# return None
col_getter_dict = {
'rowid': [1, 2, 3],
'image_name': ['lena.png', 'carl.jpg', 'patsy.jpg'],
'thumb': thumb_getter,
}
col_ider_dict = {
'thumb': 'image_name',
}
col_setter_dict = {}
editable_colnames = []
sortby = 'rowid'
def get_thumb_size():
return 128
col_width_dict = {}
col_bgrole_dict = {}
api = guitool.CustomAPI(
col_name_list,
col_types_dict,
col_getter_dict,
col_bgrole_dict,
col_ider_dict,
col_setter_dict,
editable_colnames,
sortby,
get_thumb_size,
True,
col_width_dict,
)
headers = api.make_headers(tblnice='Simple Example')
wgt = guitool.APIItemWidget()
wgt.change_headers(headers)
# guitool.qtapp_loop(qwin=wgt, ipy=ipy, frequency=loop_freq)
return wgt
|
[
"utool.is_funclike",
"utool.replace_nones",
"utool.grab_test_imgpath",
"numpy.isnan",
"utool.noinject",
"six.moves.zip",
"utool.get_argflag",
"wbia.guitool.api_tree_node.TreeNode",
"wbia.guitool.__PYQT__.QVariantHack",
"utool.printex",
"wbia.guitool.ensure_qapp",
"wbia.guitool.qtype.cast_from_qt",
"wbia.guitool.__PYQT__.QtCore.QModelIndex",
"wbia.guitool.qtype.to_qcolor",
"utool.get_list_column",
"wbia.guitool.qtype.cast_into_qt",
"wbia.guitool.__PYQT__.QtGui.QColor",
"cachetools.TTLCache",
"wbia.guitool.CustomAPI",
"wbia.guitool.APIItemWidget",
"utool.is_list",
"wbia.guitool.api_tree_node.build_internal_structure",
"functools.wraps",
"vtool.argsort_records",
"utool.embed",
"utool.take",
"wbia.guitool.guitool_decorators.signal_",
"logging.getLogger"
] |
[((796, 821), 'logging.getLogger', 'logging.getLogger', (['"""wbia"""'], {}), "('wbia')\n", (813, 821), False, 'import logging\n'), ((822, 861), 'utool.noinject', 'ut.noinject', (['__name__', '"""[APIItemModel]"""'], {}), "(__name__, '[APIItemModel]')\n", (833, 861), True, 'import utool as ut\n'), ((1013, 1057), 'utool.get_argflag', 'ut.get_argflag', (["('--verbose-qt', '--verbqt')"], {}), "(('--verbose-qt', '--verbqt'))\n", (1027, 1057), True, 'import utool as ut\n'), ((1091, 1143), 'utool.get_argflag', 'ut.get_argflag', (["('--verbose-qt-api', '--verbqt-api')"], {}), "(('--verbose-qt-api', '--verbqt-api'))\n", (1105, 1143), True, 'import utool as ut\n'), ((3134, 3155), 'functools.wraps', 'functools.wraps', (['func'], {}), '(func)\n', (3149, 3155), False, 'import functools\n'), ((4126, 4143), 'wbia.guitool.guitool_decorators.signal_', 'signal_', (['str', 'int'], {}), '(str, int)\n', (4133, 4143), False, 'from wbia.guitool.guitool_decorators import checks_qt_error, signal_\n'), ((4168, 4195), 'wbia.guitool.__PYQT__.QtGui.QColor', 'QtGui.QColor', (['(242)', '(242)', '(255)'], {}), '(242, 242, 255)\n', (4180, 4195), False, 'from wbia.guitool.__PYQT__ import QtCore, QtGui, QVariantHack\n'), ((4270, 4297), 'wbia.guitool.__PYQT__.QtGui.QColor', 'QtGui.QColor', (['(230)', '(250)', '(230)'], {}), '(230, 250, 230)\n', (4282, 4297), False, 'from wbia.guitool.__PYQT__ import QtCore, QtGui, QVariantHack\n'), ((4319, 4346), 'wbia.guitool.__PYQT__.QtGui.QColor', 'QtGui.QColor', (['(250)', '(230)', '(230)'], {}), '(250, 230, 230)\n', (4331, 4346), False, 'from wbia.guitool.__PYQT__ import QtCore, QtGui, QVariantHack\n'), ((42721, 42742), 'wbia.guitool.ensure_qapp', 'guitool.ensure_qapp', ([], {}), '()\n', (42740, 42742), True, 'import wbia.guitool as guitool\n'), ((43440, 43627), 'wbia.guitool.CustomAPI', 'guitool.CustomAPI', (['col_name_list', 'col_types_dict', 'col_getter_dict', 'col_bgrole_dict', 'col_ider_dict', 'col_setter_dict', 'editable_colnames', 'sortby', 'get_thumb_size', '(True)', 'col_width_dict'], {}), '(col_name_list, col_types_dict, col_getter_dict,\n col_bgrole_dict, col_ider_dict, col_setter_dict, editable_colnames,\n sortby, get_thumb_size, True, col_width_dict)\n', (43457, 43627), True, 'import wbia.guitool as guitool\n'), ((43783, 43806), 'wbia.guitool.APIItemWidget', 'guitool.APIItemWidget', ([], {}), '()\n', (43804, 43806), True, 'import wbia.guitool as guitool\n'), ((5932, 5959), 'wbia.guitool.api_tree_node.TreeNode', '_atn.TreeNode', (['(-1)', 'None', '(-1)'], {}), '(-1, None, -1)\n', (5945, 5959), True, 'from wbia.guitool import api_tree_node as _atn\n'), ((11658, 11732), 'cachetools.TTLCache', 'cachetools.TTLCache', ([], {'maxsize': 'model.cache_size', 'ttl': 'model.cache_timeout_sec'}), '(maxsize=model.cache_size, ttl=model.cache_timeout_sec)\n', (11677, 11732), False, 'import cachetools\n'), ((19681, 19701), 'wbia.guitool.__PYQT__.QtCore.QModelIndex', 'QtCore.QModelIndex', ([], {}), '()\n', (19699, 19701), False, 'from wbia.guitool.__PYQT__ import QtCore, QtGui, QVariantHack\n'), ((20627, 20647), 'wbia.guitool.__PYQT__.QtCore.QModelIndex', 'QtCore.QModelIndex', ([], {}), '()\n', (20645, 20647), False, 'from wbia.guitool.__PYQT__ import QtCore, QtGui, QVariantHack\n'), ((22697, 22719), 'wbia.guitool.qtype.to_qcolor', 'qtype.to_qcolor', (['color'], {}), '(color)\n', (22712, 22719), False, 'from wbia.guitool import qtype\n'), ((27850, 27870), 'wbia.guitool.__PYQT__.QtCore.QModelIndex', 'QtCore.QModelIndex', ([], {}), '()\n', (27868, 27870), False, 'from wbia.guitool.__PYQT__ import QtCore, QtGui, QVariantHack\n'), ((27943, 27963), 'wbia.guitool.__PYQT__.QtCore.QModelIndex', 'QtCore.QModelIndex', ([], {}), '()\n', (27961, 27963), False, 'from wbia.guitool.__PYQT__ import QtCore, QtGui, QVariantHack\n'), ((29665, 29685), 'wbia.guitool.__PYQT__.QtCore.QModelIndex', 'QtCore.QModelIndex', ([], {}), '()\n', (29683, 29685), False, 'from wbia.guitool.__PYQT__ import QtCore, QtGui, QVariantHack\n'), ((30290, 30310), 'wbia.guitool.__PYQT__.QtCore.QModelIndex', 'QtCore.QModelIndex', ([], {}), '()\n', (30308, 30310), False, 'from wbia.guitool.__PYQT__ import QtCore, QtGui, QVariantHack\n'), ((30524, 30544), 'wbia.guitool.__PYQT__.QtCore.QModelIndex', 'QtCore.QModelIndex', ([], {}), '()\n', (30542, 30544), False, 'from wbia.guitool.__PYQT__ import QtCore, QtGui, QVariantHack\n'), ((32308, 32328), 'wbia.guitool.__PYQT__.QtCore.QModelIndex', 'QtCore.QModelIndex', ([], {}), '()\n', (32326, 32328), False, 'from wbia.guitool.__PYQT__ import QtCore, QtGui, QVariantHack\n'), ((37532, 37546), 'wbia.guitool.__PYQT__.QVariantHack', 'QVariantHack', ([], {}), '()\n', (37544, 37546), False, 'from wbia.guitool.__PYQT__ import QtCore, QtGui, QVariantHack\n'), ((40370, 40384), 'wbia.guitool.__PYQT__.QVariantHack', 'QVariantHack', ([], {}), '()\n', (40382, 40384), False, 'from wbia.guitool.__PYQT__ import QtCore, QtGui, QVariantHack\n'), ((43002, 43027), 'utool.grab_test_imgpath', 'ut.grab_test_imgpath', (['id_'], {}), '(id_)\n', (43022, 43027), True, 'import utool as ut\n'), ((8648, 8665), 'utool.is_list', 'ut.is_list', (['iders'], {}), '(iders)\n', (8658, 8665), True, 'import utool as ut\n'), ((10486, 10536), 'six.moves.zip', 'zip', (['col_name_list', 'col_edit_list', 'col_setter_list'], {}), '(col_name_list, col_edit_list, col_setter_list)\n', (10489, 10536), False, 'from six.moves import zip\n'), ((34410, 34424), 'wbia.guitool.__PYQT__.QVariantHack', 'QVariantHack', ([], {}), '()\n', (34422, 34424), False, 'from wbia.guitool.__PYQT__ import QtCore, QtGui, QVariantHack\n'), ((8768, 8788), 'utool.is_funclike', 'ut.is_funclike', (['ider'], {}), '(ider)\n', (8782, 8788), True, 'import utool as ut\n'), ((13413, 13449), 'wbia.guitool.api_tree_node.build_internal_structure', '_atn.build_internal_structure', (['model'], {}), '(model)\n', (13442, 13449), True, 'from wbia.guitool import api_tree_node as _atn\n'), ((20982, 21032), 'utool.printex', 'ut.printex', (['ex'], {'key_list': "['node']", 'pad_stdout': '(True)'}), "(ex, key_list=['node'], pad_stdout=True)\n", (20992, 21032), True, 'import utool as ut\n'), ((21178, 21243), 'utool.printex', 'ut.printex', (['ex'], {'key_list': "['node']", 'reraise': '(False)', 'pad_stdout': '(True)'}), "(ex, key_list=['node'], reraise=False, pad_stdout=True)\n", (21188, 21243), True, 'import utool as ut\n'), ((25126, 25222), 'utool.printex', 'ut.printex', (['ex', "('ERROR: setting data: row_id=%r, setter=%r, col=%r' % (row_id, setter, col))"], {}), "(ex, 'ERROR: setting data: row_id=%r, setter=%r, col=%r' % (\n row_id, setter, col))\n", (25136, 25222), True, 'import utool as ut\n'), ((28662, 28682), 'wbia.guitool.__PYQT__.QtCore.QModelIndex', 'QtCore.QModelIndex', ([], {}), '()\n', (28680, 28682), False, 'from wbia.guitool.__PYQT__ import QtCore, QtGui, QVariantHack\n'), ((28888, 28908), 'wbia.guitool.__PYQT__.QtCore.QModelIndex', 'QtCore.QModelIndex', ([], {}), '()\n', (28906, 28908), False, 'from wbia.guitool.__PYQT__ import QtCore, QtGui, QVariantHack\n'), ((39226, 39320), 'utool.printex', 'ut.printex', (['ex', '"""ignoring setData"""', '"""[model]"""'], {'tb': '(True)', 'key_list': "['value']", 'iswarning': '(True)'}), "(ex, 'ignoring setData', '[model]', tb=True, key_list=['value'],\n iswarning=True)\n", (39236, 39320), True, 'import utool as ut\n'), ((7447, 7483), 'six.moves.zip', 'zip', (['model.ider_filters', 'model.iders'], {}), '(model.ider_filters, model.iders)\n', (7450, 7483), False, 'from six.moves import zip\n'), ((15353, 15407), 'vtool.argsort_records', 'vt.argsort_records', (['[values, id_list]'], {'reverse': 'reverse'}), '([values, id_list], reverse=reverse)\n', (15371, 15407), True, 'import vtool as vt\n'), ((15483, 15507), 'utool.take', 'ut.take', (['children', 'sortx'], {}), '(children, sortx)\n', (15490, 15507), True, 'import utool as ut\n'), ((20452, 20505), 'utool.printex', 'ut.printex', (['ex'], {'key_list': "['node', 'model', 'qtindex']"}), "(ex, key_list=['node', 'model', 'qtindex'])\n", (20462, 20505), True, 'import utool as ut\n'), ((26826, 26846), 'wbia.guitool.__PYQT__.QtCore.QModelIndex', 'QtCore.QModelIndex', ([], {}), '()\n', (26844, 26846), False, 'from wbia.guitool.__PYQT__ import QtCore, QtGui, QVariantHack\n'), ((27129, 27149), 'wbia.guitool.__PYQT__.QtCore.QModelIndex', 'QtCore.QModelIndex', ([], {}), '()\n', (27147, 27149), False, 'from wbia.guitool.__PYQT__ import QtCore, QtGui, QVariantHack\n'), ((27799, 27812), 'utool.embed', 'utool.embed', ([], {}), '()\n', (27810, 27812), False, 'import utool\n'), ((35274, 35311), 'wbia.guitool.__PYQT__.QVariantHack', 'QVariantHack', (['model.EditableItemColor'], {}), '(model.EditableItemColor)\n', (35286, 35311), False, 'from wbia.guitool.__PYQT__ import QtCore, QtGui, QVariantHack\n'), ((37261, 37285), 'wbia.guitool.qtype.cast_into_qt', 'qtype.cast_into_qt', (['data'], {}), '(data)\n', (37279, 37285), False, 'from wbia.guitool import qtype\n'), ((38647, 38679), 'wbia.guitool.qtype.cast_from_qt', 'qtype.cast_from_qt', (['value', 'type_'], {}), '(value, type_)\n', (38665, 38679), False, 'from wbia.guitool import qtype\n'), ((14597, 14626), 'utool.get_list_column', 'ut.get_list_column', (['values', '(0)'], {}), '(values, 0)\n', (14615, 14626), True, 'import utool as ut\n'), ((20196, 20269), 'utool.printex', 'ut.printex', (['ex', '"""error in _get_row_id"""'], {'keys': "['model', 'qtindex', 'node']"}), "(ex, 'error in _get_row_id', keys=['model', 'qtindex', 'node'])\n", (20206, 20269), True, 'import utool as ut\n'), ((23486, 23655), 'utool.printex', 'ut.printex', (['ex', "('[api_item_model] problem getting in column %r' % (col,))"], {'keys': "['model.name', 'getter', 'row_id', 'col', 'qtindex', 'qtindex_rc']", 'iswarning': '(True)'}), "(ex, '[api_item_model] problem getting in column %r' % (col,),\n keys=['model.name', 'getter', 'row_id', 'col', 'qtindex', 'qtindex_rc'],\n iswarning=True)\n", (23496, 23655), True, 'import utool as ut\n'), ((27545, 27637), 'utool.printex', 'ut.printex', (['ex', '"""failed to do parenty things"""'], {'keys': "['qindex_rc', 'model.name']", 'tb': '(True)'}), "(ex, 'failed to do parenty things', keys=['qindex_rc',\n 'model.name'], tb=True)\n", (27555, 27637), True, 'import utool as ut\n'), ((15029, 15061), 'utool.replace_nones', 'ut.replace_nones', (['values', 'np.nan'], {}), '(values, np.nan)\n', (15045, 15061), True, 'import utool as ut\n'), ((15156, 15172), 'numpy.isnan', 'np.isnan', (['values'], {}), '(values)\n', (15164, 15172), True, 'import numpy as np\n'), ((15256, 15284), 'utool.replace_nones', 'ut.replace_nones', (['values', '""""""'], {}), "(values, '')\n", (15272, 15284), True, 'import utool as ut\n'), ((35532, 35565), 'wbia.guitool.__PYQT__.QVariantHack', 'QVariantHack', (['model.TrueItemColor'], {}), '(model.TrueItemColor)\n', (35544, 35565), False, 'from wbia.guitool.__PYQT__ import QtCore, QtGui, QVariantHack\n'), ((35615, 35649), 'wbia.guitool.__PYQT__.QVariantHack', 'QVariantHack', (['model.FalseItemColor'], {}), '(model.FalseItemColor)\n', (35627, 35649), False, 'from wbia.guitool.__PYQT__ import QtCore, QtGui, QVariantHack\n'), ((35851, 35872), 'wbia.guitool.__PYQT__.QtGui.QColor', 'QtGui.QColor', (['(0)', '(0)', '(0)'], {}), '(0, 0, 0)\n', (35863, 35872), False, 'from wbia.guitool.__PYQT__ import QtCore, QtGui, QVariantHack\n')]
|
# -*- coding: utf-8 -*-
from __future__ import division, absolute_import, unicode_literals
import datetime as dt
from io import StringIO
import logging
import numpy as np
import pytest
from sys import version_info
import warnings
import aacgmv2
class TestFutureDepWarning:
def setup(self):
# Initialize the routine to be tested
self.test_routine = None
self.test_args = []
self.test_kwargs = {}
def teardown(self):
del self.test_routine, self.test_args, self.test_kwargs
def test_future_dep_warning(self):
"""Test the implementation of FutureWarning for deprecated routines"""
if self.test_routine is None:
assert True
else:
with warnings.catch_warnings(record=True) as wout:
# Cause all warnings to always be triggered.
warnings.simplefilter("always")
# Trigger a warning.
self.test_routine(*self.test_args, **self.test_kwargs)
# Verify some things
assert len(wout) == 1
assert issubclass(wout[-1].category, FutureWarning)
assert "Deprecated routine" in str(wout[-1].message)
class TestDepAACGMV2Warning(TestFutureDepWarning):
def setup(self):
self.dtime = dt.datetime(2015, 1, 1, 0, 0, 0)
self.test_routine = None
self.test_args = []
self.test_kwargs = {}
def teardown(self):
del self.dtime, self.test_routine, self.test_args, self.test_kwargs
def test_gc2gd_lat_warning(self):
"""Test future deprecation warning for gc2gd_lat"""
self.test_routine = aacgmv2.deprecated.gc2gd_lat
self.test_args = [60.0]
self.test_future_dep_warning()
def test_igrf_dipole_axis_warning(self):
"""Test future deprecation warning for igrf_dipole_axis"""
self.test_routine = aacgmv2.deprecated.igrf_dipole_axis
self.test_args = [self.dtime]
self.test_future_dep_warning()
class TestDepAACGMV2:
def setup(self):
"""Runs before every method to create a clean testing setup"""
self.dtime = dt.datetime(2015, 1, 1, 0, 0, 0)
self.lat = None
self.lon = None
def teardown(self):
"""Runs after every method to clean up previous testing"""
del self.dtime, self.lat, self.lon
def test_subsol(self):
"""Test the subsolar calculation"""
doy = int(self.dtime.strftime("%j"))
ut = self.dtime.hour * 3600.0 + self.dtime.minute * 60.0 + \
self.dtime.second
with warnings.catch_warnings():
warnings.simplefilter("ignore")
self.lon, self.lat = aacgmv2.deprecated.subsol(self.dtime.year,
doy, ut)
np.testing.assert_almost_equal(self.lon, -179.2004, decimal=4)
np.testing.assert_almost_equal(self.lat, -23.0431, decimal=4)
def test_gc2gd_lat(self):
"""Test the geocentric to geodetic conversion"""
with warnings.catch_warnings():
warnings.simplefilter("ignore")
self.lat = aacgmv2.deprecated.gc2gd_lat(45.0)
np.testing.assert_almost_equal(self.lat, 45.1924, decimal=4)
def test_gc2gd_lat_list(self):
"""Test the geocentric to geodetic conversion"""
self.lat = [45.0, -45.0]
with warnings.catch_warnings():
warnings.simplefilter("ignore")
self.lat = aacgmv2.deprecated.gc2gd_lat(self.lat)
np.testing.assert_allclose(self.lat, [45.1924, -45.1924], rtol=1.0e-4)
def test_gc2gd_lat_arr(self):
"""Test the geocentric to geodetic conversion"""
self.lat = np.array([45.0, -45.0])
with warnings.catch_warnings():
warnings.simplefilter("ignore")
self.lat = aacgmv2.deprecated.gc2gd_lat(self.lat)
np.testing.assert_allclose(self.lat, [45.1924, -45.1924], rtol=1.0e-4)
def test_igrf_dipole_axis(self):
"""Test the IGRF dipole axis calculation"""
with warnings.catch_warnings():
warnings.simplefilter("ignore")
m = aacgmv2.deprecated.igrf_dipole_axis(self.dtime)
np.testing.assert_allclose(m, [0.050281,-0.16057,0.98574], rtol=1.0e-4)
|
[
"warnings.simplefilter",
"numpy.testing.assert_almost_equal",
"aacgmv2.deprecated.subsol",
"aacgmv2.deprecated.gc2gd_lat",
"datetime.datetime",
"numpy.array",
"warnings.catch_warnings",
"numpy.testing.assert_allclose",
"aacgmv2.deprecated.igrf_dipole_axis"
] |
[((1307, 1339), 'datetime.datetime', 'dt.datetime', (['(2015)', '(1)', '(1)', '(0)', '(0)', '(0)'], {}), '(2015, 1, 1, 0, 0, 0)\n', (1318, 1339), True, 'import datetime as dt\n'), ((2151, 2183), 'datetime.datetime', 'dt.datetime', (['(2015)', '(1)', '(1)', '(0)', '(0)', '(0)'], {}), '(2015, 1, 1, 0, 0, 0)\n', (2162, 2183), True, 'import datetime as dt\n'), ((2821, 2883), 'numpy.testing.assert_almost_equal', 'np.testing.assert_almost_equal', (['self.lon', '(-179.2004)'], {'decimal': '(4)'}), '(self.lon, -179.2004, decimal=4)\n', (2851, 2883), True, 'import numpy as np\n'), ((2892, 2953), 'numpy.testing.assert_almost_equal', 'np.testing.assert_almost_equal', (['self.lat', '(-23.0431)'], {'decimal': '(4)'}), '(self.lat, -23.0431, decimal=4)\n', (2922, 2953), True, 'import numpy as np\n'), ((3193, 3253), 'numpy.testing.assert_almost_equal', 'np.testing.assert_almost_equal', (['self.lat', '(45.1924)'], {'decimal': '(4)'}), '(self.lat, 45.1924, decimal=4)\n', (3223, 3253), True, 'import numpy as np\n'), ((3535, 3605), 'numpy.testing.assert_allclose', 'np.testing.assert_allclose', (['self.lat', '[45.1924, -45.1924]'], {'rtol': '(0.0001)'}), '(self.lat, [45.1924, -45.1924], rtol=0.0001)\n', (3561, 3605), True, 'import numpy as np\n'), ((3717, 3740), 'numpy.array', 'np.array', (['[45.0, -45.0]'], {}), '([45.0, -45.0])\n', (3725, 3740), True, 'import numpy as np\n'), ((3896, 3966), 'numpy.testing.assert_allclose', 'np.testing.assert_allclose', (['self.lat', '[45.1924, -45.1924]'], {'rtol': '(0.0001)'}), '(self.lat, [45.1924, -45.1924], rtol=0.0001)\n', (3922, 3966), True, 'import numpy as np\n'), ((4214, 4287), 'numpy.testing.assert_allclose', 'np.testing.assert_allclose', (['m', '[0.050281, -0.16057, 0.98574]'], {'rtol': '(0.0001)'}), '(m, [0.050281, -0.16057, 0.98574], rtol=0.0001)\n', (4240, 4287), True, 'import numpy as np\n'), ((2597, 2622), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {}), '()\n', (2620, 2622), False, 'import warnings\n'), ((2636, 2667), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""'], {}), "('ignore')\n", (2657, 2667), False, 'import warnings\n'), ((2701, 2752), 'aacgmv2.deprecated.subsol', 'aacgmv2.deprecated.subsol', (['self.dtime.year', 'doy', 'ut'], {}), '(self.dtime.year, doy, ut)\n', (2726, 2752), False, 'import aacgmv2\n'), ((3055, 3080), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {}), '()\n', (3078, 3080), False, 'import warnings\n'), ((3094, 3125), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""'], {}), "('ignore')\n", (3115, 3125), False, 'import warnings\n'), ((3149, 3183), 'aacgmv2.deprecated.gc2gd_lat', 'aacgmv2.deprecated.gc2gd_lat', (['(45.0)'], {}), '(45.0)\n', (3177, 3183), False, 'import aacgmv2\n'), ((3393, 3418), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {}), '()\n', (3416, 3418), False, 'import warnings\n'), ((3432, 3463), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""'], {}), "('ignore')\n", (3453, 3463), False, 'import warnings\n'), ((3487, 3525), 'aacgmv2.deprecated.gc2gd_lat', 'aacgmv2.deprecated.gc2gd_lat', (['self.lat'], {}), '(self.lat)\n', (3515, 3525), False, 'import aacgmv2\n'), ((3754, 3779), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {}), '()\n', (3777, 3779), False, 'import warnings\n'), ((3793, 3824), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""'], {}), "('ignore')\n", (3814, 3824), False, 'import warnings\n'), ((3848, 3886), 'aacgmv2.deprecated.gc2gd_lat', 'aacgmv2.deprecated.gc2gd_lat', (['self.lat'], {}), '(self.lat)\n', (3876, 3886), False, 'import aacgmv2\n'), ((4070, 4095), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {}), '()\n', (4093, 4095), False, 'import warnings\n'), ((4109, 4140), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""'], {}), "('ignore')\n", (4130, 4140), False, 'import warnings\n'), ((4157, 4204), 'aacgmv2.deprecated.igrf_dipole_axis', 'aacgmv2.deprecated.igrf_dipole_axis', (['self.dtime'], {}), '(self.dtime)\n', (4192, 4204), False, 'import aacgmv2\n'), ((735, 771), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {'record': '(True)'}), '(record=True)\n', (758, 771), False, 'import warnings\n'), ((858, 889), 'warnings.simplefilter', 'warnings.simplefilter', (['"""always"""'], {}), "('always')\n", (879, 889), False, 'import warnings\n')]
|