code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
from diofant.utilities.decorator import no_attrs_in_subclass
__all__ = ()
def test_no_attrs_in_subclass():
class A:
x = 'test'
A.x = no_attrs_in_subclass(A, A.x)
class B(A):
pass
assert hasattr(A, 'x') is True
assert hasattr(B, 'x') is False
| [
"diofant.utilities.decorator.no_attrs_in_subclass"
] | [((154, 182), 'diofant.utilities.decorator.no_attrs_in_subclass', 'no_attrs_in_subclass', (['A', 'A.x'], {}), '(A, A.x)\n', (174, 182), False, 'from diofant.utilities.decorator import no_attrs_in_subclass\n')] |
import argparse
from ccc_client.app_repo.AppRepoRunner import AppRepoRunner
from ccc_client.utils import print_API_response
def run(args):
runner = AppRepoRunner(args.host, args.port, args.authToken)
r = runner.upload_image(args.imageBlob, args.imageName, args.imageTag)
print_API_response(r)
if args.metadata is not None:
r = runner.upload_metadata(None, args.metadata)
print_API_response(r)
parser = argparse.ArgumentParser()
parser.set_defaults(runner=run)
parser.add_argument(
"--imageBlob", "-b",
type=str,
help="name of file or path"
)
parser.add_argument(
"--imageName", "-n",
type=str,
help="name of docker image"
)
parser.add_argument(
"--imageTag", "-t",
type=str,
default="latest",
help="docker image version tag"
)
parser.add_argument(
"--metadata", "-m", type=str,
help="tool metadata; can be a filepath or json string"
)
| [
"ccc_client.app_repo.AppRepoRunner.AppRepoRunner",
"ccc_client.utils.print_API_response",
"argparse.ArgumentParser"
] | [((440, 465), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (463, 465), False, 'import argparse\n'), ((155, 206), 'ccc_client.app_repo.AppRepoRunner.AppRepoRunner', 'AppRepoRunner', (['args.host', 'args.port', 'args.authToken'], {}), '(args.host, args.port, args.authToken)\n', (168, 206), False, 'from ccc_client.app_repo.AppRepoRunner import AppRepoRunner\n'), ((286, 307), 'ccc_client.utils.print_API_response', 'print_API_response', (['r'], {}), '(r)\n', (304, 307), False, 'from ccc_client.utils import print_API_response\n'), ((407, 428), 'ccc_client.utils.print_API_response', 'print_API_response', (['r'], {}), '(r)\n', (425, 428), False, 'from ccc_client.utils import print_API_response\n')] |
#!/usr/bin/python2.6
#-*- coding: utf-8 -*-
import signal
import subprocess
from glob import glob
from os import listdir
from os.path import basename, dirname
label = 'CentOS_6.9_Final'
def listifaces():
ethernet = []
for iface in listdir('/sys/class/net/'):
if iface != 'lo':
ethernet.append(iface)
return ethernet
def listblocks():
drive = '/sys/block/*/device'
return [basename(dirname(d)) for d in glob(drive)]
def listlabel(dev):
command = '/usr/sbin/blkid -o value -s LABEL {0}'.format(dev)
try:
lsblk = subprocess.Popen(command, stdout=subprocess.PIPE, shell=True)
output = lsblk.communicate()[0].rstrip()
return output
except:
pass
def discoverdisks():
disklist = []
for dev in listblocks():
removable = open('/sys/block/{0}/removable'.format(dev)).readline()
disklist.append([dev, removable.rstrip()])
return disklist
def getinternal(disklist):
internal = []
for dev in disklist:
if dev[1] == '0':
internal.append(dev[0])
return internal
def getremovable(disklist):
removable = []
for dev in disklist:
if dev[1] == '1':
removable.append(dev[0])
return removable
def getinstallmedia(disklist):
for dev in disklist:
firstpart = '/dev/{0}1'.format(dev[0])
relativep = '{0}1'.format(dev[0])
partlabel = listlabel(firstpart)
if partlabel == label:
return relativep
discoverdisks = discoverdisks()
source = getinstallmedia(discoverdisks)
localdisks = sorted(getinternal(discoverdisks))[:2]
nics = ','.join(listifaces())
kickstart = """lang en_US.UTF-8
keyboard us
network --bootproto=static --device=bond0 --bootproto=dhcp --bondopts=miimon=100,mode=active-backup --bondslaves="{0}"
firewall --enabled --ssh
timezone --utc America/Sao_Paulo
zerombr yes
clearpart --drives="{1}" --all --initlabel
bootloader --location=mbr --driveorder="{1}" --append="crashkernel=auto rhgb quiet"
# Please remember to change this. In case you don't the password encrypted bellow is "<PASSWORD>".
rootpw --iscrypted $6$JDAL2eOJcBzAkykb$o9v9XAVC2i9YLyMGWEyG60SO2vXSDO.C42CoI/M5Ai/UCVOoWD6SH1sd9e7ImZJj/rx1aljJShdVjKHJgRa8s/
authconfig --enableshadow --passalgo=<PASSWORD>
selinux --enabled
skipx
# Disk proposal bellow. You should customize it to your needs.
part raid.0 --size=512 --ondisk {2} --asprimary
part raid.1 --size=512 --ondisk {3} --asprimary
part raid.2 --size=40000 --ondisk {2} --asprimary
part raid.3 --size=40000 --ondisk {3} --asprimary
part raid.4 --size=10000 --ondisk {2} --asprimary --grow
part raid.5 --size=10000 --ondisk {3} --asprimary --grow
raid /boot --fstype xfs --level=RAID1 --device=md0 raid.0 raid.1
raid pv.1 --fstype "physical volume (LVM)" --level=RAID1 --device=md1 raid.2 raid.3
raid pv.2 --fstype "physical volume (LVM)" --level=RAID1 --device=md2 raid.4 raid.5
volgroup system --pesize=32768 pv.1
volgroup data --pesize=32768 pv.2
logvol / --fstype xfs --name=root --vgname=system --size=4096 --fsoptions="noatime,nodiratime"
logvol /usr --fstype xfs --name=usr --vgname=system --size=8192 --fsoptions="noatime,nodiratime,nodev"
logvol /var --fstype xfs --name=var --vgname=system --size=4096 --fsoptions="noatime,nodiratime,nodev,nosuid"
logvol /var/log --fstype xfs --name=varlog --vgname=system --size=4096 --fsoptions="noatime,nodiratime,nodev,nosuid,noexec"
logvol /tmp --fstype xfs --name=tmp --vgname=system --size=4096 --fsoptions="noatime,nodiratime,nodev,nosuid"
logvol /opt --fstype xfs --name=opt --vgname=system --size=512 --fsoptions="noatime,nodiratime,nodev,nosuid"
logvol /srv --fstype xfs --name=srv --vgname=system --size=5120 --fsoptions="noatime,nodiratime,nodev,nosuid,noexec"
logvol swap --fstype swap --name=swap --vgname=system --size=4096
logvol /home --fstype xfs --name=home --vgname=data --size=512 --fsoptions="noatime,nodiratime,nodev,nosuid,noexec"
%packages
@base
@console-internet
@core
@debugging
@directory-client
@hardware-monitoring
@java-platform
@large-systems
@network-file-system-client
@performance
@perl-runtime
@portuguese-support
@server-platform
@server-policy
@workstation-policy
pax
python-dmidecode
oddjob
sgpio
device-mapper-persistent-data
samba-winbind
certmonger
pam_krb5
krb5-workstation
perl-DBD-SQLite
dos2unix
ca-certificates
dhcp
nfs-utils
ipa-client
tcpdump
expect
%post""".format(nics, ','.join(localdisks), localdisks[0], localdisks[1])
if __name__ == '__main__':
incks = open('/tmp/autogen.ks', 'w+')
incks.write(kickstart)
incks.close()
| [
"os.path.dirname",
"subprocess.Popen",
"os.listdir",
"glob.glob"
] | [((242, 268), 'os.listdir', 'listdir', (['"""/sys/class/net/"""'], {}), "('/sys/class/net/')\n", (249, 268), False, 'from os import listdir\n'), ((573, 634), 'subprocess.Popen', 'subprocess.Popen', (['command'], {'stdout': 'subprocess.PIPE', 'shell': '(True)'}), '(command, stdout=subprocess.PIPE, shell=True)\n', (589, 634), False, 'import subprocess\n'), ((426, 436), 'os.path.dirname', 'dirname', (['d'], {}), '(d)\n', (433, 436), False, 'from os.path import basename, dirname\n'), ((447, 458), 'glob.glob', 'glob', (['drive'], {}), '(drive)\n', (451, 458), False, 'from glob import glob\n')] |
from ckan_cloud_operator import kubectl
def get(what, *args, required=True, namespace=None, get_cmd=None, **kwargs):
return kubectl.get(what, *args, required=required, namespace=namespace, get_cmd=get_cmd, **kwargs)
| [
"ckan_cloud_operator.kubectl.get"
] | [((130, 226), 'ckan_cloud_operator.kubectl.get', 'kubectl.get', (['what', '*args'], {'required': 'required', 'namespace': 'namespace', 'get_cmd': 'get_cmd'}), '(what, *args, required=required, namespace=namespace, get_cmd=\n get_cmd, **kwargs)\n', (141, 226), False, 'from ckan_cloud_operator import kubectl\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Author: <NAME>, ph4r05, 2018
import operator
import sys
# Useful for very coarse version differentiation.
PY3 = sys.version_info[0] == 3
if PY3:
indexbytes = operator.getitem
intlist2bytes = bytes
int2byte = operator.methodcaller("to_bytes", 1, "big")
else:
int2byte = chr
range = xrange
def indexbytes(buf, i):
return ord(buf[i])
def intlist2bytes(l):
return b"".join(chr(c) for c in l)
| [
"operator.methodcaller"
] | [((271, 314), 'operator.methodcaller', 'operator.methodcaller', (['"""to_bytes"""', '(1)', '"""big"""'], {}), "('to_bytes', 1, 'big')\n", (292, 314), False, 'import operator\n')] |
import os.path as osp
from argparse import ArgumentParser
from mmcv import Config
from pytorch_lightning import Trainer, seed_everything
from pytorch_lightning.callbacks import ModelCheckpoint
from torch.utils.data import DataLoader
from datasets import build_dataset
from models import MODELS
import torch
def parse_args():
parser = ArgumentParser(description='Training with DDP.')
parser.add_argument('config',
type=str)
parser.add_argument('gpus',
type=int)
parser.add_argument('--work_dir',
type=str,
default='checkpoints')
parser.add_argument('--seed',
type=int,
default=1024)
args = parser.parse_args()
return args
def main():
torch.set_default_dtype(torch.float32)
# parse args
args = parse_args()
# parse cfg
cfg = Config.fromfile(osp.join(f'configs/{args.config}.yaml'))
# show information
print(f'Now training with {args.config}...')
# configure seed
seed_everything(args.seed)
# prepare data loader
dataset = build_dataset(cfg.dataset)
loader = DataLoader(dataset, cfg.imgs_per_gpu, shuffle=True, num_workers=cfg.workers_per_gpu, drop_last=True)
if cfg.model.name == 'rnw':
cfg.data_link = dataset
# define model
model = MODELS.build(name=cfg.model.name, option=cfg)
# define trainer
work_dir = osp.join(args.work_dir, args.config)
# save checkpoint every 'cfg.checkpoint_epoch_interval' epochs
checkpoint_callback = ModelCheckpoint(dirpath=work_dir,
save_weights_only=True,
save_top_k=-1,
filename='checkpoint_{epoch}',
every_n_epochs=cfg.checkpoint_epoch_interval)
trainer = Trainer(accelerator='ddp',
default_root_dir=work_dir,
gpus=args.gpus,
num_nodes=1,
max_epochs=cfg.total_epochs,
callbacks=[checkpoint_callback],
auto_scale_batch_size="power")
# training
trainer.fit(model, loader)
if __name__ == '__main__':
main()
| [
"pytorch_lightning.callbacks.ModelCheckpoint",
"models.MODELS.build",
"argparse.ArgumentParser",
"pytorch_lightning.seed_everything",
"os.path.join",
"torch.set_default_dtype",
"datasets.build_dataset",
"pytorch_lightning.Trainer",
"torch.utils.data.DataLoader"
] | [((342, 390), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': '"""Training with DDP."""'}), "(description='Training with DDP.')\n", (356, 390), False, 'from argparse import ArgumentParser\n'), ((815, 853), 'torch.set_default_dtype', 'torch.set_default_dtype', (['torch.float32'], {}), '(torch.float32)\n', (838, 853), False, 'import torch\n'), ((1078, 1104), 'pytorch_lightning.seed_everything', 'seed_everything', (['args.seed'], {}), '(args.seed)\n', (1093, 1104), False, 'from pytorch_lightning import Trainer, seed_everything\n'), ((1146, 1172), 'datasets.build_dataset', 'build_dataset', (['cfg.dataset'], {}), '(cfg.dataset)\n', (1159, 1172), False, 'from datasets import build_dataset\n'), ((1186, 1291), 'torch.utils.data.DataLoader', 'DataLoader', (['dataset', 'cfg.imgs_per_gpu'], {'shuffle': '(True)', 'num_workers': 'cfg.workers_per_gpu', 'drop_last': '(True)'}), '(dataset, cfg.imgs_per_gpu, shuffle=True, num_workers=cfg.\n workers_per_gpu, drop_last=True)\n', (1196, 1291), False, 'from torch.utils.data import DataLoader\n'), ((1384, 1429), 'models.MODELS.build', 'MODELS.build', ([], {'name': 'cfg.model.name', 'option': 'cfg'}), '(name=cfg.model.name, option=cfg)\n', (1396, 1429), False, 'from models import MODELS\n'), ((1467, 1503), 'os.path.join', 'osp.join', (['args.work_dir', 'args.config'], {}), '(args.work_dir, args.config)\n', (1475, 1503), True, 'import os.path as osp\n'), ((1597, 1755), 'pytorch_lightning.callbacks.ModelCheckpoint', 'ModelCheckpoint', ([], {'dirpath': 'work_dir', 'save_weights_only': '(True)', 'save_top_k': '(-1)', 'filename': '"""checkpoint_{epoch}"""', 'every_n_epochs': 'cfg.checkpoint_epoch_interval'}), "(dirpath=work_dir, save_weights_only=True, save_top_k=-1,\n filename='checkpoint_{epoch}', every_n_epochs=cfg.checkpoint_epoch_interval\n )\n", (1612, 1755), False, 'from pytorch_lightning.callbacks import ModelCheckpoint\n'), ((1929, 2113), 'pytorch_lightning.Trainer', 'Trainer', ([], {'accelerator': '"""ddp"""', 'default_root_dir': 'work_dir', 'gpus': 'args.gpus', 'num_nodes': '(1)', 'max_epochs': 'cfg.total_epochs', 'callbacks': '[checkpoint_callback]', 'auto_scale_batch_size': '"""power"""'}), "(accelerator='ddp', default_root_dir=work_dir, gpus=args.gpus,\n num_nodes=1, max_epochs=cfg.total_epochs, callbacks=[\n checkpoint_callback], auto_scale_batch_size='power')\n", (1936, 2113), False, 'from pytorch_lightning import Trainer, seed_everything\n'), ((938, 977), 'os.path.join', 'osp.join', (['f"""configs/{args.config}.yaml"""'], {}), "(f'configs/{args.config}.yaml')\n", (946, 977), True, 'import os.path as osp\n')] |
""" This Script contain the different function used in the framework
part1. Data processing
part2. Prediction and analisys
part3. Plotting
"""
import numpy as np
import librosa
import matplotlib.pyplot as plt
from sklearn import metrics
import os
import pickle
import time
import struct
""" Data processing """
def get_mel_spectrogram(file_path, mfcc_max_padding=0, n_fft=2048, hop_length=512, n_mels=128):
"""Generates/extracts Log-MEL Spectrogram coefficients with LibRosa """
try:
# Load audio file
y, sr = librosa.load(file_path)
# Normalize audio data between -1 and 1
normalized_y = librosa.util.normalize(y)
# Generate mel scaled filterbanks
mel = librosa.feature.melspectrogram(normalized_y, sr=sr, n_mels=n_mels)
# Convert sound intensity to log amplitude:
mel_db = librosa.amplitude_to_db(abs(mel))
# Normalize between -1 and 1
normalized_mel = librosa.util.normalize(mel_db)
# Should we require padding
shape = normalized_mel.shape[1]
if (mfcc_max_padding > 0 & shape < mfcc_max_padding):
xDiff = mfcc_max_padding - shape
xLeft = xDiff//2
xRight = xDiff-xLeft
normalized_mel = np.pad(normalized_mel, pad_width=((0,0), (xLeft, xRight)), mode='constant')
except Exception as e:
print("Error parsing wavefile: ", e)
return None
return normalized_mel
def get_mfcc(file_path, mfcc_max_padding=0, n_mfcc=40, robots_noise = None, noise_amp = 1):
"""Generates/extracts MFCC coefficients with LibRosa"""
try:
# Load audio file
y, sr = librosa.load(file_path,sr=None)
if robots_noise != None :
y_n, _ = librosa.load(robots_noise)
y = (y + noise_amp * y_n)/(noise_amp + 1)
# Normalize audio data between -1 and 1
normalized_y = librosa.util.normalize(y)
# Compute MFCC coefficients
mfcc = librosa.feature.mfcc(y=normalized_y, sr=sr, n_mfcc=n_mfcc)
# Normalize MFCC between -1 and 1
normalized_mfcc = librosa.util.normalize(mfcc)
# Should we require padding
shape = normalized_mfcc.shape[1]
if (shape < mfcc_max_padding):
pad_width = mfcc_max_padding - shape
normalized_mfcc = np.pad(normalized_mfcc,
pad_width=((0, 0), (0, pad_width)),
mode ='constant',
constant_values=(0,))
except Exception as e:
print("Error parsing wavefile: ", e)
return None
return normalized_mfcc
def add_padding(features, mfcc_max_padding=174):
"""Given an numpy array of features, zero-pads each ocurrence to max_padding"""
padded = []
# Add padding
for i in range(len(features)):
px = features[i]
size = len(px[0])
# Add padding if required
if (size < mfcc_max_padding):
xDiff = mfcc_max_padding - size
xLeft = xDiff//2
xRight = xDiff-xLeft
px = np.pad(px, pad_width=((0,0), (xLeft, xRight)), mode='constant')
padded.append(px)
return padded
def scale(X, x_min, x_max, axis=0):
"""Scales data between x_min and x_max"""
nom = (X-X.min(axis=axis))*(x_max-x_min)
denom = X.max(axis=axis) - X.min(axis=axis)
denom[denom==0] = 1
return x_min + nom/denom
def save_split_distributions(test_split_idx, train_split_idx, file_path=None):
if (path == None):
print("You must enter a file path to save the splits")
return false
# Create split dictionary
split = {}
split['test_split_idx'] = test_split_idx
split['train_split_idx'] = train_split_idx
with open(file_path, 'wb') as file_pi:
pickle.dump(split, file_pi)
return file
def load_split_distributions(file_path):
file = open(file_path, 'rb')
data = pickle.load(file)
return [data['test_split_idx'], data['train_split_idx']]
def find_dupes(array):
seen = {}
dupes = []
for x in array:
if x not in seen:
seen[x] = 1
else:
if seen[x] == 1:
dupes.append(x)
seen[x] += 1
return len(dupes)
def read_header(filename):
"""Reads a file's header data and returns a list of wavefile properties"""
wave = open(filename,"rb")
riff = wave.read(12)
fmat = wave.read(36)
num_channels_string = fmat[10:12]
num_channels = struct.unpack('<H', num_channels_string)[0]
sample_rate_string = fmat[12:16]
sample_rate = struct.unpack("<I",sample_rate_string)[0]
bit_depth_string = fmat[22:24]
bit_depth = struct.unpack("<H",bit_depth_string)[0]
return (num_channels, sample_rate, bit_depth)
def play_dataset_sample(dataset_row, audio_path):
"""Given a dataset row it returns an audio player and prints the audio properties"""
fold_num = dataset_row.iloc[0]['fold']
file_name = dataset_row.iloc[0]['file']
file_path = os.path.join(audio_path, fold_num, file_name)
file_path = os.path.join(audio_path, dataset_row.iloc[0]['fold'], dataset_row.iloc[0]['file'])
print("Class:", dataset_row.iloc[0]['class'])
print("File:", file_path)
print("Sample rate:", dataset_row.iloc[0]['sample_rate'])
print("Bit depth:", dataset_row.iloc[0]['bit_depth'])
print("Duration {} seconds".format(dataset_row.iloc[0]['duration']))
# Sound preview
return IP.display.Audio(file_path)
"""
Prediction and analisys
"""
def evaluate_model(model, X_train, y_train, X_test, y_test):
train_score = model.evaluate(X_train, y_train, verbose=0)
test_score = model.evaluate(X_test, y_test, verbose=0)
return train_score, test_score
def model_evaluation_report(model, X_train, y_train, X_test, y_test, calc_normal=True):
dash = '-' * 38
# Compute scores
train_score, test_score = evaluate_model(model, X_train, y_train, X_test, y_test)
# Pint Train vs Test report
print('{:<10s}{:>14s}{:>14s}'.format("", "LOSS", "ACCURACY"))
print(dash)
print('{:<10s}{:>14.4f}{:>14.4f}'.format( "Training:", train_score[0], 100 * train_score[1]))
print('{:<10s}{:>14.4f}{:>14.4f}'.format( "Test:", test_score[0], 100 * test_score[1]))
# Calculate and report normalized error difference?
if (calc_normal):
max_err = max(train_score[0], test_score[0])
error_diff = max_err - min(train_score[0], test_score[0])
normal_diff = error_diff * 100 / max_err
print('{:<10s}{:>13.2f}{:>1s}'.format("Normal diff ", normal_diff, ""))
def acc_per_class(np_probs_array):
"""
Expects a NumPy array with probabilities and a confusion matrix data, retuns accuracy per class
"""
accs = []
for idx in range(0, np_probs_array.shape[0]):
correct = np_probs_array[idx][idx].astype(int)
total = np_probs_array[idx].sum().astype(int)
acc = (correct / total) * 100
accs.append(acc)
return accs
"""
Plotting
"""
def plot_train_history(history, x_ticks_vertical=False):
history = history.history
# min loss / max accs
min_loss = min(history['loss'])
min_val_loss = min(history['val_loss'])
max_accuracy = max(history['accuracy'])
max_val_accuracy = max(history['val_accuracy'])
# x pos for loss / acc min/max
min_loss_x = history['loss'].index(min_loss)
min_val_loss_x = history['val_loss'].index(min_val_loss)
max_accuracy_x = history['accuracy'].index(max_accuracy)
max_val_accuracy_x = history['val_accuracy'].index(max_val_accuracy)
# summarize history for loss, display min
plt.figure(figsize=(16,8))
plt.plot(history['loss'], color="#1f77b4", alpha=0.7)
plt.plot(history['val_loss'], color="#ff7f0e", linestyle="--")
plt.plot(min_loss_x, min_loss, marker='o', markersize=3, color="#1f77b4", alpha=0.7, label='Inline label')
plt.plot(min_val_loss_x, min_val_loss, marker='o', markersize=3, color="#ff7f0e", alpha=7, label='Inline label')
plt.title('Model loss', fontsize=20)
plt.ylabel('Loss', fontsize=16)
plt.xlabel('Epoch', fontsize=16)
plt.legend(['Train',
'Test',
('%.3f' % min_loss),
('%.3f' % min_val_loss)],
loc='upper right',
fancybox=True,
framealpha=0.9,
shadow=True,
borderpad=1)
if (x_ticks_vertical):
plt.xticks(np.arange(0, len(history['loss']), 5.0), rotation='vertical')
else:
plt.xticks(np.arange(0, len(history['loss']), 5.0))
plt.show()
# summarize history for accuracy, display max
plt.figure(figsize=(16,6))
plt.plot(history['accuracy'], alpha=0.7)
plt.plot(history['val_accuracy'], linestyle="--")
plt.plot(max_accuracy_x, max_accuracy, marker='o', markersize=3, color="#1f77b4", alpha=7)
plt.plot(max_val_accuracy_x, max_val_accuracy, marker='o', markersize=3, color="orange", alpha=7)
plt.title('Model accuracy', fontsize=20)
plt.ylabel('Accuracy', fontsize=16)
plt.xlabel('Epoch', fontsize=16)
plt.legend(['Train',
'Test',
('%.2f' % max_accuracy),
('%.2f' % max_val_accuracy)],
loc='upper left',
fancybox=True,
framealpha=0.9,
shadow=True,
borderpad=1)
plt.figure(num=1, figsize=(10, 6))
if (x_ticks_vertical):
plt.xticks(np.arange(0, len(history['accuracy']), 5.0), rotation='vertical')
else:
plt.xticks(np.arange(0, len(history['accuracy']), 5.0))
plt.show()
def compute_confusion_matrix(y_true,
y_pred,
classes,
normalize=False):
# Compute confusion matrix
cm = metrics.confusion_matrix(y_true, y_pred)
if normalize:
cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]
return cm
def plot_confusion_matrix(cm,
classes,
normalized=False,
title=None,
cmap=plt.cm.Blues,
size=(10,10)):
"""Plots a confussion matrix"""
fig, ax = plt.subplots(figsize=size)
im = ax.imshow(cm, interpolation='nearest', cmap=cmap)
ax.figure.colorbar(im, ax=ax)
# We want to show all ticks...
ax.set(xticks=np.arange(cm.shape[1]),
yticks=np.arange(cm.shape[0]),
# ... and label them with the respective list entries
xticklabels=classes, yticklabels=classes,
title=title,
ylabel='True label',
xlabel='Predicted label')
# Rotate the tick labels and set their alignment.
plt.setp(ax.get_xticklabels(), rotation=45, ha="right",
rotation_mode="anchor")
# Loop over data dimensions and create text annotations.
fmt = '.2f' if normalized else 'd'
thresh = cm.max() / 2.
for i in range(cm.shape[0]):
for j in range(cm.shape[1]):
ax.text(j, i, format(cm[i, j], fmt),
ha="center", va="center",
color="white" if cm[i, j] > thresh else "black")
fig.tight_layout()
plt.show() | [
"matplotlib.pyplot.ylabel",
"librosa.feature.mfcc",
"numpy.arange",
"librosa.load",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"sklearn.metrics.confusion_matrix",
"pickle.load",
"struct.unpack",
"matplotlib.pyplot.title",
"librosa.util.normalize",
"matplotlib.pyplot.legend",
"matp... | [((3975, 3992), 'pickle.load', 'pickle.load', (['file'], {}), '(file)\n', (3986, 3992), False, 'import pickle\n'), ((5073, 5118), 'os.path.join', 'os.path.join', (['audio_path', 'fold_num', 'file_name'], {}), '(audio_path, fold_num, file_name)\n', (5085, 5118), False, 'import os\n'), ((5135, 5222), 'os.path.join', 'os.path.join', (['audio_path', "dataset_row.iloc[0]['fold']", "dataset_row.iloc[0]['file']"], {}), "(audio_path, dataset_row.iloc[0]['fold'], dataset_row.iloc[0][\n 'file'])\n", (5147, 5222), False, 'import os\n'), ((7715, 7742), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(16, 8)'}), '(figsize=(16, 8))\n', (7725, 7742), True, 'import matplotlib.pyplot as plt\n'), ((7746, 7799), 'matplotlib.pyplot.plot', 'plt.plot', (["history['loss']"], {'color': '"""#1f77b4"""', 'alpha': '(0.7)'}), "(history['loss'], color='#1f77b4', alpha=0.7)\n", (7754, 7799), True, 'import matplotlib.pyplot as plt\n'), ((7804, 7866), 'matplotlib.pyplot.plot', 'plt.plot', (["history['val_loss']"], {'color': '"""#ff7f0e"""', 'linestyle': '"""--"""'}), "(history['val_loss'], color='#ff7f0e', linestyle='--')\n", (7812, 7866), True, 'import matplotlib.pyplot as plt\n'), ((7871, 7981), 'matplotlib.pyplot.plot', 'plt.plot', (['min_loss_x', 'min_loss'], {'marker': '"""o"""', 'markersize': '(3)', 'color': '"""#1f77b4"""', 'alpha': '(0.7)', 'label': '"""Inline label"""'}), "(min_loss_x, min_loss, marker='o', markersize=3, color='#1f77b4',\n alpha=0.7, label='Inline label')\n", (7879, 7981), True, 'import matplotlib.pyplot as plt\n'), ((7982, 8099), 'matplotlib.pyplot.plot', 'plt.plot', (['min_val_loss_x', 'min_val_loss'], {'marker': '"""o"""', 'markersize': '(3)', 'color': '"""#ff7f0e"""', 'alpha': '(7)', 'label': '"""Inline label"""'}), "(min_val_loss_x, min_val_loss, marker='o', markersize=3, color=\n '#ff7f0e', alpha=7, label='Inline label')\n", (7990, 8099), True, 'import matplotlib.pyplot as plt\n'), ((8099, 8135), 'matplotlib.pyplot.title', 'plt.title', (['"""Model loss"""'], {'fontsize': '(20)'}), "('Model loss', fontsize=20)\n", (8108, 8135), True, 'import matplotlib.pyplot as plt\n'), ((8140, 8171), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Loss"""'], {'fontsize': '(16)'}), "('Loss', fontsize=16)\n", (8150, 8171), True, 'import matplotlib.pyplot as plt\n'), ((8176, 8208), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Epoch"""'], {'fontsize': '(16)'}), "('Epoch', fontsize=16)\n", (8186, 8208), True, 'import matplotlib.pyplot as plt\n'), ((8213, 8365), 'matplotlib.pyplot.legend', 'plt.legend', (["['Train', 'Test', '%.3f' % min_loss, '%.3f' % min_val_loss]"], {'loc': '"""upper right"""', 'fancybox': '(True)', 'framealpha': '(0.9)', 'shadow': '(True)', 'borderpad': '(1)'}), "(['Train', 'Test', '%.3f' % min_loss, '%.3f' % min_val_loss], loc\n ='upper right', fancybox=True, framealpha=0.9, shadow=True, borderpad=1)\n", (8223, 8365), True, 'import matplotlib.pyplot as plt\n'), ((8685, 8695), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (8693, 8695), True, 'import matplotlib.pyplot as plt\n'), ((8751, 8778), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(16, 6)'}), '(figsize=(16, 6))\n', (8761, 8778), True, 'import matplotlib.pyplot as plt\n'), ((8782, 8822), 'matplotlib.pyplot.plot', 'plt.plot', (["history['accuracy']"], {'alpha': '(0.7)'}), "(history['accuracy'], alpha=0.7)\n", (8790, 8822), True, 'import matplotlib.pyplot as plt\n'), ((8827, 8876), 'matplotlib.pyplot.plot', 'plt.plot', (["history['val_accuracy']"], {'linestyle': '"""--"""'}), "(history['val_accuracy'], linestyle='--')\n", (8835, 8876), True, 'import matplotlib.pyplot as plt\n'), ((8881, 8976), 'matplotlib.pyplot.plot', 'plt.plot', (['max_accuracy_x', 'max_accuracy'], {'marker': '"""o"""', 'markersize': '(3)', 'color': '"""#1f77b4"""', 'alpha': '(7)'}), "(max_accuracy_x, max_accuracy, marker='o', markersize=3, color=\n '#1f77b4', alpha=7)\n", (8889, 8976), True, 'import matplotlib.pyplot as plt\n'), ((8976, 9077), 'matplotlib.pyplot.plot', 'plt.plot', (['max_val_accuracy_x', 'max_val_accuracy'], {'marker': '"""o"""', 'markersize': '(3)', 'color': '"""orange"""', 'alpha': '(7)'}), "(max_val_accuracy_x, max_val_accuracy, marker='o', markersize=3,\n color='orange', alpha=7)\n", (8984, 9077), True, 'import matplotlib.pyplot as plt\n'), ((9078, 9118), 'matplotlib.pyplot.title', 'plt.title', (['"""Model accuracy"""'], {'fontsize': '(20)'}), "('Model accuracy', fontsize=20)\n", (9087, 9118), True, 'import matplotlib.pyplot as plt\n'), ((9123, 9158), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Accuracy"""'], {'fontsize': '(16)'}), "('Accuracy', fontsize=16)\n", (9133, 9158), True, 'import matplotlib.pyplot as plt\n'), ((9163, 9195), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Epoch"""'], {'fontsize': '(16)'}), "('Epoch', fontsize=16)\n", (9173, 9195), True, 'import matplotlib.pyplot as plt\n'), ((9200, 9362), 'matplotlib.pyplot.legend', 'plt.legend', (["['Train', 'Test', '%.2f' % max_accuracy, '%.2f' % max_val_accuracy]"], {'loc': '"""upper left"""', 'fancybox': '(True)', 'framealpha': '(0.9)', 'shadow': '(True)', 'borderpad': '(1)'}), "(['Train', 'Test', '%.2f' % max_accuracy, '%.2f' %\n max_val_accuracy], loc='upper left', fancybox=True, framealpha=0.9,\n shadow=True, borderpad=1)\n", (9210, 9362), True, 'import matplotlib.pyplot as plt\n'), ((9499, 9533), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'num': '(1)', 'figsize': '(10, 6)'}), '(num=1, figsize=(10, 6))\n', (9509, 9533), True, 'import matplotlib.pyplot as plt\n'), ((9726, 9736), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (9734, 9736), True, 'import matplotlib.pyplot as plt\n'), ((9900, 9940), 'sklearn.metrics.confusion_matrix', 'metrics.confusion_matrix', (['y_true', 'y_pred'], {}), '(y_true, y_pred)\n', (9924, 9940), False, 'from sklearn import metrics\n'), ((10324, 10350), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': 'size'}), '(figsize=size)\n', (10336, 10350), True, 'import matplotlib.pyplot as plt\n'), ((11317, 11327), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (11325, 11327), True, 'import matplotlib.pyplot as plt\n'), ((538, 561), 'librosa.load', 'librosa.load', (['file_path'], {}), '(file_path)\n', (550, 561), False, 'import librosa\n'), ((634, 659), 'librosa.util.normalize', 'librosa.util.normalize', (['y'], {}), '(y)\n', (656, 659), False, 'import librosa\n'), ((717, 783), 'librosa.feature.melspectrogram', 'librosa.feature.melspectrogram', (['normalized_y'], {'sr': 'sr', 'n_mels': 'n_mels'}), '(normalized_y, sr=sr, n_mels=n_mels)\n', (747, 783), False, 'import librosa\n'), ((951, 981), 'librosa.util.normalize', 'librosa.util.normalize', (['mel_db'], {}), '(mel_db)\n', (973, 981), False, 'import librosa\n'), ((1660, 1692), 'librosa.load', 'librosa.load', (['file_path'], {'sr': 'None'}), '(file_path, sr=None)\n', (1672, 1692), False, 'import librosa\n'), ((1922, 1947), 'librosa.util.normalize', 'librosa.util.normalize', (['y'], {}), '(y)\n', (1944, 1947), False, 'import librosa\n'), ((2000, 2058), 'librosa.feature.mfcc', 'librosa.feature.mfcc', ([], {'y': 'normalized_y', 'sr': 'sr', 'n_mfcc': 'n_mfcc'}), '(y=normalized_y, sr=sr, n_mfcc=n_mfcc)\n', (2020, 2058), False, 'import librosa\n'), ((2128, 2156), 'librosa.util.normalize', 'librosa.util.normalize', (['mfcc'], {}), '(mfcc)\n', (2150, 2156), False, 'import librosa\n'), ((3843, 3870), 'pickle.dump', 'pickle.dump', (['split', 'file_pi'], {}), '(split, file_pi)\n', (3854, 3870), False, 'import pickle\n'), ((4548, 4588), 'struct.unpack', 'struct.unpack', (['"""<H"""', 'num_channels_string'], {}), "('<H', num_channels_string)\n", (4561, 4588), False, 'import struct\n'), ((4647, 4686), 'struct.unpack', 'struct.unpack', (['"""<I"""', 'sample_rate_string'], {}), "('<I', sample_rate_string)\n", (4660, 4686), False, 'import struct\n'), ((4740, 4777), 'struct.unpack', 'struct.unpack', (['"""<H"""', 'bit_depth_string'], {}), "('<H', bit_depth_string)\n", (4753, 4777), False, 'import struct\n'), ((1257, 1333), 'numpy.pad', 'np.pad', (['normalized_mel'], {'pad_width': '((0, 0), (xLeft, xRight))', 'mode': '"""constant"""'}), "(normalized_mel, pad_width=((0, 0), (xLeft, xRight)), mode='constant')\n", (1263, 1333), True, 'import numpy as np\n'), ((1757, 1783), 'librosa.load', 'librosa.load', (['robots_noise'], {}), '(robots_noise)\n', (1769, 1783), False, 'import librosa\n'), ((2354, 2456), 'numpy.pad', 'np.pad', (['normalized_mfcc'], {'pad_width': '((0, 0), (0, pad_width))', 'mode': '"""constant"""', 'constant_values': '(0,)'}), "(normalized_mfcc, pad_width=((0, 0), (0, pad_width)), mode='constant',\n constant_values=(0,))\n", (2360, 2456), True, 'import numpy as np\n'), ((3104, 3168), 'numpy.pad', 'np.pad', (['px'], {'pad_width': '((0, 0), (xLeft, xRight))', 'mode': '"""constant"""'}), "(px, pad_width=((0, 0), (xLeft, xRight)), mode='constant')\n", (3110, 3168), True, 'import numpy as np\n'), ((10498, 10520), 'numpy.arange', 'np.arange', (['cm.shape[1]'], {}), '(cm.shape[1])\n', (10507, 10520), True, 'import numpy as np\n'), ((10540, 10562), 'numpy.arange', 'np.arange', (['cm.shape[0]'], {}), '(cm.shape[0])\n', (10549, 10562), True, 'import numpy as np\n')] |
"""
jb2.py
~~~~~~
Use JBIG2, and an external compressor, for black and white images.
"""
import os, sys, subprocess, struct, zipfile, random
from . import pdf_image
from . import pdf_write
from . import pdf
import PIL.Image as _PILImage
_default_jbig2_exe = os.path.join(os.path.abspath(".."), "agl-jbig2enc", "jbig2.exe")
class JBIG2Compressor():
"""Use an external compressor to compress using the JBIG2 standard.
:param jbig2_exe_path: The path to the "jbig2.exe" excutable. Or `None` to
use the default.
:param oversample: Can be 1, 2 or 4. Upsample by this amount before making b/w.
"""
def __init__(self, jbig2_exe_path=None, oversample=2):
if jbig2_exe_path is None:
jbig2_exe_path = _default_jbig2_exe
self._jbig2_exe_path = jbig2_exe_path
self._upsample = oversample
def call(self, args):
return subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
def encode(self, files):
"""Will generate `output.sym` and `output.0000`, `output.0001` etc.
in the current directory."""
args = [self._jbig2_exe_path, "-s", "-p", "-v"]
if self._upsample == 1:
pass
elif self._upsample == 2:
args += ["-2"]
elif self._upsample == 4:
args += ["-4"]
else:
raise ValueError("{} is not supported for over-sampling".format(self._upsample))
result = self.call(args + list(files))
assert result.returncode == 0
return result
class JBIG2CompressorToZip():
"""A higher-level version of :class:`JBIG2Compressor` which takes care of
temporary output directories, and zipping the result.
:param output_filename: The filename to write the ZIP file to.
:param jbig2_exe_path: The path to the "jbig2.exe" excutable. Or `None` to
use the default.
:param input_directory: The directory to find input files in, or `None` for
the current directory.
:param temporary_directory: The directory to write temporary files to, or
`None` to auto-generated one (and delete at the end).
:param oversample: Can be 1, 2 or 4. Upsample by this amount before making b/w.
:param split: Should we ask `jbig2.exe` to attempt to split out PNG files of
graphics? If so, `oversample==1` seems to be the only setting which works!
"""
def __init__(self, output_filename, jbig2_exe_path=None, input_directory=None,
temporary_directory=None, oversample=2, split=False):
if jbig2_exe_path is None:
jbig2_exe_path = _default_jbig2_exe
self._jbig2_exe_path = os.path.abspath(jbig2_exe_path)
self._in_dir = input_directory
self._temp_dir = temporary_directory
self._out_file = os.path.abspath(output_filename)
self._upsample = oversample
self._split = split
def _random_dir_name(self):
return "".join(random.choice("abcdefghijklmnopqrstuvwxyz") for _ in range(8))
def _call(self, args):
return subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
def _cleanup(self):
if self._old_directory is not None:
os.chdir(self._old_directory)
return
files = list(os.listdir())
for f in files:
try:
os.remove(f)
except:
pass
os.chdir("..")
try:
os.rmdir(self._temp_dir)
except:
pass
def _make_temp_dir(self):
if self._temp_dir is not None:
self._old_directory = os.path.abspath(os.curdir)
os.chdir(self._temp_dir)
else:
self._old_directory = None
self._temp_dir = self._random_dir_name()
os.mkdir(self._temp_dir)
os.chdir(self._temp_dir)
def _write_zip_file(self):
zf = zipfile.ZipFile(self._out_file, "w")
try:
files = list(os.listdir())
for f in files:
with open(f, "rb") as file:
data = file.read()
with zf.open(f, "w") as file:
file.write(data)
finally:
zf.close()
self._cleanup()
def encode(self, files, threshold=None):
"""Encode the files, all to be found in the input directory.
:param files: The files to encode
:param threshold: If not `None`, then the level, between 0 and 255, to
use when converting to 1 bpp.
"""
if self._in_dir is not None:
files = [os.path.join(self._in_dir, x) for x in files]
files = [os.path.abspath(f) for f in files]
self._make_temp_dir()
args = [self._jbig2_exe_path, "-s", "-p", "-v"]
if self._split:
args.append("-S")
if self._upsample == 1:
pass
elif self._upsample == 2:
args += ["-2"]
elif self._upsample == 4:
args += ["-4"]
else:
raise ValueError("{} is not supported for over-sampling".format(self._upsample))
if threshold is not None:
args += ["-T", str(int(threshold))]
result = self._call(args + list(files))
if not result.returncode == 0:
self._cleanup()
raise Exception("Failed to compress files", result)
self._write_zip_file()
class ImageFacade():
pass
class JBIG2Image(pdf_image.PDFImage):
"""Assemble a single jbig2 output file into a PDF file."""
def __init__(self, jbig2globals_object, file, proc_set_object, dpi=1):
self._file = file
super().__init__(self._image(), proc_set_object, dpi)
self._jbig2globals_object = jbig2globals_object
@staticmethod
def read_file(file):
"""Read binary data from a file or filename."""
if isinstance(file, str):
with open(file, "rb") as f:
return f.read()
return file.read()
def _read_file(self):
if not hasattr(self, "_file_cache"):
self._file_cache = self.read_file(self._file)
return self._file_cache
@staticmethod
def load_jbig2(data):
(width, height, xres, yres) = struct.unpack('>IIII', data[11:27])
image = ImageFacade()
image.width = width
image.height = height
image.mode = "1"
return image
def _image(self):
return self.load_jbig2(self._read_file())
def _get_filtered_data(self, image):
params = {"JBIG2Globals" : self._jbig2globals_object}
data = self._read_file()
return "JBIG2Decode", data, params
class JBIG2PNGMultiImage(pdf_image.PDFMultipleImage):
"""Combine a jbig2 image with a png image for graphics."""
def __init__(self, jbig2globals_object, file, proc_set_object, dpi=1):
self._jb2_data = JBIG2Image.read_file(file)
self._jb2png_proc_set_object = proc_set_object
self._jb2_image = JBIG2Image.load_jbig2(self._jb2_data)
super().__init__(self._jb2_image, proc_set_object, dpi)
self._jbig2globals_object = jbig2globals_object
def _get_filtered_data(self, image):
params = {"JBIG2Globals" : self._jbig2globals_object}
return "JBIG2Decode", self._jb2_data, params
def _get_top_filtered_data(self, image):
png_image = pdf_image.PNGImage(image, self._jb2png_proc_set_object)
return png_image._get_filtered_data(image)
@property
def image_size(self):
"""Size of the image given by the JBIG2 layer."""
im = self._jb2_image
return im.width, im.height
class JBIG2Output():
"""Container for the output of converting JBIG2 output to PDF format."""
def __init__(self, pages, objects):
self._pages = pages
self._objects = objects
@property
def pages(self):
"""Iterable of page objects."""
return self._pages
@property
def objects(self):
"""An iterable of objects to add to the PDF file."""
return self._objects
def add_to_pdf_writer(self, pdf_writer):
"""Convenience method to add directly to a :class:`pdf_write.PDFWriter`
instance."""
for page in self.pages:
pdf_writer.add_page(page)
for obj in self.objects:
pdf_writer.add_pdf_object(obj)
class JBIG2Images():
"""Assemble the compressed JBIG2 files into a PDF document.
The ZIP file should have been generated by :class:`JBIG2CompressorToZip`.
:param zipfilename: The ZIP file to look at for data.
:param dpi: The scaling to apply to each page.
"""
def __init__(self, zipfilename, dpi=1):
self._objects = []
self._dpi = dpi
self._zipfilename = zipfilename
def _make_result(self):
zf = zipfile.ZipFile(self._zipfilename, "r")
try:
self._add_globals(zf)
self._proc_set_object = pdf_write.ProcedureSet().object()
self._result = self._compile_pages(zf)
self._result.objects.append(self._jb2_globals)
self._result.objects.append(self._proc_set_object)
finally:
zf.close()
@property
def parts(self):
"""The output"""
if not hasattr(self, "_result"):
self._make_result()
return self._result
def _compile_pages(self, zf):
page_number = 0
pages = []
objects = []
while True:
ending = ".{:04}".format(page_number)
choices = [x for x in zf.filelist if x.filename.endswith(ending)]
if len(choices) == 0:
break
with zf.open(choices[0]) as file:
parts = JBIG2Image(self._jb2_globals, file, self._proc_set_object, self._dpi)()
pages.append(parts.page)
objects.extend(parts.objects)
page_number += 1
return JBIG2Output(pages, objects)
def _add_globals(self, zf):
for zfile in zf.filelist:
if zfile.filename.endswith(".sym"):
with zf.open(zfile) as f:
data = f.read()
stream = pdf.PDFStream([(pdf.PDFName("Length"), pdf.PDFNumeric(len(data)))], data)
self._jb2_globals = pdf.PDFObject(stream)
return
raise ValueError("Could not find a symbol file.")
class JBIG2MultiImages(JBIG2Images):
"""As :class:`JBIG2Images` but supports blending in a PNG file which has
been automatically produced by the external compressor.
The input should be a ZIP file produced by :class:`JBIG2CompressorToZip`
with `oversample=1` and `split=True`.
"""
def __init__(self, zipfilename, dpi=1):
super().__init__(zipfilename, dpi)
def _check_and_get_png(self, zf, basename):
try:
with zf.open(basename + ".png") as file:
return _PILImage.open(file)
except KeyError:
return None
def _compile_pages(self, zf):
page_number = 0
pages = []
objects = []
while True:
ending = ".{:04}".format(page_number)
choices = [x for x in zf.filelist if x.filename.endswith(ending)]
if len(choices) == 0:
break
png_image = self._check_and_get_png(zf, choices[0].filename)
with zf.open(choices[0]) as file:
if png_image is None:
parts = JBIG2Image(self._jb2_globals, file, self._proc_set_object, self._dpi)()
else:
multi_image = JBIG2PNGMultiImage(self._jb2_globals, file, self._proc_set_object, self._dpi)
multi_image.add_top_image(png_image, (0,0),
(png_image.width / self._dpi, png_image.height / self._dpi), (255,255)*3)
parts = multi_image()
pages.append(parts.page)
objects.extend(parts.objects)
page_number += 1
return JBIG2Output(pages, objects)
class JBIG2ManualMultiImages(JBIG2Images):
"""As :class:`JBIG2MultiImages` but with the extracted PNG image(s) chosen
by hand.
The ZIP file should have been generated by :class:`JBIG2CompressorToZip`.
:param zipfilename: The ZIP file to look at for data.
:param dpi: The scaling to apply to each page.
"""
def __init__(self, zipfilename, dpi=1):
super().__init__(zipfilename, dpi)
self._page_rectangles = {}
def add_png_section(self, page_number, page_image, rectangles):
"""Overlay the given page with one or more rectangular extracts from
the given image. For ease, we work witht the usual, :mod:`PIL`,
coordinate system, with `(0,0)` as the top-left corner.
:param page_number: Starting from 0, the page number to adjust.
:param page_image: A :class:`PIL.Image` image to extract rectangles
from.
:param rectangles: An iterable of tuples `(xmin, ymin, xmax, ymax)`
determining a rectangle `xmin <= x < xmax` and `ymin <= y < ymax`.
"""
self._page_rectangles[page_number] = (page_image, list(rectangles))
def _to_parts(self, filename, zf, page_number):
with zf.open(filename) as file:
if page_number not in self._page_rectangles:
return JBIG2Image(self._jb2_globals, file, self._proc_set_object, self._dpi)()
multi_image = JBIG2PNGMultiImage(self._jb2_globals, file, self._proc_set_object, self._dpi)
png_image, rectangles = self._page_rectangles[page_number]
scale_to_page = multi_image.image_size[0] / png_image.size[0]
height_scale = multi_image.image_size[1] / png_image.size[1]
if abs(scale_to_page - height_scale) > 1e-6:
raise ValueError("JBIG2 image and PNG image of different aspect ratios")
for xmin, ymin, xmax, ymax in rectangles:
png_part = png_image.crop((xmin, ymin, xmax, ymax))
xmin, xmax = xmin * scale_to_page, xmax * scale_to_page
ymin, ymax = ymin * scale_to_page, ymax * scale_to_page
x1, x2 = xmin / self._dpi, xmax / self._dpi
y1, y2 = ymin / self._dpi, ymax / self._dpi
page_height = multi_image.image_size[1] / self._dpi
multi_image.add_top_image(png_part, (x1, page_height - y2), (x2 - x1, y2 - y1))
return multi_image()
def _compile_pages(self, zf):
page_number = 0
pages = []
objects = []
while True:
ending = ".{:04}".format(page_number)
choices = [x for x in zf.filelist if x.filename.endswith(ending)]
if len(choices) == 0:
break
parts = self._to_parts(choices[0], zf, page_number)
pages.append(parts.page)
objects.extend(parts.objects)
page_number += 1
return JBIG2Output(pages, objects)
| [
"os.listdir",
"random.choice",
"PIL.Image.open",
"zipfile.ZipFile",
"subprocess.run",
"os.path.join",
"os.chdir",
"os.rmdir",
"struct.unpack",
"os.mkdir",
"os.path.abspath",
"os.remove"
] | [((274, 295), 'os.path.abspath', 'os.path.abspath', (['""".."""'], {}), "('..')\n", (289, 295), False, 'import os, sys, subprocess, struct, zipfile, random\n'), ((887, 955), 'subprocess.run', 'subprocess.run', (['args'], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), '(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n', (901, 955), False, 'import os, sys, subprocess, struct, zipfile, random\n'), ((2645, 2676), 'os.path.abspath', 'os.path.abspath', (['jbig2_exe_path'], {}), '(jbig2_exe_path)\n', (2660, 2676), False, 'import os, sys, subprocess, struct, zipfile, random\n'), ((2787, 2819), 'os.path.abspath', 'os.path.abspath', (['output_filename'], {}), '(output_filename)\n', (2802, 2819), False, 'import os, sys, subprocess, struct, zipfile, random\n'), ((3046, 3114), 'subprocess.run', 'subprocess.run', (['args'], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), '(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n', (3060, 3114), False, 'import os, sys, subprocess, struct, zipfile, random\n'), ((3399, 3413), 'os.chdir', 'os.chdir', (['""".."""'], {}), "('..')\n", (3407, 3413), False, 'import os, sys, subprocess, struct, zipfile, random\n'), ((3890, 3926), 'zipfile.ZipFile', 'zipfile.ZipFile', (['self._out_file', '"""w"""'], {}), "(self._out_file, 'w')\n", (3905, 3926), False, 'import os, sys, subprocess, struct, zipfile, random\n'), ((6246, 6281), 'struct.unpack', 'struct.unpack', (['""">IIII"""', 'data[11:27]'], {}), "('>IIII', data[11:27])\n", (6259, 6281), False, 'import os, sys, subprocess, struct, zipfile, random\n'), ((8835, 8874), 'zipfile.ZipFile', 'zipfile.ZipFile', (['self._zipfilename', '"""r"""'], {}), "(self._zipfilename, 'r')\n", (8850, 8874), False, 'import os, sys, subprocess, struct, zipfile, random\n'), ((3196, 3225), 'os.chdir', 'os.chdir', (['self._old_directory'], {}), '(self._old_directory)\n', (3204, 3225), False, 'import os, sys, subprocess, struct, zipfile, random\n'), ((3266, 3278), 'os.listdir', 'os.listdir', ([], {}), '()\n', (3276, 3278), False, 'import os, sys, subprocess, struct, zipfile, random\n'), ((3439, 3463), 'os.rmdir', 'os.rmdir', (['self._temp_dir'], {}), '(self._temp_dir)\n', (3447, 3463), False, 'import os, sys, subprocess, struct, zipfile, random\n'), ((3601, 3627), 'os.path.abspath', 'os.path.abspath', (['os.curdir'], {}), '(os.curdir)\n', (3616, 3627), False, 'import os, sys, subprocess, struct, zipfile, random\n'), ((3640, 3664), 'os.chdir', 'os.chdir', (['self._temp_dir'], {}), '(self._temp_dir)\n', (3648, 3664), False, 'import os, sys, subprocess, struct, zipfile, random\n'), ((3783, 3807), 'os.mkdir', 'os.mkdir', (['self._temp_dir'], {}), '(self._temp_dir)\n', (3791, 3807), False, 'import os, sys, subprocess, struct, zipfile, random\n'), ((3820, 3844), 'os.chdir', 'os.chdir', (['self._temp_dir'], {}), '(self._temp_dir)\n', (3828, 3844), False, 'import os, sys, subprocess, struct, zipfile, random\n'), ((4659, 4677), 'os.path.abspath', 'os.path.abspath', (['f'], {}), '(f)\n', (4674, 4677), False, 'import os, sys, subprocess, struct, zipfile, random\n'), ((2940, 2983), 'random.choice', 'random.choice', (['"""abcdefghijklmnopqrstuvwxyz"""'], {}), "('abcdefghijklmnopqrstuvwxyz')\n", (2953, 2983), False, 'import os, sys, subprocess, struct, zipfile, random\n'), ((3337, 3349), 'os.remove', 'os.remove', (['f'], {}), '(f)\n', (3346, 3349), False, 'import os, sys, subprocess, struct, zipfile, random\n'), ((3965, 3977), 'os.listdir', 'os.listdir', ([], {}), '()\n', (3975, 3977), False, 'import os, sys, subprocess, struct, zipfile, random\n'), ((4596, 4625), 'os.path.join', 'os.path.join', (['self._in_dir', 'x'], {}), '(self._in_dir, x)\n', (4608, 4625), False, 'import os, sys, subprocess, struct, zipfile, random\n'), ((10947, 10967), 'PIL.Image.open', '_PILImage.open', (['file'], {}), '(file)\n', (10961, 10967), True, 'import PIL.Image as _PILImage\n')] |
#!/usr/bin/env python3
import argparse
import sys
from mpopt import ct, utils
if __name__ == '__main__':
parser = argparse.ArgumentParser(prog='ct_jug', description='Optimizer for *.jug cell tracking models.')
parser.add_argument('-B', '--batch-size', type=int, default=ct.DEFAULT_BATCH_SIZE)
parser.add_argument('-b', '--max-batches', type=int, default=ct.DEFAULT_MAX_BATCHES)
parser.add_argument('-o', '--output', default=None, help='Specifies the output file.')
parser.add_argument('--ilp', choices=('standard', 'decomposed'), help='Solves the ILP after reparametrizing.')
parser.add_argument('input_filename', metavar='INPUT', help='Specifies the *.jug input file.')
args = parser.parse_args()
with utils.smart_open(args.input_filename, 'rt') as f:
model, bimap = ct.convert_jug_to_ct(ct.parse_jug_model(f))
tracker = ct.construct_tracker(model)
tracker.run(args.batch_size, args.max_batches)
if args.ilp:
if args.ilp == 'standard':
gurobi = ct.GurobiStandardModel(model)
gurobi.construct()
gurobi.update_upper_bound(tracker)
else:
gurobi = ct.GurobiDecomposedModel(model, tracker)
gurobi.construct()
gurobi.update_upper_bound()
gurobi.run()
primals = gurobi.get_primals()
else:
primals = ct.extract_primals_from_tracker(model, tracker)
print('final solution:', primals.evaluate())
if args.output:
with open(args.output, 'w') as f:
ct.format_jug_primals(primals, bimap, f)
| [
"mpopt.ct.construct_tracker",
"mpopt.ct.GurobiDecomposedModel",
"argparse.ArgumentParser",
"mpopt.ct.format_jug_primals",
"mpopt.ct.parse_jug_model",
"mpopt.utils.smart_open",
"mpopt.ct.GurobiStandardModel",
"mpopt.ct.extract_primals_from_tracker"
] | [((122, 222), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '"""ct_jug"""', 'description': '"""Optimizer for *.jug cell tracking models."""'}), "(prog='ct_jug', description=\n 'Optimizer for *.jug cell tracking models.')\n", (145, 222), False, 'import argparse\n'), ((872, 899), 'mpopt.ct.construct_tracker', 'ct.construct_tracker', (['model'], {}), '(model)\n', (892, 899), False, 'from mpopt import ct, utils\n'), ((740, 783), 'mpopt.utils.smart_open', 'utils.smart_open', (['args.input_filename', '"""rt"""'], {}), "(args.input_filename, 'rt')\n", (756, 783), False, 'from mpopt import ct, utils\n'), ((1368, 1415), 'mpopt.ct.extract_primals_from_tracker', 'ct.extract_primals_from_tracker', (['model', 'tracker'], {}), '(model, tracker)\n', (1399, 1415), False, 'from mpopt import ct, utils\n'), ((834, 855), 'mpopt.ct.parse_jug_model', 'ct.parse_jug_model', (['f'], {}), '(f)\n', (852, 855), False, 'from mpopt import ct, utils\n'), ((1025, 1054), 'mpopt.ct.GurobiStandardModel', 'ct.GurobiStandardModel', (['model'], {}), '(model)\n', (1047, 1054), False, 'from mpopt import ct, utils\n'), ((1168, 1208), 'mpopt.ct.GurobiDecomposedModel', 'ct.GurobiDecomposedModel', (['model', 'tracker'], {}), '(model, tracker)\n', (1192, 1208), False, 'from mpopt import ct, utils\n'), ((1540, 1580), 'mpopt.ct.format_jug_primals', 'ct.format_jug_primals', (['primals', 'bimap', 'f'], {}), '(primals, bimap, f)\n', (1561, 1580), False, 'from mpopt import ct, utils\n')] |
import os
from setuptools import setup, find_packages
# For development and local builds use this version number, but for real builds replace it
# with the tag found in the environment
package_version = "4.0.0.dev0"
if 'BITBUCKET_TAG' in os.environ:
package_version = os.environ['BITBUCKET_TAG'].lstrip('v')
elif 'BUILD_SOURCEBRANCH' in os.environ:
full_tag_prefix = 'refs/tags/v'
package_version = os.environ['BUILD_SOURCEBRANCH'][len(full_tag_prefix):]
setup(
name="assemblyline_v4_p2compat",
version=package_version,
description="Assemblyline 4 python2 service compatibility layer",
long_description="This package provides common functionalities for python2 only services.",
url="https://bitbucket.org/cse-assemblyline/assemblyline_v4_p2compat/",
author="CCCS Assemblyline development team",
author_email="<EMAIL>",
license="MIT",
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7'
],
keywords="assemblyline malware gc canada cse-cst cse cst cyber cccs",
packages=find_packages(exclude=['test/*']),
install_requires=[
'PyYAML',
'netifaces',
'easydict',
'chardet'
],
package_data={
'': []
}
)
| [
"setuptools.find_packages"
] | [((1235, 1268), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['test/*']"}), "(exclude=['test/*'])\n", (1248, 1268), False, 'from setuptools import setup, find_packages\n')] |
import os
import tarfile
import time
import pickle
import numpy as np
from Bio.Seq import Seq
from scipy.special import expit
from scipy.special import logit
import torch
import torch.nn.functional as F
""" Get directories for model and seengenes """
module_dir = os.path.dirname(os.path.realpath(__file__))
model_dir = os.path.join(module_dir, "balrog_models")
""" Print what the program is doing."""
verbose = True
""" Use kmer prefilter to increase gene sensitivity.
May not play nice with very high GC genomes."""
protein_kmer_filter = False
""" Nucleotide to amino acid translation table. 11 for most bacteria/archaea.
4 for Mycoplasma/Spiroplasma."""
translation_table = 11
# translation_table = 4
""" Batch size for the temporal convolutional network used to score genes.
Small batches and big batches slow down the model. Very big batches may crash the
GPU. """
gene_batch_size = 200
TIS_batch_size = 1000
""" All following are internal parameters. Change at your own risk."""
weight_gene_prob = 0.9746869839852076
weight_TIS_prob = 0.25380288790532707
score_threshold = 0.47256101519707244
weight_ATG = 0.84249804151264
weight_GTG = 0.7083689705744909
weight_TTG = 0.7512400826652517
unidirectional_penalty_per_base = 3.895921717182765 # 3' 5' overlap
convergent_penalty_per_base = 4.603432608883688 # 3' 3' overlap
divergent_penalty_per_base = 3.3830814940689975 # 5' 5' overlap
k_seengene = 10
multimer_threshold = 2
nuc_encode = {"A": 0,
"T": 1,
"G": 2,
"C": 3,
"N": 0,
"M": 0,
"R": 0,
"Y": 0,
"W": 0,
"K": 0}
start_enc = {"ATG": 0,
"GTG": 1,
"TTG": 2}
aa_table = {"L": 1,
"V": 2,
"I": 3,
"M": 4,
"C": 5,
"A": 6,
"G": 7,
"S": 8,
"T": 9,
"P": 10,
"F": 11,
"Y": 12,
"W": 13,
"E": 14,
"D": 15,
"N": 16,
"Q": 17,
"K": 18,
"R": 19,
"H": 20,
"*": 0,
"X": 0}
# generate ORF sequences from coordinates
# @profile
def generate_sequence(graph_vector, nodelist, node_coords, overlap):
sequence = ""
for i in range(0, len(nodelist)):
id = nodelist[i]
coords = node_coords[i]
# calculate strand based on value of node (if negative, strand is false)
strand = True if id >= 0 else False
if strand:
unitig_seq = graph_vector[abs(id) - 1].seq
else:
unitig_seq = str(Seq(graph_vector[abs(id) - 1].seq).reverse_complement())
if len(sequence) == 0:
substring = unitig_seq[coords[0]:(coords[1] + 1)]
else:
if coords[1] >= overlap:
substring = unitig_seq[overlap:(coords[1] + 1)]
sequence += substring
return sequence
#@profile
def tokenize_aa_seq(aa_seq):
""" Convert amino acid letters to integers."""
tokenized = torch.tensor([aa_table[aa] for aa in aa_seq])
return tokenized
#@profile
def get_ORF_info(ORF_vector, graph, overlap):
ORF_seq_list = []
TIS_seqs = []
# iterate over list of ORFs
for ORFNodeVector in ORF_vector:
# need to determine ORF sequences from paths
ORF_nodelist = ORFNodeVector[0]
ORF_node_coords = ORFNodeVector[1]
TIS_nodelist = ORFNodeVector[3]
TIS_node_coords = ORFNodeVector[4]
# generate ORF_seq, as well as upstream and downstream TIS seq
ORF_seq = graph.generate_sequence(ORF_nodelist, ORF_node_coords, overlap)
upstream_TIS_seq = graph.generate_sequence(TIS_nodelist, TIS_node_coords, overlap)
downstream_TIS_seq = ORF_seq[0:19]
# generate Seq class for translation
seq = Seq(ORF_seq)
# translate once per frame, then slice. Note, do not include start or stop codons
aa = str(seq[3:-3].translate(table=translation_table, to_stop=False))
ORF_seq_list.append(aa)
TIS_seqs.append((upstream_TIS_seq, downstream_TIS_seq))
# convert amino acids into integers
ORF_seq_enc = [tokenize_aa_seq(x) for x in ORF_seq_list]
return ORF_seq_enc, TIS_seqs
#@profile
def predict(model, X):
model.eval()
with torch.no_grad():
if torch.cuda.device_count() > 0:
X_enc = F.one_hot(X, 21).permute(0, 2, 1).float().cuda()
probs = expit(model(X_enc).cpu())
del X_enc
torch.cuda.empty_cache()
else:
X_enc = F.one_hot(X, 21).permute(0, 2, 1).float()
probs = expit(model(X_enc).cpu())
return probs
#@profile
def predict_tis(model_tis, X):
model_tis.eval()
with torch.no_grad():
if torch.cuda.device_count() > 0:
X_enc = F.one_hot(X, 4).permute(0, 2, 1).float().cuda()
else:
X_enc = F.one_hot(X, 4).permute(0, 2, 1).float()
probs = expit(model_tis(X_enc).cpu())
return probs
#@profile
def kmerize(seq, k):
kmerset = set()
for i in range(len(seq) - k + 1):
kmer = tuple(seq[i: i + k].tolist())
kmerset.add(kmer)
return kmerset
def load_kmer_model():
# check if directory exists. If not, unzip file
if not os.path.exists(model_dir):
tar = tarfile.open(model_dir + ".tar.gz", mode="r:gz")
tar.extractall(module_dir)
tar.close()
"""Load k-mer filters"""
genexa_kmer_path = os.path.join(model_dir, "10mer_thresh2_minusARF_all.pkl")
with open(genexa_kmer_path, "rb") as f:
aa_kmer_set = pickle.load(f)
return aa_kmer_set
def load_gene_models():
# check if directory exists. If not, unzip file
if not os.path.exists(model_dir):
tar = tarfile.open(model_dir + ".tar.gz", mode="r:gz")
tar.extractall(module_dir)
tar.close()
torch.hub.set_dir(model_dir)
# print("Loading convolutional model...")
if torch.cuda.device_count() > 0:
# print("GPU detected...")
model = torch.hub.load(model_dir, "geneTCN", source='local').cuda()
model_tis = torch.hub.load(model_dir, "tisTCN", source='local').cuda()
time.sleep(0.5)
else:
# print("No GPU detected, using CPU...")
model = torch.hub.load(model_dir, "geneTCN", source='local')
model_tis = torch.hub.load(model_dir, "tisTCN", source='local')
time.sleep(0.5)
return (model, model_tis)
#@profile
def score_genes(ORF_vector, graph_vector, minimum_ORF_score, overlap, model, model_tis, aa_kmer_set):
# get sequences and coordinates of ORFs
# print("Finding and translating open reading frames...")
ORF_seq_enc, TIS_seqs = get_ORF_info(ORF_vector, graph_vector, overlap)
# seengene check
if protein_kmer_filter:
seengene = []
for s in ORF_seq_enc:
kmerset = kmerize(s, k_seengene)
# s = [x in aa_kmer_set for x in kmerset]
s = np.isin(list(kmerset), aa_kmer_set)
seen = np.count_nonzero(s) >= multimer_threshold
seengene.append(seen)
# score
# print("Scoring ORFs with temporal convolutional network...")
# sort by length to minimize impact of batch padding
ORF_lengths = np.asarray([len(x) for x in ORF_seq_enc])
length_idx = np.argsort(ORF_lengths)
ORF_seq_sorted = [ORF_seq_enc[i] for i in length_idx]
# pad to allow creation of batch matrix
prob_list = []
for i in range(0, len(ORF_seq_sorted), gene_batch_size):
batch = ORF_seq_sorted[i:i + gene_batch_size]
seq_lengths = torch.LongTensor(list(map(len, batch)))
seq_tensor = torch.zeros((len(batch), seq_lengths.max())).long()
for idx, (seq, seqlen) in enumerate(zip(batch, seq_lengths)):
seq_tensor[idx, :seqlen] = torch.LongTensor(seq)
pred_all = predict(model, seq_tensor)
pred = []
for j, length in enumerate(seq_lengths):
subseq = pred_all[j, 0, 0:int(length)]
predprob = float(expit(torch.mean(logit(subseq))))
pred.append(predprob)
prob_list.extend(pred)
prob_arr = np.asarray(prob_list, dtype=float)
# unsort
unsort_idx = np.argsort(length_idx)
ORF_prob = prob_arr[unsort_idx]
# recombine ORFs
idx = 0
ORF_gene_score = [None] * len(ORF_seq_enc)
for k, coord in enumerate(ORF_gene_score):
ORF_gene_score[k] = float(ORF_prob[idx])
idx += 1
# print("Scoring translation initiation sites...")
# extract nucleotide sequence surrounding potential start codons
ORF_TIS_seq_flat = []
ORF_TIS_seq_idx = []
ORF_TIS_prob = [None] * len(TIS_seqs)
ORF_start_codon = [None] * len(ORF_seq_enc)
for i, TIS in enumerate(TIS_seqs):
# unpack tuple. Note, downsteam includes start codon, which needs to be removed
upstream, downstream = TIS
if len(upstream) == 16:
TIS_seq = torch.tensor([nuc_encode[c] for c in (upstream + downstream[3:])[::-1]],
dtype=int) # model scores 3' to 5' direction
ORF_TIS_seq_flat.append(TIS_seq)
ORF_TIS_seq_idx.append(i)
else:
ORF_TIS_prob[i] = 0.5
# encode start codon
start_codon = start_enc[downstream[0:3]]
ORF_start_codon[i] = start_codon
# batch score TIS
TIS_prob_list = []
for i in range(0, len(ORF_TIS_seq_flat), TIS_batch_size):
batch = ORF_TIS_seq_flat[i:i + TIS_batch_size]
TIS_stacked = torch.stack(batch)
pred = predict_tis(model_tis, TIS_stacked)
TIS_prob_list.extend(pred)
y_pred_TIS = np.asarray(TIS_prob_list, dtype=float)
# reindex batched scores
for i, prob in enumerate(y_pred_TIS):
idx = ORF_TIS_seq_idx[i]
ORF_TIS_prob[idx] = float(prob)
# combine all info into single score for each ORF
if protein_kmer_filter:
ORF_score_flat = []
for i, geneprob in enumerate(ORF_gene_score):
if not geneprob:
ORF_score_flat.append(None)
continue
seengene_idx = 0
# calculate length by multiplying number of amino acids by 3, then adding 6 for start and stop
length = (len(ORF_seq_enc[i]) * 3) + 6
TIS_prob = ORF_TIS_prob[i]
start_codon = ORF_start_codon[i]
ATG = start_codon == 0
GTG = start_codon == 1
TTG = start_codon == 2
combprob = geneprob * weight_gene_prob \
+ TIS_prob * weight_TIS_prob \
+ ATG * weight_ATG \
+ GTG * weight_GTG \
+ TTG * weight_TTG
maxprob = weight_gene_prob + weight_TIS_prob + max(weight_ATG, weight_TTG, weight_GTG)
probthresh = score_threshold * maxprob
score = (combprob - probthresh) * length + 1e6 * seengene[seengene_idx]
seengene_idx += 1
ORF_score_flat.append(score)
else:
ORF_score_flat = []
for i, geneprob in enumerate(ORF_gene_score):
if not geneprob:
ORF_score_flat.append(None)
continue
# calculate length by multiplying number of amino acids by 3, then adding 6 for start and stop
length = len(ORF_seq_enc[i]) * 3
TIS_prob = ORF_TIS_prob[i]
start_codon = ORF_start_codon[i]
ATG = start_codon == 0
GTG = start_codon == 1
TTG = start_codon == 2
combprob = geneprob * weight_gene_prob \
+ TIS_prob * weight_TIS_prob \
+ ATG * weight_ATG \
+ GTG * weight_GTG \
+ TTG * weight_TTG
maxprob = weight_gene_prob + weight_TIS_prob + max(weight_ATG, weight_TTG, weight_GTG)
probthresh = score_threshold * maxprob
score = (combprob - probthresh) * length
ORF_score_flat.append(score)
# update initial dictionary, removing low scoring ORFs and create score mapping score within a tuple
ORF_score_dict = {}
for i, score in enumerate(ORF_score_flat):
# if score greater than minimum, add to the ORF_score_dict
if score >= minimum_ORF_score:
ORF_score_dict[i] = score
return ORF_score_dict
| [
"tarfile.open",
"torch.LongTensor",
"Bio.Seq.Seq",
"torch.cuda.device_count",
"time.sleep",
"numpy.argsort",
"numpy.count_nonzero",
"os.path.exists",
"numpy.asarray",
"torch.hub.load",
"pickle.load",
"torch.hub.set_dir",
"torch.nn.functional.one_hot",
"torch.cuda.empty_cache",
"torch.sta... | [((322, 363), 'os.path.join', 'os.path.join', (['module_dir', '"""balrog_models"""'], {}), "(module_dir, 'balrog_models')\n", (334, 363), False, 'import os\n'), ((282, 308), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (298, 308), False, 'import os\n'), ((3091, 3136), 'torch.tensor', 'torch.tensor', (['[aa_table[aa] for aa in aa_seq]'], {}), '([aa_table[aa] for aa in aa_seq])\n', (3103, 3136), False, 'import torch\n'), ((5546, 5603), 'os.path.join', 'os.path.join', (['model_dir', '"""10mer_thresh2_minusARF_all.pkl"""'], {}), "(model_dir, '10mer_thresh2_minusARF_all.pkl')\n", (5558, 5603), False, 'import os\n'), ((5949, 5977), 'torch.hub.set_dir', 'torch.hub.set_dir', (['model_dir'], {}), '(model_dir)\n', (5966, 5977), False, 'import torch\n'), ((7391, 7414), 'numpy.argsort', 'np.argsort', (['ORF_lengths'], {}), '(ORF_lengths)\n', (7401, 7414), True, 'import numpy as np\n'), ((8229, 8263), 'numpy.asarray', 'np.asarray', (['prob_list'], {'dtype': 'float'}), '(prob_list, dtype=float)\n', (8239, 8263), True, 'import numpy as np\n'), ((8295, 8317), 'numpy.argsort', 'np.argsort', (['length_idx'], {}), '(length_idx)\n', (8305, 8317), True, 'import numpy as np\n'), ((9745, 9783), 'numpy.asarray', 'np.asarray', (['TIS_prob_list'], {'dtype': 'float'}), '(TIS_prob_list, dtype=float)\n', (9755, 9783), True, 'import numpy as np\n'), ((3892, 3904), 'Bio.Seq.Seq', 'Seq', (['ORF_seq'], {}), '(ORF_seq)\n', (3895, 3904), False, 'from Bio.Seq import Seq\n'), ((4368, 4383), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (4381, 4383), False, 'import torch\n'), ((4814, 4829), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (4827, 4829), False, 'import torch\n'), ((5348, 5373), 'os.path.exists', 'os.path.exists', (['model_dir'], {}), '(model_dir)\n', (5362, 5373), False, 'import os\n'), ((5389, 5437), 'tarfile.open', 'tarfile.open', (["(model_dir + '.tar.gz')"], {'mode': '"""r:gz"""'}), "(model_dir + '.tar.gz', mode='r:gz')\n", (5401, 5437), False, 'import tarfile\n'), ((5671, 5685), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (5682, 5685), False, 'import pickle\n'), ((5799, 5824), 'os.path.exists', 'os.path.exists', (['model_dir'], {}), '(model_dir)\n', (5813, 5824), False, 'import os\n'), ((5840, 5888), 'tarfile.open', 'tarfile.open', (["(model_dir + '.tar.gz')"], {'mode': '"""r:gz"""'}), "(model_dir + '.tar.gz', mode='r:gz')\n", (5852, 5888), False, 'import tarfile\n'), ((6031, 6056), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (6054, 6056), False, 'import torch\n'), ((6260, 6275), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (6270, 6275), False, 'import time\n'), ((6351, 6403), 'torch.hub.load', 'torch.hub.load', (['model_dir', '"""geneTCN"""'], {'source': '"""local"""'}), "(model_dir, 'geneTCN', source='local')\n", (6365, 6403), False, 'import torch\n'), ((6424, 6475), 'torch.hub.load', 'torch.hub.load', (['model_dir', '"""tisTCN"""'], {'source': '"""local"""'}), "(model_dir, 'tisTCN', source='local')\n", (6438, 6475), False, 'import torch\n'), ((6484, 6499), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (6494, 6499), False, 'import time\n'), ((9622, 9640), 'torch.stack', 'torch.stack', (['batch'], {}), '(batch)\n', (9633, 9640), False, 'import torch\n'), ((4396, 4421), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (4419, 4421), False, 'import torch\n'), ((4576, 4600), 'torch.cuda.empty_cache', 'torch.cuda.empty_cache', ([], {}), '()\n', (4598, 4600), False, 'import torch\n'), ((4842, 4867), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (4865, 4867), False, 'import torch\n'), ((7897, 7918), 'torch.LongTensor', 'torch.LongTensor', (['seq'], {}), '(seq)\n', (7913, 7918), False, 'import torch\n'), ((9032, 9119), 'torch.tensor', 'torch.tensor', (['[nuc_encode[c] for c in (upstream + downstream[3:])[::-1]]'], {'dtype': 'int'}), '([nuc_encode[c] for c in (upstream + downstream[3:])[::-1]],\n dtype=int)\n', (9044, 9119), False, 'import torch\n'), ((6113, 6165), 'torch.hub.load', 'torch.hub.load', (['model_dir', '"""geneTCN"""'], {'source': '"""local"""'}), "(model_dir, 'geneTCN', source='local')\n", (6127, 6165), False, 'import torch\n'), ((6193, 6244), 'torch.hub.load', 'torch.hub.load', (['model_dir', '"""tisTCN"""'], {'source': '"""local"""'}), "(model_dir, 'tisTCN', source='local')\n", (6207, 6244), False, 'import torch\n'), ((7100, 7119), 'numpy.count_nonzero', 'np.count_nonzero', (['s'], {}), '(s)\n', (7116, 7119), True, 'import numpy as np\n'), ((8131, 8144), 'scipy.special.logit', 'logit', (['subseq'], {}), '(subseq)\n', (8136, 8144), False, 'from scipy.special import logit\n'), ((4635, 4651), 'torch.nn.functional.one_hot', 'F.one_hot', (['X', '(21)'], {}), '(X, 21)\n', (4644, 4651), True, 'import torch.nn.functional as F\n'), ((4975, 4990), 'torch.nn.functional.one_hot', 'F.one_hot', (['X', '(4)'], {}), '(X, 4)\n', (4984, 4990), True, 'import torch.nn.functional as F\n'), ((4447, 4463), 'torch.nn.functional.one_hot', 'F.one_hot', (['X', '(21)'], {}), '(X, 21)\n', (4456, 4463), True, 'import torch.nn.functional as F\n'), ((4893, 4908), 'torch.nn.functional.one_hot', 'F.one_hot', (['X', '(4)'], {}), '(X, 4)\n', (4902, 4908), True, 'import torch.nn.functional as F\n')] |
#!/usr/bin/env python3
# system imports
import argparse
import sys
# obspy imports
from obspy.clients.fdsn import Client
from obspy import read, read_inventory, UTCDateTime
from scipy import signal
from obspy.signal.cross_correlation import correlate, xcorr_max
from obspy.clients.fdsn.header import FDSNNoDataException
from obspy.core.stream import Stream
# other imports
import numpy as np
import matplotlib.pyplot as plt
import matplotlib as mpl
import pickle
################################################################################
def main():
parser = argparse.ArgumentParser(description="Cross correlate sensor streams", formatter_class=SmartFormatter)
parser.add_argument("net",
help="Network code (e.g. II)",
action="store")
parser.add_argument("sta",
help="Station Code (e.g. MSEY or WRAB)",
action="store")
parser.add_argument("chan",
help="channel (e.g. BHZ or BH0",
action="store")
parser.add_argument("startdate",
help="R|start date (YYYY-JJJ OR\n"
"YYYY-MM-DD), UTC is assumed",
action="store")
parser.add_argument("enddate",
help="R|end date (YYYY-JJJ OR\n"
"YYYY-MM-DD), UTC is assumed",
action="store")
parser.add_argument("-d", "--duration",
help="the duration in seconds of the sample",
action="store",
type=int)
parser.add_argument("-i", "--interval",
help="interval in minutes to skip between segments",
action="store",
default="14400",
type=int)
parser.add_argument("-k", "--keepresponse",
help="don't use the remove_response call",
action="store_true")
parser.add_argument("-o", "--outfilename",
help="the filename for the plot output file",
action="store",
type=str)
parser.add_argument("-r", "--responsefilepath",
help="the path to the response file location, the filename is generated in code",
action="store",
type=str)
parser.add_argument("-v", "--verbose",
help="extra output for debugging",
action="store_true",
default=False)
args = parser.parse_args()
# upper case the stations and channels
args.sta = args.sta.upper()
args.chan = args.chan.upper()
doCorrelation(args.net, args.sta, args.chan, args.startdate, args.enddate, args.duration, \
args.interval, args.keepresponse, args.outfilename, args.responsefilepath, args.verbose)
################################################################################
def doCorrelation(net, sta, chan, start, end, duration, interval,
keep_response, outfilename, resp_filepath, be_verbose):
stime = UTCDateTime(start)
etime = UTCDateTime(end)
ctime = stime
skiptime = 24*60*60*10 # 10 days in seconds. Override with --interval <minutes> option
skiptime = interval*60 #
# location constants
LOC00 = '00'
LOC10 = '10'
# True to calculate values, False to read them from a pickle file
# this might be desirable when debugging the plotting code piece
calc = True
print(net, sta, LOC00, LOC10, duration, interval, stime, etime, keep_response, resp_filepath)
if calc:
times, shifts, vals = [],[], []
while ctime < etime:
cnt = 1
attach_response = True
if resp_filepath:
inv00 = read_inventory(f'{resp_filepath}/RESP.{net}.{sta}.{LOC00}.{chan}', 'RESP')
inv10 = read_inventory(f'{resp_filepath}/RESP.{net}.{sta}.{LOC10}.{chan}', 'RESP')
attach_response = False
st00 = getStream(net, sta, LOC00, chan, ctime, duration, be_verbose, attach_response)
st10 = getStream(net, sta, LOC10, chan, ctime, duration, be_verbose, attach_response)
if len(st00) == 0:
if be_verbose:
print("no traces returned for {} {} {} {} {}".format(net, sta, LOC00, chan, ctime), file=sys.stderr)
ctime += skiptime
continue
if len(st10) == 0:
if be_verbose:
print("no traces returned for {} {} {} {} {}".format(net, sta, LOC10, chan, ctime), file=sys.stderr)
ctime += skiptime
continue
if len(st00) > 1:
if be_verbose:
print("gap(s) found in segment for {} {} {} {} {}".format(net, sta, LOC00, chan, ctime), file=sys.stderr)
ctime += skiptime
continue
if len(st10) > 1:
if be_verbose:
print("gap(s) found in segment for {} {} {} {} {}".format(net, sta, LOC10, chan, ctime), file=sys.stderr)
ctime += skiptime
continue
if ((st00[0].stats.endtime - st00[0].stats.starttime) < (duration - 1.0/st00[0].stats.sampling_rate)):
if be_verbose:
print("skipping short segment in {} {} {} {} {}".format(net, sta, LOC00, chan, ctime), file=sys.stderr)
ctime += skiptime
continue
if ((st10[0].stats.endtime - st10[0].stats.starttime) < (duration - 1.0/st10[0].stats.sampling_rate)):
if be_verbose:
print("skipping short segment in {} {} {} {} {}".format(net, sta, LOC10, chan, ctime), file=sys.stderr)
ctime += skiptime
continue
if not attach_response:
st00.attach_response(inv00)
st10.attach_response(inv10)
if not keep_response:
st00.remove_response()
st10.remove_response()
# apply a bandpass filter and merge before resampling
st00.filter('bandpass', freqmax=1/4., freqmin=1./8., zerophase=True)
st00.resample(1000)
st10.filter('bandpass', freqmax=1/4., freqmin=1./8., zerophase=True)
st10.resample(1000)
# get the traces from the stream for each location
try:
tr1 = st00.select(location=LOC00)[0]
except Exception as err:
print(err, file=sys.stderr)
try:
tr2 = st10.select(location=LOC10)[0]
except Exception as err:
print(err, file=sys.stderr)
# trim sample to start and end at the same times
trace_start = max(tr1.stats.starttime, tr2.stats.starttime)
trace_end = min(tr1.stats.endtime, tr2.stats.endtime)
# debug
if be_verbose:
print("Before trim", file=sys.stderr)
print("tr1 start: {} tr2 start: {}".format(tr1.stats.starttime, tr2.stats.starttime), file=sys.stderr)
print("tr1 end: {} tr2 end: {}".format(tr1.stats.endtime, tr2.stats.endtime), file=sys.stderr)
print("max trace_start: {} min trace_end {}".format(trace_start, trace_end), file=sys.stderr)
tr1.trim(trace_start, trace_end)
tr2.trim(trace_start, trace_end)
# debug
if be_verbose:
print("After trim", file=sys.stderr)
print("tr1 start: {} tr2 start: {}".format(tr1.stats.starttime, tr2.stats.starttime), file=sys.stderr)
print("tr1 end: {} tr2 end: {}".format(tr1.stats.endtime, tr2.stats.endtime), file=sys.stderr)
# calculate time offset
time_offset = tr1.stats.starttime - tr2.stats.starttime
cc = correlate(tr1.data, tr2.data, 500)
# xcorr_max returns the shift and value of the maximum of the cross-correlation function
shift, val = xcorr_max(cc)
# append to lists for plotting
shifts.append(shift)
vals.append(val)
times.append(ctime.year + ctime.julday/365.25)
print("duration: {} to {} offset: {}\tshift: {} value: {}".format(ctime, ctime+duration, time_offset, shift, val))
# skip 10 days for next loop
if be_verbose:
print("ctime: {}".format(ctime), file=sys.stderr)
ctime += skiptime
# persist the data in a pickle file
if outfilename:
with open(outfilename + '.pickle', 'wb') as f:
pickle.dump([shifts, vals, times], f)
else:
with open(net + '_' + sta + '_' + net + '_' + sta + '.pickle', 'wb') as f:
pickle.dump([shifts, vals, times], f)
else:
# retrieve the data from the pickle file
if outfilename:
with open(outfilename + '.pickle', 'rb') as f:
shifts, vals, times = pickle.load(f)
else:
with open(net + '_' + sta + '_' + net + '_' + sta + '.pickle', 'rb') as f:
shifts, vals, times = pickle.load(f)
mpl.rc('font',serif='Times')
mpl.rc('font',size=16)
fig = plt.figure(1, figsize=(10,10))
plt.subplot(2,1,1)
plt.title(net + ' ' + sta + ' ' + LOC00 + ' compared to ' + net + ' ' + sta + ' ' + LOC10)
plt.plot(times, shifts,'.')
plt.ylabel('Time Shift (ms)')
plt.subplot(2,1,2)
plt.plot(times, vals, '.')
#plt.ylim((0.8, 1.0))
plt.ylim((0, 1.0))
plt.xlabel('Time (year)')
plt.ylabel('Correlation')
if outfilename:
plt.savefig(outfilename + '.PDF', format='PDF')
else:
plt.savefig(net + '_' + sta + '_' + net + '_' + sta + '.PDF', format='PDF')
################################################################################
def getStream(net, sta, loc, chan, ctime, duration, be_verbose, attach_response):
cnt = 1
client = Client()
st = Stream()
while cnt <= 4:
try:
# get_waveforms gets 'duration' seconds of activity for the channel/date/location
# only attach response if we're not using a response file
if attach_response:
st = client.get_waveforms(net, sta, loc, chan, ctime, ctime + duration, attach_response=True)
else:
st = client.get_waveforms(net, sta, loc, chan, ctime, ctime + duration)
break
except KeyboardInterrupt:
sys.exit()
except FDSNNoDataException:
if be_verbose:
print(f"No data available for {net}.{sta}.{loc}.{chan} {ctime} to {ctime+duration}", file=sys.stderr)
except Exception as err:
print(err, file=sys.stderr)
finally:
cnt += 1
return st
################################################################################
class SmartFormatter(argparse.HelpFormatter):
def _split_lines(self, text, width):
if text.startswith('R|'):
return text[2:].splitlines()
# this is the RawTextHelpFormatter._split_lines
return argparse.HelpFormatter._split_lines(self, text, width)
################################################################################
if __name__ == '__main__':
main()
| [
"matplotlib.pyplot.ylabel",
"matplotlib.rc",
"sys.exit",
"obspy.core.stream.Stream",
"argparse.ArgumentParser",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"obspy.signal.cross_correlation.correlate",
"obspy.clients.fdsn.Client",
"matplotlib.pyplot.ylim",
"matplotlib.pyplot.savefig",
... | [((573, 678), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Cross correlate sensor streams"""', 'formatter_class': 'SmartFormatter'}), "(description='Cross correlate sensor streams',\n formatter_class=SmartFormatter)\n", (596, 678), False, 'import argparse\n'), ((3255, 3273), 'obspy.UTCDateTime', 'UTCDateTime', (['start'], {}), '(start)\n', (3266, 3273), False, 'from obspy import read, read_inventory, UTCDateTime\n'), ((3286, 3302), 'obspy.UTCDateTime', 'UTCDateTime', (['end'], {}), '(end)\n', (3297, 3302), False, 'from obspy import read, read_inventory, UTCDateTime\n'), ((9438, 9467), 'matplotlib.rc', 'mpl.rc', (['"""font"""'], {'serif': '"""Times"""'}), "('font', serif='Times')\n", (9444, 9467), True, 'import matplotlib as mpl\n'), ((9471, 9494), 'matplotlib.rc', 'mpl.rc', (['"""font"""'], {'size': '(16)'}), "('font', size=16)\n", (9477, 9494), True, 'import matplotlib as mpl\n'), ((9509, 9540), 'matplotlib.pyplot.figure', 'plt.figure', (['(1)'], {'figsize': '(10, 10)'}), '(1, figsize=(10, 10))\n', (9519, 9540), True, 'import matplotlib.pyplot as plt\n'), ((9549, 9569), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(2)', '(1)', '(1)'], {}), '(2, 1, 1)\n', (9560, 9569), True, 'import matplotlib.pyplot as plt\n'), ((9572, 9666), 'matplotlib.pyplot.title', 'plt.title', (["(net + ' ' + sta + ' ' + LOC00 + ' compared to ' + net + ' ' + sta + ' ' +\n LOC10)"], {}), "(net + ' ' + sta + ' ' + LOC00 + ' compared to ' + net + ' ' + sta +\n ' ' + LOC10)\n", (9581, 9666), True, 'import matplotlib.pyplot as plt\n'), ((9667, 9695), 'matplotlib.pyplot.plot', 'plt.plot', (['times', 'shifts', '"""."""'], {}), "(times, shifts, '.')\n", (9675, 9695), True, 'import matplotlib.pyplot as plt\n'), ((9699, 9728), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Time Shift (ms)"""'], {}), "('Time Shift (ms)')\n", (9709, 9728), True, 'import matplotlib.pyplot as plt\n'), ((9738, 9758), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(2)', '(1)', '(2)'], {}), '(2, 1, 2)\n', (9749, 9758), True, 'import matplotlib.pyplot as plt\n'), ((9761, 9787), 'matplotlib.pyplot.plot', 'plt.plot', (['times', 'vals', '"""."""'], {}), "(times, vals, '.')\n", (9769, 9787), True, 'import matplotlib.pyplot as plt\n'), ((9818, 9836), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(0, 1.0)'], {}), '((0, 1.0))\n', (9826, 9836), True, 'import matplotlib.pyplot as plt\n'), ((9841, 9866), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time (year)"""'], {}), "('Time (year)')\n", (9851, 9866), True, 'import matplotlib.pyplot as plt\n'), ((9871, 9896), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Correlation"""'], {}), "('Correlation')\n", (9881, 9896), True, 'import matplotlib.pyplot as plt\n'), ((10261, 10269), 'obspy.clients.fdsn.Client', 'Client', ([], {}), '()\n', (10267, 10269), False, 'from obspy.clients.fdsn import Client\n'), ((10279, 10287), 'obspy.core.stream.Stream', 'Stream', ([], {}), '()\n', (10285, 10287), False, 'from obspy.core.stream import Stream\n'), ((9930, 9977), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(outfilename + '.PDF')"], {'format': '"""PDF"""'}), "(outfilename + '.PDF', format='PDF')\n", (9941, 9977), True, 'import matplotlib.pyplot as plt\n'), ((9996, 10071), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(net + '_' + sta + '_' + net + '_' + sta + '.PDF')"], {'format': '"""PDF"""'}), "(net + '_' + sta + '_' + net + '_' + sta + '.PDF', format='PDF')\n", (10007, 10071), True, 'import matplotlib.pyplot as plt\n'), ((11431, 11485), 'argparse.HelpFormatter._split_lines', 'argparse.HelpFormatter._split_lines', (['self', 'text', 'width'], {}), '(self, text, width)\n', (11466, 11485), False, 'import argparse\n'), ((8091, 8125), 'obspy.signal.cross_correlation.correlate', 'correlate', (['tr1.data', 'tr2.data', '(500)'], {}), '(tr1.data, tr2.data, 500)\n', (8100, 8125), False, 'from obspy.signal.cross_correlation import correlate, xcorr_max\n'), ((8253, 8266), 'obspy.signal.cross_correlation.xcorr_max', 'xcorr_max', (['cc'], {}), '(cc)\n', (8262, 8266), False, 'from obspy.signal.cross_correlation import correlate, xcorr_max\n'), ((3956, 4030), 'obspy.read_inventory', 'read_inventory', (['f"""{resp_filepath}/RESP.{net}.{sta}.{LOC00}.{chan}"""', '"""RESP"""'], {}), "(f'{resp_filepath}/RESP.{net}.{sta}.{LOC00}.{chan}', 'RESP')\n", (3970, 4030), False, 'from obspy import read, read_inventory, UTCDateTime\n'), ((4055, 4129), 'obspy.read_inventory', 'read_inventory', (['f"""{resp_filepath}/RESP.{net}.{sta}.{LOC10}.{chan}"""', '"""RESP"""'], {}), "(f'{resp_filepath}/RESP.{net}.{sta}.{LOC10}.{chan}', 'RESP')\n", (4069, 4129), False, 'from obspy import read, read_inventory, UTCDateTime\n'), ((8880, 8917), 'pickle.dump', 'pickle.dump', (['[shifts, vals, times]', 'f'], {}), '([shifts, vals, times], f)\n', (8891, 8917), False, 'import pickle\n'), ((9035, 9072), 'pickle.dump', 'pickle.dump', (['[shifts, vals, times]', 'f'], {}), '([shifts, vals, times], f)\n', (9046, 9072), False, 'import pickle\n'), ((9253, 9267), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (9264, 9267), False, 'import pickle\n'), ((9408, 9422), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (9419, 9422), False, 'import pickle\n'), ((10798, 10808), 'sys.exit', 'sys.exit', ([], {}), '()\n', (10806, 10808), False, 'import sys\n')] |
import urllib
from cloudbridge.cloud.interfaces.resources import TrafficDirection
from rest_auth.serializers import UserDetailsSerializer
from rest_framework import serializers
from rest_framework.reverse import reverse
from . import models
from . import view_helpers
from .drf_helpers import CustomHyperlinkedIdentityField
from .drf_helpers import PlacementZonePKRelatedField
from .drf_helpers import ProviderPKRelatedField
class ZoneSerializer(serializers.Serializer):
id = serializers.CharField(read_only=True)
name = serializers.CharField(read_only=True)
class RegionSerializer(serializers.Serializer):
id = serializers.CharField(read_only=True)
url = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:region-detail',
lookup_field='id',
lookup_url_kwarg='pk',
parent_url_kwargs=['cloud_pk'])
name = serializers.CharField(read_only=True)
zones = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:zone-list',
lookup_field='id',
lookup_url_kwarg='region_pk',
parent_url_kwargs=['cloud_pk'])
class MachineImageSerializer(serializers.Serializer):
id = serializers.CharField(read_only=True)
url = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:machine_image-detail',
lookup_field='id',
lookup_url_kwarg='pk',
parent_url_kwargs=['cloud_pk'])
name = serializers.CharField(read_only=True)
label = serializers.CharField()
description = serializers.CharField()
class KeyPairSerializer(serializers.Serializer):
id = serializers.CharField(read_only=True)
url = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:keypair-detail',
lookup_field='id',
lookup_url_kwarg='pk',
parent_url_kwargs=['cloud_pk'])
name = serializers.CharField(required=True)
material = serializers.CharField(read_only=True)
def create(self, validated_data):
provider = view_helpers.get_cloud_provider(self.context.get('view'))
return provider.security.key_pairs.create(validated_data.get('name'))
class VMFirewallRuleSerializer(serializers.Serializer):
protocol = serializers.CharField(allow_blank=True)
from_port = serializers.CharField(allow_blank=True)
to_port = serializers.CharField(allow_blank=True)
cidr = serializers.CharField(label="CIDR", allow_blank=True)
firewall = ProviderPKRelatedField(
label="VM Firewall",
queryset='security.vm_firewalls',
display_fields=['name', 'id'],
display_format="{0} (ID: {1})",
required=False,
allow_null=True)
url = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:vm_firewall_rule-detail',
lookup_field='id',
lookup_url_kwarg='pk',
parent_url_kwargs=['cloud_pk', 'vm_firewall_pk'])
def validate(self, data):
"""Cursory data check."""
if data.get('protocol').lower() not in ['tcp', 'udp', 'icmp']:
raise serializers.ValidationError(
'Protocol must be one of: tcp, udp, icmp.')
try:
if not (1 < int(data['from_port']) <= 65535):
raise serializers.ValidationError(
'From port must be an integer between 1 and 65535.')
elif not (1 < int(data['to_port']) <= 65535):
raise serializers.ValidationError(
'To port must be an integer between 1 and 65535.')
except ValueError:
raise serializers.ValidationError(
'To/from ports must be integers.')
return data
def create(self, validated_data):
view = self.context.get('view')
provider = view_helpers.get_cloud_provider(view)
vmf_pk = view.kwargs.get('vm_firewall_pk')
if vmf_pk:
vmf = provider.security.vm_firewalls.get(vmf_pk)
if vmf and validated_data.get('firewall'):
return vmf.rules.create(
TrafficDirection.INBOUND,
validated_data.get('protocol'),
int(validated_data.get('from_port')),
int(validated_data.get('to_port')),
src_dest_fw=validated_data.get('firewall'))
elif vmf:
return vmf.rules.create(TrafficDirection.INBOUND,
validated_data.get('protocol'),
int(validated_data.get('from_port')),
int(validated_data.get('to_port')),
validated_data.get('cidr'))
return None
class VMFirewallSerializer(serializers.Serializer):
id = serializers.CharField(read_only=True)
url = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:vm_firewall-detail',
lookup_field='id',
lookup_url_kwarg='pk',
parent_url_kwargs=['cloud_pk'])
name = serializers.CharField(read_only=True)
label = serializers.CharField(required=True)
# Technically, the description is required but when wanting to reuse an
# existing VM firewall with a different resource (eg, creating an
# instance), we need to be able to call this serializer w/o it.
description = serializers.CharField(required=False)
network_id = ProviderPKRelatedField(
queryset='networking.networks',
display_fields=['id', 'label'],
display_format="{1} ({0})")
rules = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:vm_firewall_rule-list',
lookup_field='id',
lookup_url_kwarg='vm_firewall_pk',
parent_url_kwargs=['cloud_pk'])
def create(self, validated_data):
provider = view_helpers.get_cloud_provider(self.context.get('view'))
return provider.security.vm_firewalls.create(
label=validated_data.get('label'),
network_id=validated_data.get('network_id').id,
description=validated_data.get('description'))
class NetworkingSerializer(serializers.Serializer):
networks = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:network-list',
parent_url_kwargs=['cloud_pk'])
routers = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:router-list',
parent_url_kwargs=['cloud_pk'])
class NetworkSerializer(serializers.Serializer):
id = serializers.CharField(read_only=True)
url = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:network-detail',
lookup_field='id',
lookup_url_kwarg='pk',
parent_url_kwargs=['cloud_pk'])
name = serializers.CharField(read_only=True)
label = serializers.CharField(required=True)
state = serializers.CharField(read_only=True)
cidr_block = serializers.CharField()
subnets = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:subnet-list',
lookup_field='id',
lookup_url_kwarg='network_pk',
parent_url_kwargs=['cloud_pk'])
gateways = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:gateway-list',
lookup_field='id',
lookup_url_kwarg='network_pk',
parent_url_kwargs=['cloud_pk'])
def create(self, validated_data):
provider = view_helpers.get_cloud_provider(self.context.get('view'))
return provider.networking.networks.create(
label=validated_data.get('label'),
cidr_block=validated_data.get('cidr_block', '10.0.0.0/16'))
def update(self, instance, validated_data):
# We do not allow the cidr_block to be edited so the value is ignored
# and only the name is updated.
try:
if instance.label != validated_data.get('label'):
instance.label = validated_data.get('label')
return instance
except Exception as e:
raise serializers.ValidationError("{0}".format(e))
class SubnetSerializer(serializers.Serializer):
id = serializers.CharField(read_only=True)
url = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:subnet-detail',
lookup_field='id',
lookup_url_kwarg='pk',
parent_url_kwargs=['cloud_pk', 'network_pk'])
name = serializers.CharField(read_only=True)
label = serializers.CharField(required=True)
cidr_block = serializers.CharField()
network_id = serializers.CharField(read_only=True)
zone = PlacementZonePKRelatedField(
label="Zone",
queryset='non_empty_value',
display_fields=['id'],
display_format="{0}",
required=True)
def create(self, validated_data):
provider = view_helpers.get_cloud_provider(self.context.get('view'))
net_id = self.context.get('view').kwargs.get('network_pk')
return provider.networking.subnets.create(
label=validated_data.get('label'), network=net_id,
cidr_block=validated_data.get('cidr_block'),
zone=validated_data.get('zone'))
class SubnetSerializerUpdate(SubnetSerializer):
cidr_block = serializers.CharField(read_only=True)
def update(self, instance, validated_data):
try:
if instance.label != validated_data.get('label'):
instance.label = validated_data.get('label')
return instance
except Exception as e:
raise serializers.ValidationError("{0}".format(e))
class RouterSerializer(serializers.Serializer):
id = serializers.CharField(read_only=True)
url = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:router-detail',
lookup_field='id',
lookup_url_kwarg='pk',
parent_url_kwargs=['cloud_pk'])
name = serializers.CharField(read_only=True)
label = serializers.CharField(required=True)
state = serializers.CharField(read_only=True)
network_id = ProviderPKRelatedField(
queryset='networking.networks',
display_fields=['id', 'name'],
display_format="{1} ({0})")
def create(self, validated_data):
provider = view_helpers.get_cloud_provider(self.context.get('view'))
return provider.networking.routers.create(
label=validated_data.get('label'),
network=validated_data.get('network_id').id)
def update(self, instance, validated_data):
try:
if instance.label != validated_data.get('label'):
instance.label = validated_data.get('label')
return instance
except Exception as e:
raise serializers.ValidationError("{0}".format(e))
class GatewaySerializer(serializers.Serializer):
id = serializers.CharField(read_only=True)
url = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:gateway-detail',
lookup_field='id',
lookup_url_kwarg='pk',
parent_url_kwargs=['cloud_pk', 'network_pk'])
name = serializers.CharField()
state = serializers.CharField(read_only=True)
network_id = serializers.CharField(read_only=True)
floating_ips = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:floating_ip-list',
lookup_field='id',
lookup_url_kwarg='gateway_pk',
parent_url_kwargs=['cloud_pk', 'network_pk'])
def create(self, validated_data):
provider = view_helpers.get_cloud_provider(self.context.get('view'))
net_id = self.context.get('view').kwargs.get('network_pk')
net = provider.networking.networks.get(net_id)
return net.gateways.get_or_create_inet_gateway(
name=validated_data.get('name'))
class FloatingIPSerializer(serializers.Serializer):
id = serializers.CharField(read_only=True)
ip = serializers.CharField(read_only=True)
state = serializers.CharField(read_only=True)
class VMTypeSerializer(serializers.Serializer):
id = serializers.CharField(read_only=True)
url = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:vm_type-detail',
lookup_field='pk',
parent_url_kwargs=['cloud_pk'])
name = serializers.CharField()
family = serializers.CharField()
vcpus = serializers.CharField()
ram = serializers.CharField()
size_root_disk = serializers.CharField()
size_ephemeral_disks = serializers.CharField()
num_ephemeral_disks = serializers.CharField()
size_total_disk = serializers.CharField()
extra_data = serializers.DictField(serializers.CharField())
class AttachmentInfoSerializer(serializers.Serializer):
device = serializers.CharField(read_only=True)
instance_id = ProviderPKRelatedField(
label="Instance ID",
queryset='compute.instances',
display_fields=['name', 'id'],
display_format="{0} (ID: {1})",
required=False,
allow_null=True)
instance = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:instance-detail',
lookup_field='instance_id',
lookup_url_kwarg='pk',
parent_url_kwargs=['cloud_pk'])
class VolumeSerializer(serializers.Serializer):
id = serializers.CharField(read_only=True)
url = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:volume-detail',
lookup_field='id',
lookup_url_kwarg='pk',
parent_url_kwargs=['cloud_pk'])
name = serializers.CharField(read_only=True)
label = serializers.CharField(required=True)
description = serializers.CharField(allow_blank=True)
size = serializers.IntegerField(min_value=0)
create_time = serializers.CharField(read_only=True)
zone_id = PlacementZonePKRelatedField(
label="Zone",
queryset='non_empty_value',
display_fields=['id'],
display_format="{0}",
required=True)
state = serializers.CharField(read_only=True)
snapshot_id = ProviderPKRelatedField(
label="Snapshot ID",
queryset='storage.snapshots',
display_fields=['label', 'id', 'size'],
display_format="{0} (ID: {1}, Size: {2} GB)",
write_only=True,
required=False,
allow_null=True)
attachments = AttachmentInfoSerializer()
def create(self, validated_data):
provider = view_helpers.get_cloud_provider(self.context.get('view'))
try:
return provider.storage.volumes.create(
validated_data.get('label'),
validated_data.get('size'),
validated_data.get('zone_id'),
description=validated_data.get('description'),
snapshot=validated_data.get('snapshot_id'))
except Exception as e:
raise serializers.ValidationError("{0}".format(e))
def update(self, instance, validated_data):
try:
if instance.label != validated_data.get('label'):
instance.label = validated_data.get('label')
if instance.description != validated_data.get('description'):
instance.description = validated_data.get('description')
return instance
except Exception as e:
raise serializers.ValidationError("{0}".format(e))
class SnapshotSerializer(serializers.Serializer):
id = serializers.CharField(read_only=True)
url = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:snapshot-detail',
lookup_field='id',
lookup_url_kwarg='pk',
parent_url_kwargs=['cloud_pk'])
name = serializers.CharField(read_only=True)
label = serializers.CharField(required=True)
description = serializers.CharField()
state = serializers.CharField(read_only=True)
volume_id = ProviderPKRelatedField(
label="Volume ID",
queryset='storage.volumes',
display_fields=['label', 'id', 'size'],
display_format="{0} (ID: {1}, Size: {2} GB)",
required=True)
create_time = serializers.CharField(read_only=True)
size = serializers.IntegerField(min_value=0, read_only=True)
def create(self, validated_data):
provider = view_helpers.get_cloud_provider(self.context.get('view'))
try:
return provider.storage.snapshots.create(
validated_data.get('label'),
validated_data.get('volume_id'),
description=validated_data.get('description'))
except Exception as e:
raise serializers.ValidationError("{0}".format(e))
def update(self, instance, validated_data):
try:
if instance.label != validated_data.get('label'):
instance.label = validated_data.get('label')
if instance.description != validated_data.get('description'):
instance.description = validated_data.get('description')
return instance
except Exception as e:
raise serializers.ValidationError("{0}".format(e))
class InstanceSerializer(serializers.Serializer):
id = serializers.CharField(read_only=True)
url = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:instance-detail',
lookup_field='id',
lookup_url_kwarg='pk',
parent_url_kwargs=['cloud_pk'])
name = serializers.CharField(read_only=True)
label = serializers.CharField(required=True)
public_ips = serializers.ListField(serializers.IPAddressField())
private_ips = serializers.ListField(serializers.IPAddressField())
vm_type_id = ProviderPKRelatedField(
label="Instance Type",
queryset='compute.vm_types',
display_fields=['name'],
display_format="{0}",
required=True)
vm_type_url = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:vm_type-detail',
lookup_field='vm_type_id',
lookup_url_kwarg='pk',
parent_url_kwargs=['cloud_pk'])
image_id = ProviderPKRelatedField(
label="Image",
queryset='compute.images',
display_fields=['name', 'id', 'label'],
display_format="{0} (ID: {1}, Label: {2})",
required=True)
image_id_url = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:machine_image-detail',
lookup_field='image_id',
lookup_url_kwarg='pk',
parent_url_kwargs=['cloud_pk'])
key_pair_id = ProviderPKRelatedField(
label="Keypair",
queryset='security.key_pairs',
display_fields=['id'],
display_format="{0}",
required=True)
subnet_id = ProviderPKRelatedField(
label="Subnet",
queryset='networking.subnets',
display_fields=['id', 'label'],
display_format="{1} ({0})")
zone_id = PlacementZonePKRelatedField(
label="Placement Zone",
queryset='non_empty_value',
display_fields=['id'],
display_format="{0}",
required=True)
vm_firewall_ids = ProviderPKRelatedField(
label="VM Firewalls",
queryset='security.vm_firewalls',
display_fields=['name', 'id', 'label'],
display_format="{0} (ID: {1}, Label: {2})",
many=True)
user_data = serializers.CharField(write_only=True, allow_blank=True,
style={'base_template': 'textarea.html'})
def create(self, validated_data):
provider = view_helpers.get_cloud_provider(self.context.get('view'))
label = validated_data.get('label')
image_id = validated_data.get('image_id')
vm_type = validated_data.get('vm_type_id')
kp_name = validated_data.get('key_pair_name')
zone_id = validated_data.get('zone_id')
vm_firewall_ids = validated_data.get('vm_firewall_ids')
subnet_id = validated_data.get('subnet_id')
user_data = validated_data.get('user_data')
try:
return provider.compute.instances.create(
label, image_id, vm_type, subnet_id, zone_id,
key_pair=kp_name, vm_firewalls=vm_firewall_ids,
user_data=user_data)
except Exception as e:
raise serializers.ValidationError("{0}".format(e))
def update(self, instance, validated_data):
try:
if instance.label != validated_data.get('label'):
instance.label = validated_data.get('label')
return instance
except Exception as e:
raise serializers.ValidationError("{0}".format(e))
class BucketSerializer(serializers.Serializer):
id = serializers.CharField(read_only=True)
url = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:bucket-detail',
lookup_field='id',
lookup_url_kwarg='pk',
parent_url_kwargs=['cloud_pk'])
name = serializers.CharField()
objects = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:bucketobject-list',
lookup_field='id',
lookup_url_kwarg='bucket_pk',
parent_url_kwargs=['cloud_pk'])
def create(self, validated_data):
provider = view_helpers.get_cloud_provider(self.context.get('view'))
try:
return provider.storage.buckets.create(validated_data.get('name'))
except Exception as e:
raise serializers.ValidationError("{0}".format(e))
class BucketObjectSerializer(serializers.Serializer):
id = serializers.CharField(read_only=True)
name = serializers.CharField(allow_blank=True)
size = serializers.IntegerField(read_only=True)
last_modified = serializers.CharField(read_only=True)
url = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:bucketobject-detail',
lookup_field='id',
lookup_url_kwarg='pk',
parent_url_kwargs=['cloud_pk', 'bucket_pk'])
download_url = serializers.SerializerMethodField()
upload_content = serializers.FileField(write_only=True)
def get_download_url(self, obj):
"""Create a URL for accessing a single instance."""
kwargs = self.context['view'].kwargs.copy()
kwargs.update({'pk': obj.id})
obj_url = reverse('djcloudbridge:bucketobject-detail',
kwargs=kwargs,
request=self.context['request'])
return urllib.parse.urljoin(obj_url, '?format=binary')
def create(self, validated_data):
provider = view_helpers.get_cloud_provider(self.context.get('view'))
bucket_id = self.context.get('view').kwargs.get('bucket_pk')
bucket = provider.storage.buckets.get(bucket_id)
try:
name = validated_data.get('name')
content = validated_data.get('upload_content')
if name:
obj = bucket.objects.create(name)
else:
obj = bucket.objects.create(content.name)
if content:
obj.upload(content.file.getvalue())
return obj
except Exception as e:
raise serializers.ValidationError("{0}".format(e))
def update(self, instance, validated_data):
try:
instance.upload(
validated_data.get('upload_content').file.getvalue())
return instance
except Exception as e:
raise serializers.ValidationError("{0}".format(e))
class CloudSerializer(serializers.ModelSerializer):
slug = serializers.CharField(read_only=True)
compute = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:compute-list',
lookup_field='slug',
lookup_url_kwarg='cloud_pk')
security = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:security-list',
lookup_field='slug',
lookup_url_kwarg='cloud_pk')
storage = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:storage-list',
lookup_field='slug',
lookup_url_kwarg='cloud_pk')
networking = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:networking-list',
lookup_field='slug',
lookup_url_kwarg='cloud_pk')
region_name = serializers.SerializerMethodField()
cloud_type = serializers.SerializerMethodField()
extra_data = serializers.SerializerMethodField()
def get_region_name(self, obj):
if hasattr(obj, 'aws'):
return obj.aws.region_name
elif hasattr(obj, 'openstack'):
return obj.openstack.region_name
elif hasattr(obj, 'azure'):
return obj.azure.region_name
elif hasattr(obj, 'gcp'):
return obj.gcp.region_name
else:
return "Cloud provider not recognized"
def get_cloud_type(self, obj):
if hasattr(obj, 'aws'):
return 'aws'
elif hasattr(obj, 'openstack'):
return 'openstack'
elif hasattr(obj, 'azure'):
return 'azure'
elif hasattr(obj, 'gcp'):
return 'gcp'
else:
return 'unknown'
def get_extra_data(self, obj):
if hasattr(obj, 'aws'):
aws = obj.aws
return {'region_name': aws.region_name,
'ec2_endpoint_url': aws.ec2_endpoint_url,
'ec2_is_secure': aws.ec2_is_secure,
'ec2_validate_certs': aws.ec2_validate_certs,
's3_endpoint_url': aws.s3_endpoint_url,
's3_is_secure': aws.s3_is_secure,
's3_validate_certs': aws.s3_validate_certs
}
elif hasattr(obj, 'openstack'):
os = obj.openstack
return {'auth_url': os.auth_url,
'region_name': os.region_name,
'identity_api_version': os.identity_api_version
}
elif hasattr(obj, 'azure'):
azure = obj.azure
return {'region_name': azure.region_name}
elif hasattr(obj, 'gcp'):
gcp = obj.gcp
return {'region_name': gcp.region_name,
'zone_name': gcp.zone_name
}
else:
return {}
class Meta:
model = models.Cloud
exclude = ('kind',)
class ComputeSerializer(serializers.Serializer):
instances = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:instance-list',
parent_url_kwargs=['cloud_pk'])
machine_images = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:machine_image-list',
parent_url_kwargs=['cloud_pk'])
regions = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:region-list',
parent_url_kwargs=['cloud_pk'])
vm_types = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:vm_type-list',
parent_url_kwargs=['cloud_pk'])
class SecuritySerializer(serializers.Serializer):
keypairs = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:keypair-list',
parent_url_kwargs=['cloud_pk'])
vm_firewalls = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:vm_firewall-list',
parent_url_kwargs=['cloud_pk'])
class StorageSerializer(serializers.Serializer):
volumes = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:volume-list',
parent_url_kwargs=['cloud_pk'])
snapshots = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:snapshot-list',
parent_url_kwargs=['cloud_pk'])
buckets = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:bucket-list',
parent_url_kwargs=['cloud_pk'])
"""
User Profile and Credentials related serializers
"""
class CredentialsSerializer(serializers.Serializer):
aws = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:awscredentials-list')
openstack = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:openstackcredentials-list')
azure = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:azurecredentials-list')
gcp = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:gcpcredentials-list')
class AWSCredsSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(read_only=True)
secret_key = serializers.CharField(
style={'input_type': 'password'},
write_only=True,
required=False
)
cloud_id = serializers.CharField(write_only=True)
cloud = CloudSerializer(read_only=True)
class Meta:
model = models.AWSCredentials
exclude = ('user_profile',)
class OpenstackCredsSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(read_only=True)
password = serializers.CharField(
style={'input_type': 'password'},
write_only=True,
required=False
)
cloud_id = serializers.CharField(write_only=True)
cloud = CloudSerializer(read_only=True)
class Meta:
model = models.OpenStackCredentials
exclude = ('user_profile',)
class AzureCredsSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(read_only=True)
secret = serializers.CharField(
style={'input_type': 'password'},
write_only=True,
required=False
)
cloud_id = serializers.CharField(write_only=True)
cloud = CloudSerializer(read_only=True)
class Meta:
model = models.AzureCredentials
exclude = ('user_profile',)
class GCPCredsSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(read_only=True)
credentials = serializers.CharField(
write_only=True,
style={'base_template': 'textarea.html', 'rows': 20},
)
cloud_id = serializers.CharField(write_only=True)
cloud = CloudSerializer(read_only=True)
class Meta:
model = models.GCPCredentials
exclude = ('user_profile',)
class CloudConnectionAuthSerializer(serializers.Serializer):
aws_creds = AWSCredsSerializer(write_only=True, required=False)
openstack_creds = OpenstackCredsSerializer(write_only=True, required=False)
azure_creds = AzureCredsSerializer(write_only=True, required=False)
gcp_creds = GCPCredsSerializer(write_only=True, required=False)
result = serializers.CharField(read_only=True)
details = serializers.CharField(read_only=True)
def create(self, validated_data):
provider = view_helpers.get_cloud_provider(self.context.get('view'))
try:
provider.authenticate()
return {'result': 'SUCCESS'}
except Exception as e:
return {'result': 'FAILURE', 'details': str(e)}
class UserSerializer(UserDetailsSerializer):
credentials = CustomHyperlinkedIdentityField(
view_name='djcloudbridge:credentialsroute-list', lookup_field=None)
aws_creds = serializers.SerializerMethodField()
openstack_creds = serializers.SerializerMethodField()
azure_creds = serializers.SerializerMethodField()
gcp_creds = serializers.SerializerMethodField()
def get_aws_creds(self, obj):
"""
Include a URL for listing this bucket's contents
"""
try:
creds = obj.userprofile.credentials.filter(
awscredentials__isnull=False).select_subclasses()
return AWSCredsSerializer(instance=creds, many=True,
context=self.context).data
except models.UserProfile.DoesNotExist:
return ""
def get_openstack_creds(self, obj):
"""
Include a URL for listing this bucket's contents
"""
try:
creds = obj.userprofile.credentials.filter(
openstackcredentials__isnull=False).select_subclasses()
return OpenstackCredsSerializer(instance=creds, many=True,
context=self.context).data
except models.UserProfile.DoesNotExist:
return ""
def get_azure_creds(self, obj):
"""
Include a URL for listing this bucket's contents
"""
try:
creds = obj.userprofile.credentials.filter(
azurecredentials__isnull=False).select_subclasses()
return AzureCredsSerializer(instance=creds, many=True,
context=self.context).data
except models.UserProfile.DoesNotExist:
return ""
def get_gcp_creds(self, obj):
"""
Include a URL for listing this bucket's contents
"""
try:
creds = obj.userprofile.credentials.filter(
gcpcredentials__isnull=False).select_subclasses()
return GCPCredsSerializer(instance=creds, many=True,
context=self.context).data
except models.UserProfile.DoesNotExist:
return ""
class Meta(UserDetailsSerializer.Meta):
fields = UserDetailsSerializer.Meta.fields + \
('aws_creds', 'openstack_creds', 'azure_creds', 'gcp_creds',
'credentials')
| [
"rest_framework.serializers.IntegerField",
"rest_framework.serializers.SerializerMethodField",
"rest_framework.serializers.ValidationError",
"rest_framework.serializers.IPAddressField",
"rest_framework.serializers.FileField",
"rest_framework.serializers.CharField",
"urllib.parse.urljoin",
"rest_framew... | [((486, 523), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (507, 523), False, 'from rest_framework import serializers\n'), ((535, 572), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (556, 572), False, 'from rest_framework import serializers\n'), ((632, 669), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (653, 669), False, 'from rest_framework import serializers\n'), ((870, 907), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (891, 907), False, 'from rest_framework import serializers\n'), ((1167, 1204), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (1188, 1204), False, 'from rest_framework import serializers\n'), ((1412, 1449), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (1433, 1449), False, 'from rest_framework import serializers\n'), ((1462, 1485), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {}), '()\n', (1483, 1485), False, 'from rest_framework import serializers\n'), ((1504, 1527), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {}), '()\n', (1525, 1527), False, 'from rest_framework import serializers\n'), ((1588, 1625), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (1609, 1625), False, 'from rest_framework import serializers\n'), ((1827, 1863), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'required': '(True)'}), '(required=True)\n', (1848, 1863), False, 'from rest_framework import serializers\n'), ((1879, 1916), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (1900, 1916), False, 'from rest_framework import serializers\n'), ((2184, 2223), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'allow_blank': '(True)'}), '(allow_blank=True)\n', (2205, 2223), False, 'from rest_framework import serializers\n'), ((2240, 2279), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'allow_blank': '(True)'}), '(allow_blank=True)\n', (2261, 2279), False, 'from rest_framework import serializers\n'), ((2294, 2333), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'allow_blank': '(True)'}), '(allow_blank=True)\n', (2315, 2333), False, 'from rest_framework import serializers\n'), ((2345, 2398), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'label': '"""CIDR"""', 'allow_blank': '(True)'}), "(label='CIDR', allow_blank=True)\n", (2366, 2398), False, 'from rest_framework import serializers\n'), ((4721, 4758), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (4742, 4758), False, 'from rest_framework import serializers\n'), ((4964, 5001), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (4985, 5001), False, 'from rest_framework import serializers\n'), ((5014, 5050), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'required': '(True)'}), '(required=True)\n', (5035, 5050), False, 'from rest_framework import serializers\n'), ((5283, 5320), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'required': '(False)'}), '(required=False)\n', (5304, 5320), False, 'from rest_framework import serializers\n'), ((6407, 6444), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (6428, 6444), False, 'from rest_framework import serializers\n'), ((6646, 6683), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (6667, 6683), False, 'from rest_framework import serializers\n'), ((6696, 6732), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'required': '(True)'}), '(required=True)\n', (6717, 6732), False, 'from rest_framework import serializers\n'), ((6745, 6782), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (6766, 6782), False, 'from rest_framework import serializers\n'), ((6800, 6823), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {}), '()\n', (6821, 6823), False, 'from rest_framework import serializers\n'), ((7995, 8032), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (8016, 8032), False, 'from rest_framework import serializers\n'), ((8247, 8284), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (8268, 8284), False, 'from rest_framework import serializers\n'), ((8297, 8333), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'required': '(True)'}), '(required=True)\n', (8318, 8333), False, 'from rest_framework import serializers\n'), ((8351, 8374), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {}), '()\n', (8372, 8374), False, 'from rest_framework import serializers\n'), ((8392, 8429), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (8413, 8429), False, 'from rest_framework import serializers\n'), ((9078, 9115), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (9099, 9115), False, 'from rest_framework import serializers\n'), ((9482, 9519), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (9503, 9519), False, 'from rest_framework import serializers\n'), ((9720, 9757), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (9741, 9757), False, 'from rest_framework import serializers\n'), ((9770, 9806), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'required': '(True)'}), '(required=True)\n', (9791, 9806), False, 'from rest_framework import serializers\n'), ((9819, 9856), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (9840, 9856), False, 'from rest_framework import serializers\n'), ((10651, 10688), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (10672, 10688), False, 'from rest_framework import serializers\n'), ((10904, 10927), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {}), '()\n', (10925, 10927), False, 'from rest_framework import serializers\n'), ((10940, 10977), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (10961, 10977), False, 'from rest_framework import serializers\n'), ((10995, 11032), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (11016, 11032), False, 'from rest_framework import serializers\n'), ((11658, 11695), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (11679, 11695), False, 'from rest_framework import serializers\n'), ((11705, 11742), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (11726, 11742), False, 'from rest_framework import serializers\n'), ((11755, 11792), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (11776, 11792), False, 'from rest_framework import serializers\n'), ((11852, 11889), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (11873, 11889), False, 'from rest_framework import serializers\n'), ((12060, 12083), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {}), '()\n', (12081, 12083), False, 'from rest_framework import serializers\n'), ((12097, 12120), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {}), '()\n', (12118, 12120), False, 'from rest_framework import serializers\n'), ((12133, 12156), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {}), '()\n', (12154, 12156), False, 'from rest_framework import serializers\n'), ((12167, 12190), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {}), '()\n', (12188, 12190), False, 'from rest_framework import serializers\n'), ((12212, 12235), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {}), '()\n', (12233, 12235), False, 'from rest_framework import serializers\n'), ((12263, 12286), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {}), '()\n', (12284, 12286), False, 'from rest_framework import serializers\n'), ((12313, 12336), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {}), '()\n', (12334, 12336), False, 'from rest_framework import serializers\n'), ((12359, 12382), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {}), '()\n', (12380, 12382), False, 'from rest_framework import serializers\n'), ((12518, 12555), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (12539, 12555), False, 'from rest_framework import serializers\n'), ((13058, 13095), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (13079, 13095), False, 'from rest_framework import serializers\n'), ((13296, 13333), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (13317, 13333), False, 'from rest_framework import serializers\n'), ((13346, 13382), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'required': '(True)'}), '(required=True)\n', (13367, 13382), False, 'from rest_framework import serializers\n'), ((13401, 13440), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'allow_blank': '(True)'}), '(allow_blank=True)\n', (13422, 13440), False, 'from rest_framework import serializers\n'), ((13452, 13489), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {'min_value': '(0)'}), '(min_value=0)\n', (13476, 13489), False, 'from rest_framework import serializers\n'), ((13508, 13545), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (13529, 13545), False, 'from rest_framework import serializers\n'), ((13743, 13780), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (13764, 13780), False, 'from rest_framework import serializers\n'), ((15161, 15198), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (15182, 15198), False, 'from rest_framework import serializers\n'), ((15401, 15438), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (15422, 15438), False, 'from rest_framework import serializers\n'), ((15451, 15487), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'required': '(True)'}), '(required=True)\n', (15472, 15487), False, 'from rest_framework import serializers\n'), ((15506, 15529), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {}), '()\n', (15527, 15529), False, 'from rest_framework import serializers\n'), ((15542, 15579), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (15563, 15579), False, 'from rest_framework import serializers\n'), ((15826, 15863), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (15847, 15863), False, 'from rest_framework import serializers\n'), ((15875, 15928), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {'min_value': '(0)', 'read_only': '(True)'}), '(min_value=0, read_only=True)\n', (15899, 15928), False, 'from rest_framework import serializers\n'), ((16878, 16915), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (16899, 16915), False, 'from rest_framework import serializers\n'), ((17118, 17155), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (17139, 17155), False, 'from rest_framework import serializers\n'), ((17168, 17204), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'required': '(True)'}), '(required=True)\n', (17189, 17204), False, 'from rest_framework import serializers\n'), ((18993, 19096), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'write_only': '(True)', 'allow_blank': '(True)', 'style': "{'base_template': 'textarea.html'}"}), "(write_only=True, allow_blank=True, style={\n 'base_template': 'textarea.html'})\n", (19014, 19096), False, 'from rest_framework import serializers\n'), ((20351, 20388), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (20372, 20388), False, 'from rest_framework import serializers\n'), ((20589, 20612), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {}), '()\n', (20610, 20612), False, 'from rest_framework import serializers\n'), ((21184, 21221), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (21205, 21221), False, 'from rest_framework import serializers\n'), ((21233, 21272), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'allow_blank': '(True)'}), '(allow_blank=True)\n', (21254, 21272), False, 'from rest_framework import serializers\n'), ((21284, 21324), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (21308, 21324), False, 'from rest_framework import serializers\n'), ((21345, 21382), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (21366, 21382), False, 'from rest_framework import serializers\n'), ((21610, 21645), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (21643, 21645), False, 'from rest_framework import serializers\n'), ((21667, 21705), 'rest_framework.serializers.FileField', 'serializers.FileField', ([], {'write_only': '(True)'}), '(write_only=True)\n', (21688, 21705), False, 'from rest_framework import serializers\n'), ((23168, 23205), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (23189, 23205), False, 'from rest_framework import serializers\n'), ((23872, 23907), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (23905, 23907), False, 'from rest_framework import serializers\n'), ((23925, 23960), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (23958, 23960), False, 'from rest_framework import serializers\n'), ((23978, 24013), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (24011, 24013), False, 'from rest_framework import serializers\n'), ((27930, 27970), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (27954, 27970), False, 'from rest_framework import serializers\n'), ((27988, 28080), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'style': "{'input_type': 'password'}", 'write_only': '(True)', 'required': '(False)'}), "(style={'input_type': 'password'}, write_only=True,\n required=False)\n", (28009, 28080), False, 'from rest_framework import serializers\n'), ((28122, 28160), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'write_only': '(True)'}), '(write_only=True)\n', (28143, 28160), False, 'from rest_framework import serializers\n'), ((28379, 28419), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (28403, 28419), False, 'from rest_framework import serializers\n'), ((28435, 28527), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'style': "{'input_type': 'password'}", 'write_only': '(True)', 'required': '(False)'}), "(style={'input_type': 'password'}, write_only=True,\n required=False)\n", (28456, 28527), False, 'from rest_framework import serializers\n'), ((28569, 28607), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'write_only': '(True)'}), '(write_only=True)\n', (28590, 28607), False, 'from rest_framework import serializers\n'), ((28828, 28868), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (28852, 28868), False, 'from rest_framework import serializers\n'), ((28882, 28974), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'style': "{'input_type': 'password'}", 'write_only': '(True)', 'required': '(False)'}), "(style={'input_type': 'password'}, write_only=True,\n required=False)\n", (28903, 28974), False, 'from rest_framework import serializers\n'), ((29016, 29054), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'write_only': '(True)'}), '(write_only=True)\n', (29037, 29054), False, 'from rest_framework import serializers\n'), ((29269, 29309), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (29293, 29309), False, 'from rest_framework import serializers\n'), ((29328, 29424), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'write_only': '(True)', 'style': "{'base_template': 'textarea.html', 'rows': 20}"}), "(write_only=True, style={'base_template':\n 'textarea.html', 'rows': 20})\n", (29349, 29424), False, 'from rest_framework import serializers\n'), ((29459, 29497), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'write_only': '(True)'}), '(write_only=True)\n', (29480, 29497), False, 'from rest_framework import serializers\n'), ((29997, 30034), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (30018, 30034), False, 'from rest_framework import serializers\n'), ((30049, 30086), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (30070, 30086), False, 'from rest_framework import serializers\n'), ((30573, 30608), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (30606, 30608), False, 'from rest_framework import serializers\n'), ((30631, 30666), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (30664, 30666), False, 'from rest_framework import serializers\n'), ((30685, 30720), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (30718, 30720), False, 'from rest_framework import serializers\n'), ((30737, 30772), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (30770, 30772), False, 'from rest_framework import serializers\n'), ((12422, 12445), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {}), '()\n', (12443, 12445), False, 'from rest_framework import serializers\n'), ((17244, 17272), 'rest_framework.serializers.IPAddressField', 'serializers.IPAddressField', ([], {}), '()\n', (17270, 17272), False, 'from rest_framework import serializers\n'), ((17314, 17342), 'rest_framework.serializers.IPAddressField', 'serializers.IPAddressField', ([], {}), '()\n', (17340, 17342), False, 'from rest_framework import serializers\n'), ((21912, 22009), 'rest_framework.reverse.reverse', 'reverse', (['"""djcloudbridge:bucketobject-detail"""'], {'kwargs': 'kwargs', 'request': "self.context['request']"}), "('djcloudbridge:bucketobject-detail', kwargs=kwargs, request=self.\n context['request'])\n", (21919, 22009), False, 'from rest_framework.reverse import reverse\n'), ((22072, 22119), 'urllib.parse.urljoin', 'urllib.parse.urljoin', (['obj_url', '"""?format=binary"""'], {}), "(obj_url, '?format=binary')\n", (22092, 22119), False, 'import urllib\n'), ((3008, 3079), 'rest_framework.serializers.ValidationError', 'serializers.ValidationError', (['"""Protocol must be one of: tcp, udp, icmp."""'], {}), "('Protocol must be one of: tcp, udp, icmp.')\n", (3035, 3079), False, 'from rest_framework import serializers\n'), ((3190, 3275), 'rest_framework.serializers.ValidationError', 'serializers.ValidationError', (['"""From port must be an integer between 1 and 65535."""'], {}), "('From port must be an integer between 1 and 65535.'\n )\n", (3217, 3275), False, 'from rest_framework import serializers\n'), ((3517, 3579), 'rest_framework.serializers.ValidationError', 'serializers.ValidationError', (['"""To/from ports must be integers."""'], {}), "('To/from ports must be integers.')\n", (3544, 3579), False, 'from rest_framework import serializers\n'), ((3372, 3450), 'rest_framework.serializers.ValidationError', 'serializers.ValidationError', (['"""To port must be an integer between 1 and 65535."""'], {}), "('To port must be an integer between 1 and 65535.')\n", (3399, 3450), False, 'from rest_framework import serializers\n')] |
# CircuitPlaygroundExpress_LightSensor
# reads the on-board light sensor and graphs the brighness with NeoPixels
import time
from adafruit_circuitplayground.express import cpx
from simpleio import map_range
cpx.pixels.brightness = 0.05
while True:
# light value remaped to pixel position
peak = map_range(cpx.light, 10, 325, 0, 9)
print(cpx.light)
print(int(peak))
for i in range(0, 9, 1):
if i <= peak:
cpx.pixels[i] = (0, 255, 0)
else:
cpx.pixels[i] = (0, 0, 0)
time.sleep(0.01)
| [
"simpleio.map_range",
"time.sleep"
] | [((307, 342), 'simpleio.map_range', 'map_range', (['cpx.light', '(10)', '(325)', '(0)', '(9)'], {}), '(cpx.light, 10, 325, 0, 9)\n', (316, 342), False, 'from simpleio import map_range\n'), ((534, 550), 'time.sleep', 'time.sleep', (['(0.01)'], {}), '(0.01)\n', (544, 550), False, 'import time\n')] |
"""
Copyright (c) 2019-present NAVER Corp.
MIT License
"""
import os
import sys
import json
import logging
import argparse
import pickle
from tqdm import tqdm
from dataset import read_data, PrefixDataset
from trie import Trie
from metric import calc_rank, calc_partial_rank, mrr_summary, mrl_summary
logging.basicConfig(format='%(asctime)s - %(message)s', datefmt='%m/%d/%Y %H:%M:%S', level=logging.INFO)
logger = logging.getLogger(__name__)
def get_args():
parser = argparse.ArgumentParser(description="Most Popular Completion")
parser.add_argument('--data_dir', default="data/aol/full")
parser.add_argument('--min_len', type=int, default=3)
parser.add_argument('--min_prefix_len', type=int, default=2)
parser.add_argument('--min_suffix_len', type=int, default=1)
parser.add_argument('--n_candidates', type=int, default=10)
parser.add_argument('--min_freq', type=int, default=1)
parser.add_argument('--train', action='store_true')
parser.add_argument('--model_path', default="models/mpc/trie.pkl")
args = parser.parse_args()
return args
def main(args):
logger.info(f"Args: {json.dumps(args.__dict__, indent=2, sort_keys=True)}")
logger.info("Reading train dataset")
train_data = read_data(os.path.join(args.data_dir, f"train.query.txt"), min_len=args.min_len)
logger.info(f" Number of train data: {len(train_data):8d}")
seen_set = set(train_data)
if not args.train and os.path.isfile(args.model_path):
logger.info(f"Loading trie at {args.model_path}")
trie = pickle.load(open(args.model_path, 'rb'))
else:
logger.info("Making trie")
trie = Trie(train_data)
os.makedirs(os.path.dirname(args.model_path), exist_ok=True)
logger.info(f"Saving trie at {args.model_path}")
sys.setrecursionlimit(100000)
pickle.dump(trie, open(args.model_path, 'wb'))
logger.info("Reading test dataset")
test_data = read_data(os.path.join(args.data_dir, f"test.query.txt"), min_len=args.min_len)
logger.info(f" Number of test data: {len(test_data):8d}")
logger.info("Evaluating MPC")
test_dataset = PrefixDataset(test_data, args.min_prefix_len, args.min_suffix_len)
seens = []
ranks = []
pranks = []
rls = []
for query, prefix in tqdm(test_dataset):
seen = int(query in seen_set)
completions = trie.get_mpc(prefix, n_candidates=args.n_candidates, min_freq=args.min_freq)
rank = calc_rank(query, completions)
prank = calc_partial_rank(query, completions)
rl = [0 for _ in range(args.n_candidates + 1)]
if seen:
for i in range(1, len(query) + 1):
r = calc_rank(query, trie.get_mpc(query[:-i]))
if r == 0:
break
else:
for j in range(r, args.n_candidates + 1):
rl[j] += 1
seens.append(seen)
ranks.append(rank)
pranks.append(prank)
rls.append(rl)
mrr_logs = mrr_summary(ranks, pranks, seens, args.n_candidates)
mrl_logs = mrl_summary(rls, seens, args.n_candidates)
for log in mrr_logs + mrl_logs:
logger.info(log)
if __name__ == "__main__":
main(get_args())
| [
"logging.basicConfig",
"logging.getLogger",
"trie.Trie",
"sys.setrecursionlimit",
"argparse.ArgumentParser",
"metric.mrr_summary",
"metric.calc_partial_rank",
"tqdm.tqdm",
"metric.mrl_summary",
"dataset.PrefixDataset",
"os.path.join",
"json.dumps",
"os.path.isfile",
"os.path.dirname",
"m... | [((305, 415), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(asctime)s - %(message)s"""', 'datefmt': '"""%m/%d/%Y %H:%M:%S"""', 'level': 'logging.INFO'}), "(format='%(asctime)s - %(message)s', datefmt=\n '%m/%d/%Y %H:%M:%S', level=logging.INFO)\n", (324, 415), False, 'import logging\n'), ((420, 447), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (437, 447), False, 'import logging\n'), ((479, 541), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Most Popular Completion"""'}), "(description='Most Popular Completion')\n", (502, 541), False, 'import argparse\n'), ((2152, 2218), 'dataset.PrefixDataset', 'PrefixDataset', (['test_data', 'args.min_prefix_len', 'args.min_suffix_len'], {}), '(test_data, args.min_prefix_len, args.min_suffix_len)\n', (2165, 2218), False, 'from dataset import read_data, PrefixDataset\n'), ((2303, 2321), 'tqdm.tqdm', 'tqdm', (['test_dataset'], {}), '(test_dataset)\n', (2307, 2321), False, 'from tqdm import tqdm\n'), ((3036, 3088), 'metric.mrr_summary', 'mrr_summary', (['ranks', 'pranks', 'seens', 'args.n_candidates'], {}), '(ranks, pranks, seens, args.n_candidates)\n', (3047, 3088), False, 'from metric import calc_rank, calc_partial_rank, mrr_summary, mrl_summary\n'), ((3104, 3146), 'metric.mrl_summary', 'mrl_summary', (['rls', 'seens', 'args.n_candidates'], {}), '(rls, seens, args.n_candidates)\n', (3115, 3146), False, 'from metric import calc_rank, calc_partial_rank, mrr_summary, mrl_summary\n'), ((1259, 1306), 'os.path.join', 'os.path.join', (['args.data_dir', 'f"""train.query.txt"""'], {}), "(args.data_dir, f'train.query.txt')\n", (1271, 1306), False, 'import os\n'), ((1453, 1484), 'os.path.isfile', 'os.path.isfile', (['args.model_path'], {}), '(args.model_path)\n', (1467, 1484), False, 'import os\n'), ((1660, 1676), 'trie.Trie', 'Trie', (['train_data'], {}), '(train_data)\n', (1664, 1676), False, 'from trie import Trie\n'), ((1812, 1841), 'sys.setrecursionlimit', 'sys.setrecursionlimit', (['(100000)'], {}), '(100000)\n', (1833, 1841), False, 'import sys\n'), ((1964, 2010), 'os.path.join', 'os.path.join', (['args.data_dir', 'f"""test.query.txt"""'], {}), "(args.data_dir, f'test.query.txt')\n", (1976, 2010), False, 'import os\n'), ((2475, 2504), 'metric.calc_rank', 'calc_rank', (['query', 'completions'], {}), '(query, completions)\n', (2484, 2504), False, 'from metric import calc_rank, calc_partial_rank, mrr_summary, mrl_summary\n'), ((2521, 2558), 'metric.calc_partial_rank', 'calc_partial_rank', (['query', 'completions'], {}), '(query, completions)\n', (2538, 2558), False, 'from metric import calc_rank, calc_partial_rank, mrr_summary, mrl_summary\n'), ((1698, 1730), 'os.path.dirname', 'os.path.dirname', (['args.model_path'], {}), '(args.model_path)\n', (1713, 1730), False, 'import os\n'), ((1135, 1186), 'json.dumps', 'json.dumps', (['args.__dict__'], {'indent': '(2)', 'sort_keys': '(True)'}), '(args.__dict__, indent=2, sort_keys=True)\n', (1145, 1186), False, 'import json\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Apr 16 18:18:29 2020
@author: xuhuiying
"""
import numpy as np
import matplotlib.pyplot as plt
def plotHistory(history,times,xLabelText,yLabelText,legendText):#画出每个history plot each history
history = np.array(history) #history是二维数组 history is a 2D array
history = history.T
iteration = range(0,times)
# plt.figure()
for j in range(0,history.shape[0]):
plt.plot(iteration,history[j],label = "%s %d"%(legendText,j + 1))
# plt.legend(loc='upper left',prop = {'size': 10},handlelength = 1)
plt.xlabel(xLabelText,fontsize = 8)
plt.ylabel(yLabelText,fontsize = 8)
plt.tick_params(labelsize=8)
# plt.savefig('%sWith%dSellersAnd%dBuyer.jpg'%(fileNamePre,N,M), dpi=300,bbox_inches = 'tight')
# plt.show()
| [
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.tick_params",
"matplotlib.pyplot.plot",
"numpy.array"
] | [((273, 290), 'numpy.array', 'np.array', (['history'], {}), '(history)\n', (281, 290), True, 'import numpy as np\n'), ((589, 623), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['xLabelText'], {'fontsize': '(8)'}), '(xLabelText, fontsize=8)\n', (599, 623), True, 'import matplotlib.pyplot as plt\n'), ((629, 663), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['yLabelText'], {'fontsize': '(8)'}), '(yLabelText, fontsize=8)\n', (639, 663), True, 'import matplotlib.pyplot as plt\n'), ((669, 697), 'matplotlib.pyplot.tick_params', 'plt.tick_params', ([], {'labelsize': '(8)'}), '(labelsize=8)\n', (684, 697), True, 'import matplotlib.pyplot as plt\n'), ((448, 516), 'matplotlib.pyplot.plot', 'plt.plot', (['iteration', 'history[j]'], {'label': "('%s %d' % (legendText, j + 1))"}), "(iteration, history[j], label='%s %d' % (legendText, j + 1))\n", (456, 516), True, 'import matplotlib.pyplot as plt\n')] |
#!/usr/bin/env python
from VMParser import Parser
from VMCodewriter import CodeWriter
from pathlib import Path
import sys
def processDirectory(inputPath):
fileName = str(inputPath.stem)
myWriter = CodeWriter(fileName)
lines = myWriter.initHeader()
for f in inputPath.glob("*.vm"):
lines += processFile(f)
return lines
def processFile(inputPath):
myParser = Parser(inputPath)
fileName = str(inputPath.stem)
parsedProg = [line for line in myParser.parse()]
myWriter = CodeWriter(fileName)
return myWriter.writeCode(parsedProg)
def main():
if (len(sys.argv) < 2):
print("Enter VM file name or directory")
print("Example:")
print("{0} path_to_file.vm".format(sys.argv[0]))
print("{0} path_to_dir".format(sys.argv[0]))
input("Press Enter to exit...")
inputPath = Path(sys.argv[1])
realPath = Path.resolve(inputPath)
isDir = realPath.is_dir()
if (isDir):
outName = str(realPath / realPath.name)
outFile = open("{0}.asm".format(outName), 'w')
output = processDirectory(realPath)
outFile.write('\n'.join(output))
elif (realPath.suffix == ".vm"):
outName = str(realPath.parent / realPath.stem)
outFile = open("{0}.asm".format(outName), 'w')
output = processFile(realPath)
outFile.write('\n'.join(output))
else:
print("Input file must be of .vm extension")
return None
if __name__ == "__main__":
main()
| [
"VMParser.Parser",
"VMCodewriter.CodeWriter",
"pathlib.Path.resolve",
"pathlib.Path"
] | [((207, 227), 'VMCodewriter.CodeWriter', 'CodeWriter', (['fileName'], {}), '(fileName)\n', (217, 227), False, 'from VMCodewriter import CodeWriter\n'), ((393, 410), 'VMParser.Parser', 'Parser', (['inputPath'], {}), '(inputPath)\n', (399, 410), False, 'from VMParser import Parser\n'), ((514, 534), 'VMCodewriter.CodeWriter', 'CodeWriter', (['fileName'], {}), '(fileName)\n', (524, 534), False, 'from VMCodewriter import CodeWriter\n'), ((860, 877), 'pathlib.Path', 'Path', (['sys.argv[1]'], {}), '(sys.argv[1])\n', (864, 877), False, 'from pathlib import Path\n'), ((893, 916), 'pathlib.Path.resolve', 'Path.resolve', (['inputPath'], {}), '(inputPath)\n', (905, 916), False, 'from pathlib import Path\n')] |
import inspect
from unittest.mock import Mock
from _pytest.monkeypatch import MonkeyPatch
from rasa.core.policies.ted_policy import TEDPolicy
from rasa.engine.training import fingerprinting
from rasa.nlu.classifiers.diet_classifier import DIETClassifier
from rasa.nlu.selectors.response_selector import ResponseSelector
from tests.engine.training.test_components import FingerprintableText
def test_fingerprint_stays_same():
key1 = fingerprinting.calculate_fingerprint_key(
TEDPolicy, TEDPolicy.get_default_config(), {"input": FingerprintableText("Hi")},
)
key2 = fingerprinting.calculate_fingerprint_key(
TEDPolicy, TEDPolicy.get_default_config(), {"input": FingerprintableText("Hi")},
)
assert key1 == key2
def test_fingerprint_changes_due_to_class():
key1 = fingerprinting.calculate_fingerprint_key(
DIETClassifier,
TEDPolicy.get_default_config(),
{"input": FingerprintableText("Hi")},
)
key2 = fingerprinting.calculate_fingerprint_key(
ResponseSelector,
TEDPolicy.get_default_config(),
{"input": FingerprintableText("Hi")},
)
assert key1 != key2
def test_fingerprint_changes_due_to_config():
key1 = fingerprinting.calculate_fingerprint_key(
TEDPolicy, {}, {"input": FingerprintableText("Hi")},
)
key2 = fingerprinting.calculate_fingerprint_key(
ResponseSelector,
TEDPolicy.get_default_config(),
{"input": FingerprintableText("Hi")},
)
assert key1 != key2
def test_fingerprint_changes_due_to_inputs():
key1 = fingerprinting.calculate_fingerprint_key(
TEDPolicy, {}, {"input": FingerprintableText("Hi")},
)
key2 = fingerprinting.calculate_fingerprint_key(
ResponseSelector,
TEDPolicy.get_default_config(),
{"input": FingerprintableText("bye")},
)
assert key1 != key2
def test_fingerprint_changes_due_to_changed_source(monkeypatch: MonkeyPatch):
key1 = fingerprinting.calculate_fingerprint_key(
TEDPolicy, {}, {"input": FingerprintableText("Hi")},
)
get_source_mock = Mock(return_value="other implementation")
monkeypatch.setattr(inspect, inspect.getsource.__name__, get_source_mock)
key2 = fingerprinting.calculate_fingerprint_key(
TEDPolicy, {}, {"input": FingerprintableText("Hi")},
)
assert key1 != key2
get_source_mock.assert_called_once_with(TEDPolicy)
| [
"rasa.core.policies.ted_policy.TEDPolicy.get_default_config",
"unittest.mock.Mock",
"tests.engine.training.test_components.FingerprintableText"
] | [((2113, 2154), 'unittest.mock.Mock', 'Mock', ([], {'return_value': '"""other implementation"""'}), "(return_value='other implementation')\n", (2117, 2154), False, 'from unittest.mock import Mock\n'), ((500, 530), 'rasa.core.policies.ted_policy.TEDPolicy.get_default_config', 'TEDPolicy.get_default_config', ([], {}), '()\n', (528, 530), False, 'from rasa.core.policies.ted_policy import TEDPolicy\n'), ((648, 678), 'rasa.core.policies.ted_policy.TEDPolicy.get_default_config', 'TEDPolicy.get_default_config', ([], {}), '()\n', (676, 678), False, 'from rasa.core.policies.ted_policy import TEDPolicy\n'), ((881, 911), 'rasa.core.policies.ted_policy.TEDPolicy.get_default_config', 'TEDPolicy.get_default_config', ([], {}), '()\n', (909, 911), False, 'from rasa.core.policies.ted_policy import TEDPolicy\n'), ((1052, 1082), 'rasa.core.policies.ted_policy.TEDPolicy.get_default_config', 'TEDPolicy.get_default_config', ([], {}), '()\n', (1080, 1082), False, 'from rasa.core.policies.ted_policy import TEDPolicy\n'), ((1416, 1446), 'rasa.core.policies.ted_policy.TEDPolicy.get_default_config', 'TEDPolicy.get_default_config', ([], {}), '()\n', (1444, 1446), False, 'from rasa.core.policies.ted_policy import TEDPolicy\n'), ((1780, 1810), 'rasa.core.policies.ted_policy.TEDPolicy.get_default_config', 'TEDPolicy.get_default_config', ([], {}), '()\n', (1808, 1810), False, 'from rasa.core.policies.ted_policy import TEDPolicy\n'), ((542, 567), 'tests.engine.training.test_components.FingerprintableText', 'FingerprintableText', (['"""Hi"""'], {}), "('Hi')\n", (561, 567), False, 'from tests.engine.training.test_components import FingerprintableText\n'), ((690, 715), 'tests.engine.training.test_components.FingerprintableText', 'FingerprintableText', (['"""Hi"""'], {}), "('Hi')\n", (709, 715), False, 'from tests.engine.training.test_components import FingerprintableText\n'), ((931, 956), 'tests.engine.training.test_components.FingerprintableText', 'FingerprintableText', (['"""Hi"""'], {}), "('Hi')\n", (950, 956), False, 'from tests.engine.training.test_components import FingerprintableText\n'), ((1102, 1127), 'tests.engine.training.test_components.FingerprintableText', 'FingerprintableText', (['"""Hi"""'], {}), "('Hi')\n", (1121, 1127), False, 'from tests.engine.training.test_components import FingerprintableText\n'), ((1295, 1320), 'tests.engine.training.test_components.FingerprintableText', 'FingerprintableText', (['"""Hi"""'], {}), "('Hi')\n", (1314, 1320), False, 'from tests.engine.training.test_components import FingerprintableText\n'), ((1466, 1491), 'tests.engine.training.test_components.FingerprintableText', 'FingerprintableText', (['"""Hi"""'], {}), "('Hi')\n", (1485, 1491), False, 'from tests.engine.training.test_components import FingerprintableText\n'), ((1659, 1684), 'tests.engine.training.test_components.FingerprintableText', 'FingerprintableText', (['"""Hi"""'], {}), "('Hi')\n", (1678, 1684), False, 'from tests.engine.training.test_components import FingerprintableText\n'), ((1830, 1856), 'tests.engine.training.test_components.FingerprintableText', 'FingerprintableText', (['"""bye"""'], {}), "('bye')\n", (1849, 1856), False, 'from tests.engine.training.test_components import FingerprintableText\n'), ((2056, 2081), 'tests.engine.training.test_components.FingerprintableText', 'FingerprintableText', (['"""Hi"""'], {}), "('Hi')\n", (2075, 2081), False, 'from tests.engine.training.test_components import FingerprintableText\n'), ((2320, 2345), 'tests.engine.training.test_components.FingerprintableText', 'FingerprintableText', (['"""Hi"""'], {}), "('Hi')\n", (2339, 2345), False, 'from tests.engine.training.test_components import FingerprintableText\n')] |
"""
Command-line driver example for SMIRKY.
"""
import sys
import string
import time
from optparse import OptionParser # For parsing of command line arguments
import smarty
from openforcefield.utils import utils
import os
import math
import copy
import re
import numpy
from numpy import random
def main():
# Create command-line argument options.
usage_string = """\
Sample over fragment types (atoms, bonds, angles, torsions, or impropers)
optionally attempting to match created types to an established SMIRFF.
For all files left blank, they will be taken from this module's
data/odds_files/ subdirectory.
usage %prog --molecules molfile --typetag fragmentType
[--atomORbases AtomORbaseFile --atomORdecors AtomORdecorFile
--atomANDdecors AtomANDdecorFile --bondORbase BondORbaseFile
--bondANDdecors BondANDdecorFile --atomIndexOdds AtomIndexFile
--bondIndexOdds BondIndexFile --replacements substitutions
--initialtypes initialFragmentsFile --SMIRFF referenceSMIRFF
--temperature float --verbose verbose
--iterations iterations --output outputFile]
example:
smirky --molecules AlkEthOH_test_filt1_ff.mol2 --typetag Angle
"""
version_string = "%prog %__version__"
parser = OptionParser(usage=usage_string, version=version_string)
parser.add_option("-m", "--molecules", metavar='MOLECULES',
action="store", type="string", dest='molecules_filename', default=None,
help="Small molecule set (in any OpenEye compatible file format) containing 'dG(exp)' fields with experimental hydration free energies. This filename can also be an option in this module's data/molecules sub-directory")
#TODO: ask about the the dG(exp) fields?
parser.add_option("-T", "--typetag", metavar='TYPETAG',
action = "store", type="choice", dest='typetag',
default=None, choices = ['VdW', 'Bond', 'Angle', 'Torsion', 'Improper'],
help="type of fragment being sampled, options are 'VdW', 'Bond', 'Angle', 'Torsion', 'Improper'")
parser.add_option('-e', '--atomORbases', metavar="DECORATORS",
action='store', type='string', dest='atom_OR_bases',
default = 'odds_files/atom_OR_bases.smarts',
help="Filename defining atom OR bases and associated probabilities. These are combined with atom OR decorators in SMIRKS, for example in '[#6X4,#7X3;R2:2]' '#6' and '#7' are atom OR bases. (OPTIONAL)")
parser.add_option("-O", "--atomORdecors", metavar="DECORATORS",
action='store', type='string', dest='atom_OR_decorators',
default = 'odds_files/atom_decorators.smarts',
help="Filename defining atom OR decorators and associated probabilities. These are combined with atom bases in SMIRKS, for example in '[#6X4,#7X3;R2:2]' 'X4' and 'X3' are ORdecorators. (OPTIONAL)")
parser.add_option('-A', '--atomANDdecors', metavar="DECORATORS",
action='store', type='string', dest='atom_AND_decorators',
default='odds_files/atom_decorators.smarts',
help="Filename defining atom AND decorators and associated probabilities. These are added to the end of an atom's SMIRKS, for example in '[#6X4,#7X3;R2:2]' 'R2' is an AND decorator. (OPTIONAL)")
parser.add_option('-o', '--bondORbase', metavar="DECORATORS",
action='store', type='string', dest='bond_OR_bases',
default='odds_files/bond_OR_bases.smarts',
help="Filename defining bond OR bases and their associated probabilities. These are OR'd together to describe a bond, for example in '[#6]-,=;@[#6]' '-' and '=' are OR bases. (OPTIONAL)")
parser.add_option('-a', '--bondANDdecors', metavar="DECORATORS",
action="store", type='string', dest='bond_AND_decorators',
default='odds_files/bond_AND_decorators.smarts',
help="Filename defining bond AND decorators and their associated probabilities. These are AND'd to the end of a bond, for example in '[#6]-,=;@[#7]' '@' is an AND decorator.(OPTIONAL)")
parser.add_option('-D', '--atomOddsFile', metavar="ODDSFILE",
action="store", type="string", dest="atom_odds",
default='odds_files/atom_index_odds.smarts',
help="Filename defining atom descriptors and probabilities with making changes to that kind of atom. Options for descriptors are integers corresponding to that indexed atom, 'Indexed', 'Unindexed', 'Alpha', 'Beta', 'All'. (OPTIONAL)")
parser.add_option('-d', '--bondOddsFile', metavar="ODDSFILE",
action="store", type="string", dest="bond_odds",
default='odds_files/bond_index_odds.smarts',
help="Filename defining bond descriptors and probabilities with making changes to that kind of bond. Options for descriptors are integers corresponding to that indexed bond, 'Indexed', 'Unindexed', 'Alpha', 'Beta', 'All'. (OPTIONAL)")
parser.add_option("-s", "--substitutions", metavar="SUBSTITUTIONS",
action="store", type="string", dest='substitutions_filename',
default=None,
help="Filename defining substitution definitions for SMARTS atom matches. (OPTIONAL).")
parser.add_option("-f", "--initialtypes", metavar='INITIALTYPES',
action="store", type="string", dest='initialtypes_filename',
default=None,
help="Filename defining initial fragment types. The file is formatted with two columns: 'SMIRKS typename'. For the default the initial type will be a generic form of the given fragment, for example '[*:1]~[*:2]' for a bond (OPTIONAL)")
parser.add_option('-r', '--smirff', metavar='REFERENCE',
action='store', type='string', dest='SMIRFF',
default=None,
help="Filename defining a SMIRFF force fielce used to determine reference fragment types in provided set of molecules. It may be an absolute file path, a path relative to the current working directory, or a path relative to this module's data subdirectory (for built in force fields). (OPTIONAL)")
parser.add_option("-i", "--iterations", metavar='ITERATIONS',
action="store", type="int", dest='iterations',
default=150,
help="MCMC iterations.")
parser.add_option("-t", "--temperature", metavar='TEMPERATURE',
action="store", type="float", dest='temperature',
default=0.1,
help="Effective temperature for Monte Carlo acceptance, indicating fractional tolerance of mismatched atoms (default: 0.1). If 0 is specified, will behave in a greedy manner.")
parser.add_option("-p", "--output", metavar='OUTPUT',
action="store", type="string", dest='outputfile',
default=None,
help="Filename base for output information. This same base will be used for all output files created. If None provided then it is set to 'typetag_temperature' (OPTIONAL).")
parser.add_option('-v', '--verbose', metavar='VERBOSE',
action='store', type='choice', dest='verbose',
default=False, choices = ['True', 'False'],
help="If True prints minimal information to the commandline during iterations. (OPTIONAL)")
# Parse command-line arguments.
(option,args) = parser.parse_args()
# Molecules are required
if option.molecules_filename is None:
parser.print_help()
parser.error("Molecules input files must be specified.")
verbose = option.verbose == 'True'
# Load and type all molecules in the specified dataset.
molecules = utils.read_molecules(option.molecules_filename, verbose=verbose)
# Parse input odds files
atom_OR_bases = smarty.parse_odds_file(option.atom_OR_bases, verbose)
atom_OR_decorators = smarty.parse_odds_file(option.atom_OR_decorators, verbose)
atom_AND_decorators = smarty.parse_odds_file(option.atom_AND_decorators, verbose)
bond_OR_bases = smarty.parse_odds_file(option.bond_OR_bases, verbose)
bond_AND_decorators = smarty.parse_odds_file(option.bond_AND_decorators, verbose)
atom_odds = smarty.parse_odds_file(option.atom_odds, verbose)
bond_odds = smarty.parse_odds_file(option.bond_odds, verbose)
# get initial types if provided, otherwise none
if option.initialtypes_filename is None:
initialtypes = None
else:
initialtypes = smarty.AtomTyper.read_typelist(option.initialtypes_filename)
output = option.outputfile
if output is None:
output = "%s_%.2e" % ( option.typetag, option.temperature)
# get replacements
if option.substitutions_filename is None:
sub_file = smarty.get_data_filename('odds_files/substitutions.smarts')
else:
sub_file = option.substitutions_filename
replacements = smarty.AtomTyper.read_typelist(sub_file)
replacements = [ (short, smarts) for (smarts, short) in replacements]
start_sampler = time.time()
fragment_sampler = smarty.FragmentSampler(
molecules, option.typetag, atom_OR_bases, atom_OR_decorators,
atom_AND_decorators, bond_OR_bases, bond_AND_decorators,
atom_odds, bond_odds, replacements, initialtypes,
option.SMIRFF, option.temperature, output)
# report time
finish_sampler = time.time()
elapsed = finish_sampler - start_sampler
if verbose: print("Creating %s sampler took %.3f s" % (option.typetag, elapsed))
# Make iterations
frac_found = fragment_sampler.run(option.iterations, verbose)
results = fragment_sampler.write_results_smarts_file()
finished = time.time()
elapsed = finished - finish_sampler
per_it = elapsed / float(option.iterations)
if verbose: print("%i iterations took %.3f s (%.3f s / iteration)" % (option.iterations, elapsed, per_it))
if verbose: print("Final score was %.3f %%" % (frac_found*100.0))
# plot results
plot_file = "%s.pdf" % output
traj = "%s.csv" % output
smarty.score_utils.create_plot_file(traj, plot_file, False, verbose)
| [
"openforcefield.utils.utils.read_molecules",
"optparse.OptionParser",
"smarty.score_utils.create_plot_file",
"smarty.AtomTyper.read_typelist",
"smarty.parse_odds_file",
"smarty.get_data_filename",
"smarty.FragmentSampler",
"time.time"
] | [((1312, 1368), 'optparse.OptionParser', 'OptionParser', ([], {'usage': 'usage_string', 'version': 'version_string'}), '(usage=usage_string, version=version_string)\n', (1324, 1368), False, 'from optparse import OptionParser\n'), ((7633, 7697), 'openforcefield.utils.utils.read_molecules', 'utils.read_molecules', (['option.molecules_filename'], {'verbose': 'verbose'}), '(option.molecules_filename, verbose=verbose)\n', (7653, 7697), False, 'from openforcefield.utils import utils\n'), ((7748, 7801), 'smarty.parse_odds_file', 'smarty.parse_odds_file', (['option.atom_OR_bases', 'verbose'], {}), '(option.atom_OR_bases, verbose)\n', (7770, 7801), False, 'import smarty\n'), ((7827, 7885), 'smarty.parse_odds_file', 'smarty.parse_odds_file', (['option.atom_OR_decorators', 'verbose'], {}), '(option.atom_OR_decorators, verbose)\n', (7849, 7885), False, 'import smarty\n'), ((7912, 7971), 'smarty.parse_odds_file', 'smarty.parse_odds_file', (['option.atom_AND_decorators', 'verbose'], {}), '(option.atom_AND_decorators, verbose)\n', (7934, 7971), False, 'import smarty\n'), ((7992, 8045), 'smarty.parse_odds_file', 'smarty.parse_odds_file', (['option.bond_OR_bases', 'verbose'], {}), '(option.bond_OR_bases, verbose)\n', (8014, 8045), False, 'import smarty\n'), ((8072, 8131), 'smarty.parse_odds_file', 'smarty.parse_odds_file', (['option.bond_AND_decorators', 'verbose'], {}), '(option.bond_AND_decorators, verbose)\n', (8094, 8131), False, 'import smarty\n'), ((8148, 8197), 'smarty.parse_odds_file', 'smarty.parse_odds_file', (['option.atom_odds', 'verbose'], {}), '(option.atom_odds, verbose)\n', (8170, 8197), False, 'import smarty\n'), ((8214, 8263), 'smarty.parse_odds_file', 'smarty.parse_odds_file', (['option.bond_odds', 'verbose'], {}), '(option.bond_odds, verbose)\n', (8236, 8263), False, 'import smarty\n'), ((8832, 8872), 'smarty.AtomTyper.read_typelist', 'smarty.AtomTyper.read_typelist', (['sub_file'], {}), '(sub_file)\n', (8862, 8872), False, 'import smarty\n'), ((8968, 8979), 'time.time', 'time.time', ([], {}), '()\n', (8977, 8979), False, 'import time\n'), ((9003, 9249), 'smarty.FragmentSampler', 'smarty.FragmentSampler', (['molecules', 'option.typetag', 'atom_OR_bases', 'atom_OR_decorators', 'atom_AND_decorators', 'bond_OR_bases', 'bond_AND_decorators', 'atom_odds', 'bond_odds', 'replacements', 'initialtypes', 'option.SMIRFF', 'option.temperature', 'output'], {}), '(molecules, option.typetag, atom_OR_bases,\n atom_OR_decorators, atom_AND_decorators, bond_OR_bases,\n bond_AND_decorators, atom_odds, bond_odds, replacements, initialtypes,\n option.SMIRFF, option.temperature, output)\n', (9025, 9249), False, 'import smarty\n'), ((9326, 9337), 'time.time', 'time.time', ([], {}), '()\n', (9335, 9337), False, 'import time\n'), ((9631, 9642), 'time.time', 'time.time', ([], {}), '()\n', (9640, 9642), False, 'import time\n'), ((9999, 10067), 'smarty.score_utils.create_plot_file', 'smarty.score_utils.create_plot_file', (['traj', 'plot_file', '(False)', 'verbose'], {}), '(traj, plot_file, False, verbose)\n', (10034, 10067), False, 'import smarty\n'), ((8423, 8483), 'smarty.AtomTyper.read_typelist', 'smarty.AtomTyper.read_typelist', (['option.initialtypes_filename'], {}), '(option.initialtypes_filename)\n', (8453, 8483), False, 'import smarty\n'), ((8694, 8753), 'smarty.get_data_filename', 'smarty.get_data_filename', (['"""odds_files/substitutions.smarts"""'], {}), "('odds_files/substitutions.smarts')\n", (8718, 8753), False, 'import smarty\n')] |
from collections import defaultdict, deque
from datetime import datetime
import pandas as pd
import random
import numpy as np
import sys
sys.path.append("..") # Adds higher directory to python modules path.
from common import Label_DbFields, Synthetic_Category_Group_Names, Other_Synthetic_Group_Names, MultiLabel_Group_Name, Labels, Stations
random.seed(42)
from fastai.text import *
bs = 256 #224 # size of minibatch
# written with fastai v1.0.48
classifiers_to_run = ['label_category', 'label_usforeign', 'factinvestigative', 'label_tone', 'label_emotion']
# this contains the labels for each classifier
label_set = Labels.copy()
label_set['factinvestigative'] = ['investigative', 'noninvestigative', 'opinion', 'other']
# contains the labels for which a classifier is relevant
relevant_col_names = {}
relevant_col_names['factinvestigative'] = ['elections_hard', 'elections_soft', 'business_economics', 'government', 'current_events', 'cultural']
relevant_col_names['label_usforeign'] = ['business_economics', 'government', 'current_events', 'sports', 'cultural']
relevant_col_names['label_tone'] = ['elections_hard', 'elections_soft', 'business_economics', 'science_tech', 'government', 'entertainment', 'sports', 'products', 'anecdotes', 'current_events', 'cultural']
relevant_col_names['label_emotion'] = ['elections_hard', 'elections_soft', 'business_economics', 'science_tech', 'government', 'entertainment', 'anecdotes', 'current_events', 'cultural']
def getIndices(names, nameMap):
return tensor(sorted([nameMap.index(name) for name in names]))
modeldir = '/data/fastai-models/selected' # the second best auto-trained model among folds
# write headers for each output file
# (only needed the first time we run, in case this gets aborted midway)
for clas_name in classifiers_to_run:
with open(clas_name + '_stats.csv', 'w') as stats_f:
header = ["quarter_begin_date"]
for station in ['overall'] + Stations:
header += [station+'-'+label for label in (['total'] + label_set[clas_name])]
# if clas_name in ['label_category', 'station']:
for station in ['overall'] + Stations:
header += [station+'-'+cn+'-top_k' for cn in (['sum'] + label_set[clas_name])]
stats_f.write(",".join(header) + "\n")
dataset_rows = {}
quarters = []
for year in range(2010,2017):
for month in ["01", "04", "07", "10"]:
quarter = str(year) + '-' + month + "-01"
quarters += [quarter]
for quarter in quarters:
print('\n\n', str(datetime.now()), 'Reading in snippets for', quarter)
# read in full population of (truecased, preprocessed) snippets for this quarter
df = pd.read_csv('/data/' + quarter + '_snippets.tsv', sep='\t')
dataset_rows = {} # what rows are relevant for a particular classifier (apart from label_category)
for clas_name in classifiers_to_run:
print('Processing', clas_name)
# take subset of rows that are relevant for this classifier; both from the dataset and the original dataframe
if clas_name == 'label_category':
train_df = df
else:
train_df = df.iloc[dataset_rows[clas_name],:]
print(' - loading databunch of size', len(train_df))
learn = load_learner(modeldir, fname=clas_name + '_clas_fine_tuned.pkl',
test= TextList.from_df(train_df, cols='snippet'))
# if clas_name == 'label_category':
# learn = load_learner(modeldir, fname=clas_name + '_clas_fine_tuned.pkl',
# test= TextList.from_df(train_df, cols='snippet'))
# learn.data.save('quarter_temp_data') # need to save rather than leave in memory
# df = df.drop('snippet', axis=1) # remove no-longer-needed column to save memory
# train_df = train_df.drop('snippet', axis=1)
# else:
# learn = load_learner(modeldir, fname=clas_name + '_clas_fine_tuned.pkl')
# loaded_data = load_data(modeldir, 'quarter_temp_data', bs=bs)
# learn.data.test_dl = loaded_data.test_dl
# loaded_data.test_dl = None; loaded_data = None
# learn.data.test_ds.x.filter_subset(dataset_rows[clas_name])
# learn.data.test_ds.y.filter_subset(dataset_rows[clas_name])
# del dataset_rows[clas_name]
# gc.collect()
print(' - running classifier')
preds, _ = learn.get_preds(ds_type=DatasetType.Test, ordered=True) # second return value would be true label (if this weren't a test set)
print(' - analyzing and saving results')
yhat = np.argmax(preds.numpy(), axis=1)
labelcounts = [defaultdict(int) for s in ['overall'] + Stations]
for i, station in enumerate(Stations):
station_yhat = yhat[train_df['station'] == station]
for label in station_yhat:
labelcounts[i+1][label] += 1 # leave index 0 for the overall counts to calculate next
print(" ", preds[:5])
# print(labelcounts[0])
# add 'overall' counts:
for label_idx, _ in enumerate(learn.data.train_ds.classes):
labelcounts[0][label_idx] = sum([labelcounts[i+1][label_idx] for i, __ in enumerate(Stations)])
# translate from NN class index into class name, and make a full list of counts
all_counts_ordered = []
for i, station in enumerate(['overall'] + Stations):
namedlabelcounts = defaultdict(int)
total = 0
for k,v in labelcounts[i].items():
namedlabelcounts[learn.data.train_ds.classes[k]] = v
total += v
print(" ", station, namedlabelcounts)
# label counts in order
counts_ordered = [str(total)] + [("0" if total == 0 else str(float(namedlabelcounts[cn])/total)) for cn in label_set[clas_name]]
all_counts_ordered += counts_ordered
all_summed_likelihoods_ordered = []
# if clas_name in ['label_category', 'station']:
# tbd: how to handle binary classifiers: ads, transitions, nonsense, investigative (currently not being run)
# calculate categories using top-k precision
k = 3 if clas_name in ['label_category', 'station', 'supergroups'] else 2
likelihoods, posns = preds.topk(k, dim=-1, sorted=False)
# scale predictions so that top 3 likelihoods sum to 1
norm_factors = 1. / likelihoods.sum(dim=-1)
likelihoods = norm_factors * likelihoods.transpose(-1,0)
likelihoods.transpose_(-1,0)
overalllabelsums = defaultdict(float)
overall_sum = 0.0
for station in Stations:
# allocate their normalized likelihoods to the 3 categories for each snippet
likelihoods_sums = defaultdict(float)
station_row_idxs = tensor((train_df['station'] == station).to_numpy().nonzero()[0])
station_likelihood_rows = likelihoods.index_select(0, station_row_idxs)
station_posns_rows = posns.index_select(0, station_row_idxs)
for (snippet_lhs, snippet_posns) in zip(station_likelihood_rows, station_posns_rows): #python 3: zip is an iterator (py2 use itertools.izip)
for lh, posn in zip(snippet_lhs.tolist(), snippet_posns.tolist()):
likelihoods_sums[posn] += lh
# order the likelihoods for reporting, and sum up overall totals
namedlabelsums = defaultdict(float)
for k,v in likelihoods_sums.items():
namedlabelsums[learn.data.train_ds.classes[k]] = v
overalllabelsums[learn.data.train_ds.classes[k]] += v
station_sum = sum([namedlabelsums[cn] for cn in label_set[clas_name]])
overall_sum += station_sum
summed_likelihoods_ordered = [("0.0" if station_sum == 0. else str(namedlabelsums[cn]/station_sum)) for cn in label_set[clas_name]]
all_summed_likelihoods_ordered += [str(station_sum)] + summed_likelihoods_ordered
# prepend the overall total likelihoods (in order) before the station totals
overall_summed_likelihoods_ordered = [str(overall_sum)] + [str(overalllabelsums[cn]/overall_sum) for cn in label_set[clas_name]]
all_summed_likelihoods_ordered = overall_summed_likelihoods_ordered + all_summed_likelihoods_ordered
# append one line with counts for this learner in this quarter
with open(clas_name + '_stats.csv', 'a') as stats_f:
stats_f.write(",".join([quarter] + all_counts_ordered + all_summed_likelihoods_ordered) + "\n")
# if this is the first classifier (label_category), save the subsets of df for other classifiers to run on
if clas_name == 'label_category':
# get the column indices in this learner for the classes for which subsequent classifiers are relevant
relevant_col_idxes = {}
for clas, col_list in relevant_col_names.items():
relevant_col_idxes[clas] = getIndices(col_list, learn.data.train_ds.classes)
# save rows to be classified for the remaining classifiers
for clas, cols in relevant_col_idxes.items():
relevant_scores = preds.index_select(1, cols)
# indexes of rows (snippets) positive for relevant cols (having >0.5 probability among relevant scores)
dataset_rows[clas] = (relevant_scores.sum(dim=-1) > 0.5).nonzero().squeeze(1).numpy()
del relevant_col_idxes; del relevant_scores
del learn; del yhat; del preds; del likelihoods; del station_likelihood_rows; del posns; del _
gc.collect()
#torch.cuda.empty_cache()
| [
"pandas.read_csv",
"common.Labels.copy",
"random.seed",
"datetime.datetime.now",
"collections.defaultdict",
"sys.path.append"
] | [((137, 158), 'sys.path.append', 'sys.path.append', (['""".."""'], {}), "('..')\n", (152, 158), False, 'import sys\n'), ((344, 359), 'random.seed', 'random.seed', (['(42)'], {}), '(42)\n', (355, 359), False, 'import random\n'), ((623, 636), 'common.Labels.copy', 'Labels.copy', ([], {}), '()\n', (634, 636), False, 'from common import Label_DbFields, Synthetic_Category_Group_Names, Other_Synthetic_Group_Names, MultiLabel_Group_Name, Labels, Stations\n'), ((2662, 2721), 'pandas.read_csv', 'pd.read_csv', (["('/data/' + quarter + '_snippets.tsv')"], {'sep': '"""\t"""'}), "('/data/' + quarter + '_snippets.tsv', sep='\\t')\n", (2673, 2721), True, 'import pandas as pd\n'), ((6644, 6662), 'collections.defaultdict', 'defaultdict', (['float'], {}), '(float)\n', (6655, 6662), False, 'from collections import defaultdict, deque\n'), ((2515, 2529), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2527, 2529), False, 'from datetime import datetime\n'), ((4677, 4693), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (4688, 4693), False, 'from collections import defaultdict, deque\n'), ((5468, 5484), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (5479, 5484), False, 'from collections import defaultdict, deque\n'), ((6842, 6860), 'collections.defaultdict', 'defaultdict', (['float'], {}), '(float)\n', (6853, 6860), False, 'from collections import defaultdict, deque\n'), ((7507, 7525), 'collections.defaultdict', 'defaultdict', (['float'], {}), '(float)\n', (7518, 7525), False, 'from collections import defaultdict, deque\n')] |
import os
import cv2
import jpype
import shutil
import weasyprint
from bs4 import BeautifulSoup
jpype.startJVM()
from asposecells.api import *
def generatePDF(XLSXPath, OutPath):
workbook = Workbook(XLSXPath)
workbook.save(f"sheet.html", SaveFormat.HTML)
with open(f'./sheet_files/sheet001.htm') as f:
htmlDoc = f.read()
soup = BeautifulSoup(htmlDoc, 'html.parser')
table = soup.find_all('table')[0]
with open(f'./sheet_files/stylesheet.css') as f:
styles = f.read()
with open(f'out.html', 'w') as f:
f.write(f'''
<style>
{styles}
@page {{size: A4; margin:0;}}
table {{margin:auto; margin-top: 5mm;}}
table, tr, td {{border: 1px solid #000 !important;}}
</style>
''')
f.write(str(table.prettify()))
weasyprint.HTML('out.html').write_pdf(OutPath)
def cleanPDF(OutPath):
shutil.rmtree('./sheet_files')
os.remove('./out.html')
os.remove('./sheet.html')
os.remove(OutPath)
def generatePNG(XLSXPath, OutPath):
workbook = Workbook(XLSXPath)
workbook.save(f"sheet.png", SaveFormat.PNG)
img = cv2.imread("sheet.png")
cropped = img[20:-100]
cv2.imwrite(OutPath, cropped)
def cleanPNG(OutPath):
os.remove('./sheet.png')
os.remove(OutPath) | [
"cv2.imwrite",
"bs4.BeautifulSoup",
"weasyprint.HTML",
"shutil.rmtree",
"jpype.startJVM",
"cv2.imread",
"os.remove"
] | [((97, 113), 'jpype.startJVM', 'jpype.startJVM', ([], {}), '()\n', (111, 113), False, 'import jpype\n'), ((356, 393), 'bs4.BeautifulSoup', 'BeautifulSoup', (['htmlDoc', '"""html.parser"""'], {}), "(htmlDoc, 'html.parser')\n", (369, 393), False, 'from bs4 import BeautifulSoup\n'), ((941, 971), 'shutil.rmtree', 'shutil.rmtree', (['"""./sheet_files"""'], {}), "('./sheet_files')\n", (954, 971), False, 'import shutil\n'), ((976, 999), 'os.remove', 'os.remove', (['"""./out.html"""'], {}), "('./out.html')\n", (985, 999), False, 'import os\n'), ((1004, 1029), 'os.remove', 'os.remove', (['"""./sheet.html"""'], {}), "('./sheet.html')\n", (1013, 1029), False, 'import os\n'), ((1034, 1052), 'os.remove', 'os.remove', (['OutPath'], {}), '(OutPath)\n', (1043, 1052), False, 'import os\n'), ((1182, 1205), 'cv2.imread', 'cv2.imread', (['"""sheet.png"""'], {}), "('sheet.png')\n", (1192, 1205), False, 'import cv2\n'), ((1237, 1266), 'cv2.imwrite', 'cv2.imwrite', (['OutPath', 'cropped'], {}), '(OutPath, cropped)\n', (1248, 1266), False, 'import cv2\n'), ((1295, 1319), 'os.remove', 'os.remove', (['"""./sheet.png"""'], {}), "('./sheet.png')\n", (1304, 1319), False, 'import os\n'), ((1324, 1342), 'os.remove', 'os.remove', (['OutPath'], {}), '(OutPath)\n', (1333, 1342), False, 'import os\n'), ((866, 893), 'weasyprint.HTML', 'weasyprint.HTML', (['"""out.html"""'], {}), "('out.html')\n", (881, 893), False, 'import weasyprint\n')] |
#-*- coding: utf8 -*-
# *************************************************************************************************
# Python API for EJDB database library http://ejdb.org
# Copyright (C) 2012-2013 Softmotions Ltd.
#
# This file is part of EJDB.
# EJDB is free software; you can redistribute it and/or modify it under the terms of
# the GNU Lesser General Public License as published by the Free Software Foundation; either
# version 2.1 of the License or any later version. EJDB is distributed in the hope
# that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
# You should have received a copy of the GNU Lesser General Public License along with EJDB;
# if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330,
# Boston, MA 02111-1307 USA.
# *************************************************************************************************
from __future__ import with_statement
from __future__ import division
from __future__ import print_function
from datetime import datetime
import sys
PY3 = sys.version_info[0] == 3
import unittest
from pyejdb import bson
import pyejdb
if PY3:
from io import StringIO as strio
else:
from io import BytesIO as strio
class TestOne(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestOne, self).__init__(*args, **kwargs)
#super().__init__(*args, **kwargs)
_ejdb = None
@classmethod
def setUpClass(cls):
print("pyejdb version: %s" % pyejdb.version)
print("libejdb_version: %s" % pyejdb.libejdb_version)
cls._ejdb = pyejdb.EJDB("testdb", pyejdb.DEFAULT_OPEN_MODE | pyejdb.JBOTRUNC)
def test(self):
ejdb = TestOne._ejdb
self.assertEqual(ejdb.isopen, True)
doc = {"foo": "bar", "foo2": 2}
ejdb.save("foocoll", doc)
self.assertEqual(type(doc["_id"]).__name__, "str" if PY3 else "unicode")
ldoc = ejdb.load("foocoll", doc["_id"])
self.assertIsInstance(ldoc, dict)
self.assertEqual(doc["_id"], ldoc["_id"])
self.assertEqual(doc["foo"], ldoc["foo"])
self.assertEqual(doc["foo2"], ldoc["foo2"])
cur = ejdb.find("foocoll", {"foo": "bar"}, hints={"$fields": {"foo2": 0}})
self.assertEqual(len(cur), 1)
d = cur[0]
self.assertTrue(d is not None)
self.assertEqual(d["_id"], ldoc["_id"])
with ejdb.find("foocoll") as cur2:
d = cur2[0]
self.assertTrue(d is not None)
self.assertEqual(d["_id"], ldoc["_id"])
self.assertEqual(ejdb.findOne("foocoll")["foo"], "bar")
self.assertTrue(ejdb.findOne("foocoll2") is None)
self.assertEqual(ejdb.count("foocoll"), 1)
self.assertEqual(ejdb.count("foocoll2"), 0)
ejdb.ensureStringIndex("foocoll", "foo")
cur = ejdb.find("foocoll", {"foo": "bar"}, hints={"$fields": {"foo2": 0}})
self.assertEqual(len(cur), 1)
ejdb.remove("foocoll", doc["_id"])
ldoc = ejdb.load("foocoll", doc["_id"])
self.assertTrue(ldoc is None)
ejdb.sync()
ejdb.ensureCollection("ecoll1", records=90000, large=False)
ejdb.dropCollection("ecoll1", prune=True)
def test2(self):
ejdb = TestOne._ejdb
self.assertEqual(ejdb.isopen, True)
parrot1 = {
"name": "Grenny",
"type": "African Grey",
"male": True,
"age": 1,
"birthdate": datetime.utcnow(),
"likes": ["green color", "night", "toys"],
"extra1": None
}
parrot2 = {
"name": "Bounty",
"type": "Cockatoo",
"male": False,
"age": 15,
"birthdate": datetime.utcnow(),
"likes": ["sugar cane"],
"extra1": None
}
ejdb.save("parrots", *[parrot1, None, parrot2])
self.assertEqual(type(parrot1["_id"]).__name__, "str" if PY3 else "unicode")
self.assertEqual(type(parrot2["_id"]).__name__, "str" if PY3 else "unicode")
p2 = ejdb.load("parrots", parrot2["_id"])
self.assertEqual(p2["_id"], parrot2["_id"])
cur = ejdb.find("parrots")
self.assertEqual(len(cur), 2)
self.assertEqual(len(cur[1:]), 1)
self.assertEqual(len(cur[2:]), 0)
cur = ejdb.find("parrots",
{"name": bson.BSON_Regex(("(grenny|bounty)", "i"))},
hints={"$orderby": [("name", 1)]})
self.assertEqual(len(cur), 2)
self.assertEqual(cur[0]["name"], "Bounty")
self.assertEqual(cur[0]["age"], 15)
cur = ejdb.find("parrots", {}, {"name": "Grenny"}, {"name": "Bounty"},
hints={"$orderby": [("name", 1)]})
self.assertEqual(len(cur), 2)
cur = ejdb.find("parrots", {}, {"name": "Grenny"},
hints={"$orderby": [("name", 1)]})
self.assertEqual(len(cur), 1)
sally = {
"name": "Sally",
"mood": "Angry",
}
molly = {
"name": "Molly",
"mood": "Very angry",
"secret": None
}
ejdb.save("birds", *[sally, molly])
logbuf = strio()
ejdb.find("birds", {"name": "Molly"}, log=logbuf)
#print("LB=%s" % logbuf.getvalue())
self.assertTrue(logbuf.getvalue().find("RUN FULLSCAN") != -1)
ejdb.ensureStringIndex("birds", "name")
logbuf = strio()
ejdb.find("birds", {"name": "Molly"}, log=logbuf)
self.assertTrue(logbuf.getvalue().find("MAIN IDX: 'sname'") != -1)
self.assertTrue(logbuf.getvalue().find("RUN FULLSCAN") == -1)
##print("dbmeta=%s" % ejdb.dbmeta())
bar = {
"foo": "bar"
}
self.assertEqual(ejdb.isactivetx("bars"), False)
ejdb.begintx("bars")
self.assertEqual(ejdb.isactivetx("bars"), True)
ejdb.save("bars", bar)
self.assertTrue(bar["_id"] is not None)
ejdb.abortx("bars")
self.assertTrue(ejdb.load("bars", bar["_id"]) is None)
ejdb.begintx("bars")
ejdb.save("bars", bar)
self.assertTrue(ejdb.load("bars", bar["_id"]) is not None)
self.assertEqual(ejdb.isactivetx("bars"), True)
ejdb.commitx("bars")
self.assertEqual(ejdb.isactivetx("bars"), False)
self.assertTrue(ejdb.load("bars", bar["_id"]) is not None)
ejdb.update("upsertcoll",
{"foo": "bar", "$upsert": {"foo": "bar"}})
self.assertTrue(ejdb.findOne("upsertcoll", {"foo": "bar"}) is not None)
@classmethod
def tearDownClass(cls):
if cls._ejdb:
cls._ejdb.close()
cls._ejdb = None
if __name__ == '__main__':
unittest.main()
| [
"datetime.datetime.utcnow",
"pyejdb.EJDB",
"io.BytesIO",
"pyejdb.bson.BSON_Regex",
"unittest.main"
] | [((6892, 6907), 'unittest.main', 'unittest.main', ([], {}), '()\n', (6905, 6907), False, 'import unittest\n'), ((1734, 1799), 'pyejdb.EJDB', 'pyejdb.EJDB', (['"""testdb"""', '(pyejdb.DEFAULT_OPEN_MODE | pyejdb.JBOTRUNC)'], {}), "('testdb', pyejdb.DEFAULT_OPEN_MODE | pyejdb.JBOTRUNC)\n", (1745, 1799), False, 'import pyejdb\n'), ((5354, 5361), 'io.BytesIO', 'strio', ([], {}), '()\n', (5359, 5361), True, 'from io import BytesIO as strio\n'), ((5601, 5608), 'io.BytesIO', 'strio', ([], {}), '()\n', (5606, 5608), True, 'from io import BytesIO as strio\n'), ((3604, 3621), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (3619, 3621), False, 'from datetime import datetime\n'), ((3873, 3890), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (3888, 3890), False, 'from datetime import datetime\n'), ((4513, 4554), 'pyejdb.bson.BSON_Regex', 'bson.BSON_Regex', (["('(grenny|bounty)', 'i')"], {}), "(('(grenny|bounty)', 'i'))\n", (4528, 4554), False, 'from pyejdb import bson\n')] |
from django.urls import path
from . import views
app_name = 'users'
urlpatterns = [
path('<int:pk>/', views.user_profile, name='user_profile'),
path('messages/<int:pk>/', views.PrivateMessageView.as_view(), name='private_message')
] | [
"django.urls.path"
] | [((90, 148), 'django.urls.path', 'path', (['"""<int:pk>/"""', 'views.user_profile'], {'name': '"""user_profile"""'}), "('<int:pk>/', views.user_profile, name='user_profile')\n", (94, 148), False, 'from django.urls import path\n')] |
#!/usr/bin/env python
def main():
"""Main flow control of vkmz
Read input data into feature objects. Results in dictionaries for samples
and features.
Then, make predictions for features. Features without predictions are removed
by default.
Finally, write results.
"""
from vkmz.arguments import args, JSON, METADATA, MODE, SQL
from vkmz.read import (
tabular as readTabular,
xcmsTabular as readXcmsTabular,
formulas as readFormulas,
)
from vkmz.predict import predict
import vkmz.write as write
# read input
if MODE == "tabular":
# read arguments here in case "input" is undeclared
tabular_f = getattr(args, "input")
samples, features = readTabular(tabular_f)
elif MODE == "w4m-xcms":
sample_f = getattr(args, "sample_metadata")
variable_f = getattr(args, "variable_metadata")
matrix_f = getattr(args, "data_matrix")
samples, features = readXcmsTabular(sample_f, variable_f, matrix_f)
else: # MODE == "formula"
formula_f = getattr(args, "input")
samples, features = readFormulas(formula_f)
if MODE == "tabular" or MODE == "w4m-xcms":
# make predictions for all features
features = {k: predict(v) for k, v in features.items()}
# remove features without a prediction
features = {k: v for k, v in features.items() if v is not None}
# remove sample feature intensities without a feature
for s in samples.values():
s.sfis = [x for x in s.sfis if len(x.feature.predictions) > 0]
# remove samples without a sample feature intensity
samples = {k: v for k, v in samples.items() if len(v.sfis) > 0}
# write results
write.tabular(samples)
j_objs = write.generateJson(samples)
if JSON:
write.json_write(j_objs)
write.html(j_objs)
if SQL:
write.sql(samples, features)
if METADATA:
write.metadata()
if __name__ == "__main__":
main()
| [
"vkmz.read.formulas",
"vkmz.predict.predict",
"vkmz.read.tabular",
"vkmz.write.tabular",
"vkmz.write.generateJson",
"vkmz.read.xcmsTabular",
"vkmz.write.html",
"vkmz.write.json_write",
"vkmz.write.metadata",
"vkmz.write.sql"
] | [((1765, 1787), 'vkmz.write.tabular', 'write.tabular', (['samples'], {}), '(samples)\n', (1778, 1787), True, 'import vkmz.write as write\n'), ((1801, 1828), 'vkmz.write.generateJson', 'write.generateJson', (['samples'], {}), '(samples)\n', (1819, 1828), True, 'import vkmz.write as write\n'), ((1879, 1897), 'vkmz.write.html', 'write.html', (['j_objs'], {}), '(j_objs)\n', (1889, 1897), True, 'import vkmz.write as write\n'), ((750, 772), 'vkmz.read.tabular', 'readTabular', (['tabular_f'], {}), '(tabular_f)\n', (761, 772), True, 'from vkmz.read import tabular as readTabular, xcmsTabular as readXcmsTabular, formulas as readFormulas\n'), ((1850, 1874), 'vkmz.write.json_write', 'write.json_write', (['j_objs'], {}), '(j_objs)\n', (1866, 1874), True, 'import vkmz.write as write\n'), ((1918, 1946), 'vkmz.write.sql', 'write.sql', (['samples', 'features'], {}), '(samples, features)\n', (1927, 1946), True, 'import vkmz.write as write\n'), ((1972, 1988), 'vkmz.write.metadata', 'write.metadata', ([], {}), '()\n', (1986, 1988), True, 'import vkmz.write as write\n'), ((986, 1033), 'vkmz.read.xcmsTabular', 'readXcmsTabular', (['sample_f', 'variable_f', 'matrix_f'], {}), '(sample_f, variable_f, matrix_f)\n', (1001, 1033), True, 'from vkmz.read import tabular as readTabular, xcmsTabular as readXcmsTabular, formulas as readFormulas\n'), ((1136, 1159), 'vkmz.read.formulas', 'readFormulas', (['formula_f'], {}), '(formula_f)\n', (1148, 1159), True, 'from vkmz.read import tabular as readTabular, xcmsTabular as readXcmsTabular, formulas as readFormulas\n'), ((1276, 1286), 'vkmz.predict.predict', 'predict', (['v'], {}), '(v)\n', (1283, 1286), False, 'from vkmz.predict import predict\n')] |
# Generated by Django 2.1 on 2018-09-06 02:03
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('phantomapp', '0008_auto_20180904_2102'),
]
operations = [
migrations.CreateModel(
name='Order',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('username', models.CharField(blank=True, max_length=255)),
('email', models.CharField(max_length=255)),
('first_name', models.CharField(max_length=255)),
('last_name', models.CharField(max_length=255)),
('company', models.CharField(max_length=255)),
('country', models.CharField(max_length=255)),
('state', models.CharField(max_length=255)),
('address', models.CharField(max_length=255)),
('telephone', models.CharField(max_length=255)),
('created', models.DateTimeField(auto_now=True)),
('updated', models.DateTimeField(auto_now=True)),
('paid', models.BooleanField(default=False)),
],
options={
'ordering': ('-created',),
},
),
migrations.CreateModel(
name='OrderProduct',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('price', models.IntegerField()),
('product', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='products', to='phantomapp.ShopProduct')),
('purchase', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='products', to='phantomapp.Order')),
],
),
]
| [
"django.db.models.IntegerField",
"django.db.models.ForeignKey",
"django.db.models.BooleanField",
"django.db.models.AutoField",
"django.db.models.DateTimeField",
"django.db.models.CharField"
] | [((363, 456), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (379, 456), False, 'from django.db import migrations, models\n'), ((484, 528), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(255)'}), '(blank=True, max_length=255)\n', (500, 528), False, 'from django.db import migrations, models\n'), ((557, 589), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (573, 589), False, 'from django.db import migrations, models\n'), ((623, 655), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (639, 655), False, 'from django.db import migrations, models\n'), ((688, 720), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (704, 720), False, 'from django.db import migrations, models\n'), ((751, 783), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (767, 783), False, 'from django.db import migrations, models\n'), ((814, 846), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (830, 846), False, 'from django.db import migrations, models\n'), ((875, 907), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (891, 907), False, 'from django.db import migrations, models\n'), ((938, 970), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (954, 970), False, 'from django.db import migrations, models\n'), ((1003, 1035), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (1019, 1035), False, 'from django.db import migrations, models\n'), ((1066, 1101), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (1086, 1101), False, 'from django.db import migrations, models\n'), ((1132, 1167), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (1152, 1167), False, 'from django.db import migrations, models\n'), ((1195, 1229), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (1214, 1229), False, 'from django.db import migrations, models\n'), ((1447, 1540), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1463, 1540), False, 'from django.db import migrations, models\n'), ((1565, 1586), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (1584, 1586), False, 'from django.db import migrations, models\n'), ((1617, 1738), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.PROTECT', 'related_name': '"""products"""', 'to': '"""phantomapp.ShopProduct"""'}), "(on_delete=django.db.models.deletion.PROTECT, related_name\n ='products', to='phantomapp.ShopProduct')\n", (1634, 1738), False, 'from django.db import migrations, models\n'), ((1765, 1880), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.PROTECT', 'related_name': '"""products"""', 'to': '"""phantomapp.Order"""'}), "(on_delete=django.db.models.deletion.PROTECT, related_name\n ='products', to='phantomapp.Order')\n", (1782, 1880), False, 'from django.db import migrations, models\n')] |
from sphericalquadpy.levelsymmetric.levelsymmetric import Levelsymmetric
import pytest
def test_levelsymmetric():
Q = Levelsymmetric(order=4)
assert Q.name() == "Levelsymmetric Quadrature"
assert Q.getmaximalorder() == 20
with pytest.raises(Exception):
_ = Levelsymmetric(order=-10)
Q = Levelsymmetric(nq=30)
def test_invalid():
Q = Levelsymmetric(order=4)
with pytest.raises(Exception):
_ = Q.computequadpoints(234234234234)
with pytest.raises(Exception):
_ = Q.computequadweights(234234234234)
| [
"sphericalquadpy.levelsymmetric.levelsymmetric.Levelsymmetric",
"pytest.raises"
] | [((124, 147), 'sphericalquadpy.levelsymmetric.levelsymmetric.Levelsymmetric', 'Levelsymmetric', ([], {'order': '(4)'}), '(order=4)\n', (138, 147), False, 'from sphericalquadpy.levelsymmetric.levelsymmetric import Levelsymmetric\n'), ((321, 342), 'sphericalquadpy.levelsymmetric.levelsymmetric.Levelsymmetric', 'Levelsymmetric', ([], {'nq': '(30)'}), '(nq=30)\n', (335, 342), False, 'from sphericalquadpy.levelsymmetric.levelsymmetric import Levelsymmetric\n'), ((373, 396), 'sphericalquadpy.levelsymmetric.levelsymmetric.Levelsymmetric', 'Levelsymmetric', ([], {'order': '(4)'}), '(order=4)\n', (387, 396), False, 'from sphericalquadpy.levelsymmetric.levelsymmetric import Levelsymmetric\n'), ((248, 272), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (261, 272), False, 'import pytest\n'), ((286, 311), 'sphericalquadpy.levelsymmetric.levelsymmetric.Levelsymmetric', 'Levelsymmetric', ([], {'order': '(-10)'}), '(order=-10)\n', (300, 311), False, 'from sphericalquadpy.levelsymmetric.levelsymmetric import Levelsymmetric\n'), ((406, 430), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (419, 430), False, 'import pytest\n'), ((487, 511), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (500, 511), False, 'import pytest\n')] |
from time import sleep
print('-=-' * 15)
print('Iremos calcular o preço da sua viagem (R$)')
print('-=-' * 15)
distancia = float(input('Qual a distância da viagem?\n>'))
print('CALCULANDO...')
sleep(2)
if distancia <= 200:
preco = distancia * 0.50
print(f'O preço da sua viagem vai custar R${preco:.2f}')
else:
preco = distancia * 0.45
print(f'O preço da sua viagem vai custar R${preco:.2f}')
| [
"time.sleep"
] | [((194, 202), 'time.sleep', 'sleep', (['(2)'], {}), '(2)\n', (199, 202), False, 'from time import sleep\n')] |
from allauth.socialaccount.providers.oauth2.urls import default_urlpatterns
from .provider import StripeProvider
urlpatterns = default_urlpatterns(StripeProvider)
| [
"allauth.socialaccount.providers.oauth2.urls.default_urlpatterns"
] | [((128, 163), 'allauth.socialaccount.providers.oauth2.urls.default_urlpatterns', 'default_urlpatterns', (['StripeProvider'], {}), '(StripeProvider)\n', (147, 163), False, 'from allauth.socialaccount.providers.oauth2.urls import default_urlpatterns\n')] |
import torch
import torch.nn as nn
import torchvision.models as models
from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence
from .build import MODEL_REGISTRY
@MODEL_REGISTRY.register()
class CNNRNN(nn.Module):
def __init__(self, cfg):
super().__init__()
input_dim = 512
hidden_dim = 128
num_layers = 1
self.cnn = models.resnet50(pretrained=True)
out_features = self.cnn.fc.in_features
self.fc1 = nn.Linear(out_features, input_dim)
self.fc2 = nn.Linear(hidden_dim, 1)
self.rnn = nn.RNN(input_dim, hidden_dim, num_layers, batch_first=True)
def forward(self, vid, lengths):
B, T, *a = vid.shape
vid = vid.permute(0, 1, 4, 2, 3)
outs = []
def hook(module, input, output):
outs.append(input)
self.cnn.fc.register_forward_hook(hook)
for t in range(T):
# print(t)
frame = vid[:, t, :, :, :]
out = self.cnn(frame)
if outs[0][0].ndim == 2:
outs = [ten[0].unsqueeze(0) for ten in outs]
else:
outs = [ten[0] for ten in outs]
outs = torch.cat(outs, dim=1)
outs = self.fc1(outs)
packed_seq = pack_padded_sequence(outs, lengths, batch_first=True, enforce_sorted=False)
out, hn = self.rnn(packed_seq)
padded_seq, lengths = pad_packed_sequence(out, batch_first=True)
out = self.fc2(padded_seq)
return out
| [
"torch.nn.RNN",
"torch.nn.utils.rnn.pack_padded_sequence",
"torch.nn.Linear",
"torchvision.models.resnet50",
"torch.nn.utils.rnn.pad_packed_sequence",
"torch.cat"
] | [((380, 412), 'torchvision.models.resnet50', 'models.resnet50', ([], {'pretrained': '(True)'}), '(pretrained=True)\n', (395, 412), True, 'import torchvision.models as models\n'), ((479, 513), 'torch.nn.Linear', 'nn.Linear', (['out_features', 'input_dim'], {}), '(out_features, input_dim)\n', (488, 513), True, 'import torch.nn as nn\n'), ((533, 557), 'torch.nn.Linear', 'nn.Linear', (['hidden_dim', '(1)'], {}), '(hidden_dim, 1)\n', (542, 557), True, 'import torch.nn as nn\n'), ((578, 637), 'torch.nn.RNN', 'nn.RNN', (['input_dim', 'hidden_dim', 'num_layers'], {'batch_first': '(True)'}), '(input_dim, hidden_dim, num_layers, batch_first=True)\n', (584, 637), True, 'import torch.nn as nn\n'), ((1174, 1196), 'torch.cat', 'torch.cat', (['outs'], {'dim': '(1)'}), '(outs, dim=1)\n', (1183, 1196), False, 'import torch\n'), ((1248, 1323), 'torch.nn.utils.rnn.pack_padded_sequence', 'pack_padded_sequence', (['outs', 'lengths'], {'batch_first': '(True)', 'enforce_sorted': '(False)'}), '(outs, lengths, batch_first=True, enforce_sorted=False)\n', (1268, 1323), False, 'from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence\n'), ((1394, 1436), 'torch.nn.utils.rnn.pad_packed_sequence', 'pad_packed_sequence', (['out'], {'batch_first': '(True)'}), '(out, batch_first=True)\n', (1413, 1436), False, 'from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence\n')] |
from os import environ as env
import json
import utils
import utils.aws as aws
import utils.handlers as handlers
def put_record_to_logstream(event: utils.LambdaEvent) -> str:
"""Put a record of source Lambda execution in LogWatch Logs."""
log_group_name = env["REPORT_LOG_GROUP_NAME"]
utils.Log.info("Fetching requestPayload and responsePayload")
req, res = event["requestPayload"], event["responsePayload"]
utils.Log.info("Fetching requestPayload content")
sns_payload = req["Records"][0]["Sns"]
message_id = sns_payload["MessageId"]
message = json.loads(sns_payload["Message"])
url, title = message["url"], message["title"]
try:
body = json.loads(res["body"])
except json.JSONDecodeError as error:
raise utils.HandledError("Failed decoding payload: %s" % error)
name, timestamp = body["name"], body["timestamp"]
if res["statusCode"] != 200:
raise utils.HandledError("Source lambda '%s' failed with status code %d, "
"ignoring report" % (name, res["statusCode"]))
return aws.send_event_to_logstream(log_group=log_group_name,
log_stream=name,
message={
"url": url,
"MessageId": message_id,
"title": title,
"timestamp": timestamp,
})
def handler(event, context) -> utils.Response:
"""Lambda entry point."""
return handlers.EventHandler(
name="send_report",
event=utils.LambdaEvent(event),
context=utils.LambdaContext(context),
action=put_record_to_logstream,
).response
| [
"json.loads",
"utils.Log.info",
"utils.aws.send_event_to_logstream",
"utils.LambdaContext",
"utils.HandledError",
"utils.LambdaEvent"
] | [((301, 362), 'utils.Log.info', 'utils.Log.info', (['"""Fetching requestPayload and responsePayload"""'], {}), "('Fetching requestPayload and responsePayload')\n", (315, 362), False, 'import utils\n'), ((433, 482), 'utils.Log.info', 'utils.Log.info', (['"""Fetching requestPayload content"""'], {}), "('Fetching requestPayload content')\n", (447, 482), False, 'import utils\n'), ((583, 617), 'json.loads', 'json.loads', (["sns_payload['Message']"], {}), "(sns_payload['Message'])\n", (593, 617), False, 'import json\n'), ((1096, 1261), 'utils.aws.send_event_to_logstream', 'aws.send_event_to_logstream', ([], {'log_group': 'log_group_name', 'log_stream': 'name', 'message': "{'url': url, 'MessageId': message_id, 'title': title, 'timestamp': timestamp}"}), "(log_group=log_group_name, log_stream=name,\n message={'url': url, 'MessageId': message_id, 'title': title,\n 'timestamp': timestamp})\n", (1123, 1261), True, 'import utils.aws as aws\n'), ((693, 716), 'json.loads', 'json.loads', (["res['body']"], {}), "(res['body'])\n", (703, 716), False, 'import json\n'), ((935, 1057), 'utils.HandledError', 'utils.HandledError', (['("Source lambda \'%s\' failed with status code %d, ignoring report" % (name,\n res[\'statusCode\']))'], {}), '(\n "Source lambda \'%s\' failed with status code %d, ignoring report" % (\n name, res[\'statusCode\']))\n', (953, 1057), False, 'import utils\n'), ((774, 831), 'utils.HandledError', 'utils.HandledError', (["('Failed decoding payload: %s' % error)"], {}), "('Failed decoding payload: %s' % error)\n", (792, 831), False, 'import utils\n'), ((1701, 1725), 'utils.LambdaEvent', 'utils.LambdaEvent', (['event'], {}), '(event)\n', (1718, 1725), False, 'import utils\n'), ((1743, 1771), 'utils.LambdaContext', 'utils.LambdaContext', (['context'], {}), '(context)\n', (1762, 1771), False, 'import utils\n')] |
import os
from bc import Imitator
import numpy as np
from dataset import Example, Dataset
import utils
#from ale_wrapper import ALEInterfaceWrapper
from evaluator import Evaluator
from pdb import set_trace
import matplotlib.pyplot as plt
#try bmh
plt.style.use('bmh')
def smooth(losses, run=10):
new_losses = []
for i in range(len(losses)):
new_losses.append(np.mean(losses[max(0, i - 10):i+1]))
return new_losses
def plot(losses, checkpoint_dir, env_name):
print("Plotting losses to ", os.path.join(checkpoint_dir, env_name + "_loss.png"))
p=plt.plot(smooth(losses, 25))
plt.xlabel("Update")
plt.ylabel("Loss")
plt.legend(loc='lower center')
plt.savefig(os.path.join(checkpoint_dir, env_name + "loss.png"))
def train(env_name,
minimal_action_set,
learning_rate,
alpha,
l2_penalty,
minibatch_size,
hist_len,
discount,
checkpoint_dir,
updates,
dataset,
validation_dataset,
num_eval_episodes,
epsilon_greedy,
extra_info):
import tracemalloc
# create DQN agent
agent = Imitator(list(minimal_action_set),
learning_rate,
alpha,
checkpoint_dir,
hist_len,
l2_penalty)
print("Beginning training...")
log_frequency = 500
log_num = log_frequency
update = 1
running_loss = 0.
best_v_loss = np.float('inf')
count = 0
while update < updates:
# snapshot = tracemalloc.take_snapshot()
# top_stats = snapshot.statistics('lineno')
# import gc
# for obj in gc.get_objects():
# try:
# if torch.is_tensor(obj) or (hasattr(obj, 'data') and torch.is_tensor(obj.data)):
# print(type(obj), obj.size())
# except:
# pass
#
# print("[ Top 10 ]")
# for stat in top_stats[:10]:
# print(stat)
if update > log_num:
print(str(update) + " updates completed. Loss {}".format(running_loss / log_frequency))
log_num += log_frequency
running_loss = 0
#run validation loss test
v_loss = agent.validate(validation_dataset, 10)
print("Validation accuracy = {}".format(v_loss / validation_dataset.size))
if v_loss > best_v_loss:
count += 1
if count > 5:
print("validation not improing for {} steps. Stopping to prevent overfitting".format(count))
break
else:
best_v_loss = v_loss
print("updating best vloss", best_v_loss)
count = 0
l = agent.train(dataset, minibatch_size)
running_loss += l
update += 1
print("Training completed.")
agent.checkpoint_network(env_name, extra_info)
#Plot losses
#Evaluation
print("beginning evaluation")
evaluator = Evaluator(env_name, num_eval_episodes, checkpoint_dir, epsilon_greedy)
evaluator.evaluate(agent)
return agent
def train_transitions(env_name,
minimal_action_set,
learning_rate,
alpha,
l2_penalty,
minibatch_size,
hist_len,
discount,
checkpoint_dir,
updates,
dataset,
num_eval_episodes):
# create DQN agent
agent = Imitator(list(minimal_action_set),
learning_rate,
alpha,
checkpoint_dir,
hist_len,
l2_penalty)
print("Beginning training...")
log_frequency = 1000
log_num = log_frequency
update = 1
running_loss = 0.
while update < updates:
if update > log_num:
print(str(update) + " updates completed. Loss {}".format(running_loss / log_frequency))
log_num += log_frequency
running_loss = 0
l = agent.train(dataset, minibatch_size)
running_loss += l
update += 1
print("Training completed.")
agent.checkpoint_network(env_name + "_transitions")
#calculate accuacy
#Evaluation
#evaluator = Evaluator(env_name, num_eval_episodes)
#evaluator.evaluate(agent)
return agent
if __name__ == '__main__':
train()
| [
"numpy.float",
"matplotlib.pyplot.ylabel",
"evaluator.Evaluator",
"matplotlib.pyplot.xlabel",
"os.path.join",
"matplotlib.pyplot.style.use",
"matplotlib.pyplot.legend"
] | [((247, 267), 'matplotlib.pyplot.style.use', 'plt.style.use', (['"""bmh"""'], {}), "('bmh')\n", (260, 267), True, 'import matplotlib.pyplot as plt\n'), ((619, 639), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Update"""'], {}), "('Update')\n", (629, 639), True, 'import matplotlib.pyplot as plt\n'), ((648, 666), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Loss"""'], {}), "('Loss')\n", (658, 666), True, 'import matplotlib.pyplot as plt\n'), ((675, 705), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""lower center"""'}), "(loc='lower center')\n", (685, 705), True, 'import matplotlib.pyplot as plt\n'), ((1482, 1497), 'numpy.float', 'np.float', (['"""inf"""'], {}), "('inf')\n", (1490, 1497), True, 'import numpy as np\n'), ((3025, 3095), 'evaluator.Evaluator', 'Evaluator', (['env_name', 'num_eval_episodes', 'checkpoint_dir', 'epsilon_greedy'], {}), '(env_name, num_eval_episodes, checkpoint_dir, epsilon_greedy)\n', (3034, 3095), False, 'from evaluator import Evaluator\n'), ((518, 570), 'os.path.join', 'os.path.join', (['checkpoint_dir', "(env_name + '_loss.png')"], {}), "(checkpoint_dir, env_name + '_loss.png')\n", (530, 570), False, 'import os\n'), ((726, 777), 'os.path.join', 'os.path.join', (['checkpoint_dir', "(env_name + 'loss.png')"], {}), "(checkpoint_dir, env_name + 'loss.png')\n", (738, 777), False, 'import os\n')] |
from IPython import get_ipython
from IPython.display import display
def is_ipynb():
return type(get_ipython()).__module__.startswith('ipykernel.')
| [
"IPython.get_ipython"
] | [((102, 115), 'IPython.get_ipython', 'get_ipython', ([], {}), '()\n', (113, 115), False, 'from IPython import get_ipython\n')] |
"""Strategic conflict detection Subscription query tests:
- add a few Subscriptions spaced in time and footprints
- query with various combinations of arguments
"""
import datetime
from monitoring.monitorlib.infrastructure import default_scope
from monitoring.monitorlib import scd
from monitoring.monitorlib.scd import SCOPE_SC
SUB1_ID = '00000088-b268-481c-a32d-6be442000000'
SUB2_ID = '00000017-a3fe-42d6-9f3b-83dec2000000'
SUB3_ID = '0000001b-9c8a-475e-a82d-d81922000000'
LAT0 = 23
LNG0 = 56
# This value should be large enough to ensure areas separated by this distance
# will lie in separate grid cells.
FOOTPRINT_SPACING_M = 10000
def _make_sub1_req():
time_start = datetime.datetime.utcnow()
time_end = time_start + datetime.timedelta(minutes=60)
lat = LAT0 - scd.latitude_degrees(FOOTPRINT_SPACING_M)
return {
"extents": scd.make_vol4(None, time_end, 0, 300, scd.make_circle(lat, LNG0, 100)),
"old_version": 0,
"uss_base_url": "https://example.com/foo",
"notify_for_operations": True,
"notify_for_constraints": False
}
def _make_sub2_req():
time_start = datetime.datetime.utcnow() + datetime.timedelta(hours=2)
time_end = time_start + datetime.timedelta(minutes=60)
return {
"extents": scd.make_vol4(time_start, time_end, 350, 650, scd.make_circle(LAT0, LNG0, 100)),
"old_version": 0,
"uss_base_url": "https://example.com/foo",
"notify_for_operations": True,
"notify_for_constraints": False
}
def _make_sub3_req():
time_start = datetime.datetime.utcnow() + datetime.timedelta(hours=4)
time_end = time_start + datetime.timedelta(minutes=60)
lat = LAT0 + scd.latitude_degrees(FOOTPRINT_SPACING_M)
return {
"extents": scd.make_vol4(time_start, time_end, 700, 1000, scd.make_circle(lat, LNG0, 100)),
"old_version": 0,
"uss_base_url": "https://example.com/foo",
"notify_for_operations": True,
"notify_for_constraints": False
}
def test_ensure_clean_workspace(scd_session):
for sub_id in (SUB1_ID, SUB2_ID, SUB3_ID):
resp = scd_session.get('/subscriptions/{}'.format(sub_id), scope=SCOPE_SC)
if resp.status_code == 200:
resp = scd_session.delete('/subscriptions/{}'.format(sub_id), scope=SCOPE_SC)
assert resp.status_code == 200, resp.content
elif resp.status_code == 404:
# As expected.
pass
else:
assert False, resp.content
# Preconditions: No named Subscriptions exist
# Mutations: None
@default_scope(SCOPE_SC)
def test_subs_do_not_exist_get(scd_session):
for sub_id in (SUB1_ID, SUB2_ID, SUB3_ID):
resp = scd_session.get('/subscriptions/{}'.format(sub_id))
assert resp.status_code == 404, resp.content
# Preconditions: No named Subscriptions exist
# Mutations: None
@default_scope(SCOPE_SC)
def test_subs_do_not_exist_query(scd_session):
resp = scd_session.post('/subscriptions/query', json={
'area_of_interest': scd.make_vol4(None, None, 0, 5000, scd.make_circle(LAT0, LNG0, FOOTPRINT_SPACING_M))
})
assert resp.status_code == 200, resp.content
result_ids = [x['id'] for x in resp.json()['subscriptions']]
for sub_id in (SUB1_ID, SUB2_ID, SUB3_ID):
assert sub_id not in result_ids
# Preconditions: No named Subscriptions exist
# Mutations: Subscriptions 1, 2, and 3 created
@default_scope(SCOPE_SC)
def test_create_subs(scd_session):
resp = scd_session.put('/subscriptions/{}'.format(SUB1_ID), json=_make_sub1_req())
assert resp.status_code == 200, resp.content
resp = scd_session.put('/subscriptions/{}'.format(SUB2_ID), json=_make_sub2_req())
assert resp.status_code == 200, resp.content
resp = scd_session.put('/subscriptions/{}'.format(SUB3_ID), json=_make_sub3_req())
assert resp.status_code == 200, resp.content
# Preconditions: Subscriptions 1, 2, and 3 created
# Mutations: None
@default_scope(SCOPE_SC)
def test_search_find_all_subs(scd_session):
resp = scd_session.post(
'/subscriptions/query',
json={
"area_of_interest": scd.make_vol4(None, None, 0, 3000,
scd.make_circle(LAT0, LNG0, FOOTPRINT_SPACING_M))
})
assert resp.status_code == 200, resp.content
result_ids = [x['id'] for x in resp.json()['subscriptions']]
for sub_id in (SUB1_ID, SUB2_ID, SUB3_ID):
assert sub_id in result_ids
# Preconditions: Subscriptions 1, 2, and 3 created
# Mutations: None
@default_scope(SCOPE_SC)
def test_search_footprint(scd_session):
lat = LAT0 - scd.latitude_degrees(FOOTPRINT_SPACING_M)
print(lat)
resp = scd_session.post(
'/subscriptions/query',
json={
"area_of_interest": scd.make_vol4(None, None, 0, 3000,
scd.make_circle(lat, LNG0, 50))
})
assert resp.status_code == 200, resp.content
result_ids = [x['id'] for x in resp.json()['subscriptions']]
assert SUB1_ID in result_ids
assert SUB2_ID not in result_ids
assert SUB3_ID not in result_ids
resp = scd_session.post(
'/subscriptions/query',
json={
"area_of_interest": scd.make_vol4(None, None, 0, 3000,
scd.make_circle(LAT0, LNG0, 50))
})
assert resp.status_code == 200, resp.content
result_ids = [x['id'] for x in resp.json()['subscriptions']]
assert SUB1_ID not in result_ids
assert SUB2_ID in result_ids
assert SUB3_ID not in result_ids
# Preconditions: Subscriptions 1, 2, and 3 created
# Mutations: None
@default_scope(SCOPE_SC)
def test_search_time(scd_session):
time_start = datetime.datetime.utcnow()
time_end = time_start + datetime.timedelta(minutes=1)
resp = scd_session.post(
'/subscriptions/query',
json={
"area_of_interest": scd.make_vol4(time_start, time_end, 0, 3000,
scd.make_circle(LAT0, LNG0, FOOTPRINT_SPACING_M))
})
assert resp.status_code == 200, resp.content
result_ids = [x['id'] for x in resp.json()['subscriptions']]
assert SUB1_ID in result_ids
assert SUB2_ID not in result_ids
assert SUB3_ID not in result_ids
resp = scd_session.post(
'/subscriptions/query',
json={
"area_of_interest": scd.make_vol4(None, time_end, 0, 3000,
scd.make_circle(LAT0, LNG0, FOOTPRINT_SPACING_M))
})
assert resp.status_code == 200, resp.content
result_ids = [x['id'] for x in resp.json()['subscriptions']]
assert SUB1_ID in result_ids
assert SUB2_ID not in result_ids
assert SUB3_ID not in result_ids
time_start = datetime.datetime.utcnow() + datetime.timedelta(hours=4)
time_end = time_start + datetime.timedelta(minutes=1)
resp = scd_session.post(
'/subscriptions/query',
json={
"area_of_interest": scd.make_vol4(time_start, time_end, 0, 3000,
scd.make_circle(LAT0, LNG0, FOOTPRINT_SPACING_M))
})
assert resp.status_code == 200, resp.content
result_ids = [x['id'] for x in resp.json()['subscriptions']]
assert SUB1_ID not in result_ids
assert SUB2_ID not in result_ids
assert SUB3_ID in result_ids
resp = scd_session.post(
'/subscriptions/query',
json={
"area_of_interest": scd.make_vol4(time_start, None, 0, 3000,
scd.make_circle(LAT0, LNG0, FOOTPRINT_SPACING_M))
})
assert resp.status_code == 200, resp.content
result_ids = [x['id'] for x in resp.json()['subscriptions']]
assert SUB1_ID not in result_ids
assert SUB2_ID not in result_ids
assert SUB3_ID in result_ids
# Preconditions: Subscriptions 1, 2, and 3 created
# Mutations: None
@default_scope(SCOPE_SC)
def test_search_time_footprint(scd_session):
time_start = datetime.datetime.utcnow()
time_end = time_start + datetime.timedelta(hours=2.5)
lat = LAT0 + scd.latitude_degrees(FOOTPRINT_SPACING_M)
resp = scd_session.post(
'/subscriptions/query',
json={
"area_of_interest": scd.make_vol4(time_start, time_end, 0, 3000,
scd.make_circle(lat, LNG0, FOOTPRINT_SPACING_M))
})
assert resp.status_code == 200, resp.content
result_ids = [x['id'] for x in resp.json()['subscriptions']]
assert SUB1_ID not in result_ids
assert SUB2_ID in result_ids
assert SUB3_ID not in result_ids
# Preconditions: Subscriptions 1, 2, and 3 created
# Mutations: Subscriptions 1, 2, and 3 deleted
@default_scope(SCOPE_SC)
def test_delete_subs(scd_session):
for sub_id in (SUB1_ID, SUB2_ID, SUB3_ID):
resp = scd_session.delete('/subscriptions/{}'.format(sub_id))
assert resp.status_code == 200, resp.content
| [
"datetime.datetime.utcnow",
"monitoring.monitorlib.infrastructure.default_scope",
"monitoring.monitorlib.scd.latitude_degrees",
"monitoring.monitorlib.scd.make_circle",
"datetime.timedelta"
] | [((2451, 2474), 'monitoring.monitorlib.infrastructure.default_scope', 'default_scope', (['SCOPE_SC'], {}), '(SCOPE_SC)\n', (2464, 2474), False, 'from monitoring.monitorlib.infrastructure import default_scope\n'), ((2744, 2767), 'monitoring.monitorlib.infrastructure.default_scope', 'default_scope', (['SCOPE_SC'], {}), '(SCOPE_SC)\n', (2757, 2767), False, 'from monitoring.monitorlib.infrastructure import default_scope\n'), ((3273, 3296), 'monitoring.monitorlib.infrastructure.default_scope', 'default_scope', (['SCOPE_SC'], {}), '(SCOPE_SC)\n', (3286, 3296), False, 'from monitoring.monitorlib.infrastructure import default_scope\n'), ((3802, 3825), 'monitoring.monitorlib.infrastructure.default_scope', 'default_scope', (['SCOPE_SC'], {}), '(SCOPE_SC)\n', (3815, 3825), False, 'from monitoring.monitorlib.infrastructure import default_scope\n'), ((4363, 4386), 'monitoring.monitorlib.infrastructure.default_scope', 'default_scope', (['SCOPE_SC'], {}), '(SCOPE_SC)\n', (4376, 4386), False, 'from monitoring.monitorlib.infrastructure import default_scope\n'), ((5405, 5428), 'monitoring.monitorlib.infrastructure.default_scope', 'default_scope', (['SCOPE_SC'], {}), '(SCOPE_SC)\n', (5418, 5428), False, 'from monitoring.monitorlib.infrastructure import default_scope\n'), ((7537, 7560), 'monitoring.monitorlib.infrastructure.default_scope', 'default_scope', (['SCOPE_SC'], {}), '(SCOPE_SC)\n', (7550, 7560), False, 'from monitoring.monitorlib.infrastructure import default_scope\n'), ((8307, 8330), 'monitoring.monitorlib.infrastructure.default_scope', 'default_scope', (['SCOPE_SC'], {}), '(SCOPE_SC)\n', (8320, 8330), False, 'from monitoring.monitorlib.infrastructure import default_scope\n'), ((688, 714), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (712, 714), False, 'import datetime\n'), ((5479, 5505), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (5503, 5505), False, 'import datetime\n'), ((7621, 7647), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (7645, 7647), False, 'import datetime\n'), ((741, 771), 'datetime.timedelta', 'datetime.timedelta', ([], {'minutes': '(60)'}), '(minutes=60)\n', (759, 771), False, 'import datetime\n'), ((787, 828), 'monitoring.monitorlib.scd.latitude_degrees', 'scd.latitude_degrees', (['FOOTPRINT_SPACING_M'], {}), '(FOOTPRINT_SPACING_M)\n', (807, 828), False, 'from monitoring.monitorlib import scd\n'), ((1110, 1136), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (1134, 1136), False, 'import datetime\n'), ((1139, 1166), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(2)'}), '(hours=2)\n', (1157, 1166), False, 'import datetime\n'), ((1193, 1223), 'datetime.timedelta', 'datetime.timedelta', ([], {'minutes': '(60)'}), '(minutes=60)\n', (1211, 1223), False, 'import datetime\n'), ((1514, 1540), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (1538, 1540), False, 'import datetime\n'), ((1543, 1570), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(4)'}), '(hours=4)\n', (1561, 1570), False, 'import datetime\n'), ((1597, 1627), 'datetime.timedelta', 'datetime.timedelta', ([], {'minutes': '(60)'}), '(minutes=60)\n', (1615, 1627), False, 'import datetime\n'), ((1643, 1684), 'monitoring.monitorlib.scd.latitude_degrees', 'scd.latitude_degrees', (['FOOTPRINT_SPACING_M'], {}), '(FOOTPRINT_SPACING_M)\n', (1663, 1684), False, 'from monitoring.monitorlib import scd\n'), ((4442, 4483), 'monitoring.monitorlib.scd.latitude_degrees', 'scd.latitude_degrees', (['FOOTPRINT_SPACING_M'], {}), '(FOOTPRINT_SPACING_M)\n', (4462, 4483), False, 'from monitoring.monitorlib import scd\n'), ((5532, 5561), 'datetime.timedelta', 'datetime.timedelta', ([], {'minutes': '(1)'}), '(minutes=1)\n', (5550, 5561), False, 'import datetime\n'), ((6464, 6490), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (6488, 6490), False, 'import datetime\n'), ((6493, 6520), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(4)'}), '(hours=4)\n', (6511, 6520), False, 'import datetime\n'), ((6547, 6576), 'datetime.timedelta', 'datetime.timedelta', ([], {'minutes': '(1)'}), '(minutes=1)\n', (6565, 6576), False, 'import datetime\n'), ((7674, 7703), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(2.5)'}), '(hours=2.5)\n', (7692, 7703), False, 'import datetime\n'), ((7719, 7760), 'monitoring.monitorlib.scd.latitude_degrees', 'scd.latitude_degrees', (['FOOTPRINT_SPACING_M'], {}), '(FOOTPRINT_SPACING_M)\n', (7739, 7760), False, 'from monitoring.monitorlib import scd\n'), ((893, 924), 'monitoring.monitorlib.scd.make_circle', 'scd.make_circle', (['lat', 'LNG0', '(100)'], {}), '(lat, LNG0, 100)\n', (908, 924), False, 'from monitoring.monitorlib import scd\n'), ((1296, 1328), 'monitoring.monitorlib.scd.make_circle', 'scd.make_circle', (['LAT0', 'LNG0', '(100)'], {}), '(LAT0, LNG0, 100)\n', (1311, 1328), False, 'from monitoring.monitorlib import scd\n'), ((1758, 1789), 'monitoring.monitorlib.scd.make_circle', 'scd.make_circle', (['lat', 'LNG0', '(100)'], {}), '(lat, LNG0, 100)\n', (1773, 1789), False, 'from monitoring.monitorlib import scd\n'), ((2931, 2979), 'monitoring.monitorlib.scd.make_circle', 'scd.make_circle', (['LAT0', 'LNG0', 'FOOTPRINT_SPACING_M'], {}), '(LAT0, LNG0, FOOTPRINT_SPACING_M)\n', (2946, 2979), False, 'from monitoring.monitorlib import scd\n'), ((4045, 4093), 'monitoring.monitorlib.scd.make_circle', 'scd.make_circle', (['LAT0', 'LNG0', 'FOOTPRINT_SPACING_M'], {}), '(LAT0, LNG0, FOOTPRINT_SPACING_M)\n', (4060, 4093), False, 'from monitoring.monitorlib import scd\n'), ((4664, 4694), 'monitoring.monitorlib.scd.make_circle', 'scd.make_circle', (['lat', 'LNG0', '(50)'], {}), '(lat, LNG0, 50)\n', (4679, 4694), False, 'from monitoring.monitorlib import scd\n'), ((5082, 5113), 'monitoring.monitorlib.scd.make_circle', 'scd.make_circle', (['LAT0', 'LNG0', '(50)'], {}), '(LAT0, LNG0, 50)\n', (5097, 5113), False, 'from monitoring.monitorlib import scd\n'), ((5740, 5788), 'monitoring.monitorlib.scd.make_circle', 'scd.make_circle', (['LAT0', 'LNG0', 'FOOTPRINT_SPACING_M'], {}), '(LAT0, LNG0, FOOTPRINT_SPACING_M)\n', (5755, 5788), False, 'from monitoring.monitorlib import scd\n'), ((6180, 6228), 'monitoring.monitorlib.scd.make_circle', 'scd.make_circle', (['LAT0', 'LNG0', 'FOOTPRINT_SPACING_M'], {}), '(LAT0, LNG0, FOOTPRINT_SPACING_M)\n', (6195, 6228), False, 'from monitoring.monitorlib import scd\n'), ((6755, 6803), 'monitoring.monitorlib.scd.make_circle', 'scd.make_circle', (['LAT0', 'LNG0', 'FOOTPRINT_SPACING_M'], {}), '(LAT0, LNG0, FOOTPRINT_SPACING_M)\n', (6770, 6803), False, 'from monitoring.monitorlib import scd\n'), ((7197, 7245), 'monitoring.monitorlib.scd.make_circle', 'scd.make_circle', (['LAT0', 'LNG0', 'FOOTPRINT_SPACING_M'], {}), '(LAT0, LNG0, FOOTPRINT_SPACING_M)\n', (7212, 7245), False, 'from monitoring.monitorlib import scd\n'), ((7938, 7985), 'monitoring.monitorlib.scd.make_circle', 'scd.make_circle', (['lat', 'LNG0', 'FOOTPRINT_SPACING_M'], {}), '(lat, LNG0, FOOTPRINT_SPACING_M)\n', (7953, 7985), False, 'from monitoring.monitorlib import scd\n')] |
# Generated by Django 3.0.4 on 2020-03-27 14:22
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('questionnarie', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='questionnaire',
name='email',
),
migrations.AddField(
model_name='questionnaire',
name='user_id',
field=models.IntegerField(default=0, verbose_name='用户id'),
),
]
| [
"django.db.migrations.RemoveField",
"django.db.models.IntegerField"
] | [((230, 294), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""questionnaire"""', 'name': '"""email"""'}), "(model_name='questionnaire', name='email')\n", (252, 294), False, 'from django.db import migrations, models\n'), ((446, 497), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)', 'verbose_name': '"""用户id"""'}), "(default=0, verbose_name='用户id')\n", (465, 497), False, 'from django.db import migrations, models\n')] |
from django.test import TestCase, Client
from django.urls import reverse
from apps.pages.models import Page
class TestPageView(TestCase):
def setUp(self):
self.client = Client()
Page.objects.create(slug="test_slug")
def test_page_GET(self):
url = reverse("page_app:page", args=["test_slug"])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "merken/pages/page.html")
def test_page_404(self):
url = reverse("page_app:page", args=["wrong_page"])
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
class TestIndexView(TestCase):
def setUp(self):
self.client = Client()
def test_index_GET(self):
Page.objects.create(slug="index")
url = reverse("page_app:index")
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "merken/pages/index.html")
def test_index_404(self):
url = reverse("page_app:index")
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
| [
"django.urls.reverse",
"apps.pages.models.Page.objects.create",
"django.test.Client"
] | [((184, 192), 'django.test.Client', 'Client', ([], {}), '()\n', (190, 192), False, 'from django.test import TestCase, Client\n'), ((201, 238), 'apps.pages.models.Page.objects.create', 'Page.objects.create', ([], {'slug': '"""test_slug"""'}), "(slug='test_slug')\n", (220, 238), False, 'from apps.pages.models import Page\n'), ((283, 327), 'django.urls.reverse', 'reverse', (['"""page_app:page"""'], {'args': "['test_slug']"}), "('page_app:page', args=['test_slug'])\n", (290, 327), False, 'from django.urls import reverse\n'), ((532, 577), 'django.urls.reverse', 'reverse', (['"""page_app:page"""'], {'args': "['wrong_page']"}), "('page_app:page', args=['wrong_page'])\n", (539, 577), False, 'from django.urls import reverse\n'), ((746, 754), 'django.test.Client', 'Client', ([], {}), '()\n', (752, 754), False, 'from django.test import TestCase, Client\n'), ((794, 827), 'apps.pages.models.Page.objects.create', 'Page.objects.create', ([], {'slug': '"""index"""'}), "(slug='index')\n", (813, 827), False, 'from apps.pages.models import Page\n'), ((842, 867), 'django.urls.reverse', 'reverse', (['"""page_app:index"""'], {}), "('page_app:index')\n", (849, 867), False, 'from django.urls import reverse\n'), ((1074, 1099), 'django.urls.reverse', 'reverse', (['"""page_app:index"""'], {}), "('page_app:index')\n", (1081, 1099), False, 'from django.urls import reverse\n')] |
from sequal.ion import Ion
ax = "ax"
by = "by"
cz = "cz"
# calculate non-labile modifications and yield associated transition
# For example "by" would yield a tuple of "b" and "y" transitions.
def fragment_non_labile(sequence, fragment_type):
for i in range(1, sequence.seq_length, 1):
left = Ion(sequence[:i], fragment_number=i, ion_type=fragment_type[0])
right = Ion(sequence[i:], fragment_number=sequence.seq_length-i, ion_type=fragment_type[1])
yield left, right
# calculate all labile modification variants for the sequence and its associated labile modifications
def fragment_labile(sequence):
fragment_number = 0
for p in sequence.mods:
for i in sequence.mods[p]:
if i.labile:
fragment_number += i.labile_number
return Ion(sequence, fragment_number=fragment_number, ion_type="Y")
class FragmentFactory:
def __init__(self, fragment_type, ignore=None):
self.fragment_type = fragment_type
if ignore:
self.ignore = ignore
else:
self.ignore = []
def set_ignore(self, ignore):
self.ignore = ignore
| [
"sequal.ion.Ion"
] | [((805, 865), 'sequal.ion.Ion', 'Ion', (['sequence'], {'fragment_number': 'fragment_number', 'ion_type': '"""Y"""'}), "(sequence, fragment_number=fragment_number, ion_type='Y')\n", (808, 865), False, 'from sequal.ion import Ion\n'), ((307, 370), 'sequal.ion.Ion', 'Ion', (['sequence[:i]'], {'fragment_number': 'i', 'ion_type': 'fragment_type[0]'}), '(sequence[:i], fragment_number=i, ion_type=fragment_type[0])\n', (310, 370), False, 'from sequal.ion import Ion\n'), ((387, 477), 'sequal.ion.Ion', 'Ion', (['sequence[i:]'], {'fragment_number': '(sequence.seq_length - i)', 'ion_type': 'fragment_type[1]'}), '(sequence[i:], fragment_number=sequence.seq_length - i, ion_type=\n fragment_type[1])\n', (390, 477), False, 'from sequal.ion import Ion\n')] |
"""
order.py
This module contains classes needed for emulating logistics system. In particular, the following
classes are here:
Item
Vehicle
Order
Location
"""
import copy
from typing import List
class Item:
"""A class used to represent an item for logistics system.
Attributes
----------
name : str
a name of the item, e.g. book, letter, TV, cookie
price : float
the price of an item in UAH
"""
def __init__(self, name: str, price: float) -> None:
"""Initialize Item with name and price (in UAH).
>>> item = Item("phone", 5123.4567)
>>> item.name
'phone'
>>> item.price
5123.4567
"""
self.name = name
self.price = price
def __str__(self) -> str:
"""Return human-readable representation of the order.
>>> item = Item("shoes", 240)
>>> print(item)
shoes
"""
return self.name
class Vehicle:
"""A class user to represent Vehicles for logistics system.
Attributes
----------
vehicle_no : int
number of vehicle
is_available : bool
tells if a vehicle is available for delivering
"""
def __init__(self, vehicle_no: int) -> None:
"""Initialize Vehicle with vehicle number."""
self.vehicle_no = vehicle_no
self.is_available = True
class Order:
"""A class used to represent an order in logistics system.
Attributes
----------
user_name : str
the name of the user who created the order
city : str
the city of destination
postoffice : int
the postoffice number of Ukrposhta in the city
items : list of items
items listed in the order
location : Location
location of destination point
vehicle : Vehicle
vehicle for delivery of the item
"""
num_orders_created = 0
def __init__(self, user_name: str, city: str, postoffice: int, items: List[Item]) -> None:
"""Initialize order with name of user, delivery city, postoffice, and items to deliver.
>>> order = Order("Bohdan", "Stryi", 2,
... [Item('Arduino',120), Item("ESP32-CAM",200), Item("Raspberri Pi Zero",1100)])
Your order number is 0.
>>> isinstance(order.location, Location)
True
>>> order.vehicle
>>> order.user_name
'Bohdan'
>>> order.location.city
'Stryi'
>>> order.location.postoffice
2
>>> all(map(lambda x: isinstance(x, Item), order.items))
True
"""
self.order_id = Order.num_orders_created
self.user_name = user_name
self.location = Location(city, postoffice)
self.items = copy.copy(items)
self.vehicle = None
Order.num_orders_created += 1
print(f"Your order number is {self.order_id}.")
def __str__(self) -> str:
"""Return human-readable represenation of an order.
>>> order = Order("Ivan", "Kyiv", "42", ['computer'])
Your order number is 1.
>>> print(order)
The order #1 by Ivan to city Kyiv, postoffice 42. The item is computer.
"""
text = f"The order #{self.order_id} by {self.user_name} to city {self.location.city}, post\
office {self.location.postoffice}."
if self.items:
text += " The item"
if len(self.items) == 1:
return text + f" is {self.items[0]}."
return text + f"s are {', '.join(map(str, self.items))}."
return text
def calculate_amount(self) -> float:
"""Return total cost of each Item (in UAH).
>>> order = Order("Bohdan", "Stryi", "2",
... [Item('Arduino',120), Item("ESP32-CAM",200),
... Item("Raspberri Pi Zero",1100)]) #doctest: +ELLIPSIS
Your order number is ....
>>> order.calculate_amount()
1420
"""
return sum(item.price for item in self.items)
def assign_vehicle(self, vehicle: Vehicle) -> None:
"""Assign a vehicle to an order.
>>> order = Order("Oksana", "Zhytomyr", 5, [Item("cap", 100)]) #doctest: +ELLIPSIS
Your order number is ....
>>> vehicle = Vehicle(213)
>>> order.assign_vehicle(vehicle)
>>> order.vehicle.vehicle_no
213
"""
self.vehicle = vehicle
class Location:
"""A class used to represent a location in logistics system.
Attributes
----------
city : str
city of the location
postoffice : int
number of postoffice of Ukrposhta in city
"""
def __init__(self, city: str, postoffice: int) -> None:
"""Initialize location with delivery city and postoffice.
>>> location = Location("Nezhukhiv", 1)
>>> location.city
'Nezhukhiv'
>>> location.postoffice
1
"""
self.city = city
self.postoffice = postoffice
if __name__ == "__main__":
import doctest
doctest.testmod()
| [
"copy.copy",
"doctest.testmod"
] | [((4981, 4998), 'doctest.testmod', 'doctest.testmod', ([], {}), '()\n', (4996, 4998), False, 'import doctest\n'), ((2730, 2746), 'copy.copy', 'copy.copy', (['items'], {}), '(items)\n', (2739, 2746), False, 'import copy\n')] |
#
# See top-level LICENSE.rst file for Copyright information
#
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from collections import OrderedDict
from ..defs import (task_name_sep, task_state_to_int, task_int_to_state)
from ...util import option_list
from ...io import findfile
from .base import (BaseTask, task_classes)
from desiutil.log import get_logger
import sys,re,os,glob
import numpy as np
# NOTE: only one class in this file should have a name that starts with "Task".
class TaskPSFNight(BaseTask):
"""Class containing the properties of one PSF combined night task.
"""
def __init__(self):
super(TaskPSFNight, self).__init__()
# then put int the specifics of this class
# _cols must have a state
self._type = "psfnight"
self._cols = [
"night",
"band",
"spec",
"state"
]
self._coltypes = [
"integer",
"text",
"integer",
"integer"
]
# _name_fields must also be in _cols
self._name_fields = ["night","band","spec"]
self._name_formats = ["08d","s","d"]
def _paths(self, name):
"""See BaseTask.paths.
"""
props = self.name_split(name)
camera = "{}{}".format(props["band"], props["spec"])
return [ findfile("psfnight", night=props["night"],
camera=camera, groupname=None, nside=None, band=props["band"],
spectrograph=props["spec"]) ]
def _deps(self, name, db, inputs):
"""See BaseTask.deps.
"""
return dict()
def _run_max_procs(self):
# This is a serial task.
return 1
def _run_time(self, name, procs, db):
# Run time on one proc on machine with scale factor == 1.0
return 2.0
def _run_defaults(self):
"""See BaseTask.run_defaults.
"""
return {}
def _option_dict(self, name, opts):
"""Build the full list of options.
This includes appending the filenames and incorporating runtime
options.
"""
from .base import task_classes, task_type
options = OrderedDict()
options["output"] = self.paths(name)[0]
# look for psf for this night on disk
options["input"] = []
props = self.name_split(name)
camera = "{}{}".format(props["band"], props["spec"])
dummy_expid = 99999999
template_input = findfile("psf", night=props["night"], expid=dummy_expid,
camera=camera,
band=props["band"],
spectrograph=props["spec"])
template_input = template_input.replace("{:08d}".format(dummy_expid),"????????")
options["input"] = glob.glob(template_input)
return options
def _option_list(self, name, opts):
"""Build the full list of options.
This includes appending the filenames and incorporating runtime
options.
"""
return option_list(self._option_dict(name,opts))
def _run_cli(self, name, opts, procs, db):
"""See BaseTask.run_cli.
"""
optlist = self._option_list(name, opts)
com = "# command line for psfnight not implemented"
return com
def _run(self, name, opts, comm, db):
"""See BaseTask.run.
"""
from ...scripts import specex
optdict = self._option_dict(name, opts)
specex.mean_psf(optdict["input"], optdict["output"])
return
def getready(self, db, name, cur):
"""Checks whether dependencies are ready"""
log = get_logger()
# look for the state of psf with same night,band,spectro
props = self.name_split(name)
cmd = "select state from psf where night={} and band='{}' and spec={}".format(props["night"],props["band"],props["spec"])
cur.execute(cmd)
states = np.array([ x for (x,) in cur.fetchall() ])
log.debug("states={}".format(states))
# psfnight ready if all psf from the night have been processed, and at least one is done (failures are allowed)
n_done = np.sum(states==task_state_to_int["done"])
n_failed = np.sum(states==task_state_to_int["failed"])
ready = (n_done > 0) & ( (n_done + n_failed) == states.size )
if ready :
self.state_set(db=db,name=name,state="ready",cur=cur)
def postprocessing(self, db, name, cur):
"""For successful runs, postprocessing on DB"""
# run getready for all extraction with same night,band,spec
props = self.name_split(name)
log = get_logger()
tt = "traceshift"
cmd = "select name from {} where night={} and band='{}' and spec={} and state=0".format(tt,props["night"],props["band"],props["spec"])
cur.execute(cmd)
tasks = [ x for (x,) in cur.fetchall() ]
log.debug("checking {}".format(tasks))
for task in tasks :
task_classes[tt].getready( db=db,name=task,cur=cur)
| [
"desiutil.log.get_logger",
"collections.OrderedDict",
"numpy.sum",
"glob.glob"
] | [((2223, 2236), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (2234, 2236), False, 'from collections import OrderedDict\n'), ((2860, 2885), 'glob.glob', 'glob.glob', (['template_input'], {}), '(template_input)\n', (2869, 2885), False, 'import sys, re, os, glob\n'), ((3726, 3738), 'desiutil.log.get_logger', 'get_logger', ([], {}), '()\n', (3736, 3738), False, 'from desiutil.log import get_logger\n'), ((4245, 4288), 'numpy.sum', 'np.sum', (["(states == task_state_to_int['done'])"], {}), "(states == task_state_to_int['done'])\n", (4251, 4288), True, 'import numpy as np\n'), ((4306, 4351), 'numpy.sum', 'np.sum', (["(states == task_state_to_int['failed'])"], {}), "(states == task_state_to_int['failed'])\n", (4312, 4351), True, 'import numpy as np\n'), ((4732, 4744), 'desiutil.log.get_logger', 'get_logger', ([], {}), '()\n', (4742, 4744), False, 'from desiutil.log import get_logger\n')] |
# Load in our dependencies
# Forking from http://matplotlib.org/xkcd/examples/showcase/xkcd.html
from matplotlib import pyplot
import numpy
"""
Comments on PRs about style
20 | --------\
| |
| |
| |
| |
1 | \--\
0 | -------
-----------------------
|
Introduction of `jscs`
Time
"""
def main():
"""Generate and save an image as per the docstring above"""
# Define our style as XKCD
pyplot.xkcd()
# Start a new graph
dpi = 72
fig = pyplot.figure(1, figsize=(600 / dpi, 400 / dpi))
# Add labels and a title
pyplot.xlabel('Time')
pyplot.title('Comments on PRs about style')
# Define our axes and limits
# http://matplotlib.org/xkcd/api/pyplot_api.html#matplotlib.pyplot.subplot
ax = fig.add_subplot(1, 1, 1) # cols, rows, plot number
ax.spines['right'].set_color('none')
ax.spines['top'].set_color('none')
pyplot.xticks([])
pyplot.yticks([0, 20])
ax.set_ylim([-1, 25])
# Hide right side of ticks
# http://stackoverflow.com/questions/9051494/customizing-just-one-side-of-tick-marks-in-matplotlib-using-spines
# http://matplotlib.org/api/axis_api.html
ax.yaxis.set_ticks_position('none')
# Generate 100 nodes for our graph and draw them
# http://wiki.scipy.org/Numpy_Example_List#fill
data = numpy.zeros(100)
data.fill(20)
inflection_point = 50
data[inflection_point:inflection_point+10] = numpy.arange(20, 0, -2)
data[inflection_point+10:] = numpy.zeros(100 - (inflection_point + 10))
pyplot.plot(data)
# Add our annotation
pyplot.annotate(
'Introduction of `jscs`',
xy=(inflection_point, 20), arrowprops=dict(arrowstyle='->'), xytext=(10, 15))
# Save the image
pyplot.savefig('graph.png', dpi=dpi)
if __name__ == '__main__':
main()
| [
"matplotlib.pyplot.savefig",
"matplotlib.pyplot.xticks",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.xkcd",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.yticks",
"numpy.zeros",
"matplotlib.pyplot.title",
"numpy.arange"
] | [((499, 512), 'matplotlib.pyplot.xkcd', 'pyplot.xkcd', ([], {}), '()\n', (510, 512), False, 'from matplotlib import pyplot\n'), ((561, 609), 'matplotlib.pyplot.figure', 'pyplot.figure', (['(1)'], {'figsize': '(600 / dpi, 400 / dpi)'}), '(1, figsize=(600 / dpi, 400 / dpi))\n', (574, 609), False, 'from matplotlib import pyplot\n'), ((644, 665), 'matplotlib.pyplot.xlabel', 'pyplot.xlabel', (['"""Time"""'], {}), "('Time')\n", (657, 665), False, 'from matplotlib import pyplot\n'), ((670, 713), 'matplotlib.pyplot.title', 'pyplot.title', (['"""Comments on PRs about style"""'], {}), "('Comments on PRs about style')\n", (682, 713), False, 'from matplotlib import pyplot\n'), ((972, 989), 'matplotlib.pyplot.xticks', 'pyplot.xticks', (['[]'], {}), '([])\n', (985, 989), False, 'from matplotlib import pyplot\n'), ((994, 1016), 'matplotlib.pyplot.yticks', 'pyplot.yticks', (['[0, 20]'], {}), '([0, 20])\n', (1007, 1016), False, 'from matplotlib import pyplot\n'), ((1394, 1410), 'numpy.zeros', 'numpy.zeros', (['(100)'], {}), '(100)\n', (1405, 1410), False, 'import numpy\n'), ((1504, 1527), 'numpy.arange', 'numpy.arange', (['(20)', '(0)', '(-2)'], {}), '(20, 0, -2)\n', (1516, 1527), False, 'import numpy\n'), ((1561, 1603), 'numpy.zeros', 'numpy.zeros', (['(100 - (inflection_point + 10))'], {}), '(100 - (inflection_point + 10))\n', (1572, 1603), False, 'import numpy\n'), ((1608, 1625), 'matplotlib.pyplot.plot', 'pyplot.plot', (['data'], {}), '(data)\n', (1619, 1625), False, 'from matplotlib import pyplot\n'), ((1819, 1855), 'matplotlib.pyplot.savefig', 'pyplot.savefig', (['"""graph.png"""'], {'dpi': 'dpi'}), "('graph.png', dpi=dpi)\n", (1833, 1855), False, 'from matplotlib import pyplot\n')] |
import yaml
import json
yaml_list = range(5)
yaml_list.append('string1')
yaml_list.append('string2')
yaml_list.append({})
yaml_list[-1]
{}
yaml_list[-1]['critter1'] = 'hedgehog'
yaml_list[-1]['critter2'] = 'bunny'
yaml_list[-1]['dungeon_levels'] = range(5)
yaml_list.append('list_end')
with open("class1_list.yml", "w") as f:
f.write(yaml.dump(yaml_list, default_flow_style=False))
with open("class1_list.json", "w") as f:
json.dump(yaml_list, f)
| [
"json.dump",
"yaml.dump"
] | [((432, 455), 'json.dump', 'json.dump', (['yaml_list', 'f'], {}), '(yaml_list, f)\n', (441, 455), False, 'import json\n'), ((338, 384), 'yaml.dump', 'yaml.dump', (['yaml_list'], {'default_flow_style': '(False)'}), '(yaml_list, default_flow_style=False)\n', (347, 384), False, 'import yaml\n')] |
# -*- coding: utf-8 -*-
"""
Profile: http://hl7.org/fhir/StructureDefinition/SpecimenDefinition
Release: R4
Version: 4.0.1
Build ID: 9346c8cc45
Last updated: 2019-11-01T09:29:23.356+11:00
"""
import typing
from pydantic import Field, root_validator
from pydantic.error_wrappers import ErrorWrapper, ValidationError
from pydantic.errors import MissingError, NoneIsNotAllowedError
from . import backboneelement, domainresource, fhirtypes
class SpecimenDefinition(domainresource.DomainResource):
"""Disclaimer: Any field name ends with ``__ext`` does't part of
Resource StructureDefinition, instead used to enable Extensibility feature
for FHIR Primitive Data Types.
Kind of specimen.
A kind of specimen with associated set of requirements.
"""
resource_type = Field("SpecimenDefinition", const=True)
collection: typing.List[fhirtypes.CodeableConceptType] = Field(
None,
alias="collection",
title="Specimen collection procedure",
description="The action to be performed for collecting the specimen.",
# if property is element of this resource.
element_property=True,
)
identifier: fhirtypes.IdentifierType = Field(
None,
alias="identifier",
title="Business identifier of a kind of specimen",
description="A business identifier associated with the kind of specimen.",
# if property is element of this resource.
element_property=True,
)
patientPreparation: typing.List[fhirtypes.CodeableConceptType] = Field(
None,
alias="patientPreparation",
title="Patient preparation for collection",
description="Preparation of the patient for specimen collection.",
# if property is element of this resource.
element_property=True,
)
timeAspect: fhirtypes.String = Field(
None,
alias="timeAspect",
title="Time aspect for collection",
description="Time aspect of specimen collection (duration or offset).",
# if property is element of this resource.
element_property=True,
)
timeAspect__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_timeAspect", title="Extension field for ``timeAspect``."
)
typeCollected: fhirtypes.CodeableConceptType = Field(
None,
alias="typeCollected",
title="Kind of material to collect",
description="The kind of material to be collected.",
# if property is element of this resource.
element_property=True,
)
typeTested: typing.List[fhirtypes.SpecimenDefinitionTypeTestedType] = Field(
None,
alias="typeTested",
title="Specimen in container intended for testing by lab",
description=(
"Specimen conditioned in a container as expected by the testing "
"laboratory."
),
# if property is element of this resource.
element_property=True,
)
class SpecimenDefinitionTypeTested(backboneelement.BackboneElement):
"""Disclaimer: Any field name ends with ``__ext`` does't part of
Resource StructureDefinition, instead used to enable Extensibility feature
for FHIR Primitive Data Types.
Specimen in container intended for testing by lab.
Specimen conditioned in a container as expected by the testing laboratory.
"""
resource_type = Field("SpecimenDefinitionTypeTested", const=True)
container: fhirtypes.SpecimenDefinitionTypeTestedContainerType = Field(
None,
alias="container",
title="The specimen's container",
description=None,
# if property is element of this resource.
element_property=True,
)
handling: typing.List[fhirtypes.SpecimenDefinitionTypeTestedHandlingType] = Field(
None,
alias="handling",
title="Specimen handling before testing",
description=(
"Set of instructions for preservation/transport of the specimen at a "
"defined temperature interval, prior the testing process."
),
# if property is element of this resource.
element_property=True,
)
isDerived: bool = Field(
None,
alias="isDerived",
title="Primary or secondary specimen",
description="Primary of secondary specimen.",
# if property is element of this resource.
element_property=True,
)
isDerived__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_isDerived", title="Extension field for ``isDerived``."
)
preference: fhirtypes.Code = Field(
None,
alias="preference",
title="preferred | alternate",
description="The preference for this type of conditioned specimen.",
# if property is element of this resource.
element_property=True,
element_required=True,
# note: Enum values can be used in validation,
# but use in your own responsibilities, read official FHIR documentation.
enum_values=["preferred", "alternate"],
)
preference__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_preference", title="Extension field for ``preference``."
)
rejectionCriterion: typing.List[fhirtypes.CodeableConceptType] = Field(
None,
alias="rejectionCriterion",
title="Rejection criterion",
description=(
"Criterion for rejection of the specimen in its container by the "
"laboratory."
),
# if property is element of this resource.
element_property=True,
)
requirement: fhirtypes.String = Field(
None,
alias="requirement",
title="Specimen requirements",
description=(
"Requirements for delivery and special handling of this kind of "
"conditioned specimen."
),
# if property is element of this resource.
element_property=True,
)
requirement__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_requirement", title="Extension field for ``requirement``."
)
retentionTime: fhirtypes.DurationType = Field(
None,
alias="retentionTime",
title="Specimen retention time",
description=(
"The usual time that a specimen of this kind is retained after the "
"ordered tests are completed, for the purpose of additional testing."
),
# if property is element of this resource.
element_property=True,
)
type: fhirtypes.CodeableConceptType = Field(
None,
alias="type",
title="Type of intended specimen",
description="The kind of specimen conditioned for testing expected by lab.",
# if property is element of this resource.
element_property=True,
)
@root_validator(pre=True, allow_reuse=True)
def validate_required_primitive_elements_3071(
cls, values: typing.Dict[str, typing.Any]
) -> typing.Dict[str, typing.Any]:
"""https://www.hl7.org/fhir/extensibility.html#Special-Case
In some cases, implementers might find that they do not have appropriate data for
an element with minimum cardinality = 1. In this case, the element must be present,
but unless the resource or a profile on it has made the actual value of the primitive
data type mandatory, it is possible to provide an extension that explains why
the primitive value is not present.
"""
required_fields = [("preference", "preference__ext")]
_missing = object()
def _fallback():
return ""
errors: typing.List["ErrorWrapper"] = []
for name, ext in required_fields:
field = cls.__fields__[name]
ext_field = cls.__fields__[ext]
value = values.get(field.alias, _missing)
if value not in (_missing, None):
continue
ext_value = values.get(ext_field.alias, _missing)
missing_ext = True
if ext_value not in (_missing, None):
if isinstance(ext_value, dict):
missing_ext = len(ext_value.get("extension", [])) == 0
elif (
getattr(ext_value.__class__, "get_resource_type", _fallback)()
== "FHIRPrimitiveExtension"
):
if ext_value.extension and len(ext_value.extension) > 0:
missing_ext = False
else:
validate_pass = True
for validator in ext_field.type_.__get_validators__():
try:
ext_value = validator(v=ext_value)
except ValidationError as exc:
errors.append(ErrorWrapper(exc, loc=ext_field.alias))
validate_pass = False
if not validate_pass:
continue
if ext_value.extension and len(ext_value.extension) > 0:
missing_ext = False
if missing_ext:
if value is _missing:
errors.append(ErrorWrapper(MissingError(), loc=field.alias))
else:
errors.append(
ErrorWrapper(NoneIsNotAllowedError(), loc=field.alias)
)
if len(errors) > 0:
raise ValidationError(errors, cls) # type: ignore
return values
class SpecimenDefinitionTypeTestedContainer(backboneelement.BackboneElement):
"""Disclaimer: Any field name ends with ``__ext`` does't part of
Resource StructureDefinition, instead used to enable Extensibility feature
for FHIR Primitive Data Types.
The specimen's container.
"""
resource_type = Field("SpecimenDefinitionTypeTestedContainer", const=True)
additive: typing.List[
fhirtypes.SpecimenDefinitionTypeTestedContainerAdditiveType
] = Field(
None,
alias="additive",
title="Additive associated with container",
description=(
"Substance introduced in the kind of container to preserve, maintain or"
" enhance the specimen. Examples: Formalin, Citrate, EDTA."
),
# if property is element of this resource.
element_property=True,
)
cap: fhirtypes.CodeableConceptType = Field(
None,
alias="cap",
title="Color of container cap",
description=None,
# if property is element of this resource.
element_property=True,
)
capacity: fhirtypes.QuantityType = Field(
None,
alias="capacity",
title="Container capacity",
description="The capacity (volume or other measure) of this kind of container.",
# if property is element of this resource.
element_property=True,
)
description: fhirtypes.String = Field(
None,
alias="description",
title="Container description",
description="The textual description of the kind of container.",
# if property is element of this resource.
element_property=True,
)
description__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_description", title="Extension field for ``description``."
)
material: fhirtypes.CodeableConceptType = Field(
None,
alias="material",
title="Container material",
description="The type of material of the container.",
# if property is element of this resource.
element_property=True,
)
minimumVolumeQuantity: fhirtypes.QuantityType = Field(
None,
alias="minimumVolumeQuantity",
title="Minimum volume",
description="The minimum volume to be conditioned in the container.",
# if property is element of this resource.
element_property=True,
# Choice of Data Types. i.e minimumVolume[x]
one_of_many="minimumVolume",
one_of_many_required=False,
)
minimumVolumeString: fhirtypes.String = Field(
None,
alias="minimumVolumeString",
title="Minimum volume",
description="The minimum volume to be conditioned in the container.",
# if property is element of this resource.
element_property=True,
# Choice of Data Types. i.e minimumVolume[x]
one_of_many="minimumVolume",
one_of_many_required=False,
)
minimumVolumeString__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None,
alias="_minimumVolumeString",
title="Extension field for ``minimumVolumeString``.",
)
preparation: fhirtypes.String = Field(
None,
alias="preparation",
title="Specimen container preparation",
description=(
"Special processing that should be applied to the container for this "
"kind of specimen."
),
# if property is element of this resource.
element_property=True,
)
preparation__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_preparation", title="Extension field for ``preparation``."
)
type: fhirtypes.CodeableConceptType = Field(
None,
alias="type",
title="Kind of container associated with the kind of specimen",
description="The type of container used to contain this kind of specimen.",
# if property is element of this resource.
element_property=True,
)
@root_validator(pre=True, allow_reuse=True)
def validate_one_of_many_4016(
cls, values: typing.Dict[str, typing.Any]
) -> typing.Dict[str, typing.Any]:
"""https://www.hl7.org/fhir/formats.html#choice
A few elements have a choice of more than one data type for their content.
All such elements have a name that takes the form nnn[x].
The "nnn" part of the name is constant, and the "[x]" is replaced with
the title-cased name of the type that is actually used.
The table view shows each of these names explicitly.
Elements that have a choice of data type cannot repeat - they must have a
maximum cardinality of 1. When constructing an instance of an element with a
choice of types, the authoring system must create a single element with a
data type chosen from among the list of permitted data types.
"""
one_of_many_fields = {
"minimumVolume": ["minimumVolumeQuantity", "minimumVolumeString"]
}
for prefix, fields in one_of_many_fields.items():
assert cls.__fields__[fields[0]].field_info.extra["one_of_many"] == prefix
required = (
cls.__fields__[fields[0]].field_info.extra["one_of_many_required"]
is True
)
found = False
for field in fields:
if field in values and values[field] is not None:
if found is True:
raise ValueError(
"Any of one field value is expected from "
f"this list {fields}, but got multiple!"
)
else:
found = True
if required is True and found is False:
raise ValueError(f"Expect any of field value from this list {fields}.")
return values
class SpecimenDefinitionTypeTestedContainerAdditive(backboneelement.BackboneElement):
"""Disclaimer: Any field name ends with ``__ext`` does't part of
Resource StructureDefinition, instead used to enable Extensibility feature
for FHIR Primitive Data Types.
Additive associated with container.
Substance introduced in the kind of container to preserve, maintain or
enhance the specimen. Examples: Formalin, Citrate, EDTA.
"""
resource_type = Field("SpecimenDefinitionTypeTestedContainerAdditive", const=True)
additiveCodeableConcept: fhirtypes.CodeableConceptType = Field(
None,
alias="additiveCodeableConcept",
title="Additive associated with container",
description=(
"Substance introduced in the kind of container to preserve, maintain or"
" enhance the specimen. Examples: Formalin, Citrate, EDTA."
),
# if property is element of this resource.
element_property=True,
# Choice of Data Types. i.e additive[x]
one_of_many="additive",
one_of_many_required=True,
)
additiveReference: fhirtypes.ReferenceType = Field(
None,
alias="additiveReference",
title="Additive associated with container",
description=(
"Substance introduced in the kind of container to preserve, maintain or"
" enhance the specimen. Examples: Formalin, Citrate, EDTA."
),
# if property is element of this resource.
element_property=True,
# Choice of Data Types. i.e additive[x]
one_of_many="additive",
one_of_many_required=True,
# note: Listed Resource Type(s) should be allowed as Reference.
enum_reference_types=["Substance"],
)
@root_validator(pre=True, allow_reuse=True)
def validate_one_of_many_4813(
cls, values: typing.Dict[str, typing.Any]
) -> typing.Dict[str, typing.Any]:
"""https://www.hl7.org/fhir/formats.html#choice
A few elements have a choice of more than one data type for their content.
All such elements have a name that takes the form nnn[x].
The "nnn" part of the name is constant, and the "[x]" is replaced with
the title-cased name of the type that is actually used.
The table view shows each of these names explicitly.
Elements that have a choice of data type cannot repeat - they must have a
maximum cardinality of 1. When constructing an instance of an element with a
choice of types, the authoring system must create a single element with a
data type chosen from among the list of permitted data types.
"""
one_of_many_fields = {
"additive": ["additiveCodeableConcept", "additiveReference"]
}
for prefix, fields in one_of_many_fields.items():
assert cls.__fields__[fields[0]].field_info.extra["one_of_many"] == prefix
required = (
cls.__fields__[fields[0]].field_info.extra["one_of_many_required"]
is True
)
found = False
for field in fields:
if field in values and values[field] is not None:
if found is True:
raise ValueError(
"Any of one field value is expected from "
f"this list {fields}, but got multiple!"
)
else:
found = True
if required is True and found is False:
raise ValueError(f"Expect any of field value from this list {fields}.")
return values
class SpecimenDefinitionTypeTestedHandling(backboneelement.BackboneElement):
"""Disclaimer: Any field name ends with ``__ext`` does't part of
Resource StructureDefinition, instead used to enable Extensibility feature
for FHIR Primitive Data Types.
Specimen handling before testing.
Set of instructions for preservation/transport of the specimen at a defined
temperature interval, prior the testing process.
"""
resource_type = Field("SpecimenDefinitionTypeTestedHandling", const=True)
instruction: fhirtypes.String = Field(
None,
alias="instruction",
title="Preservation instruction",
description=(
"Additional textual instructions for the preservation or transport of "
"the specimen. For instance, 'Protect from light exposure'."
),
# if property is element of this resource.
element_property=True,
)
instruction__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_instruction", title="Extension field for ``instruction``."
)
maxDuration: fhirtypes.DurationType = Field(
None,
alias="maxDuration",
title="Maximum preservation time",
description=(
"The maximum time interval of preservation of the specimen with these "
"conditions."
),
# if property is element of this resource.
element_property=True,
)
temperatureQualifier: fhirtypes.CodeableConceptType = Field(
None,
alias="temperatureQualifier",
title="Temperature qualifier",
description=(
"It qualifies the interval of temperature, which characterizes an "
"occurrence of handling. Conditions that are not related to temperature"
" may be handled in the instruction element."
),
# if property is element of this resource.
element_property=True,
)
temperatureRange: fhirtypes.RangeType = Field(
None,
alias="temperatureRange",
title="Temperature range",
description="The temperature interval for this set of handling instructions.",
# if property is element of this resource.
element_property=True,
)
| [
"pydantic.error_wrappers.ErrorWrapper",
"pydantic.error_wrappers.ValidationError",
"pydantic.errors.NoneIsNotAllowedError",
"pydantic.root_validator",
"pydantic.Field",
"pydantic.errors.MissingError"
] | [((792, 831), 'pydantic.Field', 'Field', (['"""SpecimenDefinition"""'], {'const': '(True)'}), "('SpecimenDefinition', const=True)\n", (797, 831), False, 'from pydantic import Field, root_validator\n'), ((894, 1066), 'pydantic.Field', 'Field', (['None'], {'alias': '"""collection"""', 'title': '"""Specimen collection procedure"""', 'description': '"""The action to be performed for collecting the specimen."""', 'element_property': '(True)'}), "(None, alias='collection', title='Specimen collection procedure',\n description='The action to be performed for collecting the specimen.',\n element_property=True)\n", (899, 1066), False, 'from pydantic import Field, root_validator\n'), ((1201, 1395), 'pydantic.Field', 'Field', (['None'], {'alias': '"""identifier"""', 'title': '"""Business identifier of a kind of specimen"""', 'description': '"""A business identifier associated with the kind of specimen."""', 'element_property': '(True)'}), "(None, alias='identifier', title=\n 'Business identifier of a kind of specimen', description=\n 'A business identifier associated with the kind of specimen.',\n element_property=True)\n", (1206, 1395), False, 'from pydantic import Field, root_validator\n'), ((1550, 1738), 'pydantic.Field', 'Field', (['None'], {'alias': '"""patientPreparation"""', 'title': '"""Patient preparation for collection"""', 'description': '"""Preparation of the patient for specimen collection."""', 'element_property': '(True)'}), "(None, alias='patientPreparation', title=\n 'Patient preparation for collection', description=\n 'Preparation of the patient for specimen collection.', element_property\n =True)\n", (1555, 1738), False, 'from pydantic import Field, root_validator\n'), ((1858, 2028), 'pydantic.Field', 'Field', (['None'], {'alias': '"""timeAspect"""', 'title': '"""Time aspect for collection"""', 'description': '"""Time aspect of specimen collection (duration or offset)."""', 'element_property': '(True)'}), "(None, alias='timeAspect', title='Time aspect for collection',\n description='Time aspect of specimen collection (duration or offset).',\n element_property=True)\n", (1863, 2028), False, 'from pydantic import Field, root_validator\n'), ((2179, 2256), 'pydantic.Field', 'Field', (['None'], {'alias': '"""_timeAspect"""', 'title': '"""Extension field for ``timeAspect``."""'}), "(None, alias='_timeAspect', title='Extension field for ``timeAspect``.')\n", (2184, 2256), False, 'from pydantic import Field, root_validator\n'), ((2323, 2474), 'pydantic.Field', 'Field', (['None'], {'alias': '"""typeCollected"""', 'title': '"""Kind of material to collect"""', 'description': '"""The kind of material to be collected."""', 'element_property': '(True)'}), "(None, alias='typeCollected', title='Kind of material to collect',\n description='The kind of material to be collected.', element_property=True)\n", (2328, 2474), False, 'from pydantic import Field, root_validator\n'), ((2644, 2862), 'pydantic.Field', 'Field', (['None'], {'alias': '"""typeTested"""', 'title': '"""Specimen in container intended for testing by lab"""', 'description': '"""Specimen conditioned in a container as expected by the testing laboratory."""', 'element_property': '(True)'}), "(None, alias='typeTested', title=\n 'Specimen in container intended for testing by lab', description=\n 'Specimen conditioned in a container as expected by the testing laboratory.'\n , element_property=True)\n", (2649, 2862), False, 'from pydantic import Field, root_validator\n'), ((3403, 3452), 'pydantic.Field', 'Field', (['"""SpecimenDefinitionTypeTested"""'], {'const': '(True)'}), "('SpecimenDefinitionTypeTested', const=True)\n", (3408, 3452), False, 'from pydantic import Field, root_validator\n'), ((3523, 3632), 'pydantic.Field', 'Field', (['None'], {'alias': '"""container"""', 'title': '"""The specimen\'s container"""', 'description': 'None', 'element_property': '(True)'}), '(None, alias=\'container\', title="The specimen\'s container",\n description=None, element_property=True)\n', (3528, 3632), False, 'from pydantic import Field, root_validator\n'), ((3808, 4056), 'pydantic.Field', 'Field', (['None'], {'alias': '"""handling"""', 'title': '"""Specimen handling before testing"""', 'description': '"""Set of instructions for preservation/transport of the specimen at a defined temperature interval, prior the testing process."""', 'element_property': '(True)'}), "(None, alias='handling', title='Specimen handling before testing',\n description=\n 'Set of instructions for preservation/transport of the specimen at a defined temperature interval, prior the testing process.'\n , element_property=True)\n", (3813, 4056), False, 'from pydantic import Field, root_validator\n'), ((4203, 4345), 'pydantic.Field', 'Field', (['None'], {'alias': '"""isDerived"""', 'title': '"""Primary or secondary specimen"""', 'description': '"""Primary of secondary specimen."""', 'element_property': '(True)'}), "(None, alias='isDerived', title='Primary or secondary specimen',\n description='Primary of secondary specimen.', element_property=True)\n", (4208, 4345), False, 'from pydantic import Field, root_validator\n'), ((4499, 4574), 'pydantic.Field', 'Field', (['None'], {'alias': '"""_isDerived"""', 'title': '"""Extension field for ``isDerived``."""'}), "(None, alias='_isDerived', title='Extension field for ``isDerived``.')\n", (4504, 4574), False, 'from pydantic import Field, root_validator\n'), ((4623, 4853), 'pydantic.Field', 'Field', (['None'], {'alias': '"""preference"""', 'title': '"""preferred | alternate"""', 'description': '"""The preference for this type of conditioned specimen."""', 'element_property': '(True)', 'element_required': '(True)', 'enum_values': "['preferred', 'alternate']"}), "(None, alias='preference', title='preferred | alternate', description=\n 'The preference for this type of conditioned specimen.',\n element_property=True, element_required=True, enum_values=['preferred',\n 'alternate'])\n", (4628, 4853), False, 'from pydantic import Field, root_validator\n'), ((5152, 5229), 'pydantic.Field', 'Field', (['None'], {'alias': '"""_preference"""', 'title': '"""Extension field for ``preference``."""'}), "(None, alias='_preference', title='Extension field for ``preference``.')\n", (5157, 5229), False, 'from pydantic import Field, root_validator\n'), ((5314, 5510), 'pydantic.Field', 'Field', (['None'], {'alias': '"""rejectionCriterion"""', 'title': '"""Rejection criterion"""', 'description': '"""Criterion for rejection of the specimen in its container by the laboratory."""', 'element_property': '(True)'}), "(None, alias='rejectionCriterion', title='Rejection criterion',\n description=\n 'Criterion for rejection of the specimen in its container by the laboratory.'\n , element_property=True)\n", (5319, 5510), False, 'from pydantic import Field, root_validator\n'), ((5671, 5872), 'pydantic.Field', 'Field', (['None'], {'alias': '"""requirement"""', 'title': '"""Specimen requirements"""', 'description': '"""Requirements for delivery and special handling of this kind of conditioned specimen."""', 'element_property': '(True)'}), "(None, alias='requirement', title='Specimen requirements', description\n =\n 'Requirements for delivery and special handling of this kind of conditioned specimen.'\n , element_property=True)\n", (5676, 5872), False, 'from pydantic import Field, root_validator\n'), ((6056, 6135), 'pydantic.Field', 'Field', (['None'], {'alias': '"""_requirement"""', 'title': '"""Extension field for ``requirement``."""'}), "(None, alias='_requirement', title='Extension field for ``requirement``.')\n", (6061, 6135), False, 'from pydantic import Field, root_validator\n'), ((6195, 6448), 'pydantic.Field', 'Field', (['None'], {'alias': '"""retentionTime"""', 'title': '"""Specimen retention time"""', 'description': '"""The usual time that a specimen of this kind is retained after the ordered tests are completed, for the purpose of additional testing."""', 'element_property': '(True)'}), "(None, alias='retentionTime', title='Specimen retention time',\n description=\n 'The usual time that a specimen of this kind is retained after the ordered tests are completed, for the purpose of additional testing.'\n , element_property=True)\n", (6200, 6448), False, 'from pydantic import Field, root_validator\n'), ((6615, 6784), 'pydantic.Field', 'Field', (['None'], {'alias': '"""type"""', 'title': '"""Type of intended specimen"""', 'description': '"""The kind of specimen conditioned for testing expected by lab."""', 'element_property': '(True)'}), "(None, alias='type', title='Type of intended specimen', description=\n 'The kind of specimen conditioned for testing expected by lab.',\n element_property=True)\n", (6620, 6784), False, 'from pydantic import Field, root_validator\n'), ((6880, 6922), 'pydantic.root_validator', 'root_validator', ([], {'pre': '(True)', 'allow_reuse': '(True)'}), '(pre=True, allow_reuse=True)\n', (6894, 6922), False, 'from pydantic import Field, root_validator\n'), ((9904, 9962), 'pydantic.Field', 'Field', (['"""SpecimenDefinitionTypeTestedContainer"""'], {'const': '(True)'}), "('SpecimenDefinitionTypeTestedContainer', const=True)\n", (9909, 9962), False, 'from pydantic import Field, root_validator\n'), ((10067, 10320), 'pydantic.Field', 'Field', (['None'], {'alias': '"""additive"""', 'title': '"""Additive associated with container"""', 'description': '"""Substance introduced in the kind of container to preserve, maintain or enhance the specimen. Examples: Formalin, Citrate, EDTA."""', 'element_property': '(True)'}), "(None, alias='additive', title='Additive associated with container',\n description=\n 'Substance introduced in the kind of container to preserve, maintain or enhance the specimen. Examples: Formalin, Citrate, EDTA.'\n , element_property=True)\n", (10072, 10320), False, 'from pydantic import Field, root_validator\n'), ((10486, 10587), 'pydantic.Field', 'Field', (['None'], {'alias': '"""cap"""', 'title': '"""Color of container cap"""', 'description': 'None', 'element_property': '(True)'}), "(None, alias='cap', title='Color of container cap', description=None,\n element_property=True)\n", (10491, 10587), False, 'from pydantic import Field, root_validator\n'), ((10722, 10892), 'pydantic.Field', 'Field', (['None'], {'alias': '"""capacity"""', 'title': '"""Container capacity"""', 'description': '"""The capacity (volume or other measure) of this kind of container."""', 'element_property': '(True)'}), "(None, alias='capacity', title='Container capacity', description=\n 'The capacity (volume or other measure) of this kind of container.',\n element_property=True)\n", (10727, 10892), False, 'from pydantic import Field, root_validator\n'), ((11019, 11180), 'pydantic.Field', 'Field', (['None'], {'alias': '"""description"""', 'title': '"""Container description"""', 'description': '"""The textual description of the kind of container."""', 'element_property': '(True)'}), "(None, alias='description', title='Container description', description\n ='The textual description of the kind of container.', element_property=True\n )\n", (11024, 11180), False, 'from pydantic import Field, root_validator\n'), ((11330, 11409), 'pydantic.Field', 'Field', (['None'], {'alias': '"""_description"""', 'title': '"""Extension field for ``description``."""'}), "(None, alias='_description', title='Extension field for ``description``.')\n", (11335, 11409), False, 'from pydantic import Field, root_validator\n'), ((11471, 11610), 'pydantic.Field', 'Field', (['None'], {'alias': '"""material"""', 'title': '"""Container material"""', 'description': '"""The type of material of the container."""', 'element_property': '(True)'}), "(None, alias='material', title='Container material', description=\n 'The type of material of the container.', element_property=True)\n", (11476, 11610), False, 'from pydantic import Field, root_validator\n'), ((11757, 11985), 'pydantic.Field', 'Field', (['None'], {'alias': '"""minimumVolumeQuantity"""', 'title': '"""Minimum volume"""', 'description': '"""The minimum volume to be conditioned in the container."""', 'element_property': '(True)', 'one_of_many': '"""minimumVolume"""', 'one_of_many_required': '(False)'}), "(None, alias='minimumVolumeQuantity', title='Minimum volume',\n description='The minimum volume to be conditioned in the container.',\n element_property=True, one_of_many='minimumVolume',\n one_of_many_required=False)\n", (11762, 11985), False, 'from pydantic import Field, root_validator\n'), ((12186, 12412), 'pydantic.Field', 'Field', (['None'], {'alias': '"""minimumVolumeString"""', 'title': '"""Minimum volume"""', 'description': '"""The minimum volume to be conditioned in the container."""', 'element_property': '(True)', 'one_of_many': '"""minimumVolume"""', 'one_of_many_required': '(False)'}), "(None, alias='minimumVolumeString', title='Minimum volume',\n description='The minimum volume to be conditioned in the container.',\n element_property=True, one_of_many='minimumVolume',\n one_of_many_required=False)\n", (12191, 12412), False, 'from pydantic import Field, root_validator\n'), ((12637, 12737), 'pydantic.Field', 'Field', (['None'], {'alias': '"""_minimumVolumeString"""', 'title': '"""Extension field for ``minimumVolumeString``."""'}), "(None, alias='_minimumVolumeString', title=\n 'Extension field for ``minimumVolumeString``.')\n", (12642, 12737), False, 'from pydantic import Field, root_validator\n'), ((12801, 13011), 'pydantic.Field', 'Field', (['None'], {'alias': '"""preparation"""', 'title': '"""Specimen container preparation"""', 'description': '"""Special processing that should be applied to the container for this kind of specimen."""', 'element_property': '(True)'}), "(None, alias='preparation', title='Specimen container preparation',\n description=\n 'Special processing that should be applied to the container for this kind of specimen.'\n , element_property=True)\n", (12806, 13011), False, 'from pydantic import Field, root_validator\n'), ((13196, 13275), 'pydantic.Field', 'Field', (['None'], {'alias': '"""_preparation"""', 'title': '"""Extension field for ``preparation``."""'}), "(None, alias='_preparation', title='Extension field for ``preparation``.')\n", (13201, 13275), False, 'from pydantic import Field, root_validator\n'), ((13333, 13535), 'pydantic.Field', 'Field', (['None'], {'alias': '"""type"""', 'title': '"""Kind of container associated with the kind of specimen"""', 'description': '"""The type of container used to contain this kind of specimen."""', 'element_property': '(True)'}), "(None, alias='type', title=\n 'Kind of container associated with the kind of specimen', description=\n 'The type of container used to contain this kind of specimen.',\n element_property=True)\n", (13338, 13535), False, 'from pydantic import Field, root_validator\n'), ((13626, 13668), 'pydantic.root_validator', 'root_validator', ([], {'pre': '(True)', 'allow_reuse': '(True)'}), '(pre=True, allow_reuse=True)\n', (13640, 13668), False, 'from pydantic import Field, root_validator\n'), ((16018, 16084), 'pydantic.Field', 'Field', (['"""SpecimenDefinitionTypeTestedContainerAdditive"""'], {'const': '(True)'}), "('SpecimenDefinitionTypeTestedContainerAdditive', const=True)\n", (16023, 16084), False, 'from pydantic import Field, root_validator\n'), ((16147, 16467), 'pydantic.Field', 'Field', (['None'], {'alias': '"""additiveCodeableConcept"""', 'title': '"""Additive associated with container"""', 'description': '"""Substance introduced in the kind of container to preserve, maintain or enhance the specimen. Examples: Formalin, Citrate, EDTA."""', 'element_property': '(True)', 'one_of_many': '"""additive"""', 'one_of_many_required': '(True)'}), "(None, alias='additiveCodeableConcept', title=\n 'Additive associated with container', description=\n 'Substance introduced in the kind of container to preserve, maintain or enhance the specimen. Examples: Formalin, Citrate, EDTA.'\n , element_property=True, one_of_many='additive', one_of_many_required=True)\n", (16152, 16467), False, 'from pydantic import Field, root_validator\n'), ((16704, 17059), 'pydantic.Field', 'Field', (['None'], {'alias': '"""additiveReference"""', 'title': '"""Additive associated with container"""', 'description': '"""Substance introduced in the kind of container to preserve, maintain or enhance the specimen. Examples: Formalin, Citrate, EDTA."""', 'element_property': '(True)', 'one_of_many': '"""additive"""', 'one_of_many_required': '(True)', 'enum_reference_types': "['Substance']"}), "(None, alias='additiveReference', title=\n 'Additive associated with container', description=\n 'Substance introduced in the kind of container to preserve, maintain or enhance the specimen. Examples: Formalin, Citrate, EDTA.'\n , element_property=True, one_of_many='additive', one_of_many_required=\n True, enum_reference_types=['Substance'])\n", (16709, 17059), False, 'from pydantic import Field, root_validator\n'), ((17327, 17369), 'pydantic.root_validator', 'root_validator', ([], {'pre': '(True)', 'allow_reuse': '(True)'}), '(pre=True, allow_reuse=True)\n', (17341, 17369), False, 'from pydantic import Field, root_validator\n'), ((19700, 19757), 'pydantic.Field', 'Field', (['"""SpecimenDefinitionTypeTestedHandling"""'], {'const': '(True)'}), "('SpecimenDefinitionTypeTestedHandling', const=True)\n", (19705, 19757), False, 'from pydantic import Field, root_validator\n'), ((19795, 20041), 'pydantic.Field', 'Field', (['None'], {'alias': '"""instruction"""', 'title': '"""Preservation instruction"""', 'description': '"""Additional textual instructions for the preservation or transport of the specimen. For instance, \'Protect from light exposure\'."""', 'element_property': '(True)'}), '(None, alias=\'instruction\', title=\'Preservation instruction\',\n description=\n "Additional textual instructions for the preservation or transport of the specimen. For instance, \'Protect from light exposure\'."\n , element_property=True)\n', (19800, 20041), False, 'from pydantic import Field, root_validator\n'), ((20226, 20305), 'pydantic.Field', 'Field', (['None'], {'alias': '"""_instruction"""', 'title': '"""Extension field for ``instruction``."""'}), "(None, alias='_instruction', title='Extension field for ``instruction``.')\n", (20231, 20305), False, 'from pydantic import Field, root_validator\n'), ((20363, 20563), 'pydantic.Field', 'Field', (['None'], {'alias': '"""maxDuration"""', 'title': '"""Maximum preservation time"""', 'description': '"""The maximum time interval of preservation of the specimen with these conditions."""', 'element_property': '(True)'}), "(None, alias='maxDuration', title='Maximum preservation time',\n description=\n 'The maximum time interval of preservation of the specimen with these conditions.'\n , element_property=True)\n", (20368, 20563), False, 'from pydantic import Field, root_validator\n'), ((20746, 21049), 'pydantic.Field', 'Field', (['None'], {'alias': '"""temperatureQualifier"""', 'title': '"""Temperature qualifier"""', 'description': '"""It qualifies the interval of temperature, which characterizes an occurrence of handling. Conditions that are not related to temperature may be handled in the instruction element."""', 'element_property': '(True)'}), "(None, alias='temperatureQualifier', title='Temperature qualifier',\n description=\n 'It qualifies the interval of temperature, which characterizes an occurrence of handling. Conditions that are not related to temperature may be handled in the instruction element.'\n , element_property=True)\n", (20751, 21049), False, 'from pydantic import Field, root_validator\n'), ((21233, 21412), 'pydantic.Field', 'Field', (['None'], {'alias': '"""temperatureRange"""', 'title': '"""Temperature range"""', 'description': '"""The temperature interval for this set of handling instructions."""', 'element_property': '(True)'}), "(None, alias='temperatureRange', title='Temperature range',\n description=\n 'The temperature interval for this set of handling instructions.',\n element_property=True)\n", (21238, 21412), False, 'from pydantic import Field, root_validator\n'), ((9513, 9541), 'pydantic.error_wrappers.ValidationError', 'ValidationError', (['errors', 'cls'], {}), '(errors, cls)\n', (9528, 9541), False, 'from pydantic.error_wrappers import ErrorWrapper, ValidationError\n'), ((9275, 9289), 'pydantic.errors.MissingError', 'MissingError', ([], {}), '()\n', (9287, 9289), False, 'from pydantic.errors import MissingError, NoneIsNotAllowedError\n'), ((9403, 9426), 'pydantic.errors.NoneIsNotAllowedError', 'NoneIsNotAllowedError', ([], {}), '()\n', (9424, 9426), False, 'from pydantic.errors import MissingError, NoneIsNotAllowedError\n'), ((8876, 8914), 'pydantic.error_wrappers.ErrorWrapper', 'ErrorWrapper', (['exc'], {'loc': 'ext_field.alias'}), '(exc, loc=ext_field.alias)\n', (8888, 8914), False, 'from pydantic.error_wrappers import ErrorWrapper, ValidationError\n')] |
import logging
from typing import Dict, List, Tuple, Union
import numpy as np
import torch
import torch.nn.functional as F
from networkx import DiGraph
from torch import Tensor, nn as nn
from torch.autograd.variable import Variable
from binlin.data.ud import index_data
from binlin.model.nn_utils import get_embed_matrix, pad_seq
from binlin.model.syn.sympairs import SymModel
from binlin.model.syn.utils.bintree import BinTreeBase
from binlin.utils.combinatorics import flatten_nested_lists
from binlin.utils.constants import VocabSymbols
logger = logging.getLogger('main')
class SymGraphEncModel(SymModel):
@property
def _data_fields(self):
return ['LEMMA', 'UPOS', 'XPOS', 'DEPREL']
@property
def _num_embedding_feats(self):
# todo: need to avoid hard-coding
# 6 = 2 * 3
# 2 because we consider two nodes at a time,
# 3 because for a node we consider itself + its context
# consisting of its parent and a random child
return 2 * 3 * self._data_fields_num
def _init_weights(self):
self._dim_emb = self.config["embedding_dim"]
self._mat_emb = get_embed_matrix(len(self._id2tok), self._dim_emb, padding_idx=self.PAD_ID)
self._dim_emb_proj_in = self._num_embedding_feats * self._dim_emb
self._dim_emb_proj_out = self._num_embedding_feats * self.config['embedding_proj_dim']
self._mat_emb_proj = nn.Linear(self._dim_emb_proj_in, self._dim_emb_proj_out)
self._dim_dense_out = self.config["dense_dim"]
self._mat_dense = nn.Linear(self._dim_emb_proj_out, self._dim_dense_out)
self._mat_attn = nn.Linear(self._dim_emb, 1)
self._dim_concat_in = self._dim_dense_out + self._dim_emb
self._dim_concat_out = self._dim_dense_out
self._mat_concat = nn.Linear(self._dim_concat_in, self._dim_concat_out)
self._dim_out = 1
self._mat_out = nn.Linear(self._dim_dense_out, self._dim_out)
def forward(self, batch_data: Tuple[Tensor, Tensor, Union[Tensor, None]]) -> Dict:
other_nodes_var, head_child_var, _ = batch_data
# head-child pair is encoded using an MLP
x_head_child = self._mat_emb(head_child_var).view(-1,
self._dim_emb_proj_in) # size(num_node_pairs, self.emb_proj_in)
x_head_child = F.leaky_relu(self._mat_emb_proj(x_head_child)) # size (num_node_pairs, self.emb_proj_out)
x_head_child = F.leaky_relu(self._mat_dense(x_head_child)) # size (num_node_pairs, self.dense1_dim)
# x_head_child = F.leaky_relu(x_head_child) # size (num_node_pairs, self.dense1_dim)
# graph nodes are encoded using embedding lookup --> summing
node_number = other_nodes_var.shape[-1]
x_graph = self._mat_emb(other_nodes_var) # size (max_num_nodes, batch_size, self.emb_dim)
# variant1: sum over all vecs
# x_graph = torch.sum(x_graph, dim=[0], keepdim=True).view(-1, self._dim_emb) # size (batch_size, emb_dim)
# variant2: use attn scores
attn_unnorm_scores = self._mat_attn(x_graph.view(-1, self._dim_emb)) # num_edges x 1
attn_weights = F.leaky_relu(attn_unnorm_scores).view(-1, 1, node_number)
# TODO: find a simpler way to do it w/o squeezing and unsqueezing?
# apply attention weights to the graph vectors to get weighted average
# size (1, dense)
x_graph = torch.bmm(attn_weights, x_graph).squeeze(1) # size: (bsize, emb_size)
# Concat head, child and graph representations
x_combined = torch.cat((x_head_child, x_graph), 1) # size (bs, emb_dim + self.dense1_dim)
# size (batch_size, self.dense1_dim)
x_combined = self._mat_concat(x_combined)
x_combined = F.leaky_relu(x_combined)
x_combined = self._mat_out(x_combined)
logits = torch.sigmoid(x_combined)
return {'logits': logits}
def extract_features(self, bt, new_node_nxid, dg, feats_d):
# pair-level features
head_nxid = bt.nxid
head_deptree_feats = feats_d[head_nxid]
child_deptree_feats = feats_d[new_node_nxid]
x_pair_ids_l = head_deptree_feats + child_deptree_feats
# extracting graph-level features
graph_ids_l = self.extract_graph_level_feats(dg, new_node_nxid, bt, feats_d)
return (graph_ids_l, x_pair_ids_l)
def extract_graph_level_feats(self, dg, new_node_nxid, bt, feats_d):
head_sbl = dg.node[bt.nxid]['sbl']
if head_sbl is None:
head_sbl_feats_l = self._dummy_node_feats_vec
else:
head_sbl_feats_l = flatten_nested_lists([feats_d[ch] for ch in head_sbl])
child_sbl = dg.node[new_node_nxid]['sbl']
if child_sbl is None:
ch_sbl_feats_l = self._dummy_node_feats_vec
else:
ch_sbl_feats_l = flatten_nested_lists([feats_d[ch] for ch in child_sbl])
child_children = dg[new_node_nxid]
if len(child_children) == 0:
ch_ch_feats_l = self._dummy_node_feats_vec
else:
ch_ch_feats_l = flatten_nested_lists([feats_d[ch] for ch in child_children])
graph_ids_l = head_sbl_feats_l + ch_sbl_feats_l + ch_ch_feats_l
return graph_ids_l
def init_data_containers(self):
return {'X': [],
'Y': [],
'Xg': []}
def add_xy_pairs(self, data_containers: Dict, y: int, model_inputs: Tuple[List[int], List[int]]):
x_graph, x_head_child = model_inputs
data_containers['X'].append(x_head_child)
data_containers['Xg'].append(x_graph)
data_containers['Y'].append(y)
def _batchify(self, data_containers: Dict, batch_size: int):
# sort according to the lemma length
sorted_data = sorted(zip(*(data_containers['Xg'],
data_containers['X'],
data_containers['Y'])), key=lambda p: len(p[0]), reverse=True)
data_size = len(sorted_data)
num_batches = data_size // batch_size
data_indices = index_data(data_size, mode='no_shuffling')
batch_pairs = []
for bi in range(num_batches + 1): # including the last (smaller) batch
batch_x_pair_feats = []
batch_x_graph_feats = []
batch_x_lens = []
batch_y = []
curr_batch_indices = data_indices[bi * batch_size: (bi + 1) * batch_size]
if len(curr_batch_indices) == 0:
break
for idx in curr_batch_indices:
graph_f_ids, node_pairs_f_ids, y_ids = sorted_data[idx]
batch_x_graph_feats.append(graph_f_ids)
batch_x_lens.append(len(graph_f_ids))
batch_x_pair_feats.append(node_pairs_f_ids)
batch_y.append(y_ids)
max_graph_f_len = max(batch_x_lens)
batch_x_graph_feats_padded = [pad_seq(x, max_graph_f_len, pad_id=self.PAD_ID) for x in batch_x_graph_feats]
# size: (num_nodes, batch_size)
batch_x_graph_feats_var = Variable(torch.LongTensor(batch_x_graph_feats_padded)).to(self.device)
# size: (batch_size, 2 * num_node_feats)
batch_x_pair_feats_var = Variable(torch.LongTensor(batch_x_pair_feats)).to(self.device)
# size: (batch_size, 1)
batch_y_var = Variable(torch.FloatTensor(batch_y)).unsqueeze(1).to(self.device)
batch_pairs.append((batch_x_graph_feats_var, batch_x_pair_feats_var, batch_y_var))
return batch_pairs
def make_decision(self,
bt: BinTreeBase,
new_node_nxid: str,
dg: DiGraph,
feats_d: Dict, *other_inputs) -> int:
x_graph_ids_l, x_ids_l = self.extract_features(bt, new_node_nxid, dg, feats_d)
x_ids_np = np.asarray(x_ids_l)
x_graph_ids_l = np.asarray([x_graph_ids_l])
outputs = self.__call__(
(
torch.from_numpy(x_graph_ids_l).to(self.device),
torch.from_numpy(x_ids_np).to(self.device),
None)
)
logit_val = outputs['logits'].cpu().data[0].numpy()
if logit_val >= 0.5:
decision = VocabSymbols.RIGHT
else:
decision = VocabSymbols.LEFT
return decision
component = SymGraphEncModel
| [
"logging.getLogger",
"torch.nn.functional.leaky_relu",
"binlin.utils.combinatorics.flatten_nested_lists",
"torch.bmm",
"torch.LongTensor",
"torch.sigmoid",
"numpy.asarray",
"torch.from_numpy",
"binlin.model.nn_utils.pad_seq",
"torch.nn.Linear",
"binlin.data.ud.index_data",
"torch.FloatTensor",... | [((552, 577), 'logging.getLogger', 'logging.getLogger', (['"""main"""'], {}), "('main')\n", (569, 577), False, 'import logging\n'), ((1419, 1475), 'torch.nn.Linear', 'nn.Linear', (['self._dim_emb_proj_in', 'self._dim_emb_proj_out'], {}), '(self._dim_emb_proj_in, self._dim_emb_proj_out)\n', (1428, 1475), True, 'from torch import Tensor, nn as nn\n'), ((1558, 1612), 'torch.nn.Linear', 'nn.Linear', (['self._dim_emb_proj_out', 'self._dim_dense_out'], {}), '(self._dim_emb_proj_out, self._dim_dense_out)\n', (1567, 1612), True, 'from torch import Tensor, nn as nn\n'), ((1639, 1666), 'torch.nn.Linear', 'nn.Linear', (['self._dim_emb', '(1)'], {}), '(self._dim_emb, 1)\n', (1648, 1666), True, 'from torch import Tensor, nn as nn\n'), ((1812, 1864), 'torch.nn.Linear', 'nn.Linear', (['self._dim_concat_in', 'self._dim_concat_out'], {}), '(self._dim_concat_in, self._dim_concat_out)\n', (1821, 1864), True, 'from torch import Tensor, nn as nn\n'), ((1916, 1961), 'torch.nn.Linear', 'nn.Linear', (['self._dim_dense_out', 'self._dim_out'], {}), '(self._dim_dense_out, self._dim_out)\n', (1925, 1961), True, 'from torch import Tensor, nn as nn\n'), ((3588, 3625), 'torch.cat', 'torch.cat', (['(x_head_child, x_graph)', '(1)'], {}), '((x_head_child, x_graph), 1)\n', (3597, 3625), False, 'import torch\n'), ((3783, 3807), 'torch.nn.functional.leaky_relu', 'F.leaky_relu', (['x_combined'], {}), '(x_combined)\n', (3795, 3807), True, 'import torch.nn.functional as F\n'), ((3872, 3897), 'torch.sigmoid', 'torch.sigmoid', (['x_combined'], {}), '(x_combined)\n', (3885, 3897), False, 'import torch\n'), ((6100, 6142), 'binlin.data.ud.index_data', 'index_data', (['data_size'], {'mode': '"""no_shuffling"""'}), "(data_size, mode='no_shuffling')\n", (6110, 6142), False, 'from binlin.data.ud import index_data\n'), ((7896, 7915), 'numpy.asarray', 'np.asarray', (['x_ids_l'], {}), '(x_ids_l)\n', (7906, 7915), True, 'import numpy as np\n'), ((7940, 7967), 'numpy.asarray', 'np.asarray', (['[x_graph_ids_l]'], {}), '([x_graph_ids_l])\n', (7950, 7967), True, 'import numpy as np\n'), ((4643, 4697), 'binlin.utils.combinatorics.flatten_nested_lists', 'flatten_nested_lists', (['[feats_d[ch] for ch in head_sbl]'], {}), '([feats_d[ch] for ch in head_sbl])\n', (4663, 4697), False, 'from binlin.utils.combinatorics import flatten_nested_lists\n'), ((4878, 4933), 'binlin.utils.combinatorics.flatten_nested_lists', 'flatten_nested_lists', (['[feats_d[ch] for ch in child_sbl]'], {}), '([feats_d[ch] for ch in child_sbl])\n', (4898, 4933), False, 'from binlin.utils.combinatorics import flatten_nested_lists\n'), ((5112, 5172), 'binlin.utils.combinatorics.flatten_nested_lists', 'flatten_nested_lists', (['[feats_d[ch] for ch in child_children]'], {}), '([feats_d[ch] for ch in child_children])\n', (5132, 5172), False, 'from binlin.utils.combinatorics import flatten_nested_lists\n'), ((3183, 3215), 'torch.nn.functional.leaky_relu', 'F.leaky_relu', (['attn_unnorm_scores'], {}), '(attn_unnorm_scores)\n', (3195, 3215), True, 'import torch.nn.functional as F\n'), ((3440, 3472), 'torch.bmm', 'torch.bmm', (['attn_weights', 'x_graph'], {}), '(attn_weights, x_graph)\n', (3449, 3472), False, 'import torch\n'), ((6948, 6995), 'binlin.model.nn_utils.pad_seq', 'pad_seq', (['x', 'max_graph_f_len'], {'pad_id': 'self.PAD_ID'}), '(x, max_graph_f_len, pad_id=self.PAD_ID)\n', (6955, 6995), False, 'from binlin.model.nn_utils import get_embed_matrix, pad_seq\n'), ((7118, 7162), 'torch.LongTensor', 'torch.LongTensor', (['batch_x_graph_feats_padded'], {}), '(batch_x_graph_feats_padded)\n', (7134, 7162), False, 'import torch\n'), ((7279, 7315), 'torch.LongTensor', 'torch.LongTensor', (['batch_x_pair_feats'], {}), '(batch_x_pair_feats)\n', (7295, 7315), False, 'import torch\n'), ((8031, 8062), 'torch.from_numpy', 'torch.from_numpy', (['x_graph_ids_l'], {}), '(x_graph_ids_l)\n', (8047, 8062), False, 'import torch\n'), ((8096, 8122), 'torch.from_numpy', 'torch.from_numpy', (['x_ids_np'], {}), '(x_ids_np)\n', (8112, 8122), False, 'import torch\n'), ((7404, 7430), 'torch.FloatTensor', 'torch.FloatTensor', (['batch_y'], {}), '(batch_y)\n', (7421, 7430), False, 'import torch\n')] |
from plotter import load_data
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
# Stampa il dataframe passatogli come istogramma
def plot_coordinata(data: pd.DataFrame, coordinata: str, alpha_value=1.0, title="Grafico 1"):
data = data[[coordinata,'color']]
rosse = data[data['color'] == 'red'].groupby(coordinata).count()
blu = data[data['color'] == 'blue'].groupby(coordinata).count()
verdi = data[data['color'] == 'green'].groupby(coordinata).count()
ax = pd.concat([rosse,verdi,blu], axis = 1).plot(kind = 'bar', color = ['r','g','b'])
ax.legend(["R", "G", "B"])
ax.set_title(title)
# Conta il totale delle celle e il colore
def cell_stats(data: pd.DataFrame):
colors = data['color'].value_counts()
print(f"\nIl totale delle celle nel labirinto è di {data['color'].count()}.\n")
# Tolgo le celle bianche
colors.drop('white', inplace=True)
for color, value in colors.iteritems():
print(f"{color}:\t{value}")
# Stampa a video gli istogrammi, se invocata con True confronta le due distribuzioni nei relativi istogrammi
def plot_stats(confronto=False):
data = load_data("data.csv")
if confronto:
data2 = load_data("data2.csv")
if not confronto:
cell_stats(data)
plot_coordinata(data, 'x')
if confronto:
plot_coordinata(data2, 'x', alpha_value=0.5, title="Grafico 2")
plot_coordinata(data, 'y')
if confronto:
plot_coordinata(data2, 'y', alpha_value=0.5, title="Grafico 2")
plt.show()
| [
"pandas.concat",
"plotter.load_data",
"matplotlib.pyplot.show"
] | [((1163, 1184), 'plotter.load_data', 'load_data', (['"""data.csv"""'], {}), "('data.csv')\n", (1172, 1184), False, 'from plotter import load_data\n'), ((1552, 1562), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1560, 1562), True, 'import matplotlib.pyplot as plt\n'), ((1220, 1242), 'plotter.load_data', 'load_data', (['"""data2.csv"""'], {}), "('data2.csv')\n", (1229, 1242), False, 'from plotter import load_data\n'), ((504, 542), 'pandas.concat', 'pd.concat', (['[rosse, verdi, blu]'], {'axis': '(1)'}), '([rosse, verdi, blu], axis=1)\n', (513, 542), True, 'import pandas as pd\n')] |
import pytest
@pytest.mark.order(4)
def test_four():
pass
@pytest.mark.order(3)
def test_three():
pass
| [
"pytest.mark.order"
] | [((17, 37), 'pytest.mark.order', 'pytest.mark.order', (['(4)'], {}), '(4)\n', (34, 37), False, 'import pytest\n'), ((67, 87), 'pytest.mark.order', 'pytest.mark.order', (['(3)'], {}), '(3)\n', (84, 87), False, 'import pytest\n')] |
## This script will define the functions used in the locate lane lines pipeline
## The end of this script will process a video file to locate and plot the lane lines
import pickle
import numpy as np
import cv2
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
from moviepy.editor import VideoFileClip
import sys
## Unpickle Required Data
cam_mtx = pickle.load(open("camera_matrix.p","rb"))
dist_coef = pickle.load(open("camera_distortion_coefficients.p","rb"))
M = pickle.load(open("M.p","rb"))
Minv = pickle.load(open("Minv.p","rb"))
## Undistort Function
def undistort(img_RGB_in):
# Input RGB distorted, Output RGB undistorted
img_out = cv2.undistort(img_RGB_in, cam_mtx, dist_coef, None, cam_mtx)
return(img_out)
# Sample undistort image
if (False):
img = mpimg.imread('camera_cal/calibration1.jpg')
dst_img = undistort(img)
plt.figure(0)
plt.imshow(img)
plt.title('Original Image')
plt.savefig('output_images/distorted_image.png')
plt.figure(1)
plt.imshow(dst_img)
plt.title('Undistorted Image')
plt.savefig('output_images/undistorted_image.png')
plt.show()
# Color Threshold Function
def color_thresh(img_RGB_in,RGB_out):
# Input RGB undistorted, Output Binary (or RGB for video)
# Convert image to HSV color space
img_HSV = cv2.cvtColor(img_RGB_in, cv2.COLOR_RGB2HSV)
# Extract S layer
H_layer = img_HSV[:,:,0]*2
S_layer = img_HSV[:,:,1]/255*100
V_layer = img_HSV[:,:,2]/255*100
# Apply threshold to S layer to identify white and yellow lane lines
H_Yellow = (40,70)
S_Yellow = (30,100)
V_Yellow = (30,100)
H_White = (0,50)
S_White = (0,10)
V_White = (75,100)
img_out = np.zeros_like(H_layer)
img_out[(((H_layer >= H_Yellow[0]) & (H_layer <= H_Yellow[1])) \
& ((S_layer >= S_Yellow[0]) & (S_layer <= S_Yellow[1])) \
& ((V_layer >= V_Yellow[0]) & (V_layer <= V_Yellow[1]))) \
| (((H_layer >= H_White[0]) & (H_layer <= H_White[1])) \
& ((S_layer >= S_White[0]) & (S_layer <= S_White[1])) \
& ((V_layer >= V_White[0]) & (V_layer <= V_White[1])))] = 1
if (RGB_out):
black_out_idxs = np.where(img_out == 0)
img_out = np.copy(img_RGB_in)
img_out[black_out_idxs[0],black_out_idxs[1],:] = 0
return(img_out)
# Sample color threshold image
if (False):
img = mpimg.imread('test_images/test5.jpg')
thrsh_img = color_thresh(img,RGB_out=True)
plt.figure(2)
plt.imshow(img)
plt.title('Original Image')
plt.savefig('output_images/pre_color_thresh.png')
plt.figure(3)
plt.imshow(thrsh_img, cmap='gray')
plt.title('Color Threshold')
plt.savefig('output_images/post_color_thresh.png')
plt.show()
## Perspective Transform to Top-Down View Function
def top_down_xfrm(img_RGB_in,frwd):
# Input RGB undistorted, Output RGB top-down
# frwd is bool that specifies if normal transform is requested (true) or inverse (false)
img_size = (img_RGB_in.shape[1], img_RGB_in.shape[0])
if (frwd):
Xfrm = M
else:
Xfrm = Minv
img_RGB_out = cv2.warpPerspective(img_RGB_in, Xfrm, img_size, flags=cv2.INTER_LINEAR)
return(img_RGB_out)
# Sample top-down perspective transform on image
if (False):
img = mpimg.imread('test_images/test6.jpg')
warped = top_down_xfrm(img,frwd=True)
plt.figure(4)
plt.imshow(img)
plt.title('Original Image')
plt.savefig('output_images/pre_top_down.png')
plt.figure(5)
plt.imshow(warped)
plt.title('Top Down View Warp')
plt.savefig('output_images/post_top_down.png')
plt.show()
## Gradient Threshold Function
def grad_thresh(img_RGB_in,RGB_out):
# Input RGB top-down, Output Binary (or RGB for video)
# RGB_out boolean can be used for video testing
#Apply gradient threshold in x direction
img_GRAY = cv2.cvtColor(img_RGB_in, cv2.COLOR_RGB2GRAY)
grad_thresh = (10,100)
abs_sobel = np.absolute(cv2.Sobel(img_GRAY, cv2.CV_64F, 1, 0))
scaled_sobel = np.uint8(255*abs_sobel/np.max(abs_sobel))
img_out = np.zeros_like(img_GRAY, dtype=np.uint8)
img_out[(scaled_sobel >= grad_thresh[0]) & (scaled_sobel <= grad_thresh[1])] = 1
if (RGB_out):
black_out_idxs = np.where(img_out == 0)
img_out = np.copy(img_RGB_in)
img_out[black_out_idxs[0],black_out_idxs[1],:] = 0
# print(out.shape)
return(img_out)
# Sample gradient threshold image
if (False):
img = mpimg.imread('test_images/test6.jpg')
img = top_down_xfrm(img,frwd=True)
thrsh_img = grad_thresh(img,RGB_out=False)
plt.figure(6)
plt.imshow(img)
plt.title('Original Top Down Transformed Image')
plt.savefig('output_images/pre_grad_thresh.png')
plt.figure(7)
plt.imshow(thrsh_img, cmap='gray')
plt.title('Gradient Threshold')
plt.savefig('output_images/post_grad_thresh.png')
plt.show()
# Class to store and calculate both lane line parameters
class LaneLines():
def __init__(self,img_RGB_in,img_BIN_in):
frame_height = img_RGB_in.shape[0]
frame_width = img_RGB_in.shape[1]
# CONSTANTS
# Frame height
self.frame_height = frame_height
# Frame width
self.frame_width = frame_width
self.midpoint_width = np.int(frame_width//2)
# y values
self.ploty = np.linspace(0, frame_height-1, frame_height)
# Polynomial fit dimension
self.poly_fit_dim = 2
# FRAME
self.Frame = img_RGB_in
# Binary image for current frame
self.img_BIN_in = img_BIN_in
# Histogram for current frame
self.histogram = None
# RGB image for output of current frame
self.img_RGB_out = img_RGB_in
# Current number of consecutive failed frames
self.num_failed_frame_curr = 0
# Number of frames processed
self.frame_num = 0
# TEXT
self.font = cv2.FONT_HERSHEY_SIMPLEX
self.Ofst_Text_pos = (20,500)
self.Rad_L_Text_pos = (20,550)
self.Rad_R_Text_pos = (20,600)
self.fontScale = 1
self.fontColor = (255,255,255)
self.lineType = 2
# HYPERPARAMETERS
# Choose the number of sliding windows
self.nwindows = 9
# Set the width of the windows +/- margin
self.margin_hist = 100
# Set the width of the windows +/- margin
self.margin_poly = 100
# Set minimum number of pixels found to re-center window
self.minpix = 50
# Number of windows that must contain minpix number of pixels for lane line to be considered valid
self.nwindow_fnd = 5
# Number of pixels that must be found for poly search method to be considered valid
self.minpix_poly = 300
# Set height of windows - based on nwindows above and image shape
self.window_height = np.int(frame_height//self.nwindows)
# Define conversions in x and y from pixels space to meters
self.x_width_pix = 700 #pixel width of lane
self.y_height_pix = 720 #pixel height of lane (frame height)
self.xm_per_pix = 3.7/self.x_width_pix # meters per pixel in x dimension
self.ym_per_pix = 30/self.y_height_pix # meters per pixel in y dimension
# Number of frames that failed to find lane lines before reset
self.num_failed_frame_alwd = 25
# Number of frames for rolling average filter
self.filt_size = 25
# LINE PARAMETERS
# was the left line detected in the current frame
self.detected_L = False
self.detected_R = False
# x values of the last n fits of the left line
self.x_fit_all_L = np.empty((0,self.ploty.size), dtype='float')
self.x_fit_all_R = np.empty((0,self.ploty.size), dtype='float')
#average x values of the fitted left line over the last n iterations
self.x_fit_best_L = np.zeros((self.ploty.size), dtype='float')
self.x_fit_best_R = np.zeros((self.ploty.size), dtype='float')
#polynomial coefficients for the most recent fit
self.coef_fit_current_L = np.zeros((self.poly_fit_dim+1), dtype='float')
self.coef_fit_current_R = np.zeros((self.poly_fit_dim+1), dtype='float')
#polynomial coefficients for the previous n iterations
self.coef_fit_all_L = np.empty((0,self.poly_fit_dim+1), dtype='float')
self.coef_fit_all_R = np.empty((0,self.poly_fit_dim+1), dtype='float')
#polynomial coefficients averaged over the last n iterations
self.coef_fit_best_L = np.zeros((self.poly_fit_dim+1), dtype='float')
self.coef_fit_best_R = np.zeros((self.poly_fit_dim+1), dtype='float')
#radius of curvature of the line in [m]
self.radius_of_curvature_L = 0
self.radius_of_curvature_R = 0
#distance in meters of vehicle center from the line
self.center_line_offst = 0
#difference in fit coefficients between last and new fits
# self.diffs = np.array([0,0,0], dtype='float')
return
def update_frame(self,img_RGB_in):
'''
Stores the new frame in memory
'''
self.Frame = img_RGB_in
self.histogram = None
self.img_RGB_out = img_RGB_in
return
def hist(self):
'''
Calculate histogram of points
'''
#Grab only the bottom half of the image
#Lane lines are likely to be mostly vertical nearest to the car
#Sum across image pixels vertically - make sure to set an `axis`
#i.e. the highest areas of vertical lines should be larger values
self.histogram = np.sum(self.img_BIN_in[self.img_BIN_in.shape[0]//2:,:], axis=0)
return
def find_lane_pixels_hist(self):
'''
Find lane pixels with histogram method
'''
# Reset previous rolling average queues
self.x_fit_all_L = np.empty((0,self.ploty.size), dtype='float')
self.x_fit_all_R = np.empty((0,self.ploty.size), dtype='float')
self.coef_fit_all_L = np.empty((0,self.poly_fit_dim+1), dtype='float')
self.coef_fit_all_R = np.empty((0,self.poly_fit_dim+1), dtype='float')
# Take a histogram of the bottom half of the image
self.hist()
# Create an output image to draw on and visualize the result
self.img_RGB_out = np.dstack((self.img_BIN_in, self.img_BIN_in, self.img_BIN_in))
# Find the peak of the left and right halves of the histogram
# These will be the starting point for the left and right lines
midpoint_height = np.int(self.histogram.shape[0]//2)
leftx_base = np.argmax(self.histogram[:midpoint_height])
rightx_base = np.argmax(self.histogram[midpoint_height:]) + midpoint_height
# Identify the x and y positions of all nonzero pixels in the image
nonzero = self.img_BIN_in.nonzero()
nonzeroy = np.array(nonzero[0])
nonzerox = np.array(nonzero[1])
# Current positions to be updated later for each window in nwindows
leftx_current = leftx_base
rightx_current = rightx_base
# Create empty lists to receive left and right lane pixel indices
left_lane_inds = []
right_lane_inds = []
# Counter of valid windows found
cnt_wdw_fnd_L = 0
cnt_wdw_fnd_R = 0
#Step through the windows one by one
for window in range(self.nwindows):
# Identify window boundaries in x and y (and right and left)
win_y_low = self.img_BIN_in.shape[0] - (window+1)*self.window_height
win_y_high = self.img_BIN_in.shape[0] - window*self.window_height
win_xleft_low = leftx_current - self.margin_hist
win_xleft_high = leftx_current + self.margin_hist
win_xright_low = rightx_current - self.margin_hist
win_xright_high = rightx_current + self.margin_hist
# Draw the windows on the visualization image
cv2.rectangle(self.img_RGB_out,(win_xleft_low,win_y_low),
(win_xleft_high,win_y_high),(0,255,0), 2)
cv2.rectangle(self.img_RGB_out,(win_xright_low,win_y_low),
(win_xright_high,win_y_high),(0,255,0), 2)
# Identify the nonzero pixels in x and y within the window
good_left_inds = ((nonzeroy >= win_y_low) & (nonzeroy < win_y_high) &
(nonzerox >= win_xleft_low) & (nonzerox < win_xleft_high)).nonzero()[0]
good_right_inds = ((nonzeroy >= win_y_low) & (nonzeroy < win_y_high) &
(nonzerox >= win_xright_low) & (nonzerox < win_xright_high)).nonzero()[0]
# Append these indices to the lists
left_lane_inds.append(good_left_inds)
right_lane_inds.append(good_right_inds)
# If you found > minpix pixels, re-center next window on their mean position (otherwise keep previous window x position)
if len(good_left_inds) > self.minpix:
cnt_wdw_fnd_L = cnt_wdw_fnd_L + 1
leftx_current = np.int(np.mean(nonzerox[good_left_inds]))
if len(good_right_inds) > self.minpix:
cnt_wdw_fnd_R = cnt_wdw_fnd_R + 1
self.detected_R = True
rightx_current = np.int(np.mean(nonzerox[good_right_inds]))
# Create numpy arrays of indices
left_lane_inds = np.concatenate(left_lane_inds)
right_lane_inds = np.concatenate(right_lane_inds)
# Determine if valid number of windows was found with pixels
self.detected_L = (self.frame_num == 0) or (cnt_wdw_fnd_L >= self.nwindow_fnd)
self.detected_R = (self.frame_num == 0) or (cnt_wdw_fnd_R >= self.nwindow_fnd)
# Color in left and right line pixels
self.img_RGB_out[nonzeroy[left_lane_inds], nonzerox[left_lane_inds]] = [255, 0, 0]
self.img_RGB_out[nonzeroy[right_lane_inds], nonzerox[right_lane_inds]] = [0, 0, 255]
# Extract left and right line pixel positions
leftx = nonzerox[left_lane_inds]
lefty = nonzeroy[left_lane_inds]
rightx = nonzerox[right_lane_inds]
righty = nonzeroy[right_lane_inds]
return leftx, lefty, rightx, righty
def fit_polynomial(self,x,y):
# Fit a second order polynomial to data using `np.polyfit`
# coef_fit = [A, B, C] of y = A*x^2 + B*x + C
coef_fit = np.polyfit(y, x, self.poly_fit_dim)
# Generate x and y values for plotting
x_fit = coef_fit[0]*self.ploty**2 + coef_fit[1]*self.ploty + coef_fit[2]
# Limit x_fit by size of frame
x_fit = np.minimum(np.maximum(x_fit,0),self.frame_width-1)
# Visualization
# Colors in the activated pixels
self.img_RGB_out[y, x] = [255, 0, 0]
# Colors in the poly line
self.img_RGB_out[self.ploty.astype(int), x_fit.astype(int)] = [255, 255, 0]
return coef_fit, x_fit
def find_lane_pixels_poly(self):
'''
Search around polynomial for new lane pixels
'''
# Grab activated pixels
nonzero = self.img_BIN_in.nonzero()
nonzeroy = np.array(nonzero[0])
nonzerox = np.array(nonzero[1])
# Set the area of search based on activated x-values
# within the +/- margin of our polynomial function (from previous frame)
left_lane_inds = ((nonzerox > (self.coef_fit_current_L[0]*(nonzeroy**2) + self.coef_fit_current_L[1]*nonzeroy + self.coef_fit_current_L[2] - self.margin_poly)) & (nonzerox < (self.coef_fit_current_L[0]*(nonzeroy**2) + self.coef_fit_current_L[1]*nonzeroy + self.coef_fit_current_L[2] + self.margin_poly)))
right_lane_inds = ((nonzerox > (self.coef_fit_current_R[0]*(nonzeroy**2) + self.coef_fit_current_R[1]*nonzeroy + self.coef_fit_current_R[2] - self.margin_poly)) & (nonzerox < (self.coef_fit_current_R[0]*(nonzeroy**2) + self.coef_fit_current_R[1]*nonzeroy + self.coef_fit_current_R[2] + self.margin_poly)))
# Extract left and right line pixel positions
leftx = nonzerox[left_lane_inds]
lefty = nonzeroy[left_lane_inds]
rightx = nonzerox[right_lane_inds]
righty = nonzeroy[right_lane_inds]
# Determine pixel find validity
self.detected_L = len(leftx) > self.minpix_poly
self.detected_R = len(rightx) > self.minpix_poly
if (self.detected_L and self.detected_R):
# Prepare output RGB image
self.img_RGB_out = np.dstack((self.img_BIN_in, self.img_BIN_in, self.img_BIN_in))
# Visualization
# Create an image to draw on and an image to show the selection window
# out_img = np.dstack((img_bin, img_bin, img_bin))*255
window_img = np.zeros_like(self.img_RGB_out)
# Color in left and right line pixels
self.img_RGB_out[nonzeroy[left_lane_inds], nonzerox[left_lane_inds]] = [255, 0, 0]
self.img_RGB_out[nonzeroy[right_lane_inds], nonzerox[right_lane_inds]] = [0, 0, 255]
# Generate a polygon to illustrate the search window area
# And recast the x and y points into usable format for cv2.fillPoly()
coef_tmp_L, x_fit_L = self.fit_polynomial(leftx,lefty)
coef_tmp_R, x_fit_R = self.fit_polynomial(rightx,righty)
left_line_window1 = np.array([np.transpose(np.vstack([x_fit_L-self.margin_poly, self.ploty]))])
left_line_window2 = np.array([np.flipud(np.transpose(np.vstack([x_fit_L+self.margin_poly, self.ploty])))])
left_line_pts = np.hstack((left_line_window1, left_line_window2))
right_line_window1 = np.array([np.transpose(np.vstack([x_fit_R-self.margin_poly, self.ploty]))])
right_line_window2 = np.array([np.flipud(np.transpose(np.vstack([x_fit_R+self.margin_poly, self.ploty])))])
right_line_pts = np.hstack((right_line_window1, right_line_window2))
# Draw the lane onto the warped blank image
cv2.fillPoly(window_img, np.int_([left_line_pts]), (0,255, 0))
cv2.fillPoly(window_img, np.int_([right_line_pts]), (0,255, 0))
self.img_RGB_out = cv2.addWeighted(self.img_RGB_out, 1, window_img, 0.3, 0)
# Plot the polynomial lines onto the image
# plt.plot(left_fitx, ploty, color='yellow')
# plt.plot(right_fitx, ploty, color='yellow')
# End visualization steps
return leftx, lefty, rightx, righty
def calc_best(self):
'''
Perform rolling average on polynomials to determine best fit.
'''
# Reset best
self.coef_fit_best_L = np.zeros((self.poly_fit_dim+1), dtype='float')
self.coef_fit_best_R = np.zeros((self.poly_fit_dim+1), dtype='float')
self.x_fit_best_L = np.zeros((self.ploty.size), dtype='float')
self.x_fit_best_R = np.zeros((self.ploty.size), dtype='float')
# Check if size of queue is larger than filter size
if (self.x_fit_all_L.shape[0] > self.filt_size):
self.x_fit_all_L = np.delete(self.x_fit_all_L,(0),axis=0)
self.x_fit_all_R = np.delete(self.x_fit_all_R,(0),axis=0)
self.coef_fit_all_L = np.delete(self.coef_fit_all_L,(0),axis=0)
self.coef_fit_all_R = np.delete(self.coef_fit_all_R,(0),axis=0)
# Loop through and compute average
n = self.x_fit_all_L.shape[0]
for row in range(n):
for col_x_fit in range(self.x_fit_all_L.shape[1]):
self.x_fit_best_L[col_x_fit] = self.x_fit_best_L[col_x_fit] + self.x_fit_all_L[row,col_x_fit]
self.x_fit_best_R[col_x_fit] = self.x_fit_best_R[col_x_fit] + self.x_fit_all_R[row,col_x_fit]
for col_coef_fit in range(self.coef_fit_all_L.shape[1]):
self.coef_fit_best_L[col_coef_fit] = self.coef_fit_best_L[col_coef_fit] + self.coef_fit_all_L[row,col_coef_fit]
self.coef_fit_best_R[col_coef_fit] = self.coef_fit_best_R[col_coef_fit] + self.coef_fit_all_R[row,col_coef_fit]
self.x_fit_best_L = self.x_fit_best_L/n
self.x_fit_best_R = self.x_fit_best_R/n
self.coef_fit_best_L = self.coef_fit_best_L/n
self.coef_fit_best_R = self.coef_fit_best_R/n
return
def calc_rad_real(self):
'''
Calculates the radius of polynomial functions in meters.
'''
# Convert parabola coefficients into pixels
A_L = self.xm_per_pix / (self.ym_per_pix**2) * self.coef_fit_best_L[0]
B_L = self.xm_per_pix / (self.ym_per_pix) * self.coef_fit_best_L[1]
A_R = self.xm_per_pix / (self.ym_per_pix**2) * self.coef_fit_best_R[0]
B_R = self.xm_per_pix / (self.ym_per_pix) * self.coef_fit_best_R[1]
# Define y-value where we want radius of curvature
# We'll choose the maximum y-value, corresponding to the bottom of the image
y_eval = (self.frame_height - 1)*self.ym_per_pix
# Calculation of R_curve (radius of curvature)
self.radius_of_curvature_L = ((1 + (2*A_L*y_eval + B_L)**2)**1.5) / np.absolute(2*A_L)
self.radius_of_curvature_R = ((1 + (2*A_R*y_eval + B_R)**2)**1.5) / np.absolute(2*A_R)
return
def calc_offset(self):
'''
Calculates the offset between vehicle and center of lane
'''
self.center_line_offst = abs(self.midpoint_width - (self.x_fit_best_L[-1] + self.x_fit_best_R[-1])/2) * self.xm_per_pix
return
def find_lane_lines(self):
'''
Find lane lines with an appropriate method
'''
## Find lane pixels
# If left or right detection from previous loop is false: Use histogram method
if (not(self.detected_L)) or (not(self.detected_R)):
print("Histogram search method used.")
# Call histogram method to find pixel locations of lanes and determine current frame detection validity
leftx, lefty, rightx, righty = self.find_lane_pixels_hist()
else:
print("Polynomial search method used")
# Call poly search method to find pixel locations of lanes and determine current frame detection validity
leftx, lefty, rightx, righty = self.find_lane_pixels_poly()
if (not(self.detected_L)) or (not(self.detected_R)):
print("Polynomial search method failed. Histogram search method used.")
# Neither lane was found, must use histogram method
leftx, lefty, rightx, righty = self.find_lane_pixels_hist()
## Check if both lane lines were found
if (self.detected_L and self.detected_R):
# Reset failed counter
self.num_failed_frame_curr = 0
# Fit new polynomials for both lanes
self.coef_fit_current_L, x_fit_L = self.fit_polynomial(leftx,lefty)
self.coef_fit_current_R, x_fit_R = self.fit_polynomial(rightx,righty)
# Append x_fit to list
self.x_fit_all_L = np.vstack((self.x_fit_all_L, x_fit_L))
self.x_fit_all_R = np.vstack((self.x_fit_all_R, x_fit_R))
# Append coefficients to list
self.coef_fit_all_L = np.vstack((self.coef_fit_all_L, self.coef_fit_current_L))
self.coef_fit_all_R = np.vstack((self.coef_fit_all_R, self.coef_fit_current_R))
# Calculate rolling average
self.calc_best()
else:
# Increment failed counter
self.num_failed_frame_curr = self.num_failed_frame_curr + 1
print("Number of failed frames: " + str(self.num_failed_frame_curr))
# Do not compute new polynomial, use previous best
# Check if number of consecutive failed frames has exceed max
if (self.num_failed_frame_curr > self.num_failed_frame_alwd):
print("Number of consecutive failed frames exceeded.")
sys.exit()
# Calculate radius of curvature
self.calc_rad_real()
# Calculate center line offset
self.calc_offset()
return
def draw_frame(self,img_RGB_in):
'''
Draws the frame with desired polynomials in original image perspective
'''
print("\n")
#print("Processing Frame # " + str(self.frame_num))
# Store new frame
self.update_frame(img_RGB_in)
# Calculate binary image of color and gradient thresholds
self.img_BIN_in = grad_thresh(top_down_xfrm(color_thresh(undistort(img_RGB_in),RGB_out=True),frwd=True),RGB_out=False)
# Create an image to draw the lines on
warp_zero = np.zeros_like(self.img_BIN_in).astype(np.uint8)
color_warp = np.dstack((warp_zero, warp_zero, warp_zero))
# Find lane lines
self.find_lane_lines()
# Recast the x and y points into usable format for cv2.fillPoly()
pts_left = np.array([np.transpose(np.vstack([self.x_fit_best_L, self.ploty]))])
pts_right = np.array([np.flipud(np.transpose(np.vstack([self.x_fit_best_R, self.ploty])))])
pts = np.hstack((pts_left, pts_right))
# Draw the lane onto the warped blank image
cv2.fillPoly(color_warp, np.int_([pts]), (0,255, 0))
# Warp the blank back to original image space using inverse perspective matrix (Minv)
newwarp = cv2.warpPerspective(color_warp, Minv, (img_RGB_in.shape[1], img_RGB_in.shape[0]))
# Combine the result with the original image
self.Frame = cv2.addWeighted(img_RGB_in, 1, newwarp, 0.3, 0)
# Draw text on image
cv2.putText(self.Frame,"Lane Center Offset [m]: " + str(round(self.center_line_offst,2)),self.Ofst_Text_pos,self.font,self.fontScale,self.fontColor,self.lineType)
cv2.putText(self.Frame,"Radius Left [m]: " + str(round(self.radius_of_curvature_L,0)),self.Rad_L_Text_pos,self.font,self.fontScale,self.fontColor,self.lineType)
cv2.putText(self.Frame,"Radius Right [m]: " + str(round(self.radius_of_curvature_R,0)),self.Rad_R_Text_pos,self.font,self.fontScale,self.fontColor,self.lineType)
self.frame_num = self.frame_num + 1
#print("Left Radius: " + str(self.radius_of_curvature_L))
#print("Right Radius: " + str(self.radius_of_curvature_R))
#print("Lane Center Offset: " + str(lane_lines.center_line_offst))
#return(self.img_RGB_out)
return(self.Frame)
# Sample histogram
if (False):
img = mpimg.imread('test_images/test6.jpg')
img_BIN_in = grad_thresh(top_down_xfrm(color_thresh(undistort(img),RGB_out=True),frwd=True),RGB_out=False);
lane_lines = LaneLines(img,img_BIN_in)
lane_lines.hist()
histogram = lane_lines.histogram
plt.figure(7)
plt.imshow(img)
plt.title('Original Image')
plt.savefig('output_images/original_histogram.png')
plt.figure(8)
plt.imshow(img_BIN_in, cmap='gray')
plt.title('Original Binary Image')
plt.savefig('output_images/original_bin_histogram.png')
plt.figure(9)
plt.plot(histogram)
plt.title('Histogram')
plt.savefig('output_images/histogram.png')
plt.show()
# Sample polyfit with histogram search
if (False):
img = mpimg.imread('test_images/test6.jpg')
plt.figure(10)
plt.imshow(img)
plt.title('Original Image')
img_BIN_in = grad_thresh(top_down_xfrm(color_thresh(undistort(img),RGB_out=True),frwd=True),RGB_out=False)
lane_lines = LaneLines(img,img_BIN_in)
# Search for lane lines using histogram method
leftx, lefty, rightx, righty = lane_lines.find_lane_pixels_hist()
# Fit new polynomials for both lanes
lane_lines.coef_fit_current_L, x_fit_L = lane_lines.fit_polynomial(leftx,lefty)
lane_lines.coef_fit_current_R, x_fit_R = lane_lines.fit_polynomial(rightx,righty)
print("Current Left Coefficients: " + str(lane_lines.coef_fit_current_L))
print("Current Right Coefficients: " + str(lane_lines.coef_fit_current_R))
plt.figure(11)
plt.imshow(lane_lines.img_RGB_out)
plt.title('2nd Order Polynomial Fit - Histogram Search Method')
plt.savefig('output_images/poly_hist.png')
# Sample search around poly
if (False):
# Append x_fit to list
lane_lines.x_fit_all_L = np.vstack((lane_lines.x_fit_all_L, x_fit_L))
lane_lines.x_fit_all_R = np.vstack((lane_lines.x_fit_all_R, x_fit_R))
# Append coefficients to list
lane_lines.coef_fit_all_L = np.vstack((lane_lines.coef_fit_all_L, lane_lines.coef_fit_current_L))
lane_lines.coef_fit_all_R = np.vstack((lane_lines.coef_fit_all_R, lane_lines.coef_fit_current_R))
print("All Left Coefficients: " + str(lane_lines.coef_fit_all_L))
print("All Right Coefficients: " + str(lane_lines.coef_fit_all_R))
# Calculate rolling average
lane_lines.calc_best()
print("Best Left Coefficients: " + str(lane_lines.coef_fit_best_L))
print("Best Right Coefficients: " + str(lane_lines.coef_fit_best_R))
# Calculate real radius of curvature
lane_lines.calc_rad_real()
print("Left Radius: " + str(lane_lines.radius_of_curvature_L))
print("Right Radius: " + str(lane_lines.radius_of_curvature_R))
lane_lines.calc_offset()
print("Center Lane Offset: " + str(lane_lines.center_line_offst))
# Search for lane lines around previous best polynomial
leftx, lefty, rightx, righty = lane_lines.find_lane_pixels_poly()
# Fit new polynomials for both lanes
lane_lines.coef_fit_current_L, x_fit_L = lane_lines.fit_polynomial(leftx,lefty)
lane_lines.coef_fit_current_R, x_fit_R = lane_lines.fit_polynomial(rightx,righty)
plt.figure(12)
plt.imshow(lane_lines.img_RGB_out)
plt.title('2nd Order Polynomial Fit - Polynomial Search Method')
plt.savefig('output_images/poly_poly.png')
plt.show()
# Test full pipeline
if (True):
img = mpimg.imread('test_images/test6.jpg')
lane_lines = LaneLines(img,img)
plt.figure(13)
plt.imshow(img)
plt.title('Original Image')
plt.figure(14)
plt.imshow(lane_lines.draw_frame(img))
plt.title('Found Lines')
plt.savefig('output_images/full_pipeline.png')
plt.show()
## Process video
if (False):
img = mpimg.imread('test_images/test6.jpg')
lane_lines = LaneLines(img,img)
video_output = 'output_videos/challenge_video_processed.mp4'
#video_output = 'output_videos/project_video_processed.mp4'
## To speed up the testing process you may want to try your pipeline on a shorter subclip of the video
## To do so add .subclip(start_second,end_second) to the end of the line below
## Where start_second and end_second are integer values representing the start and end of the subclip
## You may also uncomment the following line for a subclip of the first 5 seconds
##clip1 = VideoFileClip("test_videos/solidWhiteRight.mp4").subclip(0,5)
clip = VideoFileClip("test_videos/challenge_video.mp4")
video_clip = clip.fl_image(lane_lines.draw_frame) #NOTE: this function expects color images!!
video_clip.write_videofile(video_output, audio=False)
| [
"cv2.rectangle",
"numpy.polyfit",
"numpy.hstack",
"matplotlib.image.imread",
"numpy.array",
"cv2.warpPerspective",
"sys.exit",
"matplotlib.pyplot.imshow",
"numpy.mean",
"numpy.where",
"numpy.delete",
"matplotlib.pyplot.plot",
"cv2.undistort",
"numpy.max",
"cv2.addWeighted",
"numpy.lins... | [((666, 726), 'cv2.undistort', 'cv2.undistort', (['img_RGB_in', 'cam_mtx', 'dist_coef', 'None', 'cam_mtx'], {}), '(img_RGB_in, cam_mtx, dist_coef, None, cam_mtx)\n', (679, 726), False, 'import cv2\n'), ((795, 838), 'matplotlib.image.imread', 'mpimg.imread', (['"""camera_cal/calibration1.jpg"""'], {}), "('camera_cal/calibration1.jpg')\n", (807, 838), True, 'import matplotlib.image as mpimg\n'), ((872, 885), 'matplotlib.pyplot.figure', 'plt.figure', (['(0)'], {}), '(0)\n', (882, 885), True, 'import matplotlib.pyplot as plt\n'), ((890, 905), 'matplotlib.pyplot.imshow', 'plt.imshow', (['img'], {}), '(img)\n', (900, 905), True, 'import matplotlib.pyplot as plt\n'), ((910, 937), 'matplotlib.pyplot.title', 'plt.title', (['"""Original Image"""'], {}), "('Original Image')\n", (919, 937), True, 'import matplotlib.pyplot as plt\n'), ((942, 990), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""output_images/distorted_image.png"""'], {}), "('output_images/distorted_image.png')\n", (953, 990), True, 'import matplotlib.pyplot as plt\n'), ((995, 1008), 'matplotlib.pyplot.figure', 'plt.figure', (['(1)'], {}), '(1)\n', (1005, 1008), True, 'import matplotlib.pyplot as plt\n'), ((1013, 1032), 'matplotlib.pyplot.imshow', 'plt.imshow', (['dst_img'], {}), '(dst_img)\n', (1023, 1032), True, 'import matplotlib.pyplot as plt\n'), ((1037, 1067), 'matplotlib.pyplot.title', 'plt.title', (['"""Undistorted Image"""'], {}), "('Undistorted Image')\n", (1046, 1067), True, 'import matplotlib.pyplot as plt\n'), ((1072, 1122), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""output_images/undistorted_image.png"""'], {}), "('output_images/undistorted_image.png')\n", (1083, 1122), True, 'import matplotlib.pyplot as plt\n'), ((1127, 1137), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1135, 1137), True, 'import matplotlib.pyplot as plt\n'), ((1323, 1366), 'cv2.cvtColor', 'cv2.cvtColor', (['img_RGB_in', 'cv2.COLOR_RGB2HSV'], {}), '(img_RGB_in, cv2.COLOR_RGB2HSV)\n', (1335, 1366), False, 'import cv2\n'), ((1717, 1739), 'numpy.zeros_like', 'np.zeros_like', (['H_layer'], {}), '(H_layer)\n', (1730, 1739), True, 'import numpy as np\n'), ((2417, 2454), 'matplotlib.image.imread', 'mpimg.imread', (['"""test_images/test5.jpg"""'], {}), "('test_images/test5.jpg')\n", (2429, 2454), True, 'import matplotlib.image as mpimg\n'), ((2506, 2519), 'matplotlib.pyplot.figure', 'plt.figure', (['(2)'], {}), '(2)\n', (2516, 2519), True, 'import matplotlib.pyplot as plt\n'), ((2524, 2539), 'matplotlib.pyplot.imshow', 'plt.imshow', (['img'], {}), '(img)\n', (2534, 2539), True, 'import matplotlib.pyplot as plt\n'), ((2544, 2571), 'matplotlib.pyplot.title', 'plt.title', (['"""Original Image"""'], {}), "('Original Image')\n", (2553, 2571), True, 'import matplotlib.pyplot as plt\n'), ((2576, 2625), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""output_images/pre_color_thresh.png"""'], {}), "('output_images/pre_color_thresh.png')\n", (2587, 2625), True, 'import matplotlib.pyplot as plt\n'), ((2630, 2643), 'matplotlib.pyplot.figure', 'plt.figure', (['(3)'], {}), '(3)\n', (2640, 2643), True, 'import matplotlib.pyplot as plt\n'), ((2648, 2682), 'matplotlib.pyplot.imshow', 'plt.imshow', (['thrsh_img'], {'cmap': '"""gray"""'}), "(thrsh_img, cmap='gray')\n", (2658, 2682), True, 'import matplotlib.pyplot as plt\n'), ((2687, 2715), 'matplotlib.pyplot.title', 'plt.title', (['"""Color Threshold"""'], {}), "('Color Threshold')\n", (2696, 2715), True, 'import matplotlib.pyplot as plt\n'), ((2720, 2770), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""output_images/post_color_thresh.png"""'], {}), "('output_images/post_color_thresh.png')\n", (2731, 2770), True, 'import matplotlib.pyplot as plt\n'), ((2775, 2785), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2783, 2785), True, 'import matplotlib.pyplot as plt\n'), ((3154, 3225), 'cv2.warpPerspective', 'cv2.warpPerspective', (['img_RGB_in', 'Xfrm', 'img_size'], {'flags': 'cv2.INTER_LINEAR'}), '(img_RGB_in, Xfrm, img_size, flags=cv2.INTER_LINEAR)\n', (3173, 3225), False, 'import cv2\n'), ((3322, 3359), 'matplotlib.image.imread', 'mpimg.imread', (['"""test_images/test6.jpg"""'], {}), "('test_images/test6.jpg')\n", (3334, 3359), True, 'import matplotlib.image as mpimg\n'), ((3406, 3419), 'matplotlib.pyplot.figure', 'plt.figure', (['(4)'], {}), '(4)\n', (3416, 3419), True, 'import matplotlib.pyplot as plt\n'), ((3424, 3439), 'matplotlib.pyplot.imshow', 'plt.imshow', (['img'], {}), '(img)\n', (3434, 3439), True, 'import matplotlib.pyplot as plt\n'), ((3444, 3471), 'matplotlib.pyplot.title', 'plt.title', (['"""Original Image"""'], {}), "('Original Image')\n", (3453, 3471), True, 'import matplotlib.pyplot as plt\n'), ((3476, 3521), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""output_images/pre_top_down.png"""'], {}), "('output_images/pre_top_down.png')\n", (3487, 3521), True, 'import matplotlib.pyplot as plt\n'), ((3526, 3539), 'matplotlib.pyplot.figure', 'plt.figure', (['(5)'], {}), '(5)\n', (3536, 3539), True, 'import matplotlib.pyplot as plt\n'), ((3544, 3562), 'matplotlib.pyplot.imshow', 'plt.imshow', (['warped'], {}), '(warped)\n', (3554, 3562), True, 'import matplotlib.pyplot as plt\n'), ((3567, 3598), 'matplotlib.pyplot.title', 'plt.title', (['"""Top Down View Warp"""'], {}), "('Top Down View Warp')\n", (3576, 3598), True, 'import matplotlib.pyplot as plt\n'), ((3603, 3649), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""output_images/post_top_down.png"""'], {}), "('output_images/post_top_down.png')\n", (3614, 3649), True, 'import matplotlib.pyplot as plt\n'), ((3654, 3664), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3662, 3664), True, 'import matplotlib.pyplot as plt\n'), ((3905, 3949), 'cv2.cvtColor', 'cv2.cvtColor', (['img_RGB_in', 'cv2.COLOR_RGB2GRAY'], {}), '(img_RGB_in, cv2.COLOR_RGB2GRAY)\n', (3917, 3949), False, 'import cv2\n'), ((4119, 4158), 'numpy.zeros_like', 'np.zeros_like', (['img_GRAY'], {'dtype': 'np.uint8'}), '(img_GRAY, dtype=np.uint8)\n', (4132, 4158), True, 'import numpy as np\n'), ((4529, 4566), 'matplotlib.image.imread', 'mpimg.imread', (['"""test_images/test6.jpg"""'], {}), "('test_images/test6.jpg')\n", (4541, 4566), True, 'import matplotlib.image as mpimg\n'), ((4657, 4670), 'matplotlib.pyplot.figure', 'plt.figure', (['(6)'], {}), '(6)\n', (4667, 4670), True, 'import matplotlib.pyplot as plt\n'), ((4675, 4690), 'matplotlib.pyplot.imshow', 'plt.imshow', (['img'], {}), '(img)\n', (4685, 4690), True, 'import matplotlib.pyplot as plt\n'), ((4695, 4743), 'matplotlib.pyplot.title', 'plt.title', (['"""Original Top Down Transformed Image"""'], {}), "('Original Top Down Transformed Image')\n", (4704, 4743), True, 'import matplotlib.pyplot as plt\n'), ((4748, 4796), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""output_images/pre_grad_thresh.png"""'], {}), "('output_images/pre_grad_thresh.png')\n", (4759, 4796), True, 'import matplotlib.pyplot as plt\n'), ((4801, 4814), 'matplotlib.pyplot.figure', 'plt.figure', (['(7)'], {}), '(7)\n', (4811, 4814), True, 'import matplotlib.pyplot as plt\n'), ((4819, 4853), 'matplotlib.pyplot.imshow', 'plt.imshow', (['thrsh_img'], {'cmap': '"""gray"""'}), "(thrsh_img, cmap='gray')\n", (4829, 4853), True, 'import matplotlib.pyplot as plt\n'), ((4858, 4889), 'matplotlib.pyplot.title', 'plt.title', (['"""Gradient Threshold"""'], {}), "('Gradient Threshold')\n", (4867, 4889), True, 'import matplotlib.pyplot as plt\n'), ((4894, 4943), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""output_images/post_grad_thresh.png"""'], {}), "('output_images/post_grad_thresh.png')\n", (4905, 4943), True, 'import matplotlib.pyplot as plt\n'), ((4948, 4958), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4956, 4958), True, 'import matplotlib.pyplot as plt\n'), ((27203, 27240), 'matplotlib.image.imread', 'mpimg.imread', (['"""test_images/test6.jpg"""'], {}), "('test_images/test6.jpg')\n", (27215, 27240), True, 'import matplotlib.image as mpimg\n'), ((27459, 27472), 'matplotlib.pyplot.figure', 'plt.figure', (['(7)'], {}), '(7)\n', (27469, 27472), True, 'import matplotlib.pyplot as plt\n'), ((27477, 27492), 'matplotlib.pyplot.imshow', 'plt.imshow', (['img'], {}), '(img)\n', (27487, 27492), True, 'import matplotlib.pyplot as plt\n'), ((27497, 27524), 'matplotlib.pyplot.title', 'plt.title', (['"""Original Image"""'], {}), "('Original Image')\n", (27506, 27524), True, 'import matplotlib.pyplot as plt\n'), ((27529, 27580), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""output_images/original_histogram.png"""'], {}), "('output_images/original_histogram.png')\n", (27540, 27580), True, 'import matplotlib.pyplot as plt\n'), ((27585, 27598), 'matplotlib.pyplot.figure', 'plt.figure', (['(8)'], {}), '(8)\n', (27595, 27598), True, 'import matplotlib.pyplot as plt\n'), ((27603, 27638), 'matplotlib.pyplot.imshow', 'plt.imshow', (['img_BIN_in'], {'cmap': '"""gray"""'}), "(img_BIN_in, cmap='gray')\n", (27613, 27638), True, 'import matplotlib.pyplot as plt\n'), ((27643, 27677), 'matplotlib.pyplot.title', 'plt.title', (['"""Original Binary Image"""'], {}), "('Original Binary Image')\n", (27652, 27677), True, 'import matplotlib.pyplot as plt\n'), ((27682, 27737), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""output_images/original_bin_histogram.png"""'], {}), "('output_images/original_bin_histogram.png')\n", (27693, 27737), True, 'import matplotlib.pyplot as plt\n'), ((27742, 27755), 'matplotlib.pyplot.figure', 'plt.figure', (['(9)'], {}), '(9)\n', (27752, 27755), True, 'import matplotlib.pyplot as plt\n'), ((27760, 27779), 'matplotlib.pyplot.plot', 'plt.plot', (['histogram'], {}), '(histogram)\n', (27768, 27779), True, 'import matplotlib.pyplot as plt\n'), ((27784, 27806), 'matplotlib.pyplot.title', 'plt.title', (['"""Histogram"""'], {}), "('Histogram')\n", (27793, 27806), True, 'import matplotlib.pyplot as plt\n'), ((27811, 27853), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""output_images/histogram.png"""'], {}), "('output_images/histogram.png')\n", (27822, 27853), True, 'import matplotlib.pyplot as plt\n'), ((27858, 27868), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (27866, 27868), True, 'import matplotlib.pyplot as plt\n'), ((27935, 27972), 'matplotlib.image.imread', 'mpimg.imread', (['"""test_images/test6.jpg"""'], {}), "('test_images/test6.jpg')\n", (27947, 27972), True, 'import matplotlib.image as mpimg\n'), ((27977, 27991), 'matplotlib.pyplot.figure', 'plt.figure', (['(10)'], {}), '(10)\n', (27987, 27991), True, 'import matplotlib.pyplot as plt\n'), ((27996, 28011), 'matplotlib.pyplot.imshow', 'plt.imshow', (['img'], {}), '(img)\n', (28006, 28011), True, 'import matplotlib.pyplot as plt\n'), ((28016, 28043), 'matplotlib.pyplot.title', 'plt.title', (['"""Original Image"""'], {}), "('Original Image')\n", (28025, 28043), True, 'import matplotlib.pyplot as plt\n'), ((28692, 28706), 'matplotlib.pyplot.figure', 'plt.figure', (['(11)'], {}), '(11)\n', (28702, 28706), True, 'import matplotlib.pyplot as plt\n'), ((28711, 28745), 'matplotlib.pyplot.imshow', 'plt.imshow', (['lane_lines.img_RGB_out'], {}), '(lane_lines.img_RGB_out)\n', (28721, 28745), True, 'import matplotlib.pyplot as plt\n'), ((28750, 28813), 'matplotlib.pyplot.title', 'plt.title', (['"""2nd Order Polynomial Fit - Histogram Search Method"""'], {}), "('2nd Order Polynomial Fit - Histogram Search Method')\n", (28759, 28813), True, 'import matplotlib.pyplot as plt\n'), ((28818, 28860), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""output_images/poly_hist.png"""'], {}), "('output_images/poly_hist.png')\n", (28829, 28860), True, 'import matplotlib.pyplot as plt\n'), ((30607, 30617), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (30615, 30617), True, 'import matplotlib.pyplot as plt\n'), ((30665, 30702), 'matplotlib.image.imread', 'mpimg.imread', (['"""test_images/test6.jpg"""'], {}), "('test_images/test6.jpg')\n", (30677, 30702), True, 'import matplotlib.image as mpimg\n'), ((30743, 30757), 'matplotlib.pyplot.figure', 'plt.figure', (['(13)'], {}), '(13)\n', (30753, 30757), True, 'import matplotlib.pyplot as plt\n'), ((30762, 30777), 'matplotlib.pyplot.imshow', 'plt.imshow', (['img'], {}), '(img)\n', (30772, 30777), True, 'import matplotlib.pyplot as plt\n'), ((30782, 30809), 'matplotlib.pyplot.title', 'plt.title', (['"""Original Image"""'], {}), "('Original Image')\n", (30791, 30809), True, 'import matplotlib.pyplot as plt\n'), ((30814, 30828), 'matplotlib.pyplot.figure', 'plt.figure', (['(14)'], {}), '(14)\n', (30824, 30828), True, 'import matplotlib.pyplot as plt\n'), ((30876, 30900), 'matplotlib.pyplot.title', 'plt.title', (['"""Found Lines"""'], {}), "('Found Lines')\n", (30885, 30900), True, 'import matplotlib.pyplot as plt\n'), ((30905, 30951), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""output_images/full_pipeline.png"""'], {}), "('output_images/full_pipeline.png')\n", (30916, 30951), True, 'import matplotlib.pyplot as plt\n'), ((30956, 30966), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (30964, 30966), True, 'import matplotlib.pyplot as plt\n'), ((31011, 31048), 'matplotlib.image.imread', 'mpimg.imread', (['"""test_images/test6.jpg"""'], {}), "('test_images/test6.jpg')\n", (31023, 31048), True, 'import matplotlib.image as mpimg\n'), ((31685, 31733), 'moviepy.editor.VideoFileClip', 'VideoFileClip', (['"""test_videos/challenge_video.mp4"""'], {}), "('test_videos/challenge_video.mp4')\n", (31698, 31733), False, 'from moviepy.editor import VideoFileClip\n'), ((2218, 2240), 'numpy.where', 'np.where', (['(img_out == 0)'], {}), '(img_out == 0)\n', (2226, 2240), True, 'import numpy as np\n'), ((2259, 2278), 'numpy.copy', 'np.copy', (['img_RGB_in'], {}), '(img_RGB_in)\n', (2266, 2278), True, 'import numpy as np\n'), ((4005, 4042), 'cv2.Sobel', 'cv2.Sobel', (['img_GRAY', 'cv2.CV_64F', '(1)', '(0)'], {}), '(img_GRAY, cv2.CV_64F, 1, 0)\n', (4014, 4042), False, 'import cv2\n'), ((4296, 4318), 'numpy.where', 'np.where', (['(img_out == 0)'], {}), '(img_out == 0)\n', (4304, 4318), True, 'import numpy as np\n'), ((4337, 4356), 'numpy.copy', 'np.copy', (['img_RGB_in'], {}), '(img_RGB_in)\n', (4344, 4356), True, 'import numpy as np\n'), ((5368, 5392), 'numpy.int', 'np.int', (['(frame_width // 2)'], {}), '(frame_width // 2)\n', (5374, 5392), True, 'import numpy as np\n'), ((5431, 5477), 'numpy.linspace', 'np.linspace', (['(0)', '(frame_height - 1)', 'frame_height'], {}), '(0, frame_height - 1, frame_height)\n', (5442, 5477), True, 'import numpy as np\n'), ((6986, 7023), 'numpy.int', 'np.int', (['(frame_height // self.nwindows)'], {}), '(frame_height // self.nwindows)\n', (6992, 7023), True, 'import numpy as np\n'), ((7808, 7853), 'numpy.empty', 'np.empty', (['(0, self.ploty.size)'], {'dtype': '"""float"""'}), "((0, self.ploty.size), dtype='float')\n", (7816, 7853), True, 'import numpy as np\n'), ((7881, 7926), 'numpy.empty', 'np.empty', (['(0, self.ploty.size)'], {'dtype': '"""float"""'}), "((0, self.ploty.size), dtype='float')\n", (7889, 7926), True, 'import numpy as np\n'), ((8033, 8073), 'numpy.zeros', 'np.zeros', (['self.ploty.size'], {'dtype': '"""float"""'}), "(self.ploty.size, dtype='float')\n", (8041, 8073), True, 'import numpy as np\n'), ((8108, 8148), 'numpy.zeros', 'np.zeros', (['self.ploty.size'], {'dtype': '"""float"""'}), "(self.ploty.size, dtype='float')\n", (8116, 8148), True, 'import numpy as np\n'), ((8245, 8291), 'numpy.zeros', 'np.zeros', (['(self.poly_fit_dim + 1)'], {'dtype': '"""float"""'}), "(self.poly_fit_dim + 1, dtype='float')\n", (8253, 8291), True, 'import numpy as np\n'), ((8326, 8372), 'numpy.zeros', 'np.zeros', (['(self.poly_fit_dim + 1)'], {'dtype': '"""float"""'}), "(self.poly_fit_dim + 1, dtype='float')\n", (8334, 8372), True, 'import numpy as np\n'), ((8468, 8519), 'numpy.empty', 'np.empty', (['(0, self.poly_fit_dim + 1)'], {'dtype': '"""float"""'}), "((0, self.poly_fit_dim + 1), dtype='float')\n", (8476, 8519), True, 'import numpy as np\n'), ((8549, 8600), 'numpy.empty', 'np.empty', (['(0, self.poly_fit_dim + 1)'], {'dtype': '"""float"""'}), "((0, self.poly_fit_dim + 1), dtype='float')\n", (8557, 8600), True, 'import numpy as np\n'), ((8700, 8746), 'numpy.zeros', 'np.zeros', (['(self.poly_fit_dim + 1)'], {'dtype': '"""float"""'}), "(self.poly_fit_dim + 1, dtype='float')\n", (8708, 8746), True, 'import numpy as np\n'), ((8780, 8826), 'numpy.zeros', 'np.zeros', (['(self.poly_fit_dim + 1)'], {'dtype': '"""float"""'}), "(self.poly_fit_dim + 1, dtype='float')\n", (8788, 8826), True, 'import numpy as np\n'), ((9813, 9879), 'numpy.sum', 'np.sum', (['self.img_BIN_in[self.img_BIN_in.shape[0] // 2:, :]'], {'axis': '(0)'}), '(self.img_BIN_in[self.img_BIN_in.shape[0] // 2:, :], axis=0)\n', (9819, 9879), True, 'import numpy as np\n'), ((10093, 10138), 'numpy.empty', 'np.empty', (['(0, self.ploty.size)'], {'dtype': '"""float"""'}), "((0, self.ploty.size), dtype='float')\n", (10101, 10138), True, 'import numpy as np\n'), ((10166, 10211), 'numpy.empty', 'np.empty', (['(0, self.ploty.size)'], {'dtype': '"""float"""'}), "((0, self.ploty.size), dtype='float')\n", (10174, 10211), True, 'import numpy as np\n'), ((10241, 10292), 'numpy.empty', 'np.empty', (['(0, self.poly_fit_dim + 1)'], {'dtype': '"""float"""'}), "((0, self.poly_fit_dim + 1), dtype='float')\n", (10249, 10292), True, 'import numpy as np\n'), ((10322, 10373), 'numpy.empty', 'np.empty', (['(0, self.poly_fit_dim + 1)'], {'dtype': '"""float"""'}), "((0, self.poly_fit_dim + 1), dtype='float')\n", (10330, 10373), True, 'import numpy as np\n'), ((10547, 10609), 'numpy.dstack', 'np.dstack', (['(self.img_BIN_in, self.img_BIN_in, self.img_BIN_in)'], {}), '((self.img_BIN_in, self.img_BIN_in, self.img_BIN_in))\n', (10556, 10609), True, 'import numpy as np\n'), ((10778, 10814), 'numpy.int', 'np.int', (['(self.histogram.shape[0] // 2)'], {}), '(self.histogram.shape[0] // 2)\n', (10784, 10814), True, 'import numpy as np\n'), ((10834, 10877), 'numpy.argmax', 'np.argmax', (['self.histogram[:midpoint_height]'], {}), '(self.histogram[:midpoint_height])\n', (10843, 10877), True, 'import numpy as np\n'), ((11110, 11130), 'numpy.array', 'np.array', (['nonzero[0]'], {}), '(nonzero[0])\n', (11118, 11130), True, 'import numpy as np\n'), ((11150, 11170), 'numpy.array', 'np.array', (['nonzero[1]'], {}), '(nonzero[1])\n', (11158, 11170), True, 'import numpy as np\n'), ((13661, 13691), 'numpy.concatenate', 'np.concatenate', (['left_lane_inds'], {}), '(left_lane_inds)\n', (13675, 13691), True, 'import numpy as np\n'), ((13718, 13749), 'numpy.concatenate', 'np.concatenate', (['right_lane_inds'], {}), '(right_lane_inds)\n', (13732, 13749), True, 'import numpy as np\n'), ((14714, 14749), 'numpy.polyfit', 'np.polyfit', (['y', 'x', 'self.poly_fit_dim'], {}), '(y, x, self.poly_fit_dim)\n', (14724, 14749), True, 'import numpy as np\n'), ((15502, 15522), 'numpy.array', 'np.array', (['nonzero[0]'], {}), '(nonzero[0])\n', (15510, 15522), True, 'import numpy as np\n'), ((15542, 15562), 'numpy.array', 'np.array', (['nonzero[1]'], {}), '(nonzero[1])\n', (15550, 15562), True, 'import numpy as np\n'), ((19095, 19141), 'numpy.zeros', 'np.zeros', (['(self.poly_fit_dim + 1)'], {'dtype': '"""float"""'}), "(self.poly_fit_dim + 1, dtype='float')\n", (19103, 19141), True, 'import numpy as np\n'), ((19175, 19221), 'numpy.zeros', 'np.zeros', (['(self.poly_fit_dim + 1)'], {'dtype': '"""float"""'}), "(self.poly_fit_dim + 1, dtype='float')\n", (19183, 19221), True, 'import numpy as np\n'), ((19251, 19291), 'numpy.zeros', 'np.zeros', (['self.ploty.size'], {'dtype': '"""float"""'}), "(self.ploty.size, dtype='float')\n", (19259, 19291), True, 'import numpy as np\n'), ((19326, 19366), 'numpy.zeros', 'np.zeros', (['self.ploty.size'], {'dtype': '"""float"""'}), "(self.ploty.size, dtype='float')\n", (19334, 19366), True, 'import numpy as np\n'), ((25391, 25435), 'numpy.dstack', 'np.dstack', (['(warp_zero, warp_zero, warp_zero)'], {}), '((warp_zero, warp_zero, warp_zero))\n', (25400, 25435), True, 'import numpy as np\n'), ((25787, 25819), 'numpy.hstack', 'np.hstack', (['(pts_left, pts_right)'], {}), '((pts_left, pts_right))\n', (25796, 25819), True, 'import numpy as np\n'), ((26063, 26149), 'cv2.warpPerspective', 'cv2.warpPerspective', (['color_warp', 'Minv', '(img_RGB_in.shape[1], img_RGB_in.shape[0])'], {}), '(color_warp, Minv, (img_RGB_in.shape[1], img_RGB_in.\n shape[0]))\n', (26082, 26149), False, 'import cv2\n'), ((26220, 26267), 'cv2.addWeighted', 'cv2.addWeighted', (['img_RGB_in', '(1)', 'newwarp', '(0.3)', '(0)'], {}), '(img_RGB_in, 1, newwarp, 0.3, 0)\n', (26235, 26267), False, 'import cv2\n'), ((28973, 29017), 'numpy.vstack', 'np.vstack', (['(lane_lines.x_fit_all_L, x_fit_L)'], {}), '((lane_lines.x_fit_all_L, x_fit_L))\n', (28982, 29017), True, 'import numpy as np\n'), ((29051, 29095), 'numpy.vstack', 'np.vstack', (['(lane_lines.x_fit_all_R, x_fit_R)'], {}), '((lane_lines.x_fit_all_R, x_fit_R))\n', (29060, 29095), True, 'import numpy as np\n'), ((29170, 29239), 'numpy.vstack', 'np.vstack', (['(lane_lines.coef_fit_all_L, lane_lines.coef_fit_current_L)'], {}), '((lane_lines.coef_fit_all_L, lane_lines.coef_fit_current_L))\n', (29179, 29239), True, 'import numpy as np\n'), ((29276, 29345), 'numpy.vstack', 'np.vstack', (['(lane_lines.coef_fit_all_R, lane_lines.coef_fit_current_R)'], {}), '((lane_lines.coef_fit_all_R, lane_lines.coef_fit_current_R))\n', (29285, 29345), True, 'import numpy as np\n'), ((30421, 30435), 'matplotlib.pyplot.figure', 'plt.figure', (['(12)'], {}), '(12)\n', (30431, 30435), True, 'import matplotlib.pyplot as plt\n'), ((30444, 30478), 'matplotlib.pyplot.imshow', 'plt.imshow', (['lane_lines.img_RGB_out'], {}), '(lane_lines.img_RGB_out)\n', (30454, 30478), True, 'import matplotlib.pyplot as plt\n'), ((30487, 30551), 'matplotlib.pyplot.title', 'plt.title', (['"""2nd Order Polynomial Fit - Polynomial Search Method"""'], {}), "('2nd Order Polynomial Fit - Polynomial Search Method')\n", (30496, 30551), True, 'import matplotlib.pyplot as plt\n'), ((30560, 30602), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""output_images/poly_poly.png"""'], {}), "('output_images/poly_poly.png')\n", (30571, 30602), True, 'import matplotlib.pyplot as plt\n'), ((4086, 4103), 'numpy.max', 'np.max', (['abs_sobel'], {}), '(abs_sobel)\n', (4092, 4103), True, 'import numpy as np\n'), ((10900, 10943), 'numpy.argmax', 'np.argmax', (['self.histogram[midpoint_height:]'], {}), '(self.histogram[midpoint_height:])\n', (10909, 10943), True, 'import numpy as np\n'), ((12219, 12328), 'cv2.rectangle', 'cv2.rectangle', (['self.img_RGB_out', '(win_xleft_low, win_y_low)', '(win_xleft_high, win_y_high)', '(0, 255, 0)', '(2)'], {}), '(self.img_RGB_out, (win_xleft_low, win_y_low), (win_xleft_high,\n win_y_high), (0, 255, 0), 2)\n', (12232, 12328), False, 'import cv2\n'), ((12344, 12456), 'cv2.rectangle', 'cv2.rectangle', (['self.img_RGB_out', '(win_xright_low, win_y_low)', '(win_xright_high, win_y_high)', '(0, 255, 0)', '(2)'], {}), '(self.img_RGB_out, (win_xright_low, win_y_low), (\n win_xright_high, win_y_high), (0, 255, 0), 2)\n', (12357, 12456), False, 'import cv2\n'), ((14962, 14982), 'numpy.maximum', 'np.maximum', (['x_fit', '(0)'], {}), '(x_fit, 0)\n', (14972, 14982), True, 'import numpy as np\n'), ((16866, 16928), 'numpy.dstack', 'np.dstack', (['(self.img_BIN_in, self.img_BIN_in, self.img_BIN_in)'], {}), '((self.img_BIN_in, self.img_BIN_in, self.img_BIN_in))\n', (16875, 16928), True, 'import numpy as np\n'), ((17146, 17177), 'numpy.zeros_like', 'np.zeros_like', (['self.img_RGB_out'], {}), '(self.img_RGB_out)\n', (17159, 17177), True, 'import numpy as np\n'), ((17976, 18025), 'numpy.hstack', 'np.hstack', (['(left_line_window1, left_line_window2)'], {}), '((left_line_window1, left_line_window2))\n', (17985, 18025), True, 'import numpy as np\n'), ((18284, 18335), 'numpy.hstack', 'np.hstack', (['(right_line_window1, right_line_window2)'], {}), '((right_line_window1, right_line_window2))\n', (18293, 18335), True, 'import numpy as np\n'), ((18587, 18643), 'cv2.addWeighted', 'cv2.addWeighted', (['self.img_RGB_out', '(1)', 'window_img', '(0.3)', '(0)'], {}), '(self.img_RGB_out, 1, window_img, 0.3, 0)\n', (18602, 18643), False, 'import cv2\n'), ((19528, 19566), 'numpy.delete', 'np.delete', (['self.x_fit_all_L', '(0)'], {'axis': '(0)'}), '(self.x_fit_all_L, 0, axis=0)\n', (19537, 19566), True, 'import numpy as np\n'), ((19598, 19636), 'numpy.delete', 'np.delete', (['self.x_fit_all_R', '(0)'], {'axis': '(0)'}), '(self.x_fit_all_R, 0, axis=0)\n', (19607, 19636), True, 'import numpy as np\n'), ((19671, 19712), 'numpy.delete', 'np.delete', (['self.coef_fit_all_L', '(0)'], {'axis': '(0)'}), '(self.coef_fit_all_L, 0, axis=0)\n', (19680, 19712), True, 'import numpy as np\n'), ((19747, 19788), 'numpy.delete', 'np.delete', (['self.coef_fit_all_R', '(0)'], {'axis': '(0)'}), '(self.coef_fit_all_R, 0, axis=0)\n', (19756, 19788), True, 'import numpy as np\n'), ((21603, 21623), 'numpy.absolute', 'np.absolute', (['(2 * A_L)'], {}), '(2 * A_L)\n', (21614, 21623), True, 'import numpy as np\n'), ((21698, 21718), 'numpy.absolute', 'np.absolute', (['(2 * A_R)'], {}), '(2 * A_R)\n', (21709, 21718), True, 'import numpy as np\n'), ((23613, 23651), 'numpy.vstack', 'np.vstack', (['(self.x_fit_all_L, x_fit_L)'], {}), '((self.x_fit_all_L, x_fit_L))\n', (23622, 23651), True, 'import numpy as np\n'), ((23683, 23721), 'numpy.vstack', 'np.vstack', (['(self.x_fit_all_R, x_fit_R)'], {}), '((self.x_fit_all_R, x_fit_R))\n', (23692, 23721), True, 'import numpy as np\n'), ((23811, 23868), 'numpy.vstack', 'np.vstack', (['(self.coef_fit_all_L, self.coef_fit_current_L)'], {}), '((self.coef_fit_all_L, self.coef_fit_current_L))\n', (23820, 23868), True, 'import numpy as np\n'), ((23903, 23960), 'numpy.vstack', 'np.vstack', (['(self.coef_fit_all_R, self.coef_fit_current_R)'], {}), '((self.coef_fit_all_R, self.coef_fit_current_R))\n', (23912, 23960), True, 'import numpy as np\n'), ((25914, 25928), 'numpy.int_', 'np.int_', (['[pts]'], {}), '([pts])\n', (25921, 25928), True, 'import numpy as np\n'), ((18442, 18466), 'numpy.int_', 'np.int_', (['[left_line_pts]'], {}), '([left_line_pts])\n', (18449, 18466), True, 'import numpy as np\n'), ((18517, 18542), 'numpy.int_', 'np.int_', (['[right_line_pts]'], {}), '([right_line_pts])\n', (18524, 18542), True, 'import numpy as np\n'), ((24559, 24569), 'sys.exit', 'sys.exit', ([], {}), '()\n', (24567, 24569), False, 'import sys\n'), ((25322, 25352), 'numpy.zeros_like', 'np.zeros_like', (['self.img_BIN_in'], {}), '(self.img_BIN_in)\n', (25335, 25352), True, 'import numpy as np\n'), ((13327, 13360), 'numpy.mean', 'np.mean', (['nonzerox[good_left_inds]'], {}), '(nonzerox[good_left_inds])\n', (13334, 13360), True, 'import numpy as np\n'), ((13550, 13584), 'numpy.mean', 'np.mean', (['nonzerox[good_right_inds]'], {}), '(nonzerox[good_right_inds])\n', (13557, 13584), True, 'import numpy as np\n'), ((25627, 25669), 'numpy.vstack', 'np.vstack', (['[self.x_fit_best_L, self.ploty]'], {}), '([self.x_fit_best_L, self.ploty])\n', (25636, 25669), True, 'import numpy as np\n'), ((17776, 17827), 'numpy.vstack', 'np.vstack', (['[x_fit_L - self.margin_poly, self.ploty]'], {}), '([x_fit_L - self.margin_poly, self.ploty])\n', (17785, 17827), True, 'import numpy as np\n'), ((18082, 18133), 'numpy.vstack', 'np.vstack', (['[x_fit_R - self.margin_poly, self.ploty]'], {}), '([x_fit_R - self.margin_poly, self.ploty])\n', (18091, 18133), True, 'import numpy as np\n'), ((25726, 25768), 'numpy.vstack', 'np.vstack', (['[self.x_fit_best_R, self.ploty]'], {}), '([self.x_fit_best_R, self.ploty])\n', (25735, 25768), True, 'import numpy as np\n'), ((17894, 17945), 'numpy.vstack', 'np.vstack', (['[x_fit_L + self.margin_poly, self.ploty]'], {}), '([x_fit_L + self.margin_poly, self.ploty])\n', (17903, 17945), True, 'import numpy as np\n'), ((18201, 18252), 'numpy.vstack', 'np.vstack', (['[x_fit_R + self.margin_poly, self.ploty]'], {}), '([x_fit_R + self.margin_poly, self.ploty])\n', (18210, 18252), True, 'import numpy as np\n')] |
# coding=utf-8
# Copyright 2022 The Uncertainty Baselines Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""Vizier for linear VRNN for MultiWoZSynthDataset.
"""
import multiwoz_synth_tmpl as tmpl # local file import from experimental.language_structure.vrnn.experiments.linear_vrnn
def get_config():
"""Returns the configuration for this experiment."""
config = tmpl.get_config(
shared_bert_embedding=True, bert_embedding_type='base')
config.platform = 'df'
config.tpu_topology = '4x2'
config.max_task_failures = -1
config.max_per_task_failures = 10
return config
def get_sweep(hyper):
"""Returns hyperparameter sweep."""
domain = [
hyper.sweep('config.word_weights_file_weight',
hyper.discrete([0.25 * i for i in range(5)])),
hyper.sweep('config.psl_constraint_learning_weight',
hyper.discrete([0., 0.001, 0.005, 0.01, 0.05, 0.1])),
hyper.sweep('config.model.vae_cell.encoder_hidden_size',
hyper.discrete([200, 300, 400])),
hyper.sweep('config.base_learning_rate',
hyper.discrete([3e-5, 5e-5, 1e-4, 3e-4]))
]
sweep = hyper.product(domain)
return sweep
| [
"multiwoz_synth_tmpl.get_config"
] | [((881, 952), 'multiwoz_synth_tmpl.get_config', 'tmpl.get_config', ([], {'shared_bert_embedding': '(True)', 'bert_embedding_type': '"""base"""'}), "(shared_bert_embedding=True, bert_embedding_type='base')\n", (896, 952), True, 'import multiwoz_synth_tmpl as tmpl\n')] |
from typing import List
from wai.json.object import StrictJSONObject
from wai.json.object.property import ArrayProperty, OneOfProperty, BoolProperty
from .field import *
from .logical import *
from ._FilterExpression import FilterExpression
from ._OrderBy import OrderBy
class FilterSpec(StrictJSONObject['FilterSpec']):
"""
The top-level document describing how to filter a list request.
"""
# The sequential stages of filters of the list request
expressions: List[FilterExpression] = ArrayProperty(
element_property=OneOfProperty(
sub_properties=(
And.as_property(),
Or.as_property(),
*(field_filter_expression.as_property()
for field_filter_expression in ALL_FIELD_FILTER_EXPRESSIONS)
)
),
optional=True
)
# An optional final ordering on the result, in order of precedence
order_by: List[OrderBy] = ArrayProperty(
element_property=OrderBy.as_property(),
optional=True
)
# An optional flag to include soft-deleted models as well
include_inactive: bool = BoolProperty(
optional=True,
default=False
)
| [
"wai.json.object.property.BoolProperty"
] | [((1136, 1178), 'wai.json.object.property.BoolProperty', 'BoolProperty', ([], {'optional': '(True)', 'default': '(False)'}), '(optional=True, default=False)\n', (1148, 1178), False, 'from wai.json.object.property import ArrayProperty, OneOfProperty, BoolProperty\n')] |
# encoding: utf-8
# flake8: noqa
from sdsstools import get_package_version
NAME = "sdss-basecam"
__version__ = get_package_version(__file__, "sdss-basecam") or "dev"
from .camera import *
from .events import *
from .exceptions import *
from .exposure import *
from .notifier import *
| [
"sdsstools.get_package_version"
] | [((115, 160), 'sdsstools.get_package_version', 'get_package_version', (['__file__', '"""sdss-basecam"""'], {}), "(__file__, 'sdss-basecam')\n", (134, 160), False, 'from sdsstools import get_package_version\n')] |
import math
import random
from typing import Tuple
import cv2
import numpy as np
def np_free_form_mask(
max_vertex: int, max_length: int, max_brush_width: int, max_angle: int, height: int, width: int
) -> np.ndarray:
mask = np.zeros((height, width), np.float32)
num_vertex = random.randint(0, max_vertex)
start_y = random.randint(0, height - 1)
start_x = random.randint(0, width - 1)
brush_width = 0
for i in range(num_vertex):
angle = random.random() * max_angle
angle = math.radians(angle)
if i % 2 == 0:
angle = 2 * math.pi - angle
length = random.randint(0, max_length)
brush_width = random.randint(10, max_brush_width) // 2 * 2
next_y = start_y + length * np.cos(angle)
next_x = start_x + length * np.sin(angle)
next_y = np.maximum(np.minimum(next_y, height - 1), 0).astype(np.int)
next_x = np.maximum(np.minimum(next_x, width - 1), 0).astype(np.int)
cv2.line(mask, (start_y, start_x), (next_y, next_x), 1, brush_width)
cv2.circle(mask, (start_y, start_x), brush_width // 2, 2)
start_y, start_x = next_y, next_x
cv2.circle(mask, (start_y, start_x), brush_width // 2, 2)
return mask
def generate_stroke_mask(
image_size: Tuple[int, int],
parts: int = 7,
max_vertex: int = 25,
max_length: int = 80,
max_brush_width: int = 80,
max_angle: int = 360,
) -> np.ndarray:
mask = np.zeros(image_size, dtype=np.float32)
for _ in range(parts):
mask = mask + np_free_form_mask(
max_vertex, max_length, max_brush_width, max_angle, image_size[0], image_size[1]
)
return np.minimum(mask, 1.0)
| [
"numpy.minimum",
"cv2.line",
"math.radians",
"cv2.circle",
"numpy.zeros",
"numpy.cos",
"numpy.sin",
"random.random",
"random.randint"
] | [((235, 272), 'numpy.zeros', 'np.zeros', (['(height, width)', 'np.float32'], {}), '((height, width), np.float32)\n', (243, 272), True, 'import numpy as np\n'), ((291, 320), 'random.randint', 'random.randint', (['(0)', 'max_vertex'], {}), '(0, max_vertex)\n', (305, 320), False, 'import random\n'), ((335, 364), 'random.randint', 'random.randint', (['(0)', '(height - 1)'], {}), '(0, height - 1)\n', (349, 364), False, 'import random\n'), ((379, 407), 'random.randint', 'random.randint', (['(0)', '(width - 1)'], {}), '(0, width - 1)\n', (393, 407), False, 'import random\n'), ((1168, 1225), 'cv2.circle', 'cv2.circle', (['mask', '(start_y, start_x)', '(brush_width // 2)', '(2)'], {}), '(mask, (start_y, start_x), brush_width // 2, 2)\n', (1178, 1225), False, 'import cv2\n'), ((1461, 1499), 'numpy.zeros', 'np.zeros', (['image_size'], {'dtype': 'np.float32'}), '(image_size, dtype=np.float32)\n', (1469, 1499), True, 'import numpy as np\n'), ((1683, 1704), 'numpy.minimum', 'np.minimum', (['mask', '(1.0)'], {}), '(mask, 1.0)\n', (1693, 1704), True, 'import numpy as np\n'), ((521, 540), 'math.radians', 'math.radians', (['angle'], {}), '(angle)\n', (533, 540), False, 'import math\n'), ((623, 652), 'random.randint', 'random.randint', (['(0)', 'max_length'], {}), '(0, max_length)\n', (637, 652), False, 'import random\n'), ((986, 1054), 'cv2.line', 'cv2.line', (['mask', '(start_y, start_x)', '(next_y, next_x)', '(1)', 'brush_width'], {}), '(mask, (start_y, start_x), (next_y, next_x), 1, brush_width)\n', (994, 1054), False, 'import cv2\n'), ((1063, 1120), 'cv2.circle', 'cv2.circle', (['mask', '(start_y, start_x)', '(brush_width // 2)', '(2)'], {}), '(mask, (start_y, start_x), brush_width // 2, 2)\n', (1073, 1120), False, 'import cv2\n'), ((477, 492), 'random.random', 'random.random', ([], {}), '()\n', (490, 492), False, 'import random\n'), ((675, 710), 'random.randint', 'random.randint', (['(10)', 'max_brush_width'], {}), '(10, max_brush_width)\n', (689, 710), False, 'import random\n'), ((757, 770), 'numpy.cos', 'np.cos', (['angle'], {}), '(angle)\n', (763, 770), True, 'import numpy as np\n'), ((807, 820), 'numpy.sin', 'np.sin', (['angle'], {}), '(angle)\n', (813, 820), True, 'import numpy as np\n'), ((850, 880), 'numpy.minimum', 'np.minimum', (['next_y', '(height - 1)'], {}), '(next_y, height - 1)\n', (860, 880), True, 'import numpy as np\n'), ((928, 957), 'numpy.minimum', 'np.minimum', (['next_x', '(width - 1)'], {}), '(next_x, width - 1)\n', (938, 957), True, 'import numpy as np\n')] |
from django.db import models
from django.db.models import Count, F, Max
from binder.models import BinderModel
class Caretaker(BinderModel):
name = models.TextField()
last_seen = models.DateTimeField(null=True, blank=True)
# We have the ssn for each caretaker. We have to make sure that nobody can access this ssn in anyway, since
# this shouldn't be accessible
ssn = models.TextField(default='my secret ssn')
def __str__(self):
return 'caretaker %d: %s' % (self.pk, self.name)
class Binder:
history = True
class Annotations:
best_animal = Max('animals__name')
animal_count = Count('animals')
bsn = F('ssn') # simple alias
last_present = F('last_seen')
| [
"django.db.models.TextField",
"django.db.models.Count",
"django.db.models.F",
"django.db.models.DateTimeField",
"django.db.models.Max"
] | [((149, 167), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (165, 167), False, 'from django.db import models\n'), ((181, 224), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (201, 224), False, 'from django.db import models\n'), ((374, 415), 'django.db.models.TextField', 'models.TextField', ([], {'default': '"""my secret ssn"""'}), "(default='my secret ssn')\n", (390, 415), False, 'from django.db import models\n'), ((558, 578), 'django.db.models.Max', 'Max', (['"""animals__name"""'], {}), "('animals__name')\n", (561, 578), False, 'from django.db.models import Count, F, Max\n'), ((596, 612), 'django.db.models.Count', 'Count', (['"""animals"""'], {}), "('animals')\n", (601, 612), False, 'from django.db.models import Count, F, Max\n'), ((621, 629), 'django.db.models.F', 'F', (['"""ssn"""'], {}), "('ssn')\n", (622, 629), False, 'from django.db.models import Count, F, Max\n'), ((663, 677), 'django.db.models.F', 'F', (['"""last_seen"""'], {}), "('last_seen')\n", (664, 677), False, 'from django.db.models import Count, F, Max\n')] |
import threading
# Create threads for each SSH connection
def create_threads(list, function):
threads = []
for ip in list:
th = threading.Thread(target = function, args = (ip,))
th.start()
threads.append(th)
for th in threads:
th.join() | [
"threading.Thread"
] | [((147, 192), 'threading.Thread', 'threading.Thread', ([], {'target': 'function', 'args': '(ip,)'}), '(target=function, args=(ip,))\n', (163, 192), False, 'import threading\n')] |
import cv2
from darkflow.net.build import TFNet
import numpy as np
import glob
import matplotlib.pyplot as plt
options = {
'model': 'cfg/yolo-v2.cfg',
'load':8375,
'gpu':0.8,
'threshold':0.1
}
count = 1
tfnet = TFNet(options)
color = [0, 255, 0]
files_path = glob.glob('data_from_imd' + "\\*.jpg")
for file in files_path:
print('Working on {}, Please wait...'.format(file))
img = cv2.imread(file, cv2.IMREAD_COLOR)
results = tfnet.return_predict(img)
try:
top_detection = results[:1]
for result in top_detection:
# first we will be trying to store the x_coordinate, y_coordinate in the file
x1, y1 = (result['topleft']['x'], result['topleft']['y'])
x2, y2 = (result['bottomright']['x'], result['bottomright']['y'])
x_coordinate = (x1 + x2) // 2
y_coordinate = (y1 + y2) // 2
with open('csvfile.txt', 'a') as myfile:
temp = str(count) + ',' + str(x_coordinate) + "," + str(y_coordinate) + '\n'
myfile.writelines(temp)
count += 1
except Exception as e:
print(str(e))
| [
"darkflow.net.build.TFNet",
"glob.glob",
"cv2.imread"
] | [((211, 225), 'darkflow.net.build.TFNet', 'TFNet', (['options'], {}), '(options)\n', (216, 225), False, 'from darkflow.net.build import TFNet\n'), ((259, 297), 'glob.glob', 'glob.glob', (["('data_from_imd' + '\\\\*.jpg')"], {}), "('data_from_imd' + '\\\\*.jpg')\n", (268, 297), False, 'import glob\n'), ((388, 422), 'cv2.imread', 'cv2.imread', (['file', 'cv2.IMREAD_COLOR'], {}), '(file, cv2.IMREAD_COLOR)\n', (398, 422), False, 'import cv2\n')] |
# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class WeiboVMblogsItem(scrapy.Item):
domain = scrapy.Field()
uid = scrapy.Field()
mblog_id = scrapy.Field()
mblog_content = scrapy.Field()
created_time = scrapy.Field()
crawled_time = scrapy.Field()
def get_insert_sql(self):
insert_sql = """
insert into crawled_weibov_mblogs(domain, uid, mblog_id, mblog_content, created_time, crawled_time)
VALUES (%s, %s, %s, %s, %s, %s)
"""
params = (self["domain"], self["uid"], self["mblog_id"], self["mblog_content"], self["created_time"], self["crawled_time"])
return insert_sql, params
class WeiboVCommentsItem(scrapy.Item):
mblog_id = scrapy.Field()
uid = scrapy.Field()
comment_id = scrapy.Field()
comment_content = scrapy.Field()
created_time = scrapy.Field()
crawled_time = scrapy.Field()
def get_insert_sql(self):
insert_sql = """
insert into crawled_weibov_comments(mblog_id, uid, comment_id, comment_content, created_time, crawled_time)
VALUES (%s, %s, %s, %s, %s, %s)
"""
params = (self["mblog_id"], self["uid"], self["comment_id"], self["comment_content"], self["created_time"], self["crawled_time"])
return insert_sql, params | [
"scrapy.Field"
] | [((218, 232), 'scrapy.Field', 'scrapy.Field', ([], {}), '()\n', (230, 232), False, 'import scrapy\n'), ((243, 257), 'scrapy.Field', 'scrapy.Field', ([], {}), '()\n', (255, 257), False, 'import scrapy\n'), ((273, 287), 'scrapy.Field', 'scrapy.Field', ([], {}), '()\n', (285, 287), False, 'import scrapy\n'), ((308, 322), 'scrapy.Field', 'scrapy.Field', ([], {}), '()\n', (320, 322), False, 'import scrapy\n'), ((342, 356), 'scrapy.Field', 'scrapy.Field', ([], {}), '()\n', (354, 356), False, 'import scrapy\n'), ((376, 390), 'scrapy.Field', 'scrapy.Field', ([], {}), '()\n', (388, 390), False, 'import scrapy\n'), ((839, 853), 'scrapy.Field', 'scrapy.Field', ([], {}), '()\n', (851, 853), False, 'import scrapy\n'), ((864, 878), 'scrapy.Field', 'scrapy.Field', ([], {}), '()\n', (876, 878), False, 'import scrapy\n'), ((896, 910), 'scrapy.Field', 'scrapy.Field', ([], {}), '()\n', (908, 910), False, 'import scrapy\n'), ((933, 947), 'scrapy.Field', 'scrapy.Field', ([], {}), '()\n', (945, 947), False, 'import scrapy\n'), ((967, 981), 'scrapy.Field', 'scrapy.Field', ([], {}), '()\n', (979, 981), False, 'import scrapy\n'), ((1001, 1015), 'scrapy.Field', 'scrapy.Field', ([], {}), '()\n', (1013, 1015), False, 'import scrapy\n')] |
from django import forms
from app.models import Application, Owner, Questionnaire, Tag, Rule, TagType
from crispy_forms.bootstrap import Field
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Submit, ButtonHolder, Fieldset, Hidden, HTML
class ApplicationForm(forms.ModelForm):
class Meta:
model = Application
fields = ['name', 'description', 'primary_owner', 'secondary_owner', 'logo', 'review_cycle', 'next_review_date']
# def __init__(self, *args, **kwargs):
# super(ApplicationForm, self).__init__(*args, )
# self.helper = FormHelper()
# self.helper.form_tag = False
# self.helper.form_class = 'form-horizontal'
# self.helper.layout = Layout(
# Field('name'),
# Field('description'),
# Field('primary_owner'),
# Field('secondary_owner'),
# Field('logo'),
# Field('review_cycle'),
# Field('next_review_date', css_class='input-small dateinput'),
# Submit('submit', 'Submit', css_class="btn-success"),
# )
class OwnerForm(forms.ModelForm):
class Meta:
model = Owner
fields = ['name', 'email']
class QuestionnaireForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(QuestionnaireForm, self).__init__(*args, **kwargs)
self.helper = FormHelper(self)
self.helper.form_class = 'form-inline'
self.helper.field_template = 'bootstrap3/layout/inline_field.html'
self.helper.layout = Layout(
Field('application_name')
)
class Meta:
model = Questionnaire
fields = ['application_name']
class TagForm(forms.ModelForm):
class Meta:
model = Tag
fields = ['name', 'type', 'description']
class TagTypeForm(forms.ModelForm):
class Meta:
model = TagType
fields = ['name', 'description']
class RuleForm(forms.ModelForm):
class Meta:
model = Rule
fields = '__all__'
| [
"crispy_forms.bootstrap.Field",
"crispy_forms.helper.FormHelper"
] | [((1453, 1469), 'crispy_forms.helper.FormHelper', 'FormHelper', (['self'], {}), '(self)\n', (1463, 1469), False, 'from crispy_forms.helper import FormHelper\n'), ((1641, 1666), 'crispy_forms.bootstrap.Field', 'Field', (['"""application_name"""'], {}), "('application_name')\n", (1646, 1666), False, 'from crispy_forms.bootstrap import Field\n')] |
# encoding: utf-8
import os
from website import settings
WATERBUTLER_CREDENTIALS = {
'storage': {}
}
WATERBUTLER_SETTINGS = {
'storage': {
'provider': 'filesystem',
'folder': os.path.join(settings.BASE_PATH, 'osfstoragecache'),
}
}
WATERBUTLER_RESOURCE = 'folder'
DISK_SAVING_MODE = settings.DISK_SAVING_MODE
| [
"os.path.join"
] | [((204, 255), 'os.path.join', 'os.path.join', (['settings.BASE_PATH', '"""osfstoragecache"""'], {}), "(settings.BASE_PATH, 'osfstoragecache')\n", (216, 255), False, 'import os\n')] |
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------
# Copyright (c) 2015-2019 Analog Devices, Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# - Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# - Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# - Modified versions of the software must be conspicuously marked as such.
# - This software is licensed solely and exclusively for use with
# processors/products manufactured by or for Analog Devices, Inc.
# - This software may not be combined or merged with other code in any manner
# that would cause the software to become subject to terms and conditions
# which differ from those listed here.
# - Neither the name of Analog Devices, Inc. nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
# - The use of this software may or may not infringe the patent rights of one
# or more patent holders. This license does not release you from the
# requirement that you obtain separate licenses from these patent holders
# to use this software.
#
# THIS SOFTWARE IS PROVIDED BY ANALOG DEVICES, INC. AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# NON-INFRINGEMENT, TITLE, MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL ANALOG DEVICES, INC. OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, PUNITIVE OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# DAMAGES ARISING OUT OF CLAIMS OF INTELLECTUAL PROPERTY RIGHTS INFRINGEMENT;
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# 2019-01-10-7CBSD SLA
# -----------------------------------------------------------------------
'''
Simulation of some of the AD7124's filters.
This program QUALITATIVELY derives a filter of a type similar to that
used in the AD7124 family of ADCs, that is, it is not bit-accurate, refer
to the datasheet for guaranteed specifications.
Tested with Python 3.7, Anaconda distribution
'''
from numpy import min, max, convolve, random, average, ones, zeros, amax, log
import numpy as np
from scipy import linspace, fft
from scipy import signal
from scipy.signal import lti, step
from matplotlib import pyplot as plt
plot_sinc4 = True
# Base sample rate in high-power mode, From AD7124 datasheet
f0 = 19200
# Calculate SINC1 oversample ratios for 50, 60Hz
osr50 = int(f0/50)
osr60 = int(f0/60)
# Create "boxcar" SINC1 filters
sinc1_50 = np.ones(osr50)
sinc1_60 = np.ones(osr60)
# Calculate higher order filters
sinc2_50 = np.convolve(sinc1_50, sinc1_50)
sinc3_50 = np.convolve(sinc2_50, sinc1_50)
sinc4_50 = np.convolve(sinc2_50, sinc2_50)
# Here's the filter from datasheet Figure 91,
# SINC4-ish filter with one three zeros at 50Hz, one at 60Hz.
filt_50_60_rej = np.convolve(sinc3_50, sinc1_60)
# Normalize to unity gain by dividing by sum of all taps
sinc1_50 /= np.sum(sinc1_50)
sinc1_60 /= np.sum(sinc1_60)
sinc2_50 /= np.sum(sinc2_50)
sinc3_50 /= np.sum(sinc3_50)
sinc4_50 /= np.sum(sinc4_50)
filt_50_60_rej /= np.sum(filt_50_60_rej)
# freqz: Compute the frequency response of a digital filter.
# Older versions of SicPy return w as radians / sample, newer take an optional
# sample rate argument (fs). Computing frequencies (freqs)
# manually for backwards compatibility.
w, h = signal.freqz(filt_50_60_rej, 1, worN=16385, whole=False) #, fs=f0)
freqs = w * f0/(2.0*np.pi)
hmax = abs(max(h)) #Normalize to unity
response_dB = 20.0 * np.log10(abs(h)/hmax)
plt.figure(1)
plt.title('50Hz SINC1,2,4, and 50/60Hz SINC4 impulse responses')
plt.ylabel('tap val.')
plt.plot(sinc1_50)
plt.plot(sinc2_50)
plt.plot(sinc4_50)
plt.plot(filt_50_60_rej)
plt.xlabel('tap number')
plt.xlim(left=-100, right= 1.1* len(filt_50_60_rej))
plt.grid()
plt.figure(2)
plt.plot(freqs, response_dB, zorder=1)
plt.title('50/60Hz reject filter response')
plt.xlabel('Frequency')
plt.ylabel('Rejection')
plt.axis([0, 150, -120, 1])
plt.show()
| [
"numpy.convolve",
"matplotlib.pyplot.grid",
"numpy.ones",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.axis",
"numpy.max",
"numpy.sum",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.title",
"scipy.signal.freqz",
"matplotlib.pyplot.show... | [((3234, 3248), 'numpy.ones', 'np.ones', (['osr50'], {}), '(osr50)\n', (3241, 3248), True, 'import numpy as np\n'), ((3260, 3274), 'numpy.ones', 'np.ones', (['osr60'], {}), '(osr60)\n', (3267, 3274), True, 'import numpy as np\n'), ((3320, 3351), 'numpy.convolve', 'np.convolve', (['sinc1_50', 'sinc1_50'], {}), '(sinc1_50, sinc1_50)\n', (3331, 3351), True, 'import numpy as np\n'), ((3363, 3394), 'numpy.convolve', 'np.convolve', (['sinc2_50', 'sinc1_50'], {}), '(sinc2_50, sinc1_50)\n', (3374, 3394), True, 'import numpy as np\n'), ((3406, 3437), 'numpy.convolve', 'np.convolve', (['sinc2_50', 'sinc2_50'], {}), '(sinc2_50, sinc2_50)\n', (3417, 3437), True, 'import numpy as np\n'), ((3564, 3595), 'numpy.convolve', 'np.convolve', (['sinc3_50', 'sinc1_60'], {}), '(sinc3_50, sinc1_60)\n', (3575, 3595), True, 'import numpy as np\n'), ((3666, 3682), 'numpy.sum', 'np.sum', (['sinc1_50'], {}), '(sinc1_50)\n', (3672, 3682), True, 'import numpy as np\n'), ((3695, 3711), 'numpy.sum', 'np.sum', (['sinc1_60'], {}), '(sinc1_60)\n', (3701, 3711), True, 'import numpy as np\n'), ((3724, 3740), 'numpy.sum', 'np.sum', (['sinc2_50'], {}), '(sinc2_50)\n', (3730, 3740), True, 'import numpy as np\n'), ((3753, 3769), 'numpy.sum', 'np.sum', (['sinc3_50'], {}), '(sinc3_50)\n', (3759, 3769), True, 'import numpy as np\n'), ((3782, 3798), 'numpy.sum', 'np.sum', (['sinc4_50'], {}), '(sinc4_50)\n', (3788, 3798), True, 'import numpy as np\n'), ((3817, 3839), 'numpy.sum', 'np.sum', (['filt_50_60_rej'], {}), '(filt_50_60_rej)\n', (3823, 3839), True, 'import numpy as np\n'), ((4089, 4145), 'scipy.signal.freqz', 'signal.freqz', (['filt_50_60_rej', '(1)'], {'worN': '(16385)', 'whole': '(False)'}), '(filt_50_60_rej, 1, worN=16385, whole=False)\n', (4101, 4145), False, 'from scipy import signal\n'), ((4267, 4280), 'matplotlib.pyplot.figure', 'plt.figure', (['(1)'], {}), '(1)\n', (4277, 4280), True, 'from matplotlib import pyplot as plt\n'), ((4281, 4345), 'matplotlib.pyplot.title', 'plt.title', (['"""50Hz SINC1,2,4, and 50/60Hz SINC4 impulse responses"""'], {}), "('50Hz SINC1,2,4, and 50/60Hz SINC4 impulse responses')\n", (4290, 4345), True, 'from matplotlib import pyplot as plt\n'), ((4346, 4368), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""tap val."""'], {}), "('tap val.')\n", (4356, 4368), True, 'from matplotlib import pyplot as plt\n'), ((4369, 4387), 'matplotlib.pyplot.plot', 'plt.plot', (['sinc1_50'], {}), '(sinc1_50)\n', (4377, 4387), True, 'from matplotlib import pyplot as plt\n'), ((4388, 4406), 'matplotlib.pyplot.plot', 'plt.plot', (['sinc2_50'], {}), '(sinc2_50)\n', (4396, 4406), True, 'from matplotlib import pyplot as plt\n'), ((4407, 4425), 'matplotlib.pyplot.plot', 'plt.plot', (['sinc4_50'], {}), '(sinc4_50)\n', (4415, 4425), True, 'from matplotlib import pyplot as plt\n'), ((4426, 4450), 'matplotlib.pyplot.plot', 'plt.plot', (['filt_50_60_rej'], {}), '(filt_50_60_rej)\n', (4434, 4450), True, 'from matplotlib import pyplot as plt\n'), ((4451, 4475), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""tap number"""'], {}), "('tap number')\n", (4461, 4475), True, 'from matplotlib import pyplot as plt\n'), ((4529, 4539), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (4537, 4539), True, 'from matplotlib import pyplot as plt\n'), ((4541, 4554), 'matplotlib.pyplot.figure', 'plt.figure', (['(2)'], {}), '(2)\n', (4551, 4554), True, 'from matplotlib import pyplot as plt\n'), ((4555, 4593), 'matplotlib.pyplot.plot', 'plt.plot', (['freqs', 'response_dB'], {'zorder': '(1)'}), '(freqs, response_dB, zorder=1)\n', (4563, 4593), True, 'from matplotlib import pyplot as plt\n'), ((4594, 4637), 'matplotlib.pyplot.title', 'plt.title', (['"""50/60Hz reject filter response"""'], {}), "('50/60Hz reject filter response')\n", (4603, 4637), True, 'from matplotlib import pyplot as plt\n'), ((4638, 4661), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Frequency"""'], {}), "('Frequency')\n", (4648, 4661), True, 'from matplotlib import pyplot as plt\n'), ((4662, 4685), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Rejection"""'], {}), "('Rejection')\n", (4672, 4685), True, 'from matplotlib import pyplot as plt\n'), ((4686, 4713), 'matplotlib.pyplot.axis', 'plt.axis', (['[0, 150, -120, 1]'], {}), '([0, 150, -120, 1])\n', (4694, 4713), True, 'from matplotlib import pyplot as plt\n'), ((4714, 4724), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4722, 4724), True, 'from matplotlib import pyplot as plt\n'), ((4194, 4200), 'numpy.max', 'max', (['h'], {}), '(h)\n', (4197, 4200), False, 'from numpy import min, max, convolve, random, average, ones, zeros, amax, log\n')] |
# Generated by Django 3.2 on 2021-06-16 16:53
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('adminweb', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='profissional',
name='cep',
field=models.CharField(max_length=8),
),
]
| [
"django.db.models.CharField"
] | [((327, 357), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(8)'}), '(max_length=8)\n', (343, 357), False, 'from django.db import migrations, models\n')] |
from .models import Input, Output, InputToOutput, Device
from rest_framework import serializers
from taggit_serializer.serializers import (TagListSerializerField, TaggitSerializer)
from taggit.models import Tag
class DeviceSerializer(serializers.ModelSerializer):
class Meta:
model = Device
fields = ('id', 'description')
class InputSerializer(TaggitSerializer, serializers.HyperlinkedModelSerializer):
#url = serializers.HyperlinkedIdentityField(view_name="input:id")
pk = serializers.ReadOnlyField()
tags = TagListSerializerField()
type = serializers.SerializerMethodField()
device = DeviceSerializer(many=False)
#device = serializers.PrimaryKeyRelatedField(queryset=Device.objects.all(), allow_null=True)
#highlight = serializers.HyperlinkedIdentityField(view_name='set_down', format='html')
class Meta:
model = Input
#fields = ('url', 'ph_sn', 'index', 'input_type', 'deleted', 'description', 'outputs')
#fields = ('url', 'url2', 'ph_sn', 'index', 'input_type', 'deleted', 'description', 'outputs', 'tags',)
fields = '__all__'
def get_type(self, obj):
try:
return Input.INPUT_TYPES[obj.input_type-1][1]
except Exception as ex:
return 'UNKNOWN'
# def get_device(self, obj):
# try:
# return obj.device.pk
# except Exception as ex:
# return 'NONE'
def set_device(self, obj, value):
try:
obj.device.pk = value
except Exception as ex:
return 'NONE'
class InputSimpleSerializer(TaggitSerializer, serializers.ModelSerializer):
tags = TagListSerializerField()
type = serializers.SerializerMethodField()
class Meta:
model = Input
fields = 'pk', 'description', 'type', 'tags', 'state'
def get_type(self, obj):
try:
return Input.INPUT_TYPES[obj.input_type-1][1]
except Exception as ex:
return 'UNKNOWN'
class InputAdminSerializer(InputSimpleSerializer):
class Meta(InputSimpleSerializer.Meta):
model = Input
fields = 'pk', 'description', 'type', 'tags', 'state', 'device', 'index'
#
# class OutputSerializer_base(TaggitSerializer, serializers.HyperlinkedModelSerializer):
# def get_type(self, obj):
# try:
# import logging
# logger = logging.getLogger('ios.views.IOsView')
# logger.debug('??????????????????????????????????????????????????')
#
# return Output.OUTPUT_TYPES[obj.output_type-1][1]
# except Exception as ex:
# return 'UNKNOWN'
#
# def get_permissions(self, obj):
# try:
# return obj.permissions
# except Exception as ex:
# return 'UNKNOWN'
#
#
# class OutputSerializer(OutputSerializer_base):
# #url = serializers.HyperlinkedIdentityField(view_name="input:id")
# pk = serializers.ReadOnlyField()
# type = serializers.SerializerMethodField()
# tags = TagListSerializerField()
# permissions = serializers.SerializerMethodField()
#
# class Meta:
# model = Output
# fields = '__all__'
# extra_fields = ['permissions']
# #fields = ('pk', 'url', 'ph_sn', 'index', 'output_type', 'deleted', 'description', 'total_progress', '_my_state',)
#
# def get_field_names(self, declared_fields, info):
# """
# Adds the 'extra_fields' to '_all_'
# https://stackoverflow.com/questions/38245414/django-rest-framework-how-to-include-all-fields-and-a-related-field-in-mo
# :param declared_fields:
# :param info:
# :return:
# """
# import logging
# logger = logging.getLogger('ios.views.IOsView')
# logger.debug('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
# expanded_fields = super(OutputSerializer, self).get_field_names(declared_fields, info)
# logger.debug('*************************************')
# logger.debug(expanded_fields)
#
# if getattr(self.Meta, 'extra_fields', None):
# logger.debug('++++++++++++++++++++++++++++++++++++++')
# logger.debug(expanded_fields)
# return expanded_fields + self.Meta.extra_fields
# else:
# logger.debug('--------------------------------------')
# logger.debug(expanded_fields)
# return expanded_fields
#
#
#
# class OutputSimpleSerializer(OutputSerializer_base):
# type = serializers.SerializerMethodField()
# tags = TagListSerializerField()
# permissions = serializers.SerializerMethodField()
#
# class Meta:
# model = Output
# fields = 'pk', 'description', 'state', 'type', 'tags', 'execution_limit', 'started_time', 'current_position', 'permissions'
#
#
# class OutputAdminSerializer(OutputSimpleSerializer):
# class Meta(OutputSimpleSerializer.Meta):
# model = Output
# fields = 'pk', 'description', 'state', 'type', 'tags', 'execution_limit', 'started_time', 'current_position', 'ph_sn', 'index'
#
class OutputSerializer(TaggitSerializer, serializers.HyperlinkedModelSerializer):
#url = serializers.HyperlinkedIdentityField(view_name="input:id")
pk = serializers.ReadOnlyField()
type = serializers.SerializerMethodField()
tags = TagListSerializerField()
permissions = serializers.SerializerMethodField()
device = DeviceSerializer(many=False)
class Meta:
model = Output
fields = '__all__'
#fields = ('pk', 'url', 'ph_sn', 'index', 'output_type', 'deleted', 'description', 'total_progress', '_my_state',)
def get_type(self, obj):
try:
return Output.OUTPUT_TYPES[obj.output_type-1][1]
except Exception as ex:
return 'UNKNOWN'
def get_permissions(self, obj):
try:
return obj.permissions
except Exception as ex:
return 'UNKNOWN'
class OutputSimpleSerializer(OutputSerializer):
#type = serializers.SerializerMethodField()
#tags = TagListSerializerField()
class Meta(OutputSerializer.Meta):
#model = Output
fields = 'pk', 'description', 'state', 'type', 'tags', 'execution_limit', 'started_time', 'current_position', 'permissions', 'supports_schedules'
#def get_type(self, obj):
# try:
# return Output.OUTPUT_TYPES[obj.output_type-1][1]
# except Exception as ex:
# return 'UNKNOWN'
class OutputAdminSerializer(OutputSimpleSerializer):
class Meta(OutputSimpleSerializer.Meta):
#model = Output
fields = 'pk', 'description', 'state', 'type', 'tags', 'execution_limit', 'started_time', 'current_position', 'permissions', 'supports_schedules', 'device', 'index'
#class IOsSerializer(serializers.Serializer):
# inputs = InputSerializer(many=True, read_only=True)
# outputs = OutputSerializer(many=True, read_only=True)
class TagSerializer(serializers.ModelSerializer):
class Meta:
model = Tag
fields = '__all__'
| [
"rest_framework.serializers.SerializerMethodField",
"taggit_serializer.serializers.TagListSerializerField",
"rest_framework.serializers.ReadOnlyField"
] | [((506, 533), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {}), '()\n', (531, 533), False, 'from rest_framework import serializers\n'), ((545, 569), 'taggit_serializer.serializers.TagListSerializerField', 'TagListSerializerField', ([], {}), '()\n', (567, 569), False, 'from taggit_serializer.serializers import TagListSerializerField, TaggitSerializer\n'), ((581, 616), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (614, 616), False, 'from rest_framework import serializers\n'), ((1663, 1687), 'taggit_serializer.serializers.TagListSerializerField', 'TagListSerializerField', ([], {}), '()\n', (1685, 1687), False, 'from taggit_serializer.serializers import TagListSerializerField, TaggitSerializer\n'), ((1699, 1734), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (1732, 1734), False, 'from rest_framework import serializers\n'), ((5247, 5274), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {}), '()\n', (5272, 5274), False, 'from rest_framework import serializers\n'), ((5286, 5321), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (5319, 5321), False, 'from rest_framework import serializers\n'), ((5333, 5357), 'taggit_serializer.serializers.TagListSerializerField', 'TagListSerializerField', ([], {}), '()\n', (5355, 5357), False, 'from taggit_serializer.serializers import TagListSerializerField, TaggitSerializer\n'), ((5376, 5411), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (5409, 5411), False, 'from rest_framework import serializers\n')] |
"""
"""
import support
support.compileJPythonc("test256a.py", core=1, jar="test256.jar", output="test256.err")
#raise support.TestError("" + `x`)
| [
"support.compileJPythonc"
] | [((26, 118), 'support.compileJPythonc', 'support.compileJPythonc', (['"""test256a.py"""'], {'core': '(1)', 'jar': '"""test256.jar"""', 'output': '"""test256.err"""'}), "('test256a.py', core=1, jar='test256.jar', output=\n 'test256.err')\n", (49, 118), False, 'import support\n')] |
import json
from pathlib import Path
from typing import Any, Dict
from git import Repo
from cruft.exceptions import CruftAlreadyPresent, NoCruftFound
CruftState = Dict[str, Any]
#######################
# Cruft related utils #
#######################
def get_cruft_file(project_dir_path: Path, exists: bool = True) -> Path:
cruft_file = project_dir_path / ".cruft.json"
if not exists and cruft_file.is_file():
raise CruftAlreadyPresent(cruft_file)
if exists and not cruft_file.is_file():
raise NoCruftFound(project_dir_path.resolve())
return cruft_file
def is_project_updated(repo: Repo, current_commit: str, latest_commit: str, strict: bool) -> bool:
return (
# If the latest commit exactly matches the current commit
latest_commit == current_commit
# Or if there have been no changes to the cookiecutter
or not repo.index.diff(current_commit)
# or if the strict flag is off, we allow for newer commits to count as up to date
or (
repo.is_ancestor(repo.commit(latest_commit), repo.commit(current_commit)) and not strict
)
)
def json_dumps(cruft_state: Dict[str, Any]) -> str:
text = json.dumps(cruft_state, ensure_ascii=False, indent=2, separators=(",", ": "))
return text + "\n"
| [
"cruft.exceptions.CruftAlreadyPresent",
"json.dumps"
] | [((1206, 1283), 'json.dumps', 'json.dumps', (['cruft_state'], {'ensure_ascii': '(False)', 'indent': '(2)', 'separators': "(',', ': ')"}), "(cruft_state, ensure_ascii=False, indent=2, separators=(',', ': '))\n", (1216, 1283), False, 'import json\n'), ((438, 469), 'cruft.exceptions.CruftAlreadyPresent', 'CruftAlreadyPresent', (['cruft_file'], {}), '(cruft_file)\n', (457, 469), False, 'from cruft.exceptions import CruftAlreadyPresent, NoCruftFound\n')] |
"""Main entrypoint for the repobee CLI application.
.. module:: main
:synopsis: Main entrypoint for the repobee CLI application.
.. moduleauthor:: <NAME>
"""
import argparse
import contextlib
import io
import logging
import os
import pathlib
import sys
from typing import List, Optional, Union, Mapping
from types import ModuleType
import repobee_plug as plug
import _repobee.cli.dispatch
import _repobee.cli.parsing
import _repobee.cli.preparser
import _repobee.cli.mainparser
from _repobee import plugin
from _repobee import exception
from _repobee import config
from _repobee.cli.preparser import separate_args
from _repobee import distinfo
from _repobee import disthelpers
_PRE_INIT_ERROR_MESSAGE = """exception was raised before pre-initialization was
complete. This is usually due to incorrect settings.
Try running the `verify-settings` command and see if
the problem can be resolved. If all fails, please open
an issue at https://github.com/repobee/repobee/issues/new
and supply the stack trace below.""".replace(
"\n", " "
)
def run(
cmd: List[str],
config_file: Union[str, pathlib.Path] = "",
plugins: Optional[List[Union[ModuleType, plug.Plugin]]] = None,
workdir: Union[str, pathlib.Path] = ".",
) -> Mapping[str, List[plug.Result]]:
"""Run RepoBee with the provided options. This function is mostly intended
to be used for testing plugins.
.. important::
This function will always unregister all plugins after execution,
including anly plugins that may have been registered prior to running
this function.
Running this function is almost equivalent to running RepoBee from the CLI,
with the following exceptions:
1. Preparser options must be passed as arguments to this function (i.e.
cannot be given as part of ``cmd``).
2. There is no error handling at the top level, so exceptions are raised
instead of just logged.
As an example, the following CLI call:
.. code-block:: bash
$ repobee --plug ext.py --config-file config.ini config show
Can be executed as follows:
.. code-block:: python
import ext
from repobee import run
run(["config", "show"], config_file="config.ini", plugins=[ext])
Args:
cmd: The command to run.
config_file: Path to the configuration file.
plugins: A list of plugin modules and/or plugin classes.
workdir: The working directory to run RepoBee in.
Returns:
A mapping (plugin_name -> plugin_results).
"""
config_file = pathlib.Path(config_file)
cur_workdir = pathlib.Path(".").absolute()
requested_workdir = pathlib.Path(str(workdir)).resolve(strict=True)
@contextlib.contextmanager
def _in_requested_workdir():
try:
os.chdir(requested_workdir)
yield
finally:
os.chdir(cur_workdir)
def _ensure_is_module(p: Union[ModuleType, plug.Plugin]):
if isinstance(p, type) and issubclass(p, plug.Plugin):
mod = ModuleType(p.__name__.lower())
mod.__package__ = f"__{p.__name__}"
setattr(mod, p.__name__, p)
return mod
elif isinstance(p, ModuleType):
return p
else:
raise TypeError(f"not plugin or module: {p}")
wrapped_plugins = list(map(_ensure_is_module, plugins or []))
with _in_requested_workdir():
try:
_repobee.cli.parsing.setup_logging()
# FIXME calling _initialize_plugins like this is ugly, should be
# refactored
_initialize_plugins(argparse.Namespace(no_plugins=False, plug=[]))
plugin.register_plugins(wrapped_plugins)
parsed_args, api = _parse_args(cmd, config_file)
with _set_output_verbosity(getattr(parsed_args, "quiet", 0)):
return _repobee.cli.dispatch.dispatch_command(
parsed_args, api, config_file
)
finally:
plugin.unregister_all_plugins()
def main(sys_args: List[str], unload_plugins: bool = True):
"""Start the repobee CLI.
Args:
sys_args: Arguments from the command line.
unload_plugins: If True, plugins are automatically unloaded just before
the function returns.
"""
try:
_main(sys_args, unload_plugins)
except Exception:
plug.log.error(
"RepoBee exited unexpectedly. "
"Please visit the FAQ to try to resolve the problem: "
"https://repobee.readthedocs.io/en/stable/faq.html"
)
sys.exit(1)
def _main(sys_args: List[str], unload_plugins: bool = True):
_repobee.cli.parsing.setup_logging()
args = sys_args[1:] # drop the name of the program
traceback = False
pre_init = True
try:
preparser_args, app_args = separate_args(args)
parsed_preparser_args = _repobee.cli.preparser.parse_args(
preparser_args
)
_initialize_plugins(parsed_preparser_args)
parsed_args, api = _parse_args(
app_args, parsed_preparser_args.config_file
)
traceback = parsed_args.traceback
pre_init = False
with _set_output_verbosity(getattr(parsed_args, "quiet", 0)):
_repobee.cli.dispatch.dispatch_command(
parsed_args, api, parsed_preparser_args.config_file
)
except exception.PluginLoadError as exc:
plug.log.error(f"{exc.__class__.__name__}: {exc}")
raise
except exception.ParseError as exc:
plug.log.error(str(exc))
raise
except Exception as exc:
# FileErrors can occur during pre-init because of reading the config
# and we don't want tracebacks for those (afaik at this time)
if traceback or (
pre_init and not isinstance(exc, exception.FileError)
):
plug.log.error(str(exc))
if pre_init:
plug.echo(_PRE_INIT_ERROR_MESSAGE)
plug.log.exception("Critical exception")
else:
plug.log.error("{.__class__.__name__}: {}".format(exc, str(exc)))
raise
finally:
if unload_plugins:
plugin.unregister_all_plugins()
def _initialize_plugins(parsed_preparser_args: argparse.Namespace) -> None:
# IMPORTANT: the default plugins must be loaded before user-defined
# plugins to ensure that the user-defined plugins override the defaults
# in firstresult hooks
plug.log.debug("Initializing default plugins")
plugin.initialize_default_plugins()
if distinfo.DIST_INSTALL:
plug.log.debug("Initializing dist plugins")
plugin.initialize_dist_plugins()
if not parsed_preparser_args.no_plugins:
if distinfo.DIST_INSTALL:
plug.log.debug("Initializing active plugins")
plugin.initialize_plugins(
disthelpers.get_active_plugins(), allow_filepath=True
)
plug.log.debug("Initializing preparser-specified plugins")
plugin_names = parsed_preparser_args.plug or []
plugin.initialize_plugins(plugin_names, allow_filepath=True)
def _parse_args(args, config_file):
config.execute_config_hooks(config_file)
parsed_args, api = _repobee.cli.parsing.handle_args(
args, config_file=config_file
)
plug.manager.hook.handle_processed_args(args=parsed_args)
return parsed_args, api
@contextlib.contextmanager
def _set_output_verbosity(quietness: int):
"""Set the output verbosity, expecting `quietness` to be a non-negative
integer.
0 = do nothing, all output goes
1 = silence "regular" user feedback
2 = silence warnings
>=3 = silence everything
"""
assert quietness >= 0
if quietness >= 1:
# silence "regular" user feedback by redirecting stdout
with contextlib.redirect_stdout(io.StringIO()):
if quietness == 2:
# additionally silence warnings
_repobee.cli.parsing.setup_logging(
terminal_level=logging.ERROR
)
pass
elif quietness >= 3:
# additionally silence errors and warnings
_repobee.cli.parsing.setup_logging(
terminal_level=logging.CRITICAL
)
pass
yield
else:
# this must be in an else, because
# 1) the generator must yeld
# 2) it must yield precisely once
yield
if __name__ == "__main__":
main(sys.argv)
| [
"_repobee.plugin.initialize_default_plugins",
"_repobee.plugin.initialize_dist_plugins",
"repobee_plug.log.exception",
"pathlib.Path",
"_repobee.plugin.initialize_plugins",
"repobee_plug.log.debug",
"_repobee.plugin.register_plugins",
"os.chdir",
"repobee_plug.echo",
"repobee_plug.log.error",
"_... | [((2572, 2597), 'pathlib.Path', 'pathlib.Path', (['config_file'], {}), '(config_file)\n', (2584, 2597), False, 'import pathlib\n'), ((6523, 6569), 'repobee_plug.log.debug', 'plug.log.debug', (['"""Initializing default plugins"""'], {}), "('Initializing default plugins')\n", (6537, 6569), True, 'import repobee_plug as plug\n'), ((6574, 6609), '_repobee.plugin.initialize_default_plugins', 'plugin.initialize_default_plugins', ([], {}), '()\n', (6607, 6609), False, 'from _repobee import plugin\n'), ((7230, 7270), '_repobee.config.execute_config_hooks', 'config.execute_config_hooks', (['config_file'], {}), '(config_file)\n', (7257, 7270), False, 'from _repobee import config\n'), ((7376, 7433), 'repobee_plug.manager.hook.handle_processed_args', 'plug.manager.hook.handle_processed_args', ([], {'args': 'parsed_args'}), '(args=parsed_args)\n', (7415, 7433), True, 'import repobee_plug as plug\n'), ((4871, 4890), '_repobee.cli.preparser.separate_args', 'separate_args', (['args'], {}), '(args)\n', (4884, 4890), False, 'from _repobee.cli.preparser import separate_args\n'), ((6649, 6692), 'repobee_plug.log.debug', 'plug.log.debug', (['"""Initializing dist plugins"""'], {}), "('Initializing dist plugins')\n", (6663, 6692), True, 'import repobee_plug as plug\n'), ((6701, 6733), '_repobee.plugin.initialize_dist_plugins', 'plugin.initialize_dist_plugins', ([], {}), '()\n', (6731, 6733), False, 'from _repobee import plugin\n'), ((7004, 7062), 'repobee_plug.log.debug', 'plug.log.debug', (['"""Initializing preparser-specified plugins"""'], {}), "('Initializing preparser-specified plugins')\n", (7018, 7062), True, 'import repobee_plug as plug\n'), ((7127, 7187), '_repobee.plugin.initialize_plugins', 'plugin.initialize_plugins', (['plugin_names'], {'allow_filepath': '(True)'}), '(plugin_names, allow_filepath=True)\n', (7152, 7187), False, 'from _repobee import plugin\n'), ((2616, 2633), 'pathlib.Path', 'pathlib.Path', (['"""."""'], {}), "('.')\n", (2628, 2633), False, 'import pathlib\n'), ((2807, 2834), 'os.chdir', 'os.chdir', (['requested_workdir'], {}), '(requested_workdir)\n', (2815, 2834), False, 'import os\n'), ((2882, 2903), 'os.chdir', 'os.chdir', (['cur_workdir'], {}), '(cur_workdir)\n', (2890, 2903), False, 'import os\n'), ((3680, 3720), '_repobee.plugin.register_plugins', 'plugin.register_plugins', (['wrapped_plugins'], {}), '(wrapped_plugins)\n', (3703, 3720), False, 'from _repobee import plugin\n'), ((4017, 4048), '_repobee.plugin.unregister_all_plugins', 'plugin.unregister_all_plugins', ([], {}), '()\n', (4046, 4048), False, 'from _repobee import plugin\n'), ((4404, 4562), 'repobee_plug.log.error', 'plug.log.error', (['"""RepoBee exited unexpectedly. Please visit the FAQ to try to resolve the problem: https://repobee.readthedocs.io/en/stable/faq.html"""'], {}), "(\n 'RepoBee exited unexpectedly. Please visit the FAQ to try to resolve the problem: https://repobee.readthedocs.io/en/stable/faq.html'\n )\n", (4418, 4562), True, 'import repobee_plug as plug\n'), ((4613, 4624), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (4621, 4624), False, 'import sys\n'), ((5479, 5529), 'repobee_plug.log.error', 'plug.log.error', (['f"""{exc.__class__.__name__}: {exc}"""'], {}), "(f'{exc.__class__.__name__}: {exc}')\n", (5493, 5529), True, 'import repobee_plug as plug\n'), ((6234, 6265), '_repobee.plugin.unregister_all_plugins', 'plugin.unregister_all_plugins', ([], {}), '()\n', (6263, 6265), False, 'from _repobee import plugin\n'), ((6826, 6871), 'repobee_plug.log.debug', 'plug.log.debug', (['"""Initializing active plugins"""'], {}), "('Initializing active plugins')\n", (6840, 6871), True, 'import repobee_plug as plug\n'), ((3621, 3666), 'argparse.Namespace', 'argparse.Namespace', ([], {'no_plugins': '(False)', 'plug': '[]'}), '(no_plugins=False, plug=[])\n', (3639, 3666), False, 'import argparse\n'), ((6035, 6075), 'repobee_plug.log.exception', 'plug.log.exception', (['"""Critical exception"""'], {}), "('Critical exception')\n", (6053, 6075), True, 'import repobee_plug as plug\n'), ((6927, 6959), '_repobee.disthelpers.get_active_plugins', 'disthelpers.get_active_plugins', ([], {}), '()\n', (6957, 6959), False, 'from _repobee import disthelpers\n'), ((7915, 7928), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (7926, 7928), False, 'import io\n'), ((5988, 6022), 'repobee_plug.echo', 'plug.echo', (['_PRE_INIT_ERROR_MESSAGE'], {}), '(_PRE_INIT_ERROR_MESSAGE)\n', (5997, 6022), True, 'import repobee_plug as plug\n')] |
# Copyright (c) 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import re
import eslint
from py_vulcanize import strip_js_comments
from catapult_build import parse_html
class JSChecker(object):
def __init__(self, input_api, output_api, file_filter=None):
self.input_api = input_api
self.output_api = output_api
if file_filter:
self.file_filter = file_filter
else:
self.file_filter = lambda x: True
def RunChecks(self):
"""Checks for violations of the Chromium JavaScript style guide.
See:
http://chromium.org/developers/web-development-style-guide#TOC-JavaScript
"""
results = []
affected_files = self.input_api.AffectedFiles(
file_filter=self.file_filter,
include_deletes=False)
def ShouldCheck(f):
if f.LocalPath().endswith('.js'):
return True
if f.LocalPath().endswith('.html'):
return True
return False
affected_js_files = [f for f in affected_files if ShouldCheck(f)]
error_lines = []
for f in affected_js_files:
contents = list(f.NewContents())
error_lines += CheckStrictMode(
'\n'.join(contents),
is_html_file=f.LocalPath().endswith('.html'))
if affected_js_files:
success, eslint_output = eslint.RunEslint(
[f.AbsoluteLocalPath() for f in affected_js_files])
if not success:
error_lines.append('\neslint found lint errors:')
error_lines.append(eslint_output)
if error_lines:
error_lines.insert(0, 'Found JavaScript style violations:')
results.append(
_MakeErrorOrWarning(self.output_api, '\n'.join(error_lines)))
return results
def _ErrorHighlight(start, length):
"""Produces a row of '^'s to underline part of a string."""
return start * ' ' + length * '^'
def _MakeErrorOrWarning(output_api, error_text):
return output_api.PresubmitError(error_text)
def CheckStrictMode(contents, is_html_file=False):
statements_to_check = []
if is_html_file:
statements_to_check.extend(_FirstStatementsInScriptElements(contents))
else:
statements_to_check.append(_FirstStatement(contents))
error_lines = []
for s in statements_to_check:
if s != "'use strict'":
error_lines.append('Expected "\'use strict\'" as first statement, '
'but found "%s" instead.' % s)
return error_lines
def _FirstStatementsInScriptElements(contents):
"""Returns a list of first statements found in each <script> element."""
soup = parse_html.BeautifulSoup(contents)
script_elements = soup.find_all('script', src=None)
return [_FirstStatement(e.get_text()) for e in script_elements]
def _FirstStatement(contents):
"""Extracts the first statement in some JS source code."""
stripped_contents = strip_js_comments.StripJSComments(contents).strip()
matches = re.match('^(.*?);', stripped_contents, re.DOTALL)
if not matches:
return ''
return matches.group(1).strip()
def RunChecks(input_api, output_api, excluded_paths=None):
def ShouldCheck(affected_file):
if not excluded_paths:
return True
path = affected_file.LocalPath()
return not any(re.match(pattern, path) for pattern in excluded_paths)
return JSChecker(input_api, output_api, file_filter=ShouldCheck).RunChecks()
| [
"catapult_build.parse_html.BeautifulSoup",
"re.match",
"py_vulcanize.strip_js_comments.StripJSComments"
] | [((2617, 2651), 'catapult_build.parse_html.BeautifulSoup', 'parse_html.BeautifulSoup', (['contents'], {}), '(contents)\n', (2641, 2651), False, 'from catapult_build import parse_html\n'), ((2952, 3001), 're.match', 're.match', (['"""^(.*?);"""', 'stripped_contents', 're.DOTALL'], {}), "('^(.*?);', stripped_contents, re.DOTALL)\n", (2960, 3001), False, 'import re\n'), ((2888, 2931), 'py_vulcanize.strip_js_comments.StripJSComments', 'strip_js_comments.StripJSComments', (['contents'], {}), '(contents)\n', (2921, 2931), False, 'from py_vulcanize import strip_js_comments\n'), ((3265, 3288), 're.match', 're.match', (['pattern', 'path'], {}), '(pattern, path)\n', (3273, 3288), False, 'import re\n')] |
import enum
from django.db import models
from care.facility.models import FacilityBaseModel
from care.users.models import User
from django.contrib.postgres.fields import JSONField
class Notification(FacilityBaseModel):
class EventType(enum.Enum):
SYSTEM_GENERATED = 50
CUSTOM_MESSAGE = 100
EventTypeChoices = [(e.value, e.name) for e in EventType]
class Medium(enum.Enum):
SYSTEM = 0
SMS = 100
WHATSAPP = 200
MediumChoices = [(e.value, e.name) for e in Medium]
class Event(enum.Enum):
MESSAGE = 0
PATIENT_CREATED = 20
PATIENT_UPDATED = 30
PATIENT_DELETED = 40
PATIENT_CONSULTATION_CREATED = 50
PATIENT_CONSULTATION_UPDATED = 60
PATIENT_CONSULTATION_DELETED = 70
INVESTIGATION_SESSION_CREATED = 80
INVESTIGATION_UPDATED = 90
PATIENT_FILE_UPLOAD_CREATED = 100
CONSULTATION_FILE_UPLOAD_CREATED = 110
PATIENT_CONSULTATION_UPDATE_CREATED = 120
PATIENT_CONSULTATION_UPDATE_UPDATED = 130
PATIENT_CONSULTATION_ASSIGNMENT = 140
SHIFTING_UPDATED = 200
EventChoices = [(e.value, e.name) for e in Event]
intended_for = models.ForeignKey(
User, on_delete=models.SET_NULL, null=True, related_name="notification_intended_for",
)
medium_sent = models.IntegerField(choices=MediumChoices, default=Medium.SYSTEM.value)
caused_by = models.ForeignKey(User, on_delete=models.SET_NULL, null=True, related_name="notification_caused_by",)
read_at = models.DateTimeField(null=True, blank=True)
event_type = models.IntegerField(choices=EventTypeChoices, default=EventType.SYSTEM_GENERATED.value)
event = models.IntegerField(choices=EventChoices, default=Event.MESSAGE.value)
message = models.TextField(max_length=2000, null=True, default=None)
caused_objects = JSONField(null=True, blank=True, default=dict)
| [
"django.contrib.postgres.fields.JSONField",
"django.db.models.TextField",
"django.db.models.IntegerField",
"django.db.models.ForeignKey",
"django.db.models.DateTimeField"
] | [((1205, 1313), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.SET_NULL', 'null': '(True)', 'related_name': '"""notification_intended_for"""'}), "(User, on_delete=models.SET_NULL, null=True, related_name=\n 'notification_intended_for')\n", (1222, 1313), False, 'from django.db import models\n'), ((1342, 1413), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': 'MediumChoices', 'default': 'Medium.SYSTEM.value'}), '(choices=MediumChoices, default=Medium.SYSTEM.value)\n', (1361, 1413), False, 'from django.db import models\n'), ((1430, 1535), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.SET_NULL', 'null': '(True)', 'related_name': '"""notification_caused_by"""'}), "(User, on_delete=models.SET_NULL, null=True, related_name=\n 'notification_caused_by')\n", (1447, 1535), False, 'from django.db import models\n'), ((1546, 1589), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (1566, 1589), False, 'from django.db import models\n'), ((1607, 1699), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': 'EventTypeChoices', 'default': 'EventType.SYSTEM_GENERATED.value'}), '(choices=EventTypeChoices, default=EventType.\n SYSTEM_GENERATED.value)\n', (1626, 1699), False, 'from django.db import models\n'), ((1707, 1777), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': 'EventChoices', 'default': 'Event.MESSAGE.value'}), '(choices=EventChoices, default=Event.MESSAGE.value)\n', (1726, 1777), False, 'from django.db import models\n'), ((1792, 1850), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(2000)', 'null': '(True)', 'default': 'None'}), '(max_length=2000, null=True, default=None)\n', (1808, 1850), False, 'from django.db import models\n'), ((1872, 1918), 'django.contrib.postgres.fields.JSONField', 'JSONField', ([], {'null': '(True)', 'blank': '(True)', 'default': 'dict'}), '(null=True, blank=True, default=dict)\n', (1881, 1918), False, 'from django.contrib.postgres.fields import JSONField\n')] |
from selenium import webdriver
from selenium import *
def start():
return webdriver.Chrome(executable_path='./services/chromedriver')
if __name__ == "__main__":
pass
| [
"selenium.webdriver.Chrome"
] | [((79, 138), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {'executable_path': '"""./services/chromedriver"""'}), "(executable_path='./services/chromedriver')\n", (95, 138), False, 'from selenium import webdriver\n')] |
import numpy as np
def calculate_iou(bboxes1, bboxes2):
"""
This calculates the intersection over union of N bounding boxes
in the form N x [left, top, right, bottom], e.g for N=2:
>> bb = [[21,34,45,67], [67,120, 89, 190]]
:param bboxes1: np array: N x 4 ground truth bounding boxes
:param bboxes2: np array: N x 4 target bounding boxes
:return: iou: ratio between 0 and 1
"""
if len(bboxes1.shape) == 1:
bboxes1 = bboxes1.reshape(1, bboxes1.shape[0])
if len(bboxes2.shape) == 1:
bboxes2 = bboxes2.reshape(1, bboxes2.shape[0])
if bboxes1.shape[0] != bboxes2.shape[0] or bboxes1.shape[1] != bboxes2.shape[1]:
raise ValueError('Bounding boxes must be of equal dimension')
left_intersection = np.maximum(bboxes1[:, 0], bboxes2[:, 0])
top_intersection = np.maximum(bboxes1[:, 1], bboxes2[:, 1])
right_intersection = np.minimum(bboxes1[:, 2], bboxes2[:, 2])
bottom_intersection = np.minimum(bboxes1[:, 3], bboxes2[:, 3])
w_intersection = right_intersection - left_intersection
h_intersection = bottom_intersection - top_intersection
intersection_area = w_intersection * h_intersection
bboxes1_area = (bboxes1[:, 2] - bboxes1[:, 0]) * (bboxes1[:, 3] - bboxes1[:, 1])
bboxes2_area = (bboxes2[:, 2] - bboxes2[:, 0]) * (bboxes2[:, 3] - bboxes2[:, 1])
union_area = bboxes1_area + bboxes2_area - intersection_area
iou = np.clip(intersection_area/union_area, 0, 1)
return iou | [
"numpy.clip",
"numpy.maximum",
"numpy.minimum"
] | [((770, 810), 'numpy.maximum', 'np.maximum', (['bboxes1[:, 0]', 'bboxes2[:, 0]'], {}), '(bboxes1[:, 0], bboxes2[:, 0])\n', (780, 810), True, 'import numpy as np\n'), ((834, 874), 'numpy.maximum', 'np.maximum', (['bboxes1[:, 1]', 'bboxes2[:, 1]'], {}), '(bboxes1[:, 1], bboxes2[:, 1])\n', (844, 874), True, 'import numpy as np\n'), ((900, 940), 'numpy.minimum', 'np.minimum', (['bboxes1[:, 2]', 'bboxes2[:, 2]'], {}), '(bboxes1[:, 2], bboxes2[:, 2])\n', (910, 940), True, 'import numpy as np\n'), ((967, 1007), 'numpy.minimum', 'np.minimum', (['bboxes1[:, 3]', 'bboxes2[:, 3]'], {}), '(bboxes1[:, 3], bboxes2[:, 3])\n', (977, 1007), True, 'import numpy as np\n'), ((1434, 1479), 'numpy.clip', 'np.clip', (['(intersection_area / union_area)', '(0)', '(1)'], {}), '(intersection_area / union_area, 0, 1)\n', (1441, 1479), True, 'import numpy as np\n')] |
from __future__ import print_function, unicode_literals
import os
from datetime import timedelta
import multiuploader.default_settings as DEFAULTS
from django.conf import settings
from django.core.management.base import BaseCommand
from django.utils.timezone import now
from multiuploader.models import MultiuploaderFile
class Command(BaseCommand):
help = 'Clean all temporary attachments loaded to MultiuploaderFile model'
def handle(self, *args, **options):
expiration_time = getattr(settings, "MULTIUPLOADER_FILE_EXPIRATION_TIME",
DEFAULTS.MULTIUPLOADER_FILE_EXPIRATION_TIME)
time_threshold = now() - timedelta(seconds=expiration_time)
for attach in MultiuploaderFile.objects.filter(upload_date__lt=time_threshold):
try:
os.remove(attach.file.path)
except Exception as ex:
print(ex)
MultiuploaderFile.objects.filter(upload_date__lt=time_threshold).delete()
print("Cleaning temporary upload files complete")
| [
"django.utils.timezone.now",
"datetime.timedelta",
"os.remove",
"multiuploader.models.MultiuploaderFile.objects.filter"
] | [((725, 789), 'multiuploader.models.MultiuploaderFile.objects.filter', 'MultiuploaderFile.objects.filter', ([], {'upload_date__lt': 'time_threshold'}), '(upload_date__lt=time_threshold)\n', (757, 789), False, 'from multiuploader.models import MultiuploaderFile\n'), ((659, 664), 'django.utils.timezone.now', 'now', ([], {}), '()\n', (662, 664), False, 'from django.utils.timezone import now\n'), ((667, 701), 'datetime.timedelta', 'timedelta', ([], {'seconds': 'expiration_time'}), '(seconds=expiration_time)\n', (676, 701), False, 'from datetime import timedelta\n'), ((824, 851), 'os.remove', 'os.remove', (['attach.file.path'], {}), '(attach.file.path)\n', (833, 851), False, 'import os\n'), ((923, 987), 'multiuploader.models.MultiuploaderFile.objects.filter', 'MultiuploaderFile.objects.filter', ([], {'upload_date__lt': 'time_threshold'}), '(upload_date__lt=time_threshold)\n', (955, 987), False, 'from multiuploader.models import MultiuploaderFile\n')] |
import unittest
from app.models import Source
class testSource(unittest.TestCase):
"""
SourcesTest class to test the behavior of the Sources class
"""
def setUp(self):
"""
Method that runs before each other test runs
"""
self.new_source = Source('abc-news','ABC news','Your trusted source for breaking news',"https://abcnews.go.com","general","en","us")
def test_instance(self):
self.assertTrue(isinstance(self.new_source,Source))
if __name__ == "__main__":
unittest.main() | [
"unittest.main",
"app.models.Source"
] | [((525, 540), 'unittest.main', 'unittest.main', ([], {}), '()\n', (538, 540), False, 'import unittest\n'), ((288, 412), 'app.models.Source', 'Source', (['"""abc-news"""', '"""ABC news"""', '"""Your trusted source for breaking news"""', '"""https://abcnews.go.com"""', '"""general"""', '"""en"""', '"""us"""'], {}), "('abc-news', 'ABC news', 'Your trusted source for breaking news',\n 'https://abcnews.go.com', 'general', 'en', 'us')\n", (294, 412), False, 'from app.models import Source\n')] |
import psycopg2
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
import os
import urllib.parse as urlparse
class PostgresBaseManager:
def __init__(self,local):
self.database = 'postgres'
self.user = 'postgres'
self.password = '<PASSWORD>'
self.host = 'localhost'
self.port = '5432'
self.localTest = local
self.conn = self.connect()
self.setupSQLCMD = """-- 使用者藥品表
--Drop Table If Exists UserMedicine;
Create Table If Not Exists UserMedicine
(
ID int GENERATED ALWAYS AS IDENTITY Primary Key,
UserID varchar(1024),
MedicineName varchar(1024),
Amount int,
TakeAmount int
);
-- 提醒時間表
--Drop Table If Exists Notify;
Create Table If Not Exists Notify
(
ID int GENERATED ALWAYS AS IDENTITY Primary Key,
UserID varchar(1024),
Description text,
TargetMedicine varchar(1024),
TargetTime varchar(128),
LastNotifyDate varchar(512),
TakeDate varchar(512)
);
-- 吃藥紀錄表
--Drop Table If Exists TakeMedicineHistory;
Create Table If Not Exists TakeMedicineHistory
(
ID int GENERATED ALWAYS AS IDENTITY Primary Key,
UserID varchar(1024),
Description text,
AnwTime varchar(128)
);
-- 使用者狀態表
--Drop Table If Exists UserStatus;
Create Table If Not Exists UserStatus
(
UserID varchar(1024) Primary Key,
Stat varchar(1024),
TempValue text
);
"""
pass
def connect(self):
"""
:return: 連接 Heroku Postgres SQL 認證用
"""
if self.localTest == True:
conn = psycopg2.connect(
database=self.database,
user=self.user,
password=self.password,
host=self.host,
port=self.port)
conn.autocommit = True
conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
return conn
else:
DATABASE_URL = os.environ['DATABASE_URL']
conn = psycopg2.connect(DATABASE_URL, sslmode='require')
conn.autocommit = True
conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
return conn
pass
pass
def disconnect(self):
"""
:return: 關閉資料庫連線使用
"""
self.conn.close()
pass
def testConnection(self):
"""
:return: 測試是否可以連線到 Heroku Postgres SQL
"""
print("testing connection...")
cur = self.conn.cursor()
cur.execute('SELECT VERSION()')
results = cur.fetchall()
print("Database version : {0} ".format(results))
self.conn.commit()
cur.close()
pass
# 執行 sql 指令
def execute(self,cmd):
self.conn = self.connect()
cur = self.conn.cursor()
cur.execute(cmd)
self.conn.commit()
if cmd.startswith("Select") and (cur.rowcount > 0):
results = cur.fetchall()
cur.close()
return results
else:
return None
pass
pass
# 執行 sql 檔案
def executeFile(self,path):
self.conn = self.connect()
cur = self.conn.cursor()
sql_file = open(path,'r',encoding="utf-8")
print("running sql file:" + path)
cur.execute(sql_file.read())
self.conn.commit()
pass
pass | [
"psycopg2.connect"
] | [((1544, 1661), 'psycopg2.connect', 'psycopg2.connect', ([], {'database': 'self.database', 'user': 'self.user', 'password': 'self.password', 'host': 'self.host', 'port': 'self.port'}), '(database=self.database, user=self.user, password=self.\n password, host=self.host, port=self.port)\n', (1560, 1661), False, 'import psycopg2\n'), ((1949, 1998), 'psycopg2.connect', 'psycopg2.connect', (['DATABASE_URL'], {'sslmode': '"""require"""'}), "(DATABASE_URL, sslmode='require')\n", (1965, 1998), False, 'import psycopg2\n')] |
#!/usr/bin/env python
import setpath
from bike.testutils import *
from bike.transformer.save import save
from moveToModule import *
class TestMoveClass(BRMTestCase):
def test_movesTheText(self):
src1=trimLines("""
def before(): pass
class TheClass:
pass
def after(): pass
""")
src1after=trimLines("""
def before(): pass
def after(): pass
""")
src2after=trimLines("""
class TheClass:
pass
""")
try:
createPackageStructure(src1, "")
moveClassToNewModule(pkgstructureFile1,2,
pkgstructureFile2)
save()
self.assertEqual(src1after,file(pkgstructureFile1).read())
self.assertEqual(src2after,file(pkgstructureFile2).read())
finally:
removePackageStructure()
class TestMoveFunction(BRMTestCase):
def test_importsNameReference(self):
src1=trimLines("""
a = 'hello'
def theFunction(self):
print a
""")
src2after=trimLines("""
from a.foo import a
def theFunction(self):
print a
""")
self.helper(src1, src2after)
def test_importsExternalReference(self):
src0=("""
a = 'hello'
""")
src1=trimLines("""
from top import a
def theFunction(self):
print a
""")
src2after=trimLines("""
from top import a
def theFunction(self):
print a
""")
try:
createPackageStructure(src1, "", src0)
moveFunctionToNewModule(pkgstructureFile1,2,
pkgstructureFile2)
save()
self.assertEqual(src2after,file(pkgstructureFile2).read())
finally:
removePackageStructure()
def test_doesntImportRefCreatedInFunction(self):
src1=trimLines("""
def theFunction(self):
a = 'hello'
print a
""")
src2after=trimLines("""
def theFunction(self):
a = 'hello'
print a
""")
self.helper(src1, src2after)
def test_doesntImportRefCreatedInFunction(self):
src1=trimLines("""
def theFunction(self):
a = 'hello'
print a
""")
src2after=trimLines("""
def theFunction(self):
a = 'hello'
print a
""")
self.helper(src1, src2after)
def test_addsImportStatementToOriginalFileIfRequired(self):
src1=trimLines("""
def theFunction(self):
pass
b = theFunction()
""")
src1after=trimLines("""
from a.b.bah import theFunction
b = theFunction()
""")
try:
createPackageStructure(src1,"")
moveFunctionToNewModule(pkgstructureFile1,1,
pkgstructureFile2)
save()
self.assertEqual(src1after,file(pkgstructureFile1).read())
finally:
removePackageStructure()
def test_updatesFromImportStatementsInOtherModules(self):
src0=trimLines("""
from a.foo import theFunction
print theFunction()
""")
src1=trimLines("""
def theFunction(self):
pass
""")
src0after=trimLines("""
from a.b.bah import theFunction
print theFunction()
""")
try:
createPackageStructure(src1,"",src0)
moveFunctionToNewModule(pkgstructureFile1,1,
pkgstructureFile2)
save()
self.assertEqual(src0after,file(pkgstructureFile0).read())
finally:
removePackageStructure()
def test_updatesFromImportMultiplesInOtherModules(self):
src0=trimLines("""
from a.foo import something,theFunction,somethingelse #comment
print theFunction()
""")
src1=trimLines("""
def theFunction(self):
pass
something = ''
somethingelse = 0
""")
src0after=trimLines("""
from a.foo import something,somethingelse #comment
from a.b.bah import theFunction
print theFunction()
""")
try:
createPackageStructure(src1,"",src0)
moveFunctionToNewModule(pkgstructureFile1,1,
pkgstructureFile2)
save()
self.assertEqual(src0after,file(pkgstructureFile0).read())
finally:
removePackageStructure()
def test_updatesFromImportMultiplesInTargetModule(self):
src0=trimLines("""
from a.foo import something,theFunction,somethingelse #comment
print theFunction()
""")
src1=trimLines("""
def theFunction(self):
pass
something = ''
somethingelse = 0
""")
src0after=trimLines("""
from a.foo import something,somethingelse #comment
print theFunction()
def theFunction(self):
pass
""")
try:
createPackageStructure(src1,"",src0)
moveFunctionToNewModule(pkgstructureFile1,1,
pkgstructureFile0)
save()
#print file(pkgstructureFile0).read()
self.assertEqual(src0after,file(pkgstructureFile0).read())
finally:
removePackageStructure()
def test_updatesFromImportInTargetModule(self):
src0=trimLines("""
from a.foo import theFunction
print theFunction()
""")
src1=trimLines("""
def theFunction(self):
pass
""")
src0after=trimLines("""
print theFunction()
def theFunction(self):
pass
""")
try:
createPackageStructure(src1,"",src0)
moveFunctionToNewModule(pkgstructureFile1,1,
pkgstructureFile0)
save()
self.assertEqual(src0after,file(pkgstructureFile0).read())
finally:
removePackageStructure()
def helper(self, src1, src2after):
try:
createPackageStructure(src1, "")
moveFunctionToNewModule(pkgstructureFile1,2,
pkgstructureFile2)
save()
self.assertEqual(src2after,file(pkgstructureFile2).read())
finally:
removePackageStructure()
if __name__ == "__main__":
unittest.main()
| [
"bike.transformer.save.save"
] | [((704, 710), 'bike.transformer.save.save', 'save', ([], {}), '()\n', (708, 710), False, 'from bike.transformer.save import save\n'), ((1792, 1798), 'bike.transformer.save.save', 'save', ([], {}), '()\n', (1796, 1798), False, 'from bike.transformer.save import save\n'), ((3076, 3082), 'bike.transformer.save.save', 'save', ([], {}), '()\n', (3080, 3082), False, 'from bike.transformer.save import save\n'), ((3773, 3779), 'bike.transformer.save.save', 'save', ([], {}), '()\n', (3777, 3779), False, 'from bike.transformer.save import save\n'), ((4610, 4616), 'bike.transformer.save.save', 'save', ([], {}), '()\n', (4614, 4616), False, 'from bike.transformer.save import save\n'), ((5463, 5469), 'bike.transformer.save.save', 'save', ([], {}), '()\n', (5467, 5469), False, 'from bike.transformer.save import save\n'), ((6209, 6215), 'bike.transformer.save.save', 'save', ([], {}), '()\n', (6213, 6215), False, 'from bike.transformer.save import save\n'), ((6565, 6571), 'bike.transformer.save.save', 'save', ([], {}), '()\n', (6569, 6571), False, 'from bike.transformer.save import save\n')] |
import _ast
import ast
from typing import Dict, Union
import os
from pychecktext import teamcity, teamcity_messages
class CheckTextVisitor(ast.NodeVisitor):
def __init__(self, aliases: Dict[str, str] = {}):
self.literal_calls = []
self.expression_calls = []
self.aliases = aliases
self.function_signatures = {
"dgettext": [0, 1],
"dngettext": [0, 1, 2],
"dnpgettext": [0, 1, 2, 3],
"dpgettext": [0, 1, 2],
"gettext": [0],
"ldgettext": [0, 1],
"ldngettext": [0, 1, 2],
"lgettext": [0],
"lngettext": [0, 1],
"ngettext": [0, 1],
"npgettext": [0, 1, 2],
"pgettext": [0, 1]}
for alias, source in aliases.items():
self.function_signatures[alias] = self.function_signatures[source]
def visit_Call(self, node: _ast.Call):
func_object = node.func
if hasattr(node, 'args'):
for arg in node.args:
if isinstance(arg, _ast.Call):
self.visit_Call(arg)
if hasattr(func_object, 'id') and func_object.id in self.function_signatures:
# Get the calling function name, resolve aliases here
if func_object.id in self.aliases:
calling_name = self.aliases[func_object.id]
else:
calling_name = func_object.id
# A call to gettext or one of its aliases, check if we have a literal
called_args = []
message_args = [node.args[index] for index in self.function_signatures[func_object.id]]
if not isinstance(message_args, list):
message_args = [message_args]
has_complex_arg = False
for arg in message_args:
if isinstance(arg, _ast.Constant):
called_args.append(arg.value)
else:
has_complex_arg = True
called_args.append(arg)
call_struct = {
"function": calling_name,
"args": called_args
}
if has_complex_arg:
self.expression_calls.append(call_struct)
else:
self.literal_calls.append(call_struct)
def process_calls(self, source: str):
for call in self.expression_calls:
for index, call_arg in enumerate(call['args']):
if not isinstance(call_arg, _ast.Constant):
source_call = ast.get_source_segment(source, call_arg)
call['args'][index] = source_call
def parse_folder(folder_path: str, alias: Dict[str, Union[str, None]]):
if teamcity:
teamcity_messages.customMessage('Checking tokens in folder {}'.format(folder_path), status='INFO', errorDetails=None)
else:
print("Checking gettext tokens in folder '{}'".format(folder_path))
folder_calls = {}
for subdir, _, files in os.walk(folder_path):
for filename in files:
file_path = subdir + os.sep + filename
if not filename.startswith('.') and file_path.endswith('.py'):
file_calls = parse_file(file_path, alias)
folder_calls[file_path] = file_calls
return folder_calls
def parse_file(file_path: str, alias: Dict[str, Union[str, None]] = {}):
if teamcity:
teamcity_messages.customMessage('Checking tokens in file {}'.format(file_path),
status='INFO', errorDetails=None)
else:
print("Checking gettext tokens in file '{}'".format(file_path))
with open(file_path, 'r') as f:
data = f.read()
try:
tree = ast.parse(data)
except SyntaxError as excinfo:
if teamcity:
teamcity_messages.customMessage("Syntax error whilst parsing file '{}'",
status="ERROR", errorDetails=excinfo.msg)
else:
print("Syntax error in file '{}': {}".format(file_path, excinfo))
return None
treeVisitor = CheckTextVisitor(alias)
treeVisitor.visit(tree)
treeVisitor.process_calls(data)
return {
'literal_calls': treeVisitor.literal_calls,
'complex_calls': treeVisitor.expression_calls
}
| [
"ast.parse",
"ast.get_source_segment",
"pychecktext.teamcity_messages.customMessage",
"os.walk"
] | [((2994, 3014), 'os.walk', 'os.walk', (['folder_path'], {}), '(folder_path)\n', (3001, 3014), False, 'import os\n'), ((3708, 3723), 'ast.parse', 'ast.parse', (['data'], {}), '(data)\n', (3717, 3723), False, 'import ast\n'), ((2546, 2586), 'ast.get_source_segment', 'ast.get_source_segment', (['source', 'call_arg'], {}), '(source, call_arg)\n', (2568, 2586), False, 'import ast\n'), ((3804, 3922), 'pychecktext.teamcity_messages.customMessage', 'teamcity_messages.customMessage', (['"""Syntax error whilst parsing file \'{}\'"""'], {'status': '"""ERROR"""', 'errorDetails': 'excinfo.msg'}), '("Syntax error whilst parsing file \'{}\'",\n status=\'ERROR\', errorDetails=excinfo.msg)\n', (3835, 3922), False, 'from pychecktext import teamcity, teamcity_messages\n')] |
import discord
from discord.ext import commands
import os
from .player import Player
from extra.menu import ConfirmSkill
import os
from datetime import datetime
bots_and_commands_channel_id = int(os.getenv('BOTS_AND_COMMANDS_CHANNEL_ID'))
class Agares(Player):
emoji = '<:Agares:839497855621660693>'
def __init__(self, client) -> None:
self.client = client
self.safe_categories = [
int(os.getenv('LESSON_CAT_ID')),
int(os.getenv('CASE_CAT_ID')),
int(os.getenv('EVENTS_CAT_ID')),
int(os.getenv('DEBATE_CAT_ID')),
int(os.getenv('CULTURE_CAT_ID')),
int(os.getenv('TEACHER_APPLICATION_CAT_ID'))
]
@commands.command(aliases=['ma'])
@Player.skill_on_cooldown()
@Player.user_is_class('agares')
@Player.skill_mark()
async def magic_pull(self, ctx, target: discord.Member = None) -> None:
""" Moves a member to the channel you are in.
:param target: The target member. """
attacker = ctx.author
if ctx.channel.id != bots_and_commands_channel_id:
return await ctx.send(f"**{attacker.mention}, you can only use this command in {self.bots_txt.mention}!**")
if await self.is_user_knocked_out(attacker.id):
return await ctx.send(f"**{attacker.mention}, you can't use your skill, because you are knocked-out!**")
attacker_state = attacker.voice
if not attacker_state or not (attacker_vc := attacker_state.channel):
return await ctx.send(f"**{attacker.mention}, you first need to be in a voice channel to magic pull someone!**")
if not target:
return await ctx.send(f"**Please, inform a target member, {attacker.mention}!**")
if attacker.id == target.id:
return await ctx.send(f"**{attacker.mention}, you cannot magic pull yourself!**")
if target.bot:
return await ctx.send(f"**{attacker.mention}, you cannot magic pull a bot!**")
target_currency = await self.get_user_currency(target.id)
if not target_currency:
return await ctx.send(f"**You cannot magic pull someone who doesn't have an account, {attacker.mention}!**")
if target_currency[7] == 'default':
return await ctx.send(f"**You cannot magic pull someone who has a `default` Sloth class, {attacker.mention}!**")
target_state = target.voice
if not target_state or not (target_vc := target_state.channel):
return await ctx.send(f"**{attacker.mention}, you cannot magic pull {target.mention}, because they are not in a voice channel!!**")
if target_vc.category and target_vc.category.id in self.safe_categories:
return await ctx.send(
f"**{attacker.mention}, you can't magic pull {target.mention} from `{target_vc}`, because it's a safe channel.**")
if await self.is_user_protected(target.id):
return await ctx.send(f"**{attacker.mention}, {target.mention} is protected, you can't magic pull them!**")
try:
await target.move_to(attacker_vc)
except Exception as e:
print(e)
await ctx.send(
f"**{attacker.mention}, for some reason I couldn't magic pull {target.mention} from `{target_vc}` to `{attacker_vc}`**")
else:
# Puts the attacker's skill on cooldown
current_ts = await self.get_timestamp()
await self.update_user_action_skill_ts(attacker.id, current_ts)
# Updates user's skills used counter
await self.update_user_skills_used(user_id=attacker.id)
# Sends embedded message into the channel
magic_pull_embed = await self.get_magic_pull_embed(
channel=ctx.channel, perpetrator_id=attacker.id, target_id=target.id,
t_before_vc=target_vc, t_after_vc=attacker_vc)
await ctx.send(content=target.mention, embed=magic_pull_embed)
@commands.command()
@Player.skills_used(requirement=5)
@Player.skill_on_cooldown(skill_number=2)
@Player.user_is_class('agares')
@Player.skill_mark()
# @Player.not_ready()
async def recharge(self, ctx, target: discord.Member = None) -> None:
""" Recharges someone's first skill by removing its cooldown.
:param target: The target person who you want to recharge the skill for. """
perpetrator = ctx.author
if ctx.channel.id != bots_and_commands_channel_id:
return await ctx.send(f"**{perpetrator.mention}, you can only use this command in {self.bots_txt.mention}!**")
if not target:
return await ctx.send(f"**Please, inform a target, {perpetrator.mention}**")
if target.bot:
return await ctx.send(f"**{perpetrator.mention}, you cannot use this on a bot!**")
if await self.is_user_knocked_out(perpetrator.id):
return await ctx.send(f"**{perpetrator.mention}, you can't use your skill, because you are knocked-out!**")
target_currency = await self.get_user_currency(target.id)
if not target_currency:
return await ctx.send(f"**You cannot recharge the skill of someone who doesn't have an account, {perpetrator.mention}!**")
if target_currency[7] == 'default':
return await ctx.send(f"**You cannot recharge the skill of someone who has a `default` Sloth class, {perpetrator.mention}!**")
confirm = await ConfirmSkill(f"**Are you sure you to reset {target.mention}'s first skill cooldown, {perpetrator.mention}?**").prompt(ctx)
if not confirm:
return await ctx.send(f"**Not resetting it, then!**")
await self.check_cooldown(user_id=perpetrator.id, skill_number=2)
try:
await self.reset_user_action_skill_cooldown(target.id)
except Exception as e:
print(e)
await ctx.send(f"**For some reason I couldn't reset {target.menion}'s cooldown, {perpetrator.mention}!**")
else:
# Puts the perpetrator's skill on cooldown
current_ts = await self.get_timestamp()
await self.update_user_action_skill_two_ts(perpetrator.id, current_ts)
# Updates user's skills used counter
await self.update_user_skills_used(user_id=perpetrator.id)
# Sends embedded message into the channel
recharge_embed = await self.get_recharge_embed(
channel=ctx.channel, perpetrator_id=perpetrator.id, target_id=target.id)
await ctx.send(embed=recharge_embed)
async def get_magic_pull_embed(self, channel, perpetrator_id: int, target_id: int, t_before_vc: discord.VoiceChannel, t_after_vc: discord.VoiceChannel) -> discord.Embed:
""" Makes an embedded message for a magic pull action.
:param channel: The context channel.
:param perpetrator_id: The ID of the perpetrator of the magic pulling.
:param target_id: The ID of the target of the magic pulling. """
timestamp = await self.get_timestamp()
magic_pull_embed = discord.Embed(
title="A Magic Pull has been Successfully Pulled Off!",
timestamp=datetime.utcfromtimestamp(timestamp)
)
magic_pull_embed.description = f"**<@{perpetrator_id}> magic pulled <@{target_id}> from `{t_before_vc}` to `{t_after_vc}`!** 🧲"
magic_pull_embed.color = discord.Color.green()
magic_pull_embed.set_thumbnail(url="https://thelanguagesloth.com/media/sloth_classes/Agares.png")
magic_pull_embed.set_footer(text=channel.guild, icon_url=channel.guild.icon_url)
return magic_pull_embed
async def get_recharge_embed(self, channel, perpetrator_id: int, target_id: int) -> discord.Embed:
""" Makes an embedded message for a recharge action.
:param channel: The context channel.
:param perpetrator_id: The ID of the perpetrator of the magic pulling.
:param target_id: The ID of the target of the magic pulling. """
timestamp = await self.get_timestamp()
recharge_embed = discord.Embed(
title="A Cooldown Recharge just Happend!",
timestamp=datetime.utcfromtimestamp(timestamp)
)
recharge_embed.description = f"**<@{perpetrator_id}> reset <@{target_id}>'s first skill cooldown!** 🔁"
recharge_embed.color = discord.Color.green()
recharge_embed.set_thumbnail(url="https://thelanguagesloth.com/media/sloth_classes/Agares.png")
recharge_embed.set_footer(text=channel.guild, icon_url=channel.guild.icon_url)
recharge_embed.set_image(url='https://media1.tenor.com/images/623500b09831e08eb963bdc7d75797c4/tenor.gif?itemid=20299439')
return recharge_embed
| [
"datetime.datetime.utcfromtimestamp",
"extra.menu.ConfirmSkill",
"os.getenv",
"discord.Color.green",
"discord.ext.commands.command"
] | [((197, 238), 'os.getenv', 'os.getenv', (['"""BOTS_AND_COMMANDS_CHANNEL_ID"""'], {}), "('BOTS_AND_COMMANDS_CHANNEL_ID')\n", (206, 238), False, 'import os\n'), ((709, 741), 'discord.ext.commands.command', 'commands.command', ([], {'aliases': "['ma']"}), "(aliases=['ma'])\n", (725, 741), False, 'from discord.ext import commands\n'), ((4006, 4024), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (4022, 4024), False, 'from discord.ext import commands\n'), ((7447, 7468), 'discord.Color.green', 'discord.Color.green', ([], {}), '()\n', (7466, 7468), False, 'import discord\n'), ((8415, 8436), 'discord.Color.green', 'discord.Color.green', ([], {}), '()\n', (8434, 8436), False, 'import discord\n'), ((428, 454), 'os.getenv', 'os.getenv', (['"""LESSON_CAT_ID"""'], {}), "('LESSON_CAT_ID')\n", (437, 454), False, 'import os\n'), ((473, 497), 'os.getenv', 'os.getenv', (['"""CASE_CAT_ID"""'], {}), "('CASE_CAT_ID')\n", (482, 497), False, 'import os\n'), ((516, 542), 'os.getenv', 'os.getenv', (['"""EVENTS_CAT_ID"""'], {}), "('EVENTS_CAT_ID')\n", (525, 542), False, 'import os\n'), ((561, 587), 'os.getenv', 'os.getenv', (['"""DEBATE_CAT_ID"""'], {}), "('DEBATE_CAT_ID')\n", (570, 587), False, 'import os\n'), ((606, 633), 'os.getenv', 'os.getenv', (['"""CULTURE_CAT_ID"""'], {}), "('CULTURE_CAT_ID')\n", (615, 633), False, 'import os\n'), ((652, 691), 'os.getenv', 'os.getenv', (['"""TEACHER_APPLICATION_CAT_ID"""'], {}), "('TEACHER_APPLICATION_CAT_ID')\n", (661, 691), False, 'import os\n'), ((7231, 7267), 'datetime.datetime.utcfromtimestamp', 'datetime.utcfromtimestamp', (['timestamp'], {}), '(timestamp)\n', (7256, 7267), False, 'from datetime import datetime\n'), ((8226, 8262), 'datetime.datetime.utcfromtimestamp', 'datetime.utcfromtimestamp', (['timestamp'], {}), '(timestamp)\n', (8251, 8262), False, 'from datetime import datetime\n'), ((5498, 5618), 'extra.menu.ConfirmSkill', 'ConfirmSkill', (['f"""**Are you sure you to reset {target.mention}\'s first skill cooldown, {perpetrator.mention}?**"""'], {}), '(\n f"**Are you sure you to reset {target.mention}\'s first skill cooldown, {perpetrator.mention}?**"\n )\n', (5510, 5618), False, 'from extra.menu import ConfirmSkill\n')] |
# -*- coding: utf-8 -*-
#
import pytest
import optimesh
from helpers import download_mesh
@pytest.mark.parametrize(
"options",
[
["--method", "cpt-dp"],
["--method", "cpt-uniform-fp"],
["--method", "cpt-uniform-qn"],
#
["--method", "cvt-uniform-lloyd"],
["--method", "cvt-uniform-lloyd", "--omega", "2.0"],
["--method", "cvt-uniform-qnb"],
["--method", "cvt-uniform-qnf", "--omega", "0.9"],
#
["--method", "odt-dp-fp"],
["--method", "odt-uniform-fp"],
["--method", "odt-uniform-bfgs"],
],
)
def test_cli(options):
input_file = download_mesh(
# "circle.vtk", "614fcabc0388e1b43723ac64f8555ef52ee4ddda1466368c450741eb"
"pacman.vtk",
"19a0c0466a4714b057b88e339ab5bd57020a04cdf1d564c86dc4add6",
)
output_file = "out.vtk"
optimesh.cli.main([input_file, output_file, "-t", "1.0e-5", "-n", "5"] + options)
return
def test_info():
input_file = download_mesh(
"pacman.vtk", "19a0c0466a4714b057b88e339ab5bd57020a04cdf1d564c86dc4add6"
)
optimesh.cli.info([input_file])
return
if __name__ == "__main__":
test_cli("odt")
| [
"pytest.mark.parametrize",
"optimesh.cli.info",
"helpers.download_mesh",
"optimesh.cli.main"
] | [((95, 504), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""options"""', "[['--method', 'cpt-dp'], ['--method', 'cpt-uniform-fp'], ['--method',\n 'cpt-uniform-qn'], ['--method', 'cvt-uniform-lloyd'], ['--method',\n 'cvt-uniform-lloyd', '--omega', '2.0'], ['--method', 'cvt-uniform-qnb'],\n ['--method', 'cvt-uniform-qnf', '--omega', '0.9'], ['--method',\n 'odt-dp-fp'], ['--method', 'odt-uniform-fp'], ['--method',\n 'odt-uniform-bfgs']]"], {}), "('options', [['--method', 'cpt-dp'], ['--method',\n 'cpt-uniform-fp'], ['--method', 'cpt-uniform-qn'], ['--method',\n 'cvt-uniform-lloyd'], ['--method', 'cvt-uniform-lloyd', '--omega',\n '2.0'], ['--method', 'cvt-uniform-qnb'], ['--method', 'cvt-uniform-qnf',\n '--omega', '0.9'], ['--method', 'odt-dp-fp'], ['--method',\n 'odt-uniform-fp'], ['--method', 'odt-uniform-bfgs']])\n", (118, 504), False, 'import pytest\n'), ((643, 734), 'helpers.download_mesh', 'download_mesh', (['"""pacman.vtk"""', '"""19a0c0466a4714b057b88e339ab5bd57020a04cdf1d564c86dc4add6"""'], {}), "('pacman.vtk',\n '19a0c0466a4714b057b88e339ab5bd57020a04cdf1d564c86dc4add6')\n", (656, 734), False, 'from helpers import download_mesh\n'), ((869, 954), 'optimesh.cli.main', 'optimesh.cli.main', (["([input_file, output_file, '-t', '1.0e-5', '-n', '5'] + options)"], {}), "([input_file, output_file, '-t', '1.0e-5', '-n', '5'] +\n options)\n", (886, 954), False, 'import optimesh\n'), ((998, 1089), 'helpers.download_mesh', 'download_mesh', (['"""pacman.vtk"""', '"""19a0c0466a4714b057b88e339ab5bd57020a04cdf1d564c86dc4add6"""'], {}), "('pacman.vtk',\n '19a0c0466a4714b057b88e339ab5bd57020a04cdf1d564c86dc4add6')\n", (1011, 1089), False, 'from helpers import download_mesh\n'), ((1104, 1135), 'optimesh.cli.info', 'optimesh.cli.info', (['[input_file]'], {}), '([input_file])\n', (1121, 1135), False, 'import optimesh\n')] |
from unittest import TestCase
from bavard_ml_utils.gcp.gcs import GCSClient
from test.utils import DirSpec, FileSpec
class TestGCSClient(TestCase):
test_data_spec = DirSpec(
path="gcs-test",
children=[
FileSpec(path="test-file.txt", content="This is a test."),
FileSpec(path="test-file-2.txt", content="This is also a test."),
DirSpec(path="subdir", children=[FileSpec(path="test-file-3.txt", content="This one too.")]),
],
)
test_bucket_name = "gcs-client-bucket"
@classmethod
def setUpClass(cls):
cls.test_data_spec.write()
cls.client = GCSClient()
cls.client.create_bucket(cls.test_bucket_name)
@classmethod
def tearDownClass(cls):
cls.test_data_spec.remove()
def tes_can_upload_and_download_blob(self):
test_file = self.test_data_spec.children[0]
# Can upload blob.
gcs_uri = f"gs://{self.test_bucket_name}/{test_file.path}"
blob = self.client.upload_filename_to_blob(test_file.path, gcs_uri)
# Can download blob; contents are correct.
self.assertEqual(test_file.content, blob.download_as_text())
# Can delete blob.
blob.delete()
def test_can_upload_and_download_directory(self):
gcs_upload_dir = f"gs://{self.test_bucket_name}/temp-data"
# Upload directory (including a subdirectory).
self.client.upload_dir(self.test_data_spec.path, gcs_upload_dir)
# Download directory.
self.client.download_dir(gcs_upload_dir, "gcs-test-copy")
# Folder that was uploaded and downloaded should recursively have
# the same contents as the original one.
downloaded_spec = DirSpec.from_path("gcs-test-copy")
for child, dchild in zip(
sorted(self.test_data_spec.children, key=lambda c: c.path),
sorted(downloaded_spec.children, key=lambda c: c.path),
):
self.assertEqual(child, dchild)
# Clean up.
downloaded_spec.remove()
| [
"bavard_ml_utils.gcp.gcs.GCSClient",
"test.utils.FileSpec",
"test.utils.DirSpec.from_path"
] | [((639, 650), 'bavard_ml_utils.gcp.gcs.GCSClient', 'GCSClient', ([], {}), '()\n', (648, 650), False, 'from bavard_ml_utils.gcp.gcs import GCSClient\n'), ((1724, 1758), 'test.utils.DirSpec.from_path', 'DirSpec.from_path', (['"""gcs-test-copy"""'], {}), "('gcs-test-copy')\n", (1741, 1758), False, 'from test.utils import DirSpec, FileSpec\n'), ((237, 294), 'test.utils.FileSpec', 'FileSpec', ([], {'path': '"""test-file.txt"""', 'content': '"""This is a test."""'}), "(path='test-file.txt', content='This is a test.')\n", (245, 294), False, 'from test.utils import DirSpec, FileSpec\n'), ((308, 372), 'test.utils.FileSpec', 'FileSpec', ([], {'path': '"""test-file-2.txt"""', 'content': '"""This is also a test."""'}), "(path='test-file-2.txt', content='This is also a test.')\n", (316, 372), False, 'from test.utils import DirSpec, FileSpec\n'), ((419, 476), 'test.utils.FileSpec', 'FileSpec', ([], {'path': '"""test-file-3.txt"""', 'content': '"""This one too."""'}), "(path='test-file-3.txt', content='This one too.')\n", (427, 476), False, 'from test.utils import DirSpec, FileSpec\n')] |
from fim_mission import *
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.nn.functional as F
import torch.backends.cudnn as cudnn
import torch.optim as optim
from torch.optim import lr_scheduler
import torchvision.transforms as transforms
from torchvision import datasets, models
import matplotlib.pyplot as plt
from tensorboardX import SummaryWriter, writer
import os
import argparse
import random
import numpy as np
import warnings
from PIL import Image
plt.ion() # interactive mode
warnings.filterwarnings('ignore')
os.environ['CUDA_VISIBLE_DEVICES'] = '1' # opt.cuda
device = torch.device("cuda" if torch.cuda.is_available() else "cpu") # "cpu" #
datawriter = SummaryWriter()
data_root = './Dataset' # '../Dataset/Kaggle265'
target_root = './Targetset' # '../Dataset/Kaggle265'
num_workers = 0
loader = aim_loader(data_root, target_root, num_workers)
model_ft = models.resnet50(pretrained=True).to(device)
print(model_ft)
'''
# criterion = torch.nn.MSELoss()
criterion = torch.nn.L1Loss()
step = 16
model_ft.eval()
with torch.no_grad():
for num_iter, (data, target, data_size, target_size) in enumerate(tqdm(loader)):
target = target.to(device)
data = data.to(device)
data_size = data_size.item()
target_size = target_size.item()
min_loss = 1024.
min_i, min_j = -1, -1
for i in range(0, data_size-target_size, step):
for j in range(0, data_size-target_size, step):
trans_T = model_ft(target)
trans_D = model_ft(data[:,:,i:i+target_size,j:j+target_size])
loss = criterion(trans_T, trans_D).item()
if min_loss>loss:
min_i, min_j = i, j
min_loss = loss
head_i = max(0, min_i-step)
head_j = max(0, min_j-step)
tail_i = min(min_i+step, data_size)
tail_j = min(min_j+step, data_size)
for i in range(head_i, tail_i):
for j in range(head_j, tail_j):
trans_T = model_ft(target)
trans_D = model_ft(data[:,:,i:i+target_size,j:j+target_size])
loss = criterion(trans_T, trans_D).item()
if min_loss>loss:
min_i, min_j = i, j
min_loss = loss
data[0,:,min_i:min_i+target_size,min_j:min_j+target_size] = target[0,:,:,:]
datawriter.add_image('new_img', data[0,:,:,:], num_iter)
datawriter.add_scalar('img_loss', min_loss, num_iter)
x, y = get_position(min_i, min_j, data_size, target_size)
print('Iter : {}'.format(num_iter))
print('Pos = ({}, {})'.format(x, y))
print('Loss = {}'.format(min_loss))
datawriter.close()
''' | [
"tensorboardX.SummaryWriter",
"torch.cuda.is_available",
"matplotlib.pyplot.ion",
"warnings.filterwarnings",
"torchvision.models.resnet50"
] | [((484, 493), 'matplotlib.pyplot.ion', 'plt.ion', ([], {}), '()\n', (491, 493), True, 'import matplotlib.pyplot as plt\n'), ((514, 547), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (537, 547), False, 'import warnings\n'), ((702, 717), 'tensorboardX.SummaryWriter', 'SummaryWriter', ([], {}), '()\n', (715, 717), False, 'from tensorboardX import SummaryWriter, writer\n'), ((636, 661), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (659, 661), False, 'import torch\n'), ((910, 942), 'torchvision.models.resnet50', 'models.resnet50', ([], {'pretrained': '(True)'}), '(pretrained=True)\n', (925, 942), False, 'from torchvision import datasets, models\n')] |
import subprocess
import socket
import redis
import time
import os
import os.path
import sys
import warnings
import random
REDIS_DEBUGGER = os.environ.get('REDIS_DEBUGGER', None)
REDIS_SHOW_OUTPUT = int(os.environ.get(
'REDIS_VERBOSE', 1 if REDIS_DEBUGGER else 0))
def get_random_port():
while True:
port = random.randrange(1025, 10000)
sock = socket.socket()
try:
sock.listen(port)
except Error:
continue
#_, port = sock.getsockname()
sock.close()
return port
class Client(redis.StrictRedis):
def __init__(self, disposable_redis, port):
redis.StrictRedis.__init__(self, port=port)
self.dr = disposable_redis
def retry_with_rdb_reload(self):
yield 1
self.dr.dump_and_reload()
yield 2
class DisposableRedis(object):
def __init__(self, port=None, path='redis-server', **extra_args):
"""
:param port: port number to start the redis server on.
Specify none to automatically generate
:type port: int|None
:param extra_args: any extra arguments kwargs will
be passed to redis server as --key val
"""
self._port = port
# this will hold the actual port the redis is listening on.
# It's equal to `_port` unless `_port` is None
# in that case `port` is randomly generated
self.port = None
self._is_external = True if port else False
self.use_aof = extra_args.pop('use_aof', False)
self.extra_args = []
for k, v in extra_args.items():
self.extra_args.append('--%s' % k)
if isinstance(v, (list, tuple)):
self.extra_args += list(v)
else:
self.extra_args.append(v)
self.path = path
self.errored = False
self.dumpfile = None
self.aoffile = None
self.pollfile = None
self.process = None
def force_start(self):
self._is_external = False
def _get_output(self):
if not self.process:
return ''
return '' if REDIS_SHOW_OUTPUT else self.process.stdout.read()
def _start_process(self):
if self._is_external:
return
if REDIS_DEBUGGER:
debugger = REDIS_DEBUGGER.split()
args = debugger + self.args
else:
args = self.args
stdout = None if REDIS_SHOW_OUTPUT else subprocess.PIPE
if REDIS_SHOW_OUTPUT:
sys.stderr.write("Executing: {}".format(repr(args)))
self.process = subprocess.Popen(
args,
stdin=sys.stdin,
stdout=stdout,
stderr=sys.stderr,
)
begin = time.time()
while True:
try:
self.client().ping()
break
except (redis.ConnectionError, redis.ResponseError):
self.process.poll()
if self.process.returncode is not None:
raise RuntimeError(
"Process has exited with code {}\n. Redis output: {}"
.format(self.process.returncode, self._get_output()))
if time.time() - begin > 300:
raise RuntimeError(
'Cannot initialize client (waited 5mins)')
time.sleep(0.1)
def start(self):
"""
Start the server. To stop the server you should call stop()
accordingly
"""
if self._port is None:
self.port = get_random_port()
else:
self.port = self._port
if not self.dumpfile:
self.dumpfile = 'dump.%s.rdb' % self.port
if not self.aoffile:
self.aoffile = 'appendonly.%s.aof' % self.port
self.args = [self.path,
'--port', str(self.port),
'--save', '',
'--dbfilename', self.dumpfile]
if self.use_aof:
self.args += ['--appendonly', 'yes',
'--appendfilename', self.aoffile]
self.args += self.extra_args
self._start_process()
def _cleanup_files(self):
for f in (self.aoffile, self.dumpfile):
try:
os.unlink(f)
except OSError:
pass
def stop(self, for_restart=False):
if self._is_external:
return
self.process.terminate()
if not for_restart:
self._cleanup_files()
def __enter__(self):
self.start()
return self.client()
def __exit__(self, exc_type, exc_val, exc_tb):
self.stop()
if exc_val or self.errored:
sys.stderr.write("Redis output: {}\n".format(self._get_output()))
def _wait_for_child(self):
# Wait until file is available
r = self.client()
while True:
info = r.info('persistence')
if info['aof_rewrite_scheduled'] or info['aof_rewrite_in_progress']:
time.sleep(0.1)
else:
break
def dump_and_reload(self, restart_process=False):
"""
Dump the rdb and reload it, to test for serialization errors
"""
conn = self.client()
if restart_process:
if self._is_external:
warnings.warn('Tied to an external process. Cannot restart')
return
import time
conn.bgrewriteaof()
self._wait_for_child()
self.stop(for_restart=True)
self.start()
else:
conn.save()
try:
conn.execute_command('DEBUG', 'RELOAD')
except redis.RedisError as err:
self.errored = True
raise err
def client(self):
"""
:rtype: redis.StrictRedis
"""
return Client(self, self.port)
| [
"redis.StrictRedis.__init__",
"socket.socket",
"random.randrange",
"subprocess.Popen",
"os.environ.get",
"time.sleep",
"os.unlink",
"warnings.warn",
"time.time"
] | [((141, 179), 'os.environ.get', 'os.environ.get', (['"""REDIS_DEBUGGER"""', 'None'], {}), "('REDIS_DEBUGGER', None)\n", (155, 179), False, 'import os\n'), ((204, 263), 'os.environ.get', 'os.environ.get', (['"""REDIS_VERBOSE"""', '(1 if REDIS_DEBUGGER else 0)'], {}), "('REDIS_VERBOSE', 1 if REDIS_DEBUGGER else 0)\n", (218, 263), False, 'import os\n'), ((327, 356), 'random.randrange', 'random.randrange', (['(1025)', '(10000)'], {}), '(1025, 10000)\n', (343, 356), False, 'import random\n'), ((372, 387), 'socket.socket', 'socket.socket', ([], {}), '()\n', (385, 387), False, 'import socket\n'), ((645, 688), 'redis.StrictRedis.__init__', 'redis.StrictRedis.__init__', (['self'], {'port': 'port'}), '(self, port=port)\n', (671, 688), False, 'import redis\n'), ((2608, 2681), 'subprocess.Popen', 'subprocess.Popen', (['args'], {'stdin': 'sys.stdin', 'stdout': 'stdout', 'stderr': 'sys.stderr'}), '(args, stdin=sys.stdin, stdout=stdout, stderr=sys.stderr)\n', (2624, 2681), False, 'import subprocess\n'), ((2758, 2769), 'time.time', 'time.time', ([], {}), '()\n', (2767, 2769), False, 'import time\n'), ((4317, 4329), 'os.unlink', 'os.unlink', (['f'], {}), '(f)\n', (4326, 4329), False, 'import os\n'), ((5081, 5096), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (5091, 5096), False, 'import time\n'), ((5393, 5453), 'warnings.warn', 'warnings.warn', (['"""Tied to an external process. Cannot restart"""'], {}), "('Tied to an external process. Cannot restart')\n", (5406, 5453), False, 'import warnings\n'), ((3390, 3405), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (3400, 3405), False, 'import time\n'), ((3239, 3250), 'time.time', 'time.time', ([], {}), '()\n', (3248, 3250), False, 'import time\n')] |
'''Contains DiscoAutoscale class that orchestrates AWS Autoscaling'''
import logging
import random
import boto
import boto.ec2
import boto.ec2.autoscale
import boto.ec2.autoscale.launchconfig
import boto.ec2.autoscale.group
from boto.ec2.autoscale.policy import ScalingPolicy
from boto.exception import BotoServerError
import boto3
from .resource_helper import throttled_call
DEFAULT_TERMINATION_POLICIES = ["OldestLaunchConfiguration"]
class DiscoAutoscale(object):
'''Class orchestrating autoscaling'''
def __init__(self, environment_name, autoscaling_connection=None, boto3_autoscaling_connection=None):
self.environment_name = environment_name
self.connection = autoscaling_connection or boto.ec2.autoscale.AutoScaleConnection(
use_block_device_types=True
)
self.boto3_autoscale = boto3_autoscaling_connection or boto3.client('autoscaling')
def get_groupname(self, hostclass):
'''Returns the autoscaling group name when given a hostclass'''
return self.environment_name + '_' + hostclass
def _filter_by_environment(self, items):
'''Filters autoscaling groups and launch configs by environment'''
return [
item for item in items
if item.name.startswith("{0}_".format(self.environment_name))
]
def _filter_instance_by_environment(self, items):
return [
item for item in items
if item.group_name.startswith("{0}_".format(self.environment_name))
]
def get_hostclass(self, groupname):
'''Returns the hostclass when given an autoscaling group name'''
return groupname.replace(self.environment_name + '_', '')
def _get_group_generator(self):
'''Yields groups in current environment'''
next_token = None
while True:
groups = throttled_call(self.connection.get_all_groups,
next_token=next_token)
for group in self._filter_by_environment(groups):
yield group
next_token = groups.next_token
if not next_token:
break
def get_groups(self):
'''Returns Autoscaling groups in current environment'''
return list(self._get_group_generator())
def _get_instance_generator(self, instance_ids=None):
'''Yields autoscaled instances in current environment'''
next_token = None
while True:
instances = throttled_call(
self.connection.get_all_autoscaling_instances,
instance_ids=instance_ids, next_token=next_token)
for instance in self._filter_instance_by_environment(instances):
yield instance
next_token = instances.next_token
if not next_token:
break
def get_instances(self, instance_ids=None):
'''Returns autoscaled instances in the current environment'''
return list(self._get_instance_generator(instance_ids=instance_ids))
def _get_config_generator(self, names=None):
'''Yields Launch Configurations in current environment'''
next_token = None
while True:
configs = throttled_call(self.connection.get_all_launch_configurations,
names=names, next_token=next_token)
for config in self._filter_by_environment(configs):
yield config
next_token = configs.next_token
if not next_token:
break
def get_configs(self, names=None):
'''Returns Launch Configurations in current environment'''
return list(self._get_config_generator(names=names))
def get_config(self, *args, **kwargs):
'''Returns a new launch configuration'''
config = boto.ec2.autoscale.launchconfig.LaunchConfiguration(
connection=self.connection, *args, **kwargs
)
throttled_call(self.connection.create_launch_configuration, config)
return config
def delete_config(self, config_name):
'''Delete a specific Launch Configuration'''
throttled_call(self.connection.delete_launch_configuration, config_name)
def clean_configs(self):
'''Delete unused Launch Configurations in current environment'''
for config in self._get_config_generator():
try:
self.delete_config(config.name)
except BotoServerError:
pass
def delete_group(self, hostclass, force=False):
'''Delete a specific Autoscaling Group'''
throttled_call(self.connection.delete_auto_scaling_group,
self.get_groupname(hostclass), force_delete=force)
def clean_groups(self, force=False):
'''Delete unused Autoscaling Groups in current environment'''
for group in self._filter_by_environment(self.get_groups()):
try:
self.delete_group(self.get_hostclass(group.name), force)
except BotoServerError:
pass
def scaledown_group(self, hostclass):
'''Scales down number of instances in Autoscaling group to zero'''
group_list = throttled_call(self.connection.get_all_groups,
names=[self.get_groupname(hostclass)])
if group_list:
group = group_list[0]
group.min_size = group.max_size = group.desired_capacity = 0
throttled_call(group.update)
def has_group(self, hostclass):
'''Returns True iff current environment has an autoscaling group a hostclass'''
return len(throttled_call(self.connection.get_all_groups,
names=[self.get_groupname(hostclass)])) != 0
@staticmethod
def create_autoscale_tags(group_name, tags):
'''Given a python dictionary return list of boto autoscale Tag objects'''
return [boto.ec2.autoscale.Tag(key=key, value=value, resource_id=group_name, propagate_at_launch=True)
for key, value in tags.iteritems()] if tags else None
def update_group(self, group, launch_config, vpc_zone_id=None,
min_size=None, max_size=None, desired_size=None,
termination_policies=None, tags=None,
load_balancers=None):
'''Update an existing autoscaling group'''
group.launch_config_name = launch_config
if vpc_zone_id:
group.vpc_zone_identifier = vpc_zone_id
if min_size is not None:
group.min_size = min_size
if max_size is not None:
group.max_size = max_size
if desired_size is not None:
group.desired_capacity = desired_size
if termination_policies:
group.termination_policies = termination_policies
throttled_call(group.update)
if tags:
throttled_call(self.connection.create_or_update_tags,
DiscoAutoscale.create_autoscale_tags(group.name, tags))
if load_balancers:
throttled_call(self.boto3_autoscale.attach_load_balancers,
AutoScalingGroupName=group.name,
LoadBalancerNames=load_balancers)
return group
def create_group(self, hostclass, launch_config, vpc_zone_id,
min_size=None, max_size=None, desired_size=None,
termination_policies=None, tags=None,
load_balancers=None):
'''
Create an autoscaling group.
The group must not already exist. Use get_group() instead if you want to update a group if it
exits or create it if it does not.
'''
_min_size = min_size or 0
_max_size = max([min_size, max_size, desired_size, 0])
_desired_capacity = desired_size or max_size
termination_policies = termination_policies or DEFAULT_TERMINATION_POLICIES
group_name = self.get_groupname(hostclass)
group = boto.ec2.autoscale.group.AutoScalingGroup(
connection=self.connection,
name=group_name,
launch_config=launch_config,
load_balancers=load_balancers,
default_cooldown=None,
health_check_type=None,
health_check_period=None,
placement_group=None,
vpc_zone_identifier=vpc_zone_id,
desired_capacity=_desired_capacity,
min_size=_min_size,
max_size=_max_size,
tags=DiscoAutoscale.create_autoscale_tags(group_name, tags),
termination_policies=termination_policies,
instance_id=None)
throttled_call(self.connection.create_auto_scaling_group, group)
return group
def get_group(self, hostclass, launch_config, vpc_zone_id=None,
min_size=None, max_size=None, desired_size=None,
termination_policies=None, tags=None,
load_balancers=None):
'''
Returns autoscaling group.
This updates an existing autoscaling group if it exists,
otherwise this creates a new autoscaling group.
NOTE: Deleting tags is not currently supported.
NOTE: Detaching ELB is not currently supported.
'''
group = self.get_existing_group(hostclass)
if group:
return self.update_group(
group=group, launch_config=launch_config, vpc_zone_id=vpc_zone_id,
min_size=min_size, max_size=max_size, desired_size=desired_size,
termination_policies=termination_policies, tags=tags, load_balancers=load_balancers)
else:
return self.create_group(
hostclass=hostclass, launch_config=launch_config, vpc_zone_id=vpc_zone_id,
min_size=min_size, max_size=max_size, desired_size=desired_size,
termination_policies=termination_policies, tags=tags, load_balancers=load_balancers)
def get_existing_group(self, hostclass):
"""Returns autoscaling group for a hostclass iff it already exists"""
group_name = self.get_groupname(hostclass)
group_list = throttled_call(self.connection.get_all_groups, names=[group_name])
return group_list[0] if group_list else None
def terminate(self, instance_id, decrement_capacity=True):
"""
Terminates an instance using the autoscaling API.
When decrement_capacity is True this allows us to avoid
autoscaling immediately replacing a terminated instance.
"""
throttled_call(self.connection.terminate_instance,
instance_id, decrement_capacity=decrement_capacity)
def get_launch_config_for_hostclass(self, hostclass):
"""Returns an launch configuration for a hostclass if it exists, None otherwise"""
group_list = throttled_call(self.connection.get_all_groups,
names=[self.get_groupname(hostclass)])
if not group_list:
return None
config_list = self.get_configs(names=[group_list[0].launch_config_name])
return config_list[0] if config_list else None
def list_policies(self):
"""Returns all autoscaling policies"""
return throttled_call(self.connection.get_all_policies)
def create_policy(self, policy_name, group_name, adjustment, cooldown):
"""Creates an autoscaling policy and associates it with an autoscaling group"""
policy = ScalingPolicy(name=policy_name, adjustment_type='ChangeInCapacity',
as_name=group_name, scaling_adjustment=adjustment, cooldown=cooldown)
throttled_call(self.connection.create_scaling_policy, policy)
def delete_policy(self, policy_name, group_name):
"""Deletes an autoscaling policy"""
return throttled_call(self.connection.delete_policy, policy_name, group_name)
def delete_all_recurring_group_actions(self, hostclass):
"""Deletes all recurring scheduled actions for a hostclass"""
as_group = self.get_groupname(hostclass)
actions = throttled_call(self.connection.get_all_scheduled_actions, as_group=as_group)
recurring_actions = [action for action in actions if action.recurrence is not None]
for action in recurring_actions:
throttled_call(self.connection.delete_scheduled_action,
scheduled_action_name=action.name, autoscale_group=as_group)
def create_recurring_group_action(self, hostclass, recurrance,
min_size=None, desired_capacity=None, max_size=None):
"""Creates a recurring scheduled action for a hostclass"""
as_group = self.get_groupname(hostclass)
action_name = "{0}_{1}".format(as_group, recurrance.replace('*', 'star').replace(' ', '_'))
throttled_call(self.connection.create_scheduled_group_action,
as_group=as_group, name=action_name,
min_size=min_size,
desired_capacity=desired_capacity,
max_size=max_size,
recurrence=recurrance)
@staticmethod
def _get_snapshot_dev(launch_config, hostclass):
snapshot_devs = [key for key, value in launch_config.block_device_mappings.iteritems()
if value.snapshot_id]
if not snapshot_devs:
raise Exception("Hostclass {0} does not mount a snapshot".format(hostclass))
elif len(snapshot_devs) > 1:
raise Exception("Unsupported configuration: hostclass {0} has multiple snapshot based devices."
.format(hostclass))
return snapshot_devs[0]
def _create_new_launchconfig(self, hostclass, launch_config):
return self.get_config(
name='{0}_{1}_{2}'.format(self.environment_name, hostclass, str(random.randrange(0, 9999999))),
image_id=launch_config.image_id,
key_name=launch_config.key_name,
security_groups=launch_config.security_groups,
block_device_mappings=[launch_config.block_device_mappings],
instance_type=launch_config.instance_type,
instance_monitoring=launch_config.instance_monitoring,
instance_profile_name=launch_config.instance_profile_name,
ebs_optimized=launch_config.ebs_optimized,
user_data=launch_config.user_data,
associate_public_ip_address=launch_config.associate_public_ip_address)
def update_snapshot(self, hostclass, snapshot_id, snapshot_size):
'''Updates an existing autoscaling group to use a different snapshot'''
launch_config = self.get_launch_config_for_hostclass(hostclass)
if not launch_config:
raise Exception("Can't locate hostclass {0}".format(hostclass))
snapshot_bdm = launch_config.block_device_mappings[
DiscoAutoscale._get_snapshot_dev(launch_config, hostclass)]
if snapshot_bdm.snapshot_id != snapshot_id:
old_snapshot_id = snapshot_bdm.snapshot_id
snapshot_bdm.snapshot_id = snapshot_id
snapshot_bdm.size = snapshot_size
self.update_group(self.get_existing_group(hostclass),
self._create_new_launchconfig(hostclass, launch_config).name)
logging.info(
"Updating %s group's snapshot from %s to %s", hostclass, old_snapshot_id, snapshot_id)
else:
logging.debug(
"Autoscaling group %s is already referencing latest snapshot %s", hostclass, snapshot_id)
def update_elb(self, hostclass, elb_names):
'''Updates an existing autoscaling group to use a different set of load balancers'''
group = self.get_existing_group(hostclass)
if not group:
logging.warning("Auto Scaling group %s does not exist. Cannot change %s ELB(s)",
hostclass, ', '.join(elb_names))
return (set(), set())
new_lbs = set(elb_names) - set(group.load_balancers)
extras = set(group.load_balancers) - set(elb_names)
if new_lbs or extras:
logging.info("Updating %s group's elb from [%s] to [%s]",
hostclass, ", ".join(group.load_balancers), ", ".join(elb_names))
if new_lbs:
throttled_call(self.boto3_autoscale.attach_load_balancers,
AutoScalingGroupName=group.name,
LoadBalancerNames=list(new_lbs))
if extras:
throttled_call(self.boto3_autoscale.detach_load_balancers,
AutoScalingGroupName=group.name,
LoadBalancerNames=list(extras))
return (new_lbs, extras)
| [
"boto3.client",
"logging.debug",
"random.randrange",
"boto.ec2.autoscale.launchconfig.LaunchConfiguration",
"boto.ec2.autoscale.Tag",
"boto.ec2.autoscale.AutoScaleConnection",
"logging.info",
"boto.ec2.autoscale.policy.ScalingPolicy"
] | [((3822, 3923), 'boto.ec2.autoscale.launchconfig.LaunchConfiguration', 'boto.ec2.autoscale.launchconfig.LaunchConfiguration', (['*args'], {'connection': 'self.connection'}), '(*args, connection=self.\n connection, **kwargs)\n', (3873, 3923), False, 'import boto\n'), ((11526, 11668), 'boto.ec2.autoscale.policy.ScalingPolicy', 'ScalingPolicy', ([], {'name': 'policy_name', 'adjustment_type': '"""ChangeInCapacity"""', 'as_name': 'group_name', 'scaling_adjustment': 'adjustment', 'cooldown': 'cooldown'}), "(name=policy_name, adjustment_type='ChangeInCapacity', as_name\n =group_name, scaling_adjustment=adjustment, cooldown=cooldown)\n", (11539, 11668), False, 'from boto.ec2.autoscale.policy import ScalingPolicy\n'), ((723, 790), 'boto.ec2.autoscale.AutoScaleConnection', 'boto.ec2.autoscale.AutoScaleConnection', ([], {'use_block_device_types': '(True)'}), '(use_block_device_types=True)\n', (761, 790), False, 'import boto\n'), ((876, 903), 'boto3.client', 'boto3.client', (['"""autoscaling"""'], {}), "('autoscaling')\n", (888, 903), False, 'import boto3\n'), ((15409, 15512), 'logging.info', 'logging.info', (['"""Updating %s group\'s snapshot from %s to %s"""', 'hostclass', 'old_snapshot_id', 'snapshot_id'], {}), '("Updating %s group\'s snapshot from %s to %s", hostclass,\n old_snapshot_id, snapshot_id)\n', (15421, 15512), False, 'import logging\n'), ((15552, 15659), 'logging.debug', 'logging.debug', (['"""Autoscaling group %s is already referencing latest snapshot %s"""', 'hostclass', 'snapshot_id'], {}), "('Autoscaling group %s is already referencing latest snapshot %s',\n hostclass, snapshot_id)\n", (15565, 15659), False, 'import logging\n'), ((5932, 6030), 'boto.ec2.autoscale.Tag', 'boto.ec2.autoscale.Tag', ([], {'key': 'key', 'value': 'value', 'resource_id': 'group_name', 'propagate_at_launch': '(True)'}), '(key=key, value=value, resource_id=group_name,\n propagate_at_launch=True)\n', (5954, 6030), False, 'import boto\n'), ((13942, 13970), 'random.randrange', 'random.randrange', (['(0)', '(9999999)'], {}), '(0, 9999999)\n', (13958, 13970), False, 'import random\n')] |
""" RealDolos' funky volafile upload tool"""
# pylint: disable=broad-except
import math
import re
import sys
# pylint: disable=no-name-in-module
try:
from os import posix_fadvise, POSIX_FADV_WILLNEED
except ImportError:
def posix_fadvise(*args, **kw):
"""Mock implementation for systems not supporting it"""
args, kw = args, kw
POSIX_FADV_WILLNEED = 0
# pylint: enable=no-name-in-module
def natsort(val):
"""Returns a tuple from a string that can be used as a sort key for
natural sorting."""
return [int(i) if i.isdigit() else i for i in re.split(r"(\d+)", val)]
def to_name(file):
"""Sortkey by-name"""
return natsort(file.name.casefold()), natsort(file.parent)
def to_path(file):
"""Sortkey by-path"""
return natsort(file.casefold())
def to_size(file):
"""Sortkey by-size"""
return file.size
SORTING = dict(name=to_name,
path=to_path,
size=to_size)
def try_unlink(file):
"""Attempt to unlink a file, or else print an error"""
try:
file.unlink()
except Exception as ex:
print("Failed to delete file after upload: {}, {}".
format(file, ex),
file=sys.stderr, flush=True)
def try_advise(file, offset, length):
"""Try to advise the OS on what file data is needed next"""
try:
if hasattr(file, "fileno"):
posix_fadvise(file.fileno(),
offset,
length,
POSIX_FADV_WILLNEED)
except Exception as ex:
print(ex, file=sys.stderr, flush=True)
def shorten(string, length):
"""Shorten a string to a specific length, cropping in the middle"""
len2 = length // 2
len3 = length - len2 - 1
lens = len(string) + 2
if lens > length:
return ("[\033[32m{}…{}\033[0m]".
format(string[:len2], string[lens - len3:]))
return ("[\033[32m{}\033[0m]{}".
format(string, " " * (length - lens)))
def progressbar(cur, tot, length):
"""Generate a progress bar"""
per = math.floor(cur * float(length) / tot)
return "[{}{}]".format("#" * per, " " * (length - per))
def format_time(secs):
"""Format times for Kokytos"""
m, s = divmod(int(secs), 60)
h, m = divmod(m, 60)
d, h = divmod(h, 24)
if d:
# Yes, vola is this shit :*(
return "{}::{:02}:{:02}:{:02}".format(d, h, m, s)
if h:
return "{}:{:02}:{:02}".format(h, m, s)
if m:
return "{:02}:{:02}".format(m, s)
return "{}s".format(s)
| [
"re.split"
] | [((585, 608), 're.split', 're.split', (['"""(\\\\d+)"""', 'val'], {}), "('(\\\\d+)', val)\n", (593, 608), False, 'import re\n')] |
import os
from os import path, environ
from configparser import ConfigParser
from collections import OrderedDict
class Config(object):
"""
This class will take care of ConfigParser and writing / reading the
configuration.
TODO: What to do when there are more variables to be configured? Should we
overwrite the users config file with the updated variables if the file is
lacking?
"""
config = None
filename = ""
defaults = None
def __init__(self):
default_path = path.join(path.expanduser("~"), ".config", "tmc.ini")
config_filepath = environ.get("TMC_CONFIGFILE", default_path)
super().__setattr__('filename', config_filepath)
super().__setattr__('config', ConfigParser())
self._update_defaults()
self.config["CONFIGURATION"] = {}
for i in self.defaults:
self.config["CONFIGURATION"][i] = str(self.defaults[i])
if self._exists():
self._load()
self._write()
def _update_defaults(self):
defaults = OrderedDict()
if os.name == "nt":
defaults["use_unicode_characters"] = False
defaults["use_ansi_colors"] = False
else:
defaults["use_unicode_characters"] = True
defaults["use_ansi_colors"] = True
defaults["tests_show_trace"] = False
defaults["tests_show_partial_trace"] = False
defaults["tests_show_time"] = True
defaults["tests_show_successful"] = True
super().__setattr__('defaults', defaults)
def _exists(self):
return path.isfile(self.filename)
def _write(self):
d = os.path.dirname(self.filename)
if not os.path.exists(d):
os.makedirs(d)
with open(self.filename, "w") as fp:
self.config.write(fp)
def _load(self):
with open(self.filename, "r") as fp:
self.config.read_file(fp)
for i in self.config["CONFIGURATION"]:
if i not in self.defaults:
print("Warning: unknown configuration option: " + i)
def __getattr__(self, name):
if isinstance(self.defaults.get(name), bool):
return self.config["CONFIGURATION"].getboolean(name)
return self.config["CONFIGURATION"].get(name)
def __setattr__(self, name, value):
self.config["CONFIGURATION"][name] = str(value)
| [
"os.path.exists",
"collections.OrderedDict",
"configparser.ConfigParser",
"os.makedirs",
"os.environ.get",
"os.path.isfile",
"os.path.dirname",
"os.path.expanduser"
] | [((601, 644), 'os.environ.get', 'environ.get', (['"""TMC_CONFIGFILE"""', 'default_path'], {}), "('TMC_CONFIGFILE', default_path)\n", (612, 644), False, 'from os import path, environ\n'), ((1059, 1072), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (1070, 1072), False, 'from collections import OrderedDict\n'), ((1598, 1624), 'os.path.isfile', 'path.isfile', (['self.filename'], {}), '(self.filename)\n', (1609, 1624), False, 'from os import path, environ\n'), ((1660, 1690), 'os.path.dirname', 'os.path.dirname', (['self.filename'], {}), '(self.filename)\n', (1675, 1690), False, 'import os\n'), ((531, 551), 'os.path.expanduser', 'path.expanduser', (['"""~"""'], {}), "('~')\n", (546, 551), False, 'from os import path, environ\n'), ((740, 754), 'configparser.ConfigParser', 'ConfigParser', ([], {}), '()\n', (752, 754), False, 'from configparser import ConfigParser\n'), ((1706, 1723), 'os.path.exists', 'os.path.exists', (['d'], {}), '(d)\n', (1720, 1723), False, 'import os\n'), ((1737, 1751), 'os.makedirs', 'os.makedirs', (['d'], {}), '(d)\n', (1748, 1751), False, 'import os\n')] |
import h5py
import numpy as np
from code.model import UNetClassifier
def load_dataset(covid_file_path, normal_file_path):
covid = h5py.File(covid_file_path, 'r')['covid']
normal = h5py.File(normal_file_path, 'r')['normal']
all_images = np.expand_dims(np.concatenate([covid, normal]), axis=3)
all_labels = np.concatenate([[1]*covid.shape[0], [0]*normal.shape[0]])
shuffled_indices = np.random.permutation(np.arange(all_images.shape[0]))
all_images = all_images[shuffled_indices]
all_labels = all_labels[shuffled_indices]
return all_images, all_labels
if __name__ == '__main__':
model = Classifier((512, 512, 1), 2, True)
all_images, all_labels = load_dataset()
print(all_images.shape, all_labels.shape)
model.train(all_images, all_labels, 15, 16, 0.2)
| [
"numpy.arange",
"numpy.concatenate",
"h5py.File"
] | [((325, 386), 'numpy.concatenate', 'np.concatenate', (['[[1] * covid.shape[0], [0] * normal.shape[0]]'], {}), '([[1] * covid.shape[0], [0] * normal.shape[0]])\n', (339, 386), True, 'import numpy as np\n'), ((137, 168), 'h5py.File', 'h5py.File', (['covid_file_path', '"""r"""'], {}), "(covid_file_path, 'r')\n", (146, 168), False, 'import h5py\n'), ((191, 223), 'h5py.File', 'h5py.File', (['normal_file_path', '"""r"""'], {}), "(normal_file_path, 'r')\n", (200, 223), False, 'import h5py\n'), ((267, 298), 'numpy.concatenate', 'np.concatenate', (['[covid, normal]'], {}), '([covid, normal])\n', (281, 298), True, 'import numpy as np\n'), ((429, 459), 'numpy.arange', 'np.arange', (['all_images.shape[0]'], {}), '(all_images.shape[0])\n', (438, 459), True, 'import numpy as np\n')] |
from app import app
from flask import render_template, flash, redirect, url_for
from app.forms import LoginForm
@app.route('/')
@app.route('/index')
def index():
return render_template('index.html')
@app.route('/contato', methods=['GET','POST'])
def contato():
form = LoginForm()
if form.validate_on_submit():
mensagem = flash('A mensagem foi enviada com sucesso.')
return redirect('/index')
return render_template('contato.html', form=form)
@app.route('/features')
def features():
return render_template('features.html')
| [
"flask.render_template",
"app.forms.LoginForm",
"flask.flash",
"flask.redirect",
"app.app.route"
] | [((114, 128), 'app.app.route', 'app.route', (['"""/"""'], {}), "('/')\n", (123, 128), False, 'from app import app\n'), ((130, 149), 'app.app.route', 'app.route', (['"""/index"""'], {}), "('/index')\n", (139, 149), False, 'from app import app\n'), ((206, 252), 'app.app.route', 'app.route', (['"""/contato"""'], {'methods': "['GET', 'POST']"}), "('/contato', methods=['GET', 'POST'])\n", (215, 252), False, 'from app import app\n'), ((487, 509), 'app.app.route', 'app.route', (['"""/features"""'], {}), "('/features')\n", (496, 509), False, 'from app import app\n'), ((174, 203), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (189, 203), False, 'from flask import render_template, flash, redirect, url_for\n'), ((278, 289), 'app.forms.LoginForm', 'LoginForm', ([], {}), '()\n', (287, 289), False, 'from app.forms import LoginForm\n'), ((433, 475), 'flask.render_template', 'render_template', (['"""contato.html"""'], {'form': 'form'}), "('contato.html', form=form)\n", (448, 475), False, 'from flask import render_template, flash, redirect, url_for\n'), ((537, 569), 'flask.render_template', 'render_template', (['"""features.html"""'], {}), "('features.html')\n", (552, 569), False, 'from flask import render_template, flash, redirect, url_for\n'), ((343, 387), 'flask.flash', 'flash', (['"""A mensagem foi enviada com sucesso."""'], {}), "('A mensagem foi enviada com sucesso.')\n", (348, 387), False, 'from flask import render_template, flash, redirect, url_for\n'), ((403, 421), 'flask.redirect', 'redirect', (['"""/index"""'], {}), "('/index')\n", (411, 421), False, 'from flask import render_template, flash, redirect, url_for\n')] |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['MrScalarArgs', 'MrScalar']
@pulumi.input_type
class MrScalarArgs:
def __init__(__self__, *,
strategy: pulumi.Input[str],
additional_info: Optional[pulumi.Input[str]] = None,
additional_primary_security_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
additional_replica_security_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
applications: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarApplicationArgs']]]] = None,
availability_zones: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
bootstrap_actions_files: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarBootstrapActionsFileArgs']]]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
configurations_files: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarConfigurationsFileArgs']]]] = None,
core_desired_capacity: Optional[pulumi.Input[int]] = None,
core_ebs_block_devices: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarCoreEbsBlockDeviceArgs']]]] = None,
core_ebs_optimized: Optional[pulumi.Input[bool]] = None,
core_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
core_lifecycle: Optional[pulumi.Input[str]] = None,
core_max_size: Optional[pulumi.Input[int]] = None,
core_min_size: Optional[pulumi.Input[int]] = None,
core_scaling_down_policies: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarCoreScalingDownPolicyArgs']]]] = None,
core_scaling_up_policies: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarCoreScalingUpPolicyArgs']]]] = None,
core_unit: Optional[pulumi.Input[str]] = None,
custom_ami_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
ebs_root_volume_size: Optional[pulumi.Input[int]] = None,
ec2_key_name: Optional[pulumi.Input[str]] = None,
expose_cluster_id: Optional[pulumi.Input[bool]] = None,
instance_weights: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarInstanceWeightArgs']]]] = None,
job_flow_role: Optional[pulumi.Input[str]] = None,
keep_job_flow_alive: Optional[pulumi.Input[bool]] = None,
log_uri: Optional[pulumi.Input[str]] = None,
managed_primary_security_group: Optional[pulumi.Input[str]] = None,
managed_replica_security_group: Optional[pulumi.Input[str]] = None,
master_ebs_block_devices: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarMasterEbsBlockDeviceArgs']]]] = None,
master_ebs_optimized: Optional[pulumi.Input[bool]] = None,
master_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
master_lifecycle: Optional[pulumi.Input[str]] = None,
master_target: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
provisioning_timeout: Optional[pulumi.Input['MrScalarProvisioningTimeoutArgs']] = None,
region: Optional[pulumi.Input[str]] = None,
release_label: Optional[pulumi.Input[str]] = None,
repo_upgrade_on_boot: Optional[pulumi.Input[str]] = None,
retries: Optional[pulumi.Input[int]] = None,
scheduled_tasks: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarScheduledTaskArgs']]]] = None,
security_config: Optional[pulumi.Input[str]] = None,
service_access_security_group: Optional[pulumi.Input[str]] = None,
service_role: Optional[pulumi.Input[str]] = None,
steps_files: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarStepsFileArgs']]]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTagArgs']]]] = None,
task_desired_capacity: Optional[pulumi.Input[int]] = None,
task_ebs_block_devices: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTaskEbsBlockDeviceArgs']]]] = None,
task_ebs_optimized: Optional[pulumi.Input[bool]] = None,
task_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
task_lifecycle: Optional[pulumi.Input[str]] = None,
task_max_size: Optional[pulumi.Input[int]] = None,
task_min_size: Optional[pulumi.Input[int]] = None,
task_scaling_down_policies: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTaskScalingDownPolicyArgs']]]] = None,
task_scaling_up_policies: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTaskScalingUpPolicyArgs']]]] = None,
task_unit: Optional[pulumi.Input[str]] = None,
termination_policies: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTerminationPolicyArgs']]]] = None,
termination_protected: Optional[pulumi.Input[bool]] = None,
visible_to_all_users: Optional[pulumi.Input[bool]] = None):
"""
The set of arguments for constructing a MrScalar resource.
:param pulumi.Input[str] strategy: The MrScaler strategy. Allowed values are `new` `clone` and `wrap`.
:param pulumi.Input[str] additional_info: This is meta information about third-party applications that third-party vendors use for testing purposes.
:param pulumi.Input[Sequence[pulumi.Input[str]]] additional_primary_security_groups: A list of additional Amazon EC2 security group IDs for the master node.
:param pulumi.Input[Sequence[pulumi.Input[str]]] additional_replica_security_groups: A list of additional Amazon EC2 security group IDs for the core and task nodes.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarApplicationArgs']]] applications: A case-insensitive list of applications for Amazon EMR to install and configure when launching the cluster
:param pulumi.Input[Sequence[pulumi.Input[str]]] availability_zones: List of AZs and their subnet Ids. See example above for usage.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarBootstrapActionsFileArgs']]] bootstrap_actions_files: Describes path to S3 file containing description of bootstrap actions. [More Information](https://api.spotinst.com/elastigroup-for-aws/services-integrations/elastic-mapreduce/import-an-emr-cluster/advanced/)
:param pulumi.Input[str] cluster_id: The MrScaler cluster id.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarConfigurationsFileArgs']]] configurations_files: Describes path to S3 file containing description of configurations. [More Information](https://api.spotinst.com/elastigroup-for-aws/services-integrations/elastic-mapreduce/import-an-emr-cluster/advanced/)
:param pulumi.Input[int] core_desired_capacity: amount of instances in core group.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarCoreEbsBlockDeviceArgs']]] core_ebs_block_devices: This determines the ebs configuration for your core group instances. Only a single block is allowed.
:param pulumi.Input[bool] core_ebs_optimized: EBS Optimization setting for instances in group.
:param pulumi.Input[Sequence[pulumi.Input[str]]] core_instance_types: The MrScaler instance types for the core nodes.
:param pulumi.Input[str] core_lifecycle: The MrScaler lifecycle for instances in core group. Allowed values are 'SPOT' and 'ON_DEMAND'.
:param pulumi.Input[int] core_max_size: maximal amount of instances in core group.
:param pulumi.Input[int] core_min_size: The minimal amount of instances in core group.
:param pulumi.Input[str] core_unit: Unit of task group for target, min and max. The unit could be `instance` or `weight`. instance - amount of instances. weight - amount of vCPU.
:param pulumi.Input[str] custom_ami_id: The ID of a custom Amazon EBS-backed Linux AMI if the cluster uses a custom AMI.
:param pulumi.Input[str] description: The MrScaler description.
:param pulumi.Input[str] ec2_key_name: The name of an Amazon EC2 key pair that can be used to ssh to the master node.
:param pulumi.Input[bool] expose_cluster_id: Allow the `cluster_id` to set a provider output variable.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarInstanceWeightArgs']]] instance_weights: Describes the instance and weights. Check out [Elastigroup Weighted Instances](https://api.spotinst.com/elastigroup-for-aws/concepts/general-concepts/elastigroup-capacity-instances-or-weighted) for more info.
:param pulumi.Input[str] job_flow_role: The IAM role that was specified when the job flow was launched. The EC2 instances of the job flow assume this role.
:param pulumi.Input[bool] keep_job_flow_alive: Specifies whether the cluster should remain available after completing all steps.
:param pulumi.Input[str] log_uri: The path to the Amazon S3 location where logs for this cluster are stored.
:param pulumi.Input[str] managed_primary_security_group: EMR Managed Security group that will be set to the primary instance group.
:param pulumi.Input[str] managed_replica_security_group: EMR Managed Security group that will be set to the replica instance group.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarMasterEbsBlockDeviceArgs']]] master_ebs_block_devices: This determines the ebs configuration for your master group instances. Only a single block is allowed.
:param pulumi.Input[bool] master_ebs_optimized: EBS Optimization setting for instances in group.
:param pulumi.Input[Sequence[pulumi.Input[str]]] master_instance_types: The MrScaler instance types for the master nodes.
:param pulumi.Input[str] master_lifecycle: The MrScaler lifecycle for instances in master group. Allowed values are 'SPOT' and 'ON_DEMAND'.
:param pulumi.Input[int] master_target: Number of instances in the master group.
:param pulumi.Input[str] name: The application name.
:param pulumi.Input[str] region: The MrScaler region.
:param pulumi.Input[str] repo_upgrade_on_boot: Applies only when `custom_ami_id` is used. Specifies the type of updates that are applied from the Amazon Linux AMI package repositories when an instance boots using the AMI. Possible values include: `SECURITY`, `NONE`.
:param pulumi.Input[int] retries: Specifies the maximum number of times a capacity provisioning should be retried if the provisioning timeout is exceeded. Valid values: `1-5`.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarScheduledTaskArgs']]] scheduled_tasks: An array of scheduled tasks.
:param pulumi.Input[str] security_config: The name of the security configuration applied to the cluster.
:param pulumi.Input[str] service_access_security_group: The identifier of the Amazon EC2 security group for the Amazon EMR service to access clusters in VPC private subnets.
:param pulumi.Input[str] service_role: The IAM role that will be assumed by the Amazon EMR service to access AWS resources on your behalf.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarStepsFileArgs']]] steps_files: Steps from S3.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarTagArgs']]] tags: A list of tags to assign to the resource. You may define multiple tags.
:param pulumi.Input[int] task_desired_capacity: amount of instances in task group.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarTaskEbsBlockDeviceArgs']]] task_ebs_block_devices: This determines the ebs configuration for your task group instances. Only a single block is allowed.
:param pulumi.Input[bool] task_ebs_optimized: EBS Optimization setting for instances in group.
:param pulumi.Input[Sequence[pulumi.Input[str]]] task_instance_types: The MrScaler instance types for the task nodes.
:param pulumi.Input[str] task_lifecycle: The MrScaler lifecycle for instances in task group. Allowed values are 'SPOT' and 'ON_DEMAND'.
:param pulumi.Input[int] task_max_size: maximal amount of instances in task group.
:param pulumi.Input[int] task_min_size: The minimal amount of instances in task group.
:param pulumi.Input[str] task_unit: Unit of task group for target, min and max. The unit could be `instance` or `weight`. instance - amount of instances. weight - amount of vCPU.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarTerminationPolicyArgs']]] termination_policies: Allows defining termination policies for EMR clusters based on CloudWatch Metrics.
:param pulumi.Input[bool] termination_protected: Specifies whether the Amazon EC2 instances in the cluster are protected from termination by API calls, user intervention, or in the event of a job-flow error.
"""
pulumi.set(__self__, "strategy", strategy)
if additional_info is not None:
pulumi.set(__self__, "additional_info", additional_info)
if additional_primary_security_groups is not None:
pulumi.set(__self__, "additional_primary_security_groups", additional_primary_security_groups)
if additional_replica_security_groups is not None:
pulumi.set(__self__, "additional_replica_security_groups", additional_replica_security_groups)
if applications is not None:
pulumi.set(__self__, "applications", applications)
if availability_zones is not None:
pulumi.set(__self__, "availability_zones", availability_zones)
if bootstrap_actions_files is not None:
pulumi.set(__self__, "bootstrap_actions_files", bootstrap_actions_files)
if cluster_id is not None:
pulumi.set(__self__, "cluster_id", cluster_id)
if configurations_files is not None:
pulumi.set(__self__, "configurations_files", configurations_files)
if core_desired_capacity is not None:
pulumi.set(__self__, "core_desired_capacity", core_desired_capacity)
if core_ebs_block_devices is not None:
pulumi.set(__self__, "core_ebs_block_devices", core_ebs_block_devices)
if core_ebs_optimized is not None:
pulumi.set(__self__, "core_ebs_optimized", core_ebs_optimized)
if core_instance_types is not None:
pulumi.set(__self__, "core_instance_types", core_instance_types)
if core_lifecycle is not None:
pulumi.set(__self__, "core_lifecycle", core_lifecycle)
if core_max_size is not None:
pulumi.set(__self__, "core_max_size", core_max_size)
if core_min_size is not None:
pulumi.set(__self__, "core_min_size", core_min_size)
if core_scaling_down_policies is not None:
pulumi.set(__self__, "core_scaling_down_policies", core_scaling_down_policies)
if core_scaling_up_policies is not None:
pulumi.set(__self__, "core_scaling_up_policies", core_scaling_up_policies)
if core_unit is not None:
pulumi.set(__self__, "core_unit", core_unit)
if custom_ami_id is not None:
pulumi.set(__self__, "custom_ami_id", custom_ami_id)
if description is not None:
pulumi.set(__self__, "description", description)
if ebs_root_volume_size is not None:
pulumi.set(__self__, "ebs_root_volume_size", ebs_root_volume_size)
if ec2_key_name is not None:
pulumi.set(__self__, "ec2_key_name", ec2_key_name)
if expose_cluster_id is not None:
pulumi.set(__self__, "expose_cluster_id", expose_cluster_id)
if instance_weights is not None:
pulumi.set(__self__, "instance_weights", instance_weights)
if job_flow_role is not None:
pulumi.set(__self__, "job_flow_role", job_flow_role)
if keep_job_flow_alive is not None:
pulumi.set(__self__, "keep_job_flow_alive", keep_job_flow_alive)
if log_uri is not None:
pulumi.set(__self__, "log_uri", log_uri)
if managed_primary_security_group is not None:
pulumi.set(__self__, "managed_primary_security_group", managed_primary_security_group)
if managed_replica_security_group is not None:
pulumi.set(__self__, "managed_replica_security_group", managed_replica_security_group)
if master_ebs_block_devices is not None:
pulumi.set(__self__, "master_ebs_block_devices", master_ebs_block_devices)
if master_ebs_optimized is not None:
pulumi.set(__self__, "master_ebs_optimized", master_ebs_optimized)
if master_instance_types is not None:
pulumi.set(__self__, "master_instance_types", master_instance_types)
if master_lifecycle is not None:
pulumi.set(__self__, "master_lifecycle", master_lifecycle)
if master_target is not None:
pulumi.set(__self__, "master_target", master_target)
if name is not None:
pulumi.set(__self__, "name", name)
if provisioning_timeout is not None:
pulumi.set(__self__, "provisioning_timeout", provisioning_timeout)
if region is not None:
pulumi.set(__self__, "region", region)
if release_label is not None:
pulumi.set(__self__, "release_label", release_label)
if repo_upgrade_on_boot is not None:
pulumi.set(__self__, "repo_upgrade_on_boot", repo_upgrade_on_boot)
if retries is not None:
pulumi.set(__self__, "retries", retries)
if scheduled_tasks is not None:
pulumi.set(__self__, "scheduled_tasks", scheduled_tasks)
if security_config is not None:
pulumi.set(__self__, "security_config", security_config)
if service_access_security_group is not None:
pulumi.set(__self__, "service_access_security_group", service_access_security_group)
if service_role is not None:
pulumi.set(__self__, "service_role", service_role)
if steps_files is not None:
pulumi.set(__self__, "steps_files", steps_files)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if task_desired_capacity is not None:
pulumi.set(__self__, "task_desired_capacity", task_desired_capacity)
if task_ebs_block_devices is not None:
pulumi.set(__self__, "task_ebs_block_devices", task_ebs_block_devices)
if task_ebs_optimized is not None:
pulumi.set(__self__, "task_ebs_optimized", task_ebs_optimized)
if task_instance_types is not None:
pulumi.set(__self__, "task_instance_types", task_instance_types)
if task_lifecycle is not None:
pulumi.set(__self__, "task_lifecycle", task_lifecycle)
if task_max_size is not None:
pulumi.set(__self__, "task_max_size", task_max_size)
if task_min_size is not None:
pulumi.set(__self__, "task_min_size", task_min_size)
if task_scaling_down_policies is not None:
pulumi.set(__self__, "task_scaling_down_policies", task_scaling_down_policies)
if task_scaling_up_policies is not None:
pulumi.set(__self__, "task_scaling_up_policies", task_scaling_up_policies)
if task_unit is not None:
pulumi.set(__self__, "task_unit", task_unit)
if termination_policies is not None:
pulumi.set(__self__, "termination_policies", termination_policies)
if termination_protected is not None:
pulumi.set(__self__, "termination_protected", termination_protected)
if visible_to_all_users is not None:
warnings.warn("""This field has been removed from our API and is no longer functional.""", DeprecationWarning)
pulumi.log.warn("""visible_to_all_users is deprecated: This field has been removed from our API and is no longer functional.""")
if visible_to_all_users is not None:
pulumi.set(__self__, "visible_to_all_users", visible_to_all_users)
@property
@pulumi.getter
def strategy(self) -> pulumi.Input[str]:
"""
The MrScaler strategy. Allowed values are `new` `clone` and `wrap`.
"""
return pulumi.get(self, "strategy")
@strategy.setter
def strategy(self, value: pulumi.Input[str]):
pulumi.set(self, "strategy", value)
@property
@pulumi.getter(name="additionalInfo")
def additional_info(self) -> Optional[pulumi.Input[str]]:
"""
This is meta information about third-party applications that third-party vendors use for testing purposes.
"""
return pulumi.get(self, "additional_info")
@additional_info.setter
def additional_info(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "additional_info", value)
@property
@pulumi.getter(name="additionalPrimarySecurityGroups")
def additional_primary_security_groups(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of additional Amazon EC2 security group IDs for the master node.
"""
return pulumi.get(self, "additional_primary_security_groups")
@additional_primary_security_groups.setter
def additional_primary_security_groups(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "additional_primary_security_groups", value)
@property
@pulumi.getter(name="additionalReplicaSecurityGroups")
def additional_replica_security_groups(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of additional Amazon EC2 security group IDs for the core and task nodes.
"""
return pulumi.get(self, "additional_replica_security_groups")
@additional_replica_security_groups.setter
def additional_replica_security_groups(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "additional_replica_security_groups", value)
@property
@pulumi.getter
def applications(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarApplicationArgs']]]]:
"""
A case-insensitive list of applications for Amazon EMR to install and configure when launching the cluster
"""
return pulumi.get(self, "applications")
@applications.setter
def applications(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarApplicationArgs']]]]):
pulumi.set(self, "applications", value)
@property
@pulumi.getter(name="availabilityZones")
def availability_zones(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of AZs and their subnet Ids. See example above for usage.
"""
return pulumi.get(self, "availability_zones")
@availability_zones.setter
def availability_zones(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "availability_zones", value)
@property
@pulumi.getter(name="bootstrapActionsFiles")
def bootstrap_actions_files(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarBootstrapActionsFileArgs']]]]:
"""
Describes path to S3 file containing description of bootstrap actions. [More Information](https://api.spotinst.com/elastigroup-for-aws/services-integrations/elastic-mapreduce/import-an-emr-cluster/advanced/)
"""
return pulumi.get(self, "bootstrap_actions_files")
@bootstrap_actions_files.setter
def bootstrap_actions_files(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarBootstrapActionsFileArgs']]]]):
pulumi.set(self, "bootstrap_actions_files", value)
@property
@pulumi.getter(name="clusterId")
def cluster_id(self) -> Optional[pulumi.Input[str]]:
"""
The MrScaler cluster id.
"""
return pulumi.get(self, "cluster_id")
@cluster_id.setter
def cluster_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_id", value)
@property
@pulumi.getter(name="configurationsFiles")
def configurations_files(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarConfigurationsFileArgs']]]]:
"""
Describes path to S3 file containing description of configurations. [More Information](https://api.spotinst.com/elastigroup-for-aws/services-integrations/elastic-mapreduce/import-an-emr-cluster/advanced/)
"""
return pulumi.get(self, "configurations_files")
@configurations_files.setter
def configurations_files(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarConfigurationsFileArgs']]]]):
pulumi.set(self, "configurations_files", value)
@property
@pulumi.getter(name="coreDesiredCapacity")
def core_desired_capacity(self) -> Optional[pulumi.Input[int]]:
"""
amount of instances in core group.
"""
return pulumi.get(self, "core_desired_capacity")
@core_desired_capacity.setter
def core_desired_capacity(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "core_desired_capacity", value)
@property
@pulumi.getter(name="coreEbsBlockDevices")
def core_ebs_block_devices(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarCoreEbsBlockDeviceArgs']]]]:
"""
This determines the ebs configuration for your core group instances. Only a single block is allowed.
"""
return pulumi.get(self, "core_ebs_block_devices")
@core_ebs_block_devices.setter
def core_ebs_block_devices(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarCoreEbsBlockDeviceArgs']]]]):
pulumi.set(self, "core_ebs_block_devices", value)
@property
@pulumi.getter(name="coreEbsOptimized")
def core_ebs_optimized(self) -> Optional[pulumi.Input[bool]]:
"""
EBS Optimization setting for instances in group.
"""
return pulumi.get(self, "core_ebs_optimized")
@core_ebs_optimized.setter
def core_ebs_optimized(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "core_ebs_optimized", value)
@property
@pulumi.getter(name="coreInstanceTypes")
def core_instance_types(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The MrScaler instance types for the core nodes.
"""
return pulumi.get(self, "core_instance_types")
@core_instance_types.setter
def core_instance_types(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "core_instance_types", value)
@property
@pulumi.getter(name="coreLifecycle")
def core_lifecycle(self) -> Optional[pulumi.Input[str]]:
"""
The MrScaler lifecycle for instances in core group. Allowed values are 'SPOT' and 'ON_DEMAND'.
"""
return pulumi.get(self, "core_lifecycle")
@core_lifecycle.setter
def core_lifecycle(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "core_lifecycle", value)
@property
@pulumi.getter(name="coreMaxSize")
def core_max_size(self) -> Optional[pulumi.Input[int]]:
"""
maximal amount of instances in core group.
"""
return pulumi.get(self, "core_max_size")
@core_max_size.setter
def core_max_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "core_max_size", value)
@property
@pulumi.getter(name="coreMinSize")
def core_min_size(self) -> Optional[pulumi.Input[int]]:
"""
The minimal amount of instances in core group.
"""
return pulumi.get(self, "core_min_size")
@core_min_size.setter
def core_min_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "core_min_size", value)
@property
@pulumi.getter(name="coreScalingDownPolicies")
def core_scaling_down_policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarCoreScalingDownPolicyArgs']]]]:
return pulumi.get(self, "core_scaling_down_policies")
@core_scaling_down_policies.setter
def core_scaling_down_policies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarCoreScalingDownPolicyArgs']]]]):
pulumi.set(self, "core_scaling_down_policies", value)
@property
@pulumi.getter(name="coreScalingUpPolicies")
def core_scaling_up_policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarCoreScalingUpPolicyArgs']]]]:
return pulumi.get(self, "core_scaling_up_policies")
@core_scaling_up_policies.setter
def core_scaling_up_policies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarCoreScalingUpPolicyArgs']]]]):
pulumi.set(self, "core_scaling_up_policies", value)
@property
@pulumi.getter(name="coreUnit")
def core_unit(self) -> Optional[pulumi.Input[str]]:
"""
Unit of task group for target, min and max. The unit could be `instance` or `weight`. instance - amount of instances. weight - amount of vCPU.
"""
return pulumi.get(self, "core_unit")
@core_unit.setter
def core_unit(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "core_unit", value)
@property
@pulumi.getter(name="customAmiId")
def custom_ami_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of a custom Amazon EBS-backed Linux AMI if the cluster uses a custom AMI.
"""
return pulumi.get(self, "custom_ami_id")
@custom_ami_id.setter
def custom_ami_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "custom_ami_id", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The MrScaler description.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="ebsRootVolumeSize")
def ebs_root_volume_size(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "ebs_root_volume_size")
@ebs_root_volume_size.setter
def ebs_root_volume_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "ebs_root_volume_size", value)
@property
@pulumi.getter(name="ec2KeyName")
def ec2_key_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of an Amazon EC2 key pair that can be used to ssh to the master node.
"""
return pulumi.get(self, "ec2_key_name")
@ec2_key_name.setter
def ec2_key_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ec2_key_name", value)
@property
@pulumi.getter(name="exposeClusterId")
def expose_cluster_id(self) -> Optional[pulumi.Input[bool]]:
"""
Allow the `cluster_id` to set a provider output variable.
"""
return pulumi.get(self, "expose_cluster_id")
@expose_cluster_id.setter
def expose_cluster_id(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "expose_cluster_id", value)
@property
@pulumi.getter(name="instanceWeights")
def instance_weights(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarInstanceWeightArgs']]]]:
"""
Describes the instance and weights. Check out [Elastigroup Weighted Instances](https://api.spotinst.com/elastigroup-for-aws/concepts/general-concepts/elastigroup-capacity-instances-or-weighted) for more info.
"""
return pulumi.get(self, "instance_weights")
@instance_weights.setter
def instance_weights(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarInstanceWeightArgs']]]]):
pulumi.set(self, "instance_weights", value)
@property
@pulumi.getter(name="jobFlowRole")
def job_flow_role(self) -> Optional[pulumi.Input[str]]:
"""
The IAM role that was specified when the job flow was launched. The EC2 instances of the job flow assume this role.
"""
return pulumi.get(self, "job_flow_role")
@job_flow_role.setter
def job_flow_role(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "job_flow_role", value)
@property
@pulumi.getter(name="keepJobFlowAlive")
def keep_job_flow_alive(self) -> Optional[pulumi.Input[bool]]:
"""
Specifies whether the cluster should remain available after completing all steps.
"""
return pulumi.get(self, "keep_job_flow_alive")
@keep_job_flow_alive.setter
def keep_job_flow_alive(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "keep_job_flow_alive", value)
@property
@pulumi.getter(name="logUri")
def log_uri(self) -> Optional[pulumi.Input[str]]:
"""
The path to the Amazon S3 location where logs for this cluster are stored.
"""
return pulumi.get(self, "log_uri")
@log_uri.setter
def log_uri(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "log_uri", value)
@property
@pulumi.getter(name="managedPrimarySecurityGroup")
def managed_primary_security_group(self) -> Optional[pulumi.Input[str]]:
"""
EMR Managed Security group that will be set to the primary instance group.
"""
return pulumi.get(self, "managed_primary_security_group")
@managed_primary_security_group.setter
def managed_primary_security_group(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "managed_primary_security_group", value)
@property
@pulumi.getter(name="managedReplicaSecurityGroup")
def managed_replica_security_group(self) -> Optional[pulumi.Input[str]]:
"""
EMR Managed Security group that will be set to the replica instance group.
"""
return pulumi.get(self, "managed_replica_security_group")
@managed_replica_security_group.setter
def managed_replica_security_group(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "managed_replica_security_group", value)
@property
@pulumi.getter(name="masterEbsBlockDevices")
def master_ebs_block_devices(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarMasterEbsBlockDeviceArgs']]]]:
"""
This determines the ebs configuration for your master group instances. Only a single block is allowed.
"""
return pulumi.get(self, "master_ebs_block_devices")
@master_ebs_block_devices.setter
def master_ebs_block_devices(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarMasterEbsBlockDeviceArgs']]]]):
pulumi.set(self, "master_ebs_block_devices", value)
@property
@pulumi.getter(name="masterEbsOptimized")
def master_ebs_optimized(self) -> Optional[pulumi.Input[bool]]:
"""
EBS Optimization setting for instances in group.
"""
return pulumi.get(self, "master_ebs_optimized")
@master_ebs_optimized.setter
def master_ebs_optimized(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "master_ebs_optimized", value)
@property
@pulumi.getter(name="masterInstanceTypes")
def master_instance_types(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The MrScaler instance types for the master nodes.
"""
return pulumi.get(self, "master_instance_types")
@master_instance_types.setter
def master_instance_types(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "master_instance_types", value)
@property
@pulumi.getter(name="masterLifecycle")
def master_lifecycle(self) -> Optional[pulumi.Input[str]]:
"""
The MrScaler lifecycle for instances in master group. Allowed values are 'SPOT' and 'ON_DEMAND'.
"""
return pulumi.get(self, "master_lifecycle")
@master_lifecycle.setter
def master_lifecycle(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "master_lifecycle", value)
@property
@pulumi.getter(name="masterTarget")
def master_target(self) -> Optional[pulumi.Input[int]]:
"""
Number of instances in the master group.
"""
return pulumi.get(self, "master_target")
@master_target.setter
def master_target(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "master_target", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The application name.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="provisioningTimeout")
def provisioning_timeout(self) -> Optional[pulumi.Input['MrScalarProvisioningTimeoutArgs']]:
return pulumi.get(self, "provisioning_timeout")
@provisioning_timeout.setter
def provisioning_timeout(self, value: Optional[pulumi.Input['MrScalarProvisioningTimeoutArgs']]):
pulumi.set(self, "provisioning_timeout", value)
@property
@pulumi.getter
def region(self) -> Optional[pulumi.Input[str]]:
"""
The MrScaler region.
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "region", value)
@property
@pulumi.getter(name="releaseLabel")
def release_label(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "release_label")
@release_label.setter
def release_label(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "release_label", value)
@property
@pulumi.getter(name="repoUpgradeOnBoot")
def repo_upgrade_on_boot(self) -> Optional[pulumi.Input[str]]:
"""
Applies only when `custom_ami_id` is used. Specifies the type of updates that are applied from the Amazon Linux AMI package repositories when an instance boots using the AMI. Possible values include: `SECURITY`, `NONE`.
"""
return pulumi.get(self, "repo_upgrade_on_boot")
@repo_upgrade_on_boot.setter
def repo_upgrade_on_boot(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "repo_upgrade_on_boot", value)
@property
@pulumi.getter
def retries(self) -> Optional[pulumi.Input[int]]:
"""
Specifies the maximum number of times a capacity provisioning should be retried if the provisioning timeout is exceeded. Valid values: `1-5`.
"""
return pulumi.get(self, "retries")
@retries.setter
def retries(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "retries", value)
@property
@pulumi.getter(name="scheduledTasks")
def scheduled_tasks(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarScheduledTaskArgs']]]]:
"""
An array of scheduled tasks.
"""
return pulumi.get(self, "scheduled_tasks")
@scheduled_tasks.setter
def scheduled_tasks(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarScheduledTaskArgs']]]]):
pulumi.set(self, "scheduled_tasks", value)
@property
@pulumi.getter(name="securityConfig")
def security_config(self) -> Optional[pulumi.Input[str]]:
"""
The name of the security configuration applied to the cluster.
"""
return pulumi.get(self, "security_config")
@security_config.setter
def security_config(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "security_config", value)
@property
@pulumi.getter(name="serviceAccessSecurityGroup")
def service_access_security_group(self) -> Optional[pulumi.Input[str]]:
"""
The identifier of the Amazon EC2 security group for the Amazon EMR service to access clusters in VPC private subnets.
"""
return pulumi.get(self, "service_access_security_group")
@service_access_security_group.setter
def service_access_security_group(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "service_access_security_group", value)
@property
@pulumi.getter(name="serviceRole")
def service_role(self) -> Optional[pulumi.Input[str]]:
"""
The IAM role that will be assumed by the Amazon EMR service to access AWS resources on your behalf.
"""
return pulumi.get(self, "service_role")
@service_role.setter
def service_role(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "service_role", value)
@property
@pulumi.getter(name="stepsFiles")
def steps_files(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarStepsFileArgs']]]]:
"""
Steps from S3.
"""
return pulumi.get(self, "steps_files")
@steps_files.setter
def steps_files(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarStepsFileArgs']]]]):
pulumi.set(self, "steps_files", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTagArgs']]]]:
"""
A list of tags to assign to the resource. You may define multiple tags.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTagArgs']]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="taskDesiredCapacity")
def task_desired_capacity(self) -> Optional[pulumi.Input[int]]:
"""
amount of instances in task group.
"""
return pulumi.get(self, "task_desired_capacity")
@task_desired_capacity.setter
def task_desired_capacity(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "task_desired_capacity", value)
@property
@pulumi.getter(name="taskEbsBlockDevices")
def task_ebs_block_devices(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTaskEbsBlockDeviceArgs']]]]:
"""
This determines the ebs configuration for your task group instances. Only a single block is allowed.
"""
return pulumi.get(self, "task_ebs_block_devices")
@task_ebs_block_devices.setter
def task_ebs_block_devices(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTaskEbsBlockDeviceArgs']]]]):
pulumi.set(self, "task_ebs_block_devices", value)
@property
@pulumi.getter(name="taskEbsOptimized")
def task_ebs_optimized(self) -> Optional[pulumi.Input[bool]]:
"""
EBS Optimization setting for instances in group.
"""
return pulumi.get(self, "task_ebs_optimized")
@task_ebs_optimized.setter
def task_ebs_optimized(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "task_ebs_optimized", value)
@property
@pulumi.getter(name="taskInstanceTypes")
def task_instance_types(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The MrScaler instance types for the task nodes.
"""
return pulumi.get(self, "task_instance_types")
@task_instance_types.setter
def task_instance_types(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "task_instance_types", value)
@property
@pulumi.getter(name="taskLifecycle")
def task_lifecycle(self) -> Optional[pulumi.Input[str]]:
"""
The MrScaler lifecycle for instances in task group. Allowed values are 'SPOT' and 'ON_DEMAND'.
"""
return pulumi.get(self, "task_lifecycle")
@task_lifecycle.setter
def task_lifecycle(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "task_lifecycle", value)
@property
@pulumi.getter(name="taskMaxSize")
def task_max_size(self) -> Optional[pulumi.Input[int]]:
"""
maximal amount of instances in task group.
"""
return pulumi.get(self, "task_max_size")
@task_max_size.setter
def task_max_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "task_max_size", value)
@property
@pulumi.getter(name="taskMinSize")
def task_min_size(self) -> Optional[pulumi.Input[int]]:
"""
The minimal amount of instances in task group.
"""
return pulumi.get(self, "task_min_size")
@task_min_size.setter
def task_min_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "task_min_size", value)
@property
@pulumi.getter(name="taskScalingDownPolicies")
def task_scaling_down_policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTaskScalingDownPolicyArgs']]]]:
return pulumi.get(self, "task_scaling_down_policies")
@task_scaling_down_policies.setter
def task_scaling_down_policies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTaskScalingDownPolicyArgs']]]]):
pulumi.set(self, "task_scaling_down_policies", value)
@property
@pulumi.getter(name="taskScalingUpPolicies")
def task_scaling_up_policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTaskScalingUpPolicyArgs']]]]:
return pulumi.get(self, "task_scaling_up_policies")
@task_scaling_up_policies.setter
def task_scaling_up_policies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTaskScalingUpPolicyArgs']]]]):
pulumi.set(self, "task_scaling_up_policies", value)
@property
@pulumi.getter(name="taskUnit")
def task_unit(self) -> Optional[pulumi.Input[str]]:
"""
Unit of task group for target, min and max. The unit could be `instance` or `weight`. instance - amount of instances. weight - amount of vCPU.
"""
return pulumi.get(self, "task_unit")
@task_unit.setter
def task_unit(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "task_unit", value)
@property
@pulumi.getter(name="terminationPolicies")
def termination_policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTerminationPolicyArgs']]]]:
"""
Allows defining termination policies for EMR clusters based on CloudWatch Metrics.
"""
return pulumi.get(self, "termination_policies")
@termination_policies.setter
def termination_policies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTerminationPolicyArgs']]]]):
pulumi.set(self, "termination_policies", value)
@property
@pulumi.getter(name="terminationProtected")
def termination_protected(self) -> Optional[pulumi.Input[bool]]:
"""
Specifies whether the Amazon EC2 instances in the cluster are protected from termination by API calls, user intervention, or in the event of a job-flow error.
"""
return pulumi.get(self, "termination_protected")
@termination_protected.setter
def termination_protected(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "termination_protected", value)
@property
@pulumi.getter(name="visibleToAllUsers")
def visible_to_all_users(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "visible_to_all_users")
@visible_to_all_users.setter
def visible_to_all_users(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "visible_to_all_users", value)
@pulumi.input_type
class _MrScalarState:
def __init__(__self__, *,
additional_info: Optional[pulumi.Input[str]] = None,
additional_primary_security_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
additional_replica_security_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
applications: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarApplicationArgs']]]] = None,
availability_zones: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
bootstrap_actions_files: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarBootstrapActionsFileArgs']]]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
configurations_files: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarConfigurationsFileArgs']]]] = None,
core_desired_capacity: Optional[pulumi.Input[int]] = None,
core_ebs_block_devices: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarCoreEbsBlockDeviceArgs']]]] = None,
core_ebs_optimized: Optional[pulumi.Input[bool]] = None,
core_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
core_lifecycle: Optional[pulumi.Input[str]] = None,
core_max_size: Optional[pulumi.Input[int]] = None,
core_min_size: Optional[pulumi.Input[int]] = None,
core_scaling_down_policies: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarCoreScalingDownPolicyArgs']]]] = None,
core_scaling_up_policies: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarCoreScalingUpPolicyArgs']]]] = None,
core_unit: Optional[pulumi.Input[str]] = None,
custom_ami_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
ebs_root_volume_size: Optional[pulumi.Input[int]] = None,
ec2_key_name: Optional[pulumi.Input[str]] = None,
expose_cluster_id: Optional[pulumi.Input[bool]] = None,
instance_weights: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarInstanceWeightArgs']]]] = None,
job_flow_role: Optional[pulumi.Input[str]] = None,
keep_job_flow_alive: Optional[pulumi.Input[bool]] = None,
log_uri: Optional[pulumi.Input[str]] = None,
managed_primary_security_group: Optional[pulumi.Input[str]] = None,
managed_replica_security_group: Optional[pulumi.Input[str]] = None,
master_ebs_block_devices: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarMasterEbsBlockDeviceArgs']]]] = None,
master_ebs_optimized: Optional[pulumi.Input[bool]] = None,
master_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
master_lifecycle: Optional[pulumi.Input[str]] = None,
master_target: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
output_cluster_id: Optional[pulumi.Input[str]] = None,
provisioning_timeout: Optional[pulumi.Input['MrScalarProvisioningTimeoutArgs']] = None,
region: Optional[pulumi.Input[str]] = None,
release_label: Optional[pulumi.Input[str]] = None,
repo_upgrade_on_boot: Optional[pulumi.Input[str]] = None,
retries: Optional[pulumi.Input[int]] = None,
scheduled_tasks: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarScheduledTaskArgs']]]] = None,
security_config: Optional[pulumi.Input[str]] = None,
service_access_security_group: Optional[pulumi.Input[str]] = None,
service_role: Optional[pulumi.Input[str]] = None,
steps_files: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarStepsFileArgs']]]] = None,
strategy: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTagArgs']]]] = None,
task_desired_capacity: Optional[pulumi.Input[int]] = None,
task_ebs_block_devices: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTaskEbsBlockDeviceArgs']]]] = None,
task_ebs_optimized: Optional[pulumi.Input[bool]] = None,
task_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
task_lifecycle: Optional[pulumi.Input[str]] = None,
task_max_size: Optional[pulumi.Input[int]] = None,
task_min_size: Optional[pulumi.Input[int]] = None,
task_scaling_down_policies: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTaskScalingDownPolicyArgs']]]] = None,
task_scaling_up_policies: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTaskScalingUpPolicyArgs']]]] = None,
task_unit: Optional[pulumi.Input[str]] = None,
termination_policies: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTerminationPolicyArgs']]]] = None,
termination_protected: Optional[pulumi.Input[bool]] = None,
visible_to_all_users: Optional[pulumi.Input[bool]] = None):
"""
Input properties used for looking up and filtering MrScalar resources.
:param pulumi.Input[str] additional_info: This is meta information about third-party applications that third-party vendors use for testing purposes.
:param pulumi.Input[Sequence[pulumi.Input[str]]] additional_primary_security_groups: A list of additional Amazon EC2 security group IDs for the master node.
:param pulumi.Input[Sequence[pulumi.Input[str]]] additional_replica_security_groups: A list of additional Amazon EC2 security group IDs for the core and task nodes.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarApplicationArgs']]] applications: A case-insensitive list of applications for Amazon EMR to install and configure when launching the cluster
:param pulumi.Input[Sequence[pulumi.Input[str]]] availability_zones: List of AZs and their subnet Ids. See example above for usage.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarBootstrapActionsFileArgs']]] bootstrap_actions_files: Describes path to S3 file containing description of bootstrap actions. [More Information](https://api.spotinst.com/elastigroup-for-aws/services-integrations/elastic-mapreduce/import-an-emr-cluster/advanced/)
:param pulumi.Input[str] cluster_id: The MrScaler cluster id.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarConfigurationsFileArgs']]] configurations_files: Describes path to S3 file containing description of configurations. [More Information](https://api.spotinst.com/elastigroup-for-aws/services-integrations/elastic-mapreduce/import-an-emr-cluster/advanced/)
:param pulumi.Input[int] core_desired_capacity: amount of instances in core group.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarCoreEbsBlockDeviceArgs']]] core_ebs_block_devices: This determines the ebs configuration for your core group instances. Only a single block is allowed.
:param pulumi.Input[bool] core_ebs_optimized: EBS Optimization setting for instances in group.
:param pulumi.Input[Sequence[pulumi.Input[str]]] core_instance_types: The MrScaler instance types for the core nodes.
:param pulumi.Input[str] core_lifecycle: The MrScaler lifecycle for instances in core group. Allowed values are 'SPOT' and 'ON_DEMAND'.
:param pulumi.Input[int] core_max_size: maximal amount of instances in core group.
:param pulumi.Input[int] core_min_size: The minimal amount of instances in core group.
:param pulumi.Input[str] core_unit: Unit of task group for target, min and max. The unit could be `instance` or `weight`. instance - amount of instances. weight - amount of vCPU.
:param pulumi.Input[str] custom_ami_id: The ID of a custom Amazon EBS-backed Linux AMI if the cluster uses a custom AMI.
:param pulumi.Input[str] description: The MrScaler description.
:param pulumi.Input[str] ec2_key_name: The name of an Amazon EC2 key pair that can be used to ssh to the master node.
:param pulumi.Input[bool] expose_cluster_id: Allow the `cluster_id` to set a provider output variable.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarInstanceWeightArgs']]] instance_weights: Describes the instance and weights. Check out [Elastigroup Weighted Instances](https://api.spotinst.com/elastigroup-for-aws/concepts/general-concepts/elastigroup-capacity-instances-or-weighted) for more info.
:param pulumi.Input[str] job_flow_role: The IAM role that was specified when the job flow was launched. The EC2 instances of the job flow assume this role.
:param pulumi.Input[bool] keep_job_flow_alive: Specifies whether the cluster should remain available after completing all steps.
:param pulumi.Input[str] log_uri: The path to the Amazon S3 location where logs for this cluster are stored.
:param pulumi.Input[str] managed_primary_security_group: EMR Managed Security group that will be set to the primary instance group.
:param pulumi.Input[str] managed_replica_security_group: EMR Managed Security group that will be set to the replica instance group.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarMasterEbsBlockDeviceArgs']]] master_ebs_block_devices: This determines the ebs configuration for your master group instances. Only a single block is allowed.
:param pulumi.Input[bool] master_ebs_optimized: EBS Optimization setting for instances in group.
:param pulumi.Input[Sequence[pulumi.Input[str]]] master_instance_types: The MrScaler instance types for the master nodes.
:param pulumi.Input[str] master_lifecycle: The MrScaler lifecycle for instances in master group. Allowed values are 'SPOT' and 'ON_DEMAND'.
:param pulumi.Input[int] master_target: Number of instances in the master group.
:param pulumi.Input[str] name: The application name.
:param pulumi.Input[str] region: The MrScaler region.
:param pulumi.Input[str] repo_upgrade_on_boot: Applies only when `custom_ami_id` is used. Specifies the type of updates that are applied from the Amazon Linux AMI package repositories when an instance boots using the AMI. Possible values include: `SECURITY`, `NONE`.
:param pulumi.Input[int] retries: Specifies the maximum number of times a capacity provisioning should be retried if the provisioning timeout is exceeded. Valid values: `1-5`.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarScheduledTaskArgs']]] scheduled_tasks: An array of scheduled tasks.
:param pulumi.Input[str] security_config: The name of the security configuration applied to the cluster.
:param pulumi.Input[str] service_access_security_group: The identifier of the Amazon EC2 security group for the Amazon EMR service to access clusters in VPC private subnets.
:param pulumi.Input[str] service_role: The IAM role that will be assumed by the Amazon EMR service to access AWS resources on your behalf.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarStepsFileArgs']]] steps_files: Steps from S3.
:param pulumi.Input[str] strategy: The MrScaler strategy. Allowed values are `new` `clone` and `wrap`.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarTagArgs']]] tags: A list of tags to assign to the resource. You may define multiple tags.
:param pulumi.Input[int] task_desired_capacity: amount of instances in task group.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarTaskEbsBlockDeviceArgs']]] task_ebs_block_devices: This determines the ebs configuration for your task group instances. Only a single block is allowed.
:param pulumi.Input[bool] task_ebs_optimized: EBS Optimization setting for instances in group.
:param pulumi.Input[Sequence[pulumi.Input[str]]] task_instance_types: The MrScaler instance types for the task nodes.
:param pulumi.Input[str] task_lifecycle: The MrScaler lifecycle for instances in task group. Allowed values are 'SPOT' and 'ON_DEMAND'.
:param pulumi.Input[int] task_max_size: maximal amount of instances in task group.
:param pulumi.Input[int] task_min_size: The minimal amount of instances in task group.
:param pulumi.Input[str] task_unit: Unit of task group for target, min and max. The unit could be `instance` or `weight`. instance - amount of instances. weight - amount of vCPU.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarTerminationPolicyArgs']]] termination_policies: Allows defining termination policies for EMR clusters based on CloudWatch Metrics.
:param pulumi.Input[bool] termination_protected: Specifies whether the Amazon EC2 instances in the cluster are protected from termination by API calls, user intervention, or in the event of a job-flow error.
"""
if additional_info is not None:
pulumi.set(__self__, "additional_info", additional_info)
if additional_primary_security_groups is not None:
pulumi.set(__self__, "additional_primary_security_groups", additional_primary_security_groups)
if additional_replica_security_groups is not None:
pulumi.set(__self__, "additional_replica_security_groups", additional_replica_security_groups)
if applications is not None:
pulumi.set(__self__, "applications", applications)
if availability_zones is not None:
pulumi.set(__self__, "availability_zones", availability_zones)
if bootstrap_actions_files is not None:
pulumi.set(__self__, "bootstrap_actions_files", bootstrap_actions_files)
if cluster_id is not None:
pulumi.set(__self__, "cluster_id", cluster_id)
if configurations_files is not None:
pulumi.set(__self__, "configurations_files", configurations_files)
if core_desired_capacity is not None:
pulumi.set(__self__, "core_desired_capacity", core_desired_capacity)
if core_ebs_block_devices is not None:
pulumi.set(__self__, "core_ebs_block_devices", core_ebs_block_devices)
if core_ebs_optimized is not None:
pulumi.set(__self__, "core_ebs_optimized", core_ebs_optimized)
if core_instance_types is not None:
pulumi.set(__self__, "core_instance_types", core_instance_types)
if core_lifecycle is not None:
pulumi.set(__self__, "core_lifecycle", core_lifecycle)
if core_max_size is not None:
pulumi.set(__self__, "core_max_size", core_max_size)
if core_min_size is not None:
pulumi.set(__self__, "core_min_size", core_min_size)
if core_scaling_down_policies is not None:
pulumi.set(__self__, "core_scaling_down_policies", core_scaling_down_policies)
if core_scaling_up_policies is not None:
pulumi.set(__self__, "core_scaling_up_policies", core_scaling_up_policies)
if core_unit is not None:
pulumi.set(__self__, "core_unit", core_unit)
if custom_ami_id is not None:
pulumi.set(__self__, "custom_ami_id", custom_ami_id)
if description is not None:
pulumi.set(__self__, "description", description)
if ebs_root_volume_size is not None:
pulumi.set(__self__, "ebs_root_volume_size", ebs_root_volume_size)
if ec2_key_name is not None:
pulumi.set(__self__, "ec2_key_name", ec2_key_name)
if expose_cluster_id is not None:
pulumi.set(__self__, "expose_cluster_id", expose_cluster_id)
if instance_weights is not None:
pulumi.set(__self__, "instance_weights", instance_weights)
if job_flow_role is not None:
pulumi.set(__self__, "job_flow_role", job_flow_role)
if keep_job_flow_alive is not None:
pulumi.set(__self__, "keep_job_flow_alive", keep_job_flow_alive)
if log_uri is not None:
pulumi.set(__self__, "log_uri", log_uri)
if managed_primary_security_group is not None:
pulumi.set(__self__, "managed_primary_security_group", managed_primary_security_group)
if managed_replica_security_group is not None:
pulumi.set(__self__, "managed_replica_security_group", managed_replica_security_group)
if master_ebs_block_devices is not None:
pulumi.set(__self__, "master_ebs_block_devices", master_ebs_block_devices)
if master_ebs_optimized is not None:
pulumi.set(__self__, "master_ebs_optimized", master_ebs_optimized)
if master_instance_types is not None:
pulumi.set(__self__, "master_instance_types", master_instance_types)
if master_lifecycle is not None:
pulumi.set(__self__, "master_lifecycle", master_lifecycle)
if master_target is not None:
pulumi.set(__self__, "master_target", master_target)
if name is not None:
pulumi.set(__self__, "name", name)
if output_cluster_id is not None:
pulumi.set(__self__, "output_cluster_id", output_cluster_id)
if provisioning_timeout is not None:
pulumi.set(__self__, "provisioning_timeout", provisioning_timeout)
if region is not None:
pulumi.set(__self__, "region", region)
if release_label is not None:
pulumi.set(__self__, "release_label", release_label)
if repo_upgrade_on_boot is not None:
pulumi.set(__self__, "repo_upgrade_on_boot", repo_upgrade_on_boot)
if retries is not None:
pulumi.set(__self__, "retries", retries)
if scheduled_tasks is not None:
pulumi.set(__self__, "scheduled_tasks", scheduled_tasks)
if security_config is not None:
pulumi.set(__self__, "security_config", security_config)
if service_access_security_group is not None:
pulumi.set(__self__, "service_access_security_group", service_access_security_group)
if service_role is not None:
pulumi.set(__self__, "service_role", service_role)
if steps_files is not None:
pulumi.set(__self__, "steps_files", steps_files)
if strategy is not None:
pulumi.set(__self__, "strategy", strategy)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if task_desired_capacity is not None:
pulumi.set(__self__, "task_desired_capacity", task_desired_capacity)
if task_ebs_block_devices is not None:
pulumi.set(__self__, "task_ebs_block_devices", task_ebs_block_devices)
if task_ebs_optimized is not None:
pulumi.set(__self__, "task_ebs_optimized", task_ebs_optimized)
if task_instance_types is not None:
pulumi.set(__self__, "task_instance_types", task_instance_types)
if task_lifecycle is not None:
pulumi.set(__self__, "task_lifecycle", task_lifecycle)
if task_max_size is not None:
pulumi.set(__self__, "task_max_size", task_max_size)
if task_min_size is not None:
pulumi.set(__self__, "task_min_size", task_min_size)
if task_scaling_down_policies is not None:
pulumi.set(__self__, "task_scaling_down_policies", task_scaling_down_policies)
if task_scaling_up_policies is not None:
pulumi.set(__self__, "task_scaling_up_policies", task_scaling_up_policies)
if task_unit is not None:
pulumi.set(__self__, "task_unit", task_unit)
if termination_policies is not None:
pulumi.set(__self__, "termination_policies", termination_policies)
if termination_protected is not None:
pulumi.set(__self__, "termination_protected", termination_protected)
if visible_to_all_users is not None:
warnings.warn("""This field has been removed from our API and is no longer functional.""", DeprecationWarning)
pulumi.log.warn("""visible_to_all_users is deprecated: This field has been removed from our API and is no longer functional.""")
if visible_to_all_users is not None:
pulumi.set(__self__, "visible_to_all_users", visible_to_all_users)
@property
@pulumi.getter(name="additionalInfo")
def additional_info(self) -> Optional[pulumi.Input[str]]:
"""
This is meta information about third-party applications that third-party vendors use for testing purposes.
"""
return pulumi.get(self, "additional_info")
@additional_info.setter
def additional_info(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "additional_info", value)
@property
@pulumi.getter(name="additionalPrimarySecurityGroups")
def additional_primary_security_groups(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of additional Amazon EC2 security group IDs for the master node.
"""
return pulumi.get(self, "additional_primary_security_groups")
@additional_primary_security_groups.setter
def additional_primary_security_groups(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "additional_primary_security_groups", value)
@property
@pulumi.getter(name="additionalReplicaSecurityGroups")
def additional_replica_security_groups(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of additional Amazon EC2 security group IDs for the core and task nodes.
"""
return pulumi.get(self, "additional_replica_security_groups")
@additional_replica_security_groups.setter
def additional_replica_security_groups(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "additional_replica_security_groups", value)
@property
@pulumi.getter
def applications(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarApplicationArgs']]]]:
"""
A case-insensitive list of applications for Amazon EMR to install and configure when launching the cluster
"""
return pulumi.get(self, "applications")
@applications.setter
def applications(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarApplicationArgs']]]]):
pulumi.set(self, "applications", value)
@property
@pulumi.getter(name="availabilityZones")
def availability_zones(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of AZs and their subnet Ids. See example above for usage.
"""
return pulumi.get(self, "availability_zones")
@availability_zones.setter
def availability_zones(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "availability_zones", value)
@property
@pulumi.getter(name="bootstrapActionsFiles")
def bootstrap_actions_files(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarBootstrapActionsFileArgs']]]]:
"""
Describes path to S3 file containing description of bootstrap actions. [More Information](https://api.spotinst.com/elastigroup-for-aws/services-integrations/elastic-mapreduce/import-an-emr-cluster/advanced/)
"""
return pulumi.get(self, "bootstrap_actions_files")
@bootstrap_actions_files.setter
def bootstrap_actions_files(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarBootstrapActionsFileArgs']]]]):
pulumi.set(self, "bootstrap_actions_files", value)
@property
@pulumi.getter(name="clusterId")
def cluster_id(self) -> Optional[pulumi.Input[str]]:
"""
The MrScaler cluster id.
"""
return pulumi.get(self, "cluster_id")
@cluster_id.setter
def cluster_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_id", value)
@property
@pulumi.getter(name="configurationsFiles")
def configurations_files(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarConfigurationsFileArgs']]]]:
"""
Describes path to S3 file containing description of configurations. [More Information](https://api.spotinst.com/elastigroup-for-aws/services-integrations/elastic-mapreduce/import-an-emr-cluster/advanced/)
"""
return pulumi.get(self, "configurations_files")
@configurations_files.setter
def configurations_files(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarConfigurationsFileArgs']]]]):
pulumi.set(self, "configurations_files", value)
@property
@pulumi.getter(name="coreDesiredCapacity")
def core_desired_capacity(self) -> Optional[pulumi.Input[int]]:
"""
amount of instances in core group.
"""
return pulumi.get(self, "core_desired_capacity")
@core_desired_capacity.setter
def core_desired_capacity(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "core_desired_capacity", value)
@property
@pulumi.getter(name="coreEbsBlockDevices")
def core_ebs_block_devices(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarCoreEbsBlockDeviceArgs']]]]:
"""
This determines the ebs configuration for your core group instances. Only a single block is allowed.
"""
return pulumi.get(self, "core_ebs_block_devices")
@core_ebs_block_devices.setter
def core_ebs_block_devices(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarCoreEbsBlockDeviceArgs']]]]):
pulumi.set(self, "core_ebs_block_devices", value)
@property
@pulumi.getter(name="coreEbsOptimized")
def core_ebs_optimized(self) -> Optional[pulumi.Input[bool]]:
"""
EBS Optimization setting for instances in group.
"""
return pulumi.get(self, "core_ebs_optimized")
@core_ebs_optimized.setter
def core_ebs_optimized(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "core_ebs_optimized", value)
@property
@pulumi.getter(name="coreInstanceTypes")
def core_instance_types(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The MrScaler instance types for the core nodes.
"""
return pulumi.get(self, "core_instance_types")
@core_instance_types.setter
def core_instance_types(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "core_instance_types", value)
@property
@pulumi.getter(name="coreLifecycle")
def core_lifecycle(self) -> Optional[pulumi.Input[str]]:
"""
The MrScaler lifecycle for instances in core group. Allowed values are 'SPOT' and 'ON_DEMAND'.
"""
return pulumi.get(self, "core_lifecycle")
@core_lifecycle.setter
def core_lifecycle(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "core_lifecycle", value)
@property
@pulumi.getter(name="coreMaxSize")
def core_max_size(self) -> Optional[pulumi.Input[int]]:
"""
maximal amount of instances in core group.
"""
return pulumi.get(self, "core_max_size")
@core_max_size.setter
def core_max_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "core_max_size", value)
@property
@pulumi.getter(name="coreMinSize")
def core_min_size(self) -> Optional[pulumi.Input[int]]:
"""
The minimal amount of instances in core group.
"""
return pulumi.get(self, "core_min_size")
@core_min_size.setter
def core_min_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "core_min_size", value)
@property
@pulumi.getter(name="coreScalingDownPolicies")
def core_scaling_down_policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarCoreScalingDownPolicyArgs']]]]:
return pulumi.get(self, "core_scaling_down_policies")
@core_scaling_down_policies.setter
def core_scaling_down_policies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarCoreScalingDownPolicyArgs']]]]):
pulumi.set(self, "core_scaling_down_policies", value)
@property
@pulumi.getter(name="coreScalingUpPolicies")
def core_scaling_up_policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarCoreScalingUpPolicyArgs']]]]:
return pulumi.get(self, "core_scaling_up_policies")
@core_scaling_up_policies.setter
def core_scaling_up_policies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarCoreScalingUpPolicyArgs']]]]):
pulumi.set(self, "core_scaling_up_policies", value)
@property
@pulumi.getter(name="coreUnit")
def core_unit(self) -> Optional[pulumi.Input[str]]:
"""
Unit of task group for target, min and max. The unit could be `instance` or `weight`. instance - amount of instances. weight - amount of vCPU.
"""
return pulumi.get(self, "core_unit")
@core_unit.setter
def core_unit(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "core_unit", value)
@property
@pulumi.getter(name="customAmiId")
def custom_ami_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of a custom Amazon EBS-backed Linux AMI if the cluster uses a custom AMI.
"""
return pulumi.get(self, "custom_ami_id")
@custom_ami_id.setter
def custom_ami_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "custom_ami_id", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The MrScaler description.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="ebsRootVolumeSize")
def ebs_root_volume_size(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "ebs_root_volume_size")
@ebs_root_volume_size.setter
def ebs_root_volume_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "ebs_root_volume_size", value)
@property
@pulumi.getter(name="ec2KeyName")
def ec2_key_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of an Amazon EC2 key pair that can be used to ssh to the master node.
"""
return pulumi.get(self, "ec2_key_name")
@ec2_key_name.setter
def ec2_key_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ec2_key_name", value)
@property
@pulumi.getter(name="exposeClusterId")
def expose_cluster_id(self) -> Optional[pulumi.Input[bool]]:
"""
Allow the `cluster_id` to set a provider output variable.
"""
return pulumi.get(self, "expose_cluster_id")
@expose_cluster_id.setter
def expose_cluster_id(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "expose_cluster_id", value)
@property
@pulumi.getter(name="instanceWeights")
def instance_weights(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarInstanceWeightArgs']]]]:
"""
Describes the instance and weights. Check out [Elastigroup Weighted Instances](https://api.spotinst.com/elastigroup-for-aws/concepts/general-concepts/elastigroup-capacity-instances-or-weighted) for more info.
"""
return pulumi.get(self, "instance_weights")
@instance_weights.setter
def instance_weights(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarInstanceWeightArgs']]]]):
pulumi.set(self, "instance_weights", value)
@property
@pulumi.getter(name="jobFlowRole")
def job_flow_role(self) -> Optional[pulumi.Input[str]]:
"""
The IAM role that was specified when the job flow was launched. The EC2 instances of the job flow assume this role.
"""
return pulumi.get(self, "job_flow_role")
@job_flow_role.setter
def job_flow_role(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "job_flow_role", value)
@property
@pulumi.getter(name="keepJobFlowAlive")
def keep_job_flow_alive(self) -> Optional[pulumi.Input[bool]]:
"""
Specifies whether the cluster should remain available after completing all steps.
"""
return pulumi.get(self, "keep_job_flow_alive")
@keep_job_flow_alive.setter
def keep_job_flow_alive(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "keep_job_flow_alive", value)
@property
@pulumi.getter(name="logUri")
def log_uri(self) -> Optional[pulumi.Input[str]]:
"""
The path to the Amazon S3 location where logs for this cluster are stored.
"""
return pulumi.get(self, "log_uri")
@log_uri.setter
def log_uri(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "log_uri", value)
@property
@pulumi.getter(name="managedPrimarySecurityGroup")
def managed_primary_security_group(self) -> Optional[pulumi.Input[str]]:
"""
EMR Managed Security group that will be set to the primary instance group.
"""
return pulumi.get(self, "managed_primary_security_group")
@managed_primary_security_group.setter
def managed_primary_security_group(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "managed_primary_security_group", value)
@property
@pulumi.getter(name="managedReplicaSecurityGroup")
def managed_replica_security_group(self) -> Optional[pulumi.Input[str]]:
"""
EMR Managed Security group that will be set to the replica instance group.
"""
return pulumi.get(self, "managed_replica_security_group")
@managed_replica_security_group.setter
def managed_replica_security_group(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "managed_replica_security_group", value)
@property
@pulumi.getter(name="masterEbsBlockDevices")
def master_ebs_block_devices(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarMasterEbsBlockDeviceArgs']]]]:
"""
This determines the ebs configuration for your master group instances. Only a single block is allowed.
"""
return pulumi.get(self, "master_ebs_block_devices")
@master_ebs_block_devices.setter
def master_ebs_block_devices(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarMasterEbsBlockDeviceArgs']]]]):
pulumi.set(self, "master_ebs_block_devices", value)
@property
@pulumi.getter(name="masterEbsOptimized")
def master_ebs_optimized(self) -> Optional[pulumi.Input[bool]]:
"""
EBS Optimization setting for instances in group.
"""
return pulumi.get(self, "master_ebs_optimized")
@master_ebs_optimized.setter
def master_ebs_optimized(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "master_ebs_optimized", value)
@property
@pulumi.getter(name="masterInstanceTypes")
def master_instance_types(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The MrScaler instance types for the master nodes.
"""
return pulumi.get(self, "master_instance_types")
@master_instance_types.setter
def master_instance_types(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "master_instance_types", value)
@property
@pulumi.getter(name="masterLifecycle")
def master_lifecycle(self) -> Optional[pulumi.Input[str]]:
"""
The MrScaler lifecycle for instances in master group. Allowed values are 'SPOT' and 'ON_DEMAND'.
"""
return pulumi.get(self, "master_lifecycle")
@master_lifecycle.setter
def master_lifecycle(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "master_lifecycle", value)
@property
@pulumi.getter(name="masterTarget")
def master_target(self) -> Optional[pulumi.Input[int]]:
"""
Number of instances in the master group.
"""
return pulumi.get(self, "master_target")
@master_target.setter
def master_target(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "master_target", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The application name.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="outputClusterId")
def output_cluster_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "output_cluster_id")
@output_cluster_id.setter
def output_cluster_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "output_cluster_id", value)
@property
@pulumi.getter(name="provisioningTimeout")
def provisioning_timeout(self) -> Optional[pulumi.Input['MrScalarProvisioningTimeoutArgs']]:
return pulumi.get(self, "provisioning_timeout")
@provisioning_timeout.setter
def provisioning_timeout(self, value: Optional[pulumi.Input['MrScalarProvisioningTimeoutArgs']]):
pulumi.set(self, "provisioning_timeout", value)
@property
@pulumi.getter
def region(self) -> Optional[pulumi.Input[str]]:
"""
The MrScaler region.
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "region", value)
@property
@pulumi.getter(name="releaseLabel")
def release_label(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "release_label")
@release_label.setter
def release_label(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "release_label", value)
@property
@pulumi.getter(name="repoUpgradeOnBoot")
def repo_upgrade_on_boot(self) -> Optional[pulumi.Input[str]]:
"""
Applies only when `custom_ami_id` is used. Specifies the type of updates that are applied from the Amazon Linux AMI package repositories when an instance boots using the AMI. Possible values include: `SECURITY`, `NONE`.
"""
return pulumi.get(self, "repo_upgrade_on_boot")
@repo_upgrade_on_boot.setter
def repo_upgrade_on_boot(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "repo_upgrade_on_boot", value)
@property
@pulumi.getter
def retries(self) -> Optional[pulumi.Input[int]]:
"""
Specifies the maximum number of times a capacity provisioning should be retried if the provisioning timeout is exceeded. Valid values: `1-5`.
"""
return pulumi.get(self, "retries")
@retries.setter
def retries(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "retries", value)
@property
@pulumi.getter(name="scheduledTasks")
def scheduled_tasks(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarScheduledTaskArgs']]]]:
"""
An array of scheduled tasks.
"""
return pulumi.get(self, "scheduled_tasks")
@scheduled_tasks.setter
def scheduled_tasks(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarScheduledTaskArgs']]]]):
pulumi.set(self, "scheduled_tasks", value)
@property
@pulumi.getter(name="securityConfig")
def security_config(self) -> Optional[pulumi.Input[str]]:
"""
The name of the security configuration applied to the cluster.
"""
return pulumi.get(self, "security_config")
@security_config.setter
def security_config(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "security_config", value)
@property
@pulumi.getter(name="serviceAccessSecurityGroup")
def service_access_security_group(self) -> Optional[pulumi.Input[str]]:
"""
The identifier of the Amazon EC2 security group for the Amazon EMR service to access clusters in VPC private subnets.
"""
return pulumi.get(self, "service_access_security_group")
@service_access_security_group.setter
def service_access_security_group(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "service_access_security_group", value)
@property
@pulumi.getter(name="serviceRole")
def service_role(self) -> Optional[pulumi.Input[str]]:
"""
The IAM role that will be assumed by the Amazon EMR service to access AWS resources on your behalf.
"""
return pulumi.get(self, "service_role")
@service_role.setter
def service_role(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "service_role", value)
@property
@pulumi.getter(name="stepsFiles")
def steps_files(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarStepsFileArgs']]]]:
"""
Steps from S3.
"""
return pulumi.get(self, "steps_files")
@steps_files.setter
def steps_files(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarStepsFileArgs']]]]):
pulumi.set(self, "steps_files", value)
@property
@pulumi.getter
def strategy(self) -> Optional[pulumi.Input[str]]:
"""
The MrScaler strategy. Allowed values are `new` `clone` and `wrap`.
"""
return pulumi.get(self, "strategy")
@strategy.setter
def strategy(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "strategy", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTagArgs']]]]:
"""
A list of tags to assign to the resource. You may define multiple tags.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTagArgs']]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="taskDesiredCapacity")
def task_desired_capacity(self) -> Optional[pulumi.Input[int]]:
"""
amount of instances in task group.
"""
return pulumi.get(self, "task_desired_capacity")
@task_desired_capacity.setter
def task_desired_capacity(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "task_desired_capacity", value)
@property
@pulumi.getter(name="taskEbsBlockDevices")
def task_ebs_block_devices(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTaskEbsBlockDeviceArgs']]]]:
"""
This determines the ebs configuration for your task group instances. Only a single block is allowed.
"""
return pulumi.get(self, "task_ebs_block_devices")
@task_ebs_block_devices.setter
def task_ebs_block_devices(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTaskEbsBlockDeviceArgs']]]]):
pulumi.set(self, "task_ebs_block_devices", value)
@property
@pulumi.getter(name="taskEbsOptimized")
def task_ebs_optimized(self) -> Optional[pulumi.Input[bool]]:
"""
EBS Optimization setting for instances in group.
"""
return pulumi.get(self, "task_ebs_optimized")
@task_ebs_optimized.setter
def task_ebs_optimized(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "task_ebs_optimized", value)
@property
@pulumi.getter(name="taskInstanceTypes")
def task_instance_types(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The MrScaler instance types for the task nodes.
"""
return pulumi.get(self, "task_instance_types")
@task_instance_types.setter
def task_instance_types(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "task_instance_types", value)
@property
@pulumi.getter(name="taskLifecycle")
def task_lifecycle(self) -> Optional[pulumi.Input[str]]:
"""
The MrScaler lifecycle for instances in task group. Allowed values are 'SPOT' and 'ON_DEMAND'.
"""
return pulumi.get(self, "task_lifecycle")
@task_lifecycle.setter
def task_lifecycle(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "task_lifecycle", value)
@property
@pulumi.getter(name="taskMaxSize")
def task_max_size(self) -> Optional[pulumi.Input[int]]:
"""
maximal amount of instances in task group.
"""
return pulumi.get(self, "task_max_size")
@task_max_size.setter
def task_max_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "task_max_size", value)
@property
@pulumi.getter(name="taskMinSize")
def task_min_size(self) -> Optional[pulumi.Input[int]]:
"""
The minimal amount of instances in task group.
"""
return pulumi.get(self, "task_min_size")
@task_min_size.setter
def task_min_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "task_min_size", value)
@property
@pulumi.getter(name="taskScalingDownPolicies")
def task_scaling_down_policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTaskScalingDownPolicyArgs']]]]:
return pulumi.get(self, "task_scaling_down_policies")
@task_scaling_down_policies.setter
def task_scaling_down_policies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTaskScalingDownPolicyArgs']]]]):
pulumi.set(self, "task_scaling_down_policies", value)
@property
@pulumi.getter(name="taskScalingUpPolicies")
def task_scaling_up_policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTaskScalingUpPolicyArgs']]]]:
return pulumi.get(self, "task_scaling_up_policies")
@task_scaling_up_policies.setter
def task_scaling_up_policies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTaskScalingUpPolicyArgs']]]]):
pulumi.set(self, "task_scaling_up_policies", value)
@property
@pulumi.getter(name="taskUnit")
def task_unit(self) -> Optional[pulumi.Input[str]]:
"""
Unit of task group for target, min and max. The unit could be `instance` or `weight`. instance - amount of instances. weight - amount of vCPU.
"""
return pulumi.get(self, "task_unit")
@task_unit.setter
def task_unit(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "task_unit", value)
@property
@pulumi.getter(name="terminationPolicies")
def termination_policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTerminationPolicyArgs']]]]:
"""
Allows defining termination policies for EMR clusters based on CloudWatch Metrics.
"""
return pulumi.get(self, "termination_policies")
@termination_policies.setter
def termination_policies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTerminationPolicyArgs']]]]):
pulumi.set(self, "termination_policies", value)
@property
@pulumi.getter(name="terminationProtected")
def termination_protected(self) -> Optional[pulumi.Input[bool]]:
"""
Specifies whether the Amazon EC2 instances in the cluster are protected from termination by API calls, user intervention, or in the event of a job-flow error.
"""
return pulumi.get(self, "termination_protected")
@termination_protected.setter
def termination_protected(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "termination_protected", value)
@property
@pulumi.getter(name="visibleToAllUsers")
def visible_to_all_users(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "visible_to_all_users")
@visible_to_all_users.setter
def visible_to_all_users(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "visible_to_all_users", value)
class MrScalar(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
additional_info: Optional[pulumi.Input[str]] = None,
additional_primary_security_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
additional_replica_security_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
applications: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarApplicationArgs']]]]] = None,
availability_zones: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
bootstrap_actions_files: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarBootstrapActionsFileArgs']]]]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
configurations_files: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarConfigurationsFileArgs']]]]] = None,
core_desired_capacity: Optional[pulumi.Input[int]] = None,
core_ebs_block_devices: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarCoreEbsBlockDeviceArgs']]]]] = None,
core_ebs_optimized: Optional[pulumi.Input[bool]] = None,
core_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
core_lifecycle: Optional[pulumi.Input[str]] = None,
core_max_size: Optional[pulumi.Input[int]] = None,
core_min_size: Optional[pulumi.Input[int]] = None,
core_scaling_down_policies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarCoreScalingDownPolicyArgs']]]]] = None,
core_scaling_up_policies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarCoreScalingUpPolicyArgs']]]]] = None,
core_unit: Optional[pulumi.Input[str]] = None,
custom_ami_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
ebs_root_volume_size: Optional[pulumi.Input[int]] = None,
ec2_key_name: Optional[pulumi.Input[str]] = None,
expose_cluster_id: Optional[pulumi.Input[bool]] = None,
instance_weights: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarInstanceWeightArgs']]]]] = None,
job_flow_role: Optional[pulumi.Input[str]] = None,
keep_job_flow_alive: Optional[pulumi.Input[bool]] = None,
log_uri: Optional[pulumi.Input[str]] = None,
managed_primary_security_group: Optional[pulumi.Input[str]] = None,
managed_replica_security_group: Optional[pulumi.Input[str]] = None,
master_ebs_block_devices: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarMasterEbsBlockDeviceArgs']]]]] = None,
master_ebs_optimized: Optional[pulumi.Input[bool]] = None,
master_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
master_lifecycle: Optional[pulumi.Input[str]] = None,
master_target: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
provisioning_timeout: Optional[pulumi.Input[pulumi.InputType['MrScalarProvisioningTimeoutArgs']]] = None,
region: Optional[pulumi.Input[str]] = None,
release_label: Optional[pulumi.Input[str]] = None,
repo_upgrade_on_boot: Optional[pulumi.Input[str]] = None,
retries: Optional[pulumi.Input[int]] = None,
scheduled_tasks: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarScheduledTaskArgs']]]]] = None,
security_config: Optional[pulumi.Input[str]] = None,
service_access_security_group: Optional[pulumi.Input[str]] = None,
service_role: Optional[pulumi.Input[str]] = None,
steps_files: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarStepsFileArgs']]]]] = None,
strategy: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTagArgs']]]]] = None,
task_desired_capacity: Optional[pulumi.Input[int]] = None,
task_ebs_block_devices: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTaskEbsBlockDeviceArgs']]]]] = None,
task_ebs_optimized: Optional[pulumi.Input[bool]] = None,
task_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
task_lifecycle: Optional[pulumi.Input[str]] = None,
task_max_size: Optional[pulumi.Input[int]] = None,
task_min_size: Optional[pulumi.Input[int]] = None,
task_scaling_down_policies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTaskScalingDownPolicyArgs']]]]] = None,
task_scaling_up_policies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTaskScalingUpPolicyArgs']]]]] = None,
task_unit: Optional[pulumi.Input[str]] = None,
termination_policies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTerminationPolicyArgs']]]]] = None,
termination_protected: Optional[pulumi.Input[bool]] = None,
visible_to_all_users: Optional[pulumi.Input[bool]] = None,
__props__=None):
"""
Provides a Spotinst AWS MrScaler resource.
## Example Usage
### New Strategy
```python
import pulumi
import pulumi_spotinst as spotinst
sample__mr_scaler_01 = spotinst.aws.MrScalar("sample-MrScaler-01",
additional_info="{'test':'more information'}",
additional_primary_security_groups=["sg-456321"],
additional_replica_security_groups=["sg-123654"],
applications=[
spotinst.aws.MrScalarApplicationArgs(
name="Ganglia",
version="1.0",
),
spotinst.aws.MrScalarApplicationArgs(
name="Hadoop",
),
spotinst.aws.MrScalarApplicationArgs(
args=[
"fake",
"args",
],
name="Pig",
),
],
availability_zones=["us-west-2a:subnet-123456"],
bootstrap_actions_files=[spotinst.aws.MrScalarBootstrapActionsFileArgs(
bucket="sample-emr-test",
key="bootstrap-actions.json",
)],
configurations_files=[spotinst.aws.MrScalarConfigurationsFileArgs(
bucket="example-bucket",
key="configurations.json",
)],
core_desired_capacity=1,
core_ebs_block_devices=[spotinst.aws.MrScalarCoreEbsBlockDeviceArgs(
size_in_gb=40,
volume_type="gp2",
volumes_per_instance=2,
)],
core_ebs_optimized=False,
core_instance_types=[
"c3.xlarge",
"c4.xlarge",
],
core_lifecycle="ON_DEMAND",
core_max_size=1,
core_min_size=1,
core_unit="instance",
custom_ami_id="ami-123456",
description="Testing MrScaler creation",
ec2_key_name="test-key",
instance_weights=[
spotinst.aws.MrScalarInstanceWeightArgs(
instance_type="t2.small",
weighted_capacity=10,
),
spotinst.aws.MrScalarInstanceWeightArgs(
instance_type="t2.medium",
weighted_capacity=90,
),
],
job_flow_role="EMR_EC2_ExampleRole",
keep_job_flow_alive=True,
log_uri="s3://example-logs",
managed_primary_security_group="sg-123456",
managed_replica_security_group="sg-987654",
master_ebs_block_devices=[spotinst.aws.MrScalarMasterEbsBlockDeviceArgs(
size_in_gb=30,
volume_type="gp2",
volumes_per_instance=1,
)],
master_ebs_optimized=True,
master_instance_types=["c3.xlarge"],
master_lifecycle="SPOT",
master_target=1,
provisioning_timeout=spotinst.aws.MrScalarProvisioningTimeoutArgs(
timeout=15,
timeout_action="terminateAndRetry",
),
region="us-west-2",
release_label="emr-5.17.0",
repo_upgrade_on_boot="NONE",
retries=2,
security_config="example-config",
service_access_security_group="access-example",
service_role="example-role",
steps_files=[spotinst.aws.MrScalarStepsFileArgs(
bucket="example-bucket",
key="steps.json",
)],
strategy="new",
tags=[spotinst.aws.MrScalarTagArgs(
key="Creator",
value="Pulumi",
)],
task_desired_capacity=1,
task_ebs_block_devices=[spotinst.aws.MrScalarTaskEbsBlockDeviceArgs(
size_in_gb=40,
volume_type="gp2",
volumes_per_instance=2,
)],
task_ebs_optimized=False,
task_instance_types=[
"c3.xlarge",
"c4.xlarge",
],
task_lifecycle="SPOT",
task_max_size=30,
task_min_size=0,
task_unit="instance",
termination_protected=False)
```
### Clone Strategy
```python
import pulumi
import pulumi_spotinst as spotinst
sample__mr_scaler_01 = spotinst.aws.MrScalar("sample-MrScaler-01",
availability_zones=["us-west-2a:subnet-12345678"],
cluster_id="j-123456789",
core_desired_capacity=1,
core_ebs_block_devices=[spotinst.aws.MrScalarCoreEbsBlockDeviceArgs(
size_in_gb=40,
volume_type="gp2",
volumes_per_instance=2,
)],
core_ebs_optimized=False,
core_instance_types=[
"c3.xlarge",
"c4.xlarge",
],
core_lifecycle="ON_DEMAND",
core_max_size=1,
core_min_size=1,
core_unit="instance",
description="Testing MrScaler creation",
expose_cluster_id=True,
master_ebs_block_devices=[spotinst.aws.MrScalarMasterEbsBlockDeviceArgs(
size_in_gb=30,
volume_type="gp2",
volumes_per_instance=1,
)],
master_ebs_optimized=True,
master_instance_types=["c3.xlarge"],
master_lifecycle="SPOT",
master_target=1,
region="us-west-2",
strategy="clone",
tags=[spotinst.aws.MrScalarTagArgs(
key="Creator",
value="Pulumi",
)],
task_desired_capacity=1,
task_ebs_block_devices=[spotinst.aws.MrScalarTaskEbsBlockDeviceArgs(
size_in_gb=40,
volume_type="gp2",
volumes_per_instance=2,
)],
task_ebs_optimized=False,
task_instance_types=[
"c3.xlarge",
"c4.xlarge",
],
task_lifecycle="SPOT",
task_max_size=30,
task_min_size=0,
task_scaling_down_policies=[spotinst.aws.MrScalarTaskScalingDownPolicyArgs(
action_type="",
adjustment="1",
cooldown=60,
dimensions={
"name": "name-1",
"value": "value-1",
},
evaluation_periods=10,
max_target_capacity="1",
maximum="10",
metric_name="CPUUtilization",
minimum="0",
namespace="AWS/EC2",
operator="gt",
period=60,
policy_name="policy-name",
statistic="average",
target="5",
threshold=10,
unit="",
)],
task_unit="instance")
pulumi.export("mrscaler-name", sample__mr_scaler_01.name)
pulumi.export("mrscaler-created-cluster-id", sample__mr_scaler_01.output_cluster_id)
```
### Wrap Strategy
```python
import pulumi
import pulumi_spotinst as spotinst
example_scaler_2 = spotinst.aws.MrScalar("example-scaler-2",
cluster_id="j-27UVDEHXL4OQM",
description="created by Pulumi",
region="us-west-2",
strategy="wrap",
task_desired_capacity=2,
task_ebs_block_devices=[spotinst.aws.MrScalarTaskEbsBlockDeviceArgs(
size_in_gb=20,
volume_type="gp2",
volumes_per_instance=1,
)],
task_instance_types=[
"c3.xlarge",
"c4.xlarge",
],
task_lifecycle="SPOT",
task_max_size=4,
task_min_size=0,
task_unit="instance")
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] additional_info: This is meta information about third-party applications that third-party vendors use for testing purposes.
:param pulumi.Input[Sequence[pulumi.Input[str]]] additional_primary_security_groups: A list of additional Amazon EC2 security group IDs for the master node.
:param pulumi.Input[Sequence[pulumi.Input[str]]] additional_replica_security_groups: A list of additional Amazon EC2 security group IDs for the core and task nodes.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarApplicationArgs']]]] applications: A case-insensitive list of applications for Amazon EMR to install and configure when launching the cluster
:param pulumi.Input[Sequence[pulumi.Input[str]]] availability_zones: List of AZs and their subnet Ids. See example above for usage.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarBootstrapActionsFileArgs']]]] bootstrap_actions_files: Describes path to S3 file containing description of bootstrap actions. [More Information](https://api.spotinst.com/elastigroup-for-aws/services-integrations/elastic-mapreduce/import-an-emr-cluster/advanced/)
:param pulumi.Input[str] cluster_id: The MrScaler cluster id.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarConfigurationsFileArgs']]]] configurations_files: Describes path to S3 file containing description of configurations. [More Information](https://api.spotinst.com/elastigroup-for-aws/services-integrations/elastic-mapreduce/import-an-emr-cluster/advanced/)
:param pulumi.Input[int] core_desired_capacity: amount of instances in core group.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarCoreEbsBlockDeviceArgs']]]] core_ebs_block_devices: This determines the ebs configuration for your core group instances. Only a single block is allowed.
:param pulumi.Input[bool] core_ebs_optimized: EBS Optimization setting for instances in group.
:param pulumi.Input[Sequence[pulumi.Input[str]]] core_instance_types: The MrScaler instance types for the core nodes.
:param pulumi.Input[str] core_lifecycle: The MrScaler lifecycle for instances in core group. Allowed values are 'SPOT' and 'ON_DEMAND'.
:param pulumi.Input[int] core_max_size: maximal amount of instances in core group.
:param pulumi.Input[int] core_min_size: The minimal amount of instances in core group.
:param pulumi.Input[str] core_unit: Unit of task group for target, min and max. The unit could be `instance` or `weight`. instance - amount of instances. weight - amount of vCPU.
:param pulumi.Input[str] custom_ami_id: The ID of a custom Amazon EBS-backed Linux AMI if the cluster uses a custom AMI.
:param pulumi.Input[str] description: The MrScaler description.
:param pulumi.Input[str] ec2_key_name: The name of an Amazon EC2 key pair that can be used to ssh to the master node.
:param pulumi.Input[bool] expose_cluster_id: Allow the `cluster_id` to set a provider output variable.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarInstanceWeightArgs']]]] instance_weights: Describes the instance and weights. Check out [Elastigroup Weighted Instances](https://api.spotinst.com/elastigroup-for-aws/concepts/general-concepts/elastigroup-capacity-instances-or-weighted) for more info.
:param pulumi.Input[str] job_flow_role: The IAM role that was specified when the job flow was launched. The EC2 instances of the job flow assume this role.
:param pulumi.Input[bool] keep_job_flow_alive: Specifies whether the cluster should remain available after completing all steps.
:param pulumi.Input[str] log_uri: The path to the Amazon S3 location where logs for this cluster are stored.
:param pulumi.Input[str] managed_primary_security_group: EMR Managed Security group that will be set to the primary instance group.
:param pulumi.Input[str] managed_replica_security_group: EMR Managed Security group that will be set to the replica instance group.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarMasterEbsBlockDeviceArgs']]]] master_ebs_block_devices: This determines the ebs configuration for your master group instances. Only a single block is allowed.
:param pulumi.Input[bool] master_ebs_optimized: EBS Optimization setting for instances in group.
:param pulumi.Input[Sequence[pulumi.Input[str]]] master_instance_types: The MrScaler instance types for the master nodes.
:param pulumi.Input[str] master_lifecycle: The MrScaler lifecycle for instances in master group. Allowed values are 'SPOT' and 'ON_DEMAND'.
:param pulumi.Input[int] master_target: Number of instances in the master group.
:param pulumi.Input[str] name: The application name.
:param pulumi.Input[str] region: The MrScaler region.
:param pulumi.Input[str] repo_upgrade_on_boot: Applies only when `custom_ami_id` is used. Specifies the type of updates that are applied from the Amazon Linux AMI package repositories when an instance boots using the AMI. Possible values include: `SECURITY`, `NONE`.
:param pulumi.Input[int] retries: Specifies the maximum number of times a capacity provisioning should be retried if the provisioning timeout is exceeded. Valid values: `1-5`.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarScheduledTaskArgs']]]] scheduled_tasks: An array of scheduled tasks.
:param pulumi.Input[str] security_config: The name of the security configuration applied to the cluster.
:param pulumi.Input[str] service_access_security_group: The identifier of the Amazon EC2 security group for the Amazon EMR service to access clusters in VPC private subnets.
:param pulumi.Input[str] service_role: The IAM role that will be assumed by the Amazon EMR service to access AWS resources on your behalf.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarStepsFileArgs']]]] steps_files: Steps from S3.
:param pulumi.Input[str] strategy: The MrScaler strategy. Allowed values are `new` `clone` and `wrap`.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTagArgs']]]] tags: A list of tags to assign to the resource. You may define multiple tags.
:param pulumi.Input[int] task_desired_capacity: amount of instances in task group.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTaskEbsBlockDeviceArgs']]]] task_ebs_block_devices: This determines the ebs configuration for your task group instances. Only a single block is allowed.
:param pulumi.Input[bool] task_ebs_optimized: EBS Optimization setting for instances in group.
:param pulumi.Input[Sequence[pulumi.Input[str]]] task_instance_types: The MrScaler instance types for the task nodes.
:param pulumi.Input[str] task_lifecycle: The MrScaler lifecycle for instances in task group. Allowed values are 'SPOT' and 'ON_DEMAND'.
:param pulumi.Input[int] task_max_size: maximal amount of instances in task group.
:param pulumi.Input[int] task_min_size: The minimal amount of instances in task group.
:param pulumi.Input[str] task_unit: Unit of task group for target, min and max. The unit could be `instance` or `weight`. instance - amount of instances. weight - amount of vCPU.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTerminationPolicyArgs']]]] termination_policies: Allows defining termination policies for EMR clusters based on CloudWatch Metrics.
:param pulumi.Input[bool] termination_protected: Specifies whether the Amazon EC2 instances in the cluster are protected from termination by API calls, user intervention, or in the event of a job-flow error.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: MrScalarArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a Spotinst AWS MrScaler resource.
## Example Usage
### New Strategy
```python
import pulumi
import pulumi_spotinst as spotinst
sample__mr_scaler_01 = spotinst.aws.MrScalar("sample-MrScaler-01",
additional_info="{'test':'more information'}",
additional_primary_security_groups=["sg-456321"],
additional_replica_security_groups=["sg-123654"],
applications=[
spotinst.aws.MrScalarApplicationArgs(
name="Ganglia",
version="1.0",
),
spotinst.aws.MrScalarApplicationArgs(
name="Hadoop",
),
spotinst.aws.MrScalarApplicationArgs(
args=[
"fake",
"args",
],
name="Pig",
),
],
availability_zones=["us-west-2a:subnet-123456"],
bootstrap_actions_files=[spotinst.aws.MrScalarBootstrapActionsFileArgs(
bucket="sample-emr-test",
key="bootstrap-actions.json",
)],
configurations_files=[spotinst.aws.MrScalarConfigurationsFileArgs(
bucket="example-bucket",
key="configurations.json",
)],
core_desired_capacity=1,
core_ebs_block_devices=[spotinst.aws.MrScalarCoreEbsBlockDeviceArgs(
size_in_gb=40,
volume_type="gp2",
volumes_per_instance=2,
)],
core_ebs_optimized=False,
core_instance_types=[
"c3.xlarge",
"c4.xlarge",
],
core_lifecycle="ON_DEMAND",
core_max_size=1,
core_min_size=1,
core_unit="instance",
custom_ami_id="ami-123456",
description="Testing MrScaler creation",
ec2_key_name="test-key",
instance_weights=[
spotinst.aws.MrScalarInstanceWeightArgs(
instance_type="t2.small",
weighted_capacity=10,
),
spotinst.aws.MrScalarInstanceWeightArgs(
instance_type="t2.medium",
weighted_capacity=90,
),
],
job_flow_role="EMR_EC2_ExampleRole",
keep_job_flow_alive=True,
log_uri="s3://example-logs",
managed_primary_security_group="sg-123456",
managed_replica_security_group="sg-987654",
master_ebs_block_devices=[spotinst.aws.MrScalarMasterEbsBlockDeviceArgs(
size_in_gb=30,
volume_type="gp2",
volumes_per_instance=1,
)],
master_ebs_optimized=True,
master_instance_types=["c3.xlarge"],
master_lifecycle="SPOT",
master_target=1,
provisioning_timeout=spotinst.aws.MrScalarProvisioningTimeoutArgs(
timeout=15,
timeout_action="terminateAndRetry",
),
region="us-west-2",
release_label="emr-5.17.0",
repo_upgrade_on_boot="NONE",
retries=2,
security_config="example-config",
service_access_security_group="access-example",
service_role="example-role",
steps_files=[spotinst.aws.MrScalarStepsFileArgs(
bucket="example-bucket",
key="steps.json",
)],
strategy="new",
tags=[spotinst.aws.MrScalarTagArgs(
key="Creator",
value="Pulumi",
)],
task_desired_capacity=1,
task_ebs_block_devices=[spotinst.aws.MrScalarTaskEbsBlockDeviceArgs(
size_in_gb=40,
volume_type="gp2",
volumes_per_instance=2,
)],
task_ebs_optimized=False,
task_instance_types=[
"c3.xlarge",
"c4.xlarge",
],
task_lifecycle="SPOT",
task_max_size=30,
task_min_size=0,
task_unit="instance",
termination_protected=False)
```
### Clone Strategy
```python
import pulumi
import pulumi_spotinst as spotinst
sample__mr_scaler_01 = spotinst.aws.MrScalar("sample-MrScaler-01",
availability_zones=["us-west-2a:subnet-12345678"],
cluster_id="j-123456789",
core_desired_capacity=1,
core_ebs_block_devices=[spotinst.aws.MrScalarCoreEbsBlockDeviceArgs(
size_in_gb=40,
volume_type="gp2",
volumes_per_instance=2,
)],
core_ebs_optimized=False,
core_instance_types=[
"c3.xlarge",
"c4.xlarge",
],
core_lifecycle="ON_DEMAND",
core_max_size=1,
core_min_size=1,
core_unit="instance",
description="Testing MrScaler creation",
expose_cluster_id=True,
master_ebs_block_devices=[spotinst.aws.MrScalarMasterEbsBlockDeviceArgs(
size_in_gb=30,
volume_type="gp2",
volumes_per_instance=1,
)],
master_ebs_optimized=True,
master_instance_types=["c3.xlarge"],
master_lifecycle="SPOT",
master_target=1,
region="us-west-2",
strategy="clone",
tags=[spotinst.aws.MrScalarTagArgs(
key="Creator",
value="Pulumi",
)],
task_desired_capacity=1,
task_ebs_block_devices=[spotinst.aws.MrScalarTaskEbsBlockDeviceArgs(
size_in_gb=40,
volume_type="gp2",
volumes_per_instance=2,
)],
task_ebs_optimized=False,
task_instance_types=[
"c3.xlarge",
"c4.xlarge",
],
task_lifecycle="SPOT",
task_max_size=30,
task_min_size=0,
task_scaling_down_policies=[spotinst.aws.MrScalarTaskScalingDownPolicyArgs(
action_type="",
adjustment="1",
cooldown=60,
dimensions={
"name": "name-1",
"value": "value-1",
},
evaluation_periods=10,
max_target_capacity="1",
maximum="10",
metric_name="CPUUtilization",
minimum="0",
namespace="AWS/EC2",
operator="gt",
period=60,
policy_name="policy-name",
statistic="average",
target="5",
threshold=10,
unit="",
)],
task_unit="instance")
pulumi.export("mrscaler-name", sample__mr_scaler_01.name)
pulumi.export("mrscaler-created-cluster-id", sample__mr_scaler_01.output_cluster_id)
```
### Wrap Strategy
```python
import pulumi
import pulumi_spotinst as spotinst
example_scaler_2 = spotinst.aws.MrScalar("example-scaler-2",
cluster_id="j-27UVDEHXL4OQM",
description="created by Pulumi",
region="us-west-2",
strategy="wrap",
task_desired_capacity=2,
task_ebs_block_devices=[spotinst.aws.MrScalarTaskEbsBlockDeviceArgs(
size_in_gb=20,
volume_type="gp2",
volumes_per_instance=1,
)],
task_instance_types=[
"c3.xlarge",
"c4.xlarge",
],
task_lifecycle="SPOT",
task_max_size=4,
task_min_size=0,
task_unit="instance")
```
:param str resource_name: The name of the resource.
:param MrScalarArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(MrScalarArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
additional_info: Optional[pulumi.Input[str]] = None,
additional_primary_security_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
additional_replica_security_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
applications: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarApplicationArgs']]]]] = None,
availability_zones: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
bootstrap_actions_files: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarBootstrapActionsFileArgs']]]]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
configurations_files: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarConfigurationsFileArgs']]]]] = None,
core_desired_capacity: Optional[pulumi.Input[int]] = None,
core_ebs_block_devices: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarCoreEbsBlockDeviceArgs']]]]] = None,
core_ebs_optimized: Optional[pulumi.Input[bool]] = None,
core_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
core_lifecycle: Optional[pulumi.Input[str]] = None,
core_max_size: Optional[pulumi.Input[int]] = None,
core_min_size: Optional[pulumi.Input[int]] = None,
core_scaling_down_policies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarCoreScalingDownPolicyArgs']]]]] = None,
core_scaling_up_policies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarCoreScalingUpPolicyArgs']]]]] = None,
core_unit: Optional[pulumi.Input[str]] = None,
custom_ami_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
ebs_root_volume_size: Optional[pulumi.Input[int]] = None,
ec2_key_name: Optional[pulumi.Input[str]] = None,
expose_cluster_id: Optional[pulumi.Input[bool]] = None,
instance_weights: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarInstanceWeightArgs']]]]] = None,
job_flow_role: Optional[pulumi.Input[str]] = None,
keep_job_flow_alive: Optional[pulumi.Input[bool]] = None,
log_uri: Optional[pulumi.Input[str]] = None,
managed_primary_security_group: Optional[pulumi.Input[str]] = None,
managed_replica_security_group: Optional[pulumi.Input[str]] = None,
master_ebs_block_devices: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarMasterEbsBlockDeviceArgs']]]]] = None,
master_ebs_optimized: Optional[pulumi.Input[bool]] = None,
master_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
master_lifecycle: Optional[pulumi.Input[str]] = None,
master_target: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
provisioning_timeout: Optional[pulumi.Input[pulumi.InputType['MrScalarProvisioningTimeoutArgs']]] = None,
region: Optional[pulumi.Input[str]] = None,
release_label: Optional[pulumi.Input[str]] = None,
repo_upgrade_on_boot: Optional[pulumi.Input[str]] = None,
retries: Optional[pulumi.Input[int]] = None,
scheduled_tasks: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarScheduledTaskArgs']]]]] = None,
security_config: Optional[pulumi.Input[str]] = None,
service_access_security_group: Optional[pulumi.Input[str]] = None,
service_role: Optional[pulumi.Input[str]] = None,
steps_files: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarStepsFileArgs']]]]] = None,
strategy: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTagArgs']]]]] = None,
task_desired_capacity: Optional[pulumi.Input[int]] = None,
task_ebs_block_devices: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTaskEbsBlockDeviceArgs']]]]] = None,
task_ebs_optimized: Optional[pulumi.Input[bool]] = None,
task_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
task_lifecycle: Optional[pulumi.Input[str]] = None,
task_max_size: Optional[pulumi.Input[int]] = None,
task_min_size: Optional[pulumi.Input[int]] = None,
task_scaling_down_policies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTaskScalingDownPolicyArgs']]]]] = None,
task_scaling_up_policies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTaskScalingUpPolicyArgs']]]]] = None,
task_unit: Optional[pulumi.Input[str]] = None,
termination_policies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTerminationPolicyArgs']]]]] = None,
termination_protected: Optional[pulumi.Input[bool]] = None,
visible_to_all_users: Optional[pulumi.Input[bool]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = MrScalarArgs.__new__(MrScalarArgs)
__props__.__dict__["additional_info"] = additional_info
__props__.__dict__["additional_primary_security_groups"] = additional_primary_security_groups
__props__.__dict__["additional_replica_security_groups"] = additional_replica_security_groups
__props__.__dict__["applications"] = applications
__props__.__dict__["availability_zones"] = availability_zones
__props__.__dict__["bootstrap_actions_files"] = bootstrap_actions_files
__props__.__dict__["cluster_id"] = cluster_id
__props__.__dict__["configurations_files"] = configurations_files
__props__.__dict__["core_desired_capacity"] = core_desired_capacity
__props__.__dict__["core_ebs_block_devices"] = core_ebs_block_devices
__props__.__dict__["core_ebs_optimized"] = core_ebs_optimized
__props__.__dict__["core_instance_types"] = core_instance_types
__props__.__dict__["core_lifecycle"] = core_lifecycle
__props__.__dict__["core_max_size"] = core_max_size
__props__.__dict__["core_min_size"] = core_min_size
__props__.__dict__["core_scaling_down_policies"] = core_scaling_down_policies
__props__.__dict__["core_scaling_up_policies"] = core_scaling_up_policies
__props__.__dict__["core_unit"] = core_unit
__props__.__dict__["custom_ami_id"] = custom_ami_id
__props__.__dict__["description"] = description
__props__.__dict__["ebs_root_volume_size"] = ebs_root_volume_size
__props__.__dict__["ec2_key_name"] = ec2_key_name
__props__.__dict__["expose_cluster_id"] = expose_cluster_id
__props__.__dict__["instance_weights"] = instance_weights
__props__.__dict__["job_flow_role"] = job_flow_role
__props__.__dict__["keep_job_flow_alive"] = keep_job_flow_alive
__props__.__dict__["log_uri"] = log_uri
__props__.__dict__["managed_primary_security_group"] = managed_primary_security_group
__props__.__dict__["managed_replica_security_group"] = managed_replica_security_group
__props__.__dict__["master_ebs_block_devices"] = master_ebs_block_devices
__props__.__dict__["master_ebs_optimized"] = master_ebs_optimized
__props__.__dict__["master_instance_types"] = master_instance_types
__props__.__dict__["master_lifecycle"] = master_lifecycle
__props__.__dict__["master_target"] = master_target
__props__.__dict__["name"] = name
__props__.__dict__["provisioning_timeout"] = provisioning_timeout
__props__.__dict__["region"] = region
__props__.__dict__["release_label"] = release_label
__props__.__dict__["repo_upgrade_on_boot"] = repo_upgrade_on_boot
__props__.__dict__["retries"] = retries
__props__.__dict__["scheduled_tasks"] = scheduled_tasks
__props__.__dict__["security_config"] = security_config
__props__.__dict__["service_access_security_group"] = service_access_security_group
__props__.__dict__["service_role"] = service_role
__props__.__dict__["steps_files"] = steps_files
if strategy is None and not opts.urn:
raise TypeError("Missing required property 'strategy'")
__props__.__dict__["strategy"] = strategy
__props__.__dict__["tags"] = tags
__props__.__dict__["task_desired_capacity"] = task_desired_capacity
__props__.__dict__["task_ebs_block_devices"] = task_ebs_block_devices
__props__.__dict__["task_ebs_optimized"] = task_ebs_optimized
__props__.__dict__["task_instance_types"] = task_instance_types
__props__.__dict__["task_lifecycle"] = task_lifecycle
__props__.__dict__["task_max_size"] = task_max_size
__props__.__dict__["task_min_size"] = task_min_size
__props__.__dict__["task_scaling_down_policies"] = task_scaling_down_policies
__props__.__dict__["task_scaling_up_policies"] = task_scaling_up_policies
__props__.__dict__["task_unit"] = task_unit
__props__.__dict__["termination_policies"] = termination_policies
__props__.__dict__["termination_protected"] = termination_protected
if visible_to_all_users is not None and not opts.urn:
warnings.warn("""This field has been removed from our API and is no longer functional.""", DeprecationWarning)
pulumi.log.warn("""visible_to_all_users is deprecated: This field has been removed from our API and is no longer functional.""")
__props__.__dict__["visible_to_all_users"] = visible_to_all_users
__props__.__dict__["output_cluster_id"] = None
super(MrScalar, __self__).__init__(
'spotinst:aws/mrScalar:MrScalar',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
additional_info: Optional[pulumi.Input[str]] = None,
additional_primary_security_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
additional_replica_security_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
applications: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarApplicationArgs']]]]] = None,
availability_zones: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
bootstrap_actions_files: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarBootstrapActionsFileArgs']]]]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
configurations_files: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarConfigurationsFileArgs']]]]] = None,
core_desired_capacity: Optional[pulumi.Input[int]] = None,
core_ebs_block_devices: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarCoreEbsBlockDeviceArgs']]]]] = None,
core_ebs_optimized: Optional[pulumi.Input[bool]] = None,
core_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
core_lifecycle: Optional[pulumi.Input[str]] = None,
core_max_size: Optional[pulumi.Input[int]] = None,
core_min_size: Optional[pulumi.Input[int]] = None,
core_scaling_down_policies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarCoreScalingDownPolicyArgs']]]]] = None,
core_scaling_up_policies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarCoreScalingUpPolicyArgs']]]]] = None,
core_unit: Optional[pulumi.Input[str]] = None,
custom_ami_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
ebs_root_volume_size: Optional[pulumi.Input[int]] = None,
ec2_key_name: Optional[pulumi.Input[str]] = None,
expose_cluster_id: Optional[pulumi.Input[bool]] = None,
instance_weights: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarInstanceWeightArgs']]]]] = None,
job_flow_role: Optional[pulumi.Input[str]] = None,
keep_job_flow_alive: Optional[pulumi.Input[bool]] = None,
log_uri: Optional[pulumi.Input[str]] = None,
managed_primary_security_group: Optional[pulumi.Input[str]] = None,
managed_replica_security_group: Optional[pulumi.Input[str]] = None,
master_ebs_block_devices: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarMasterEbsBlockDeviceArgs']]]]] = None,
master_ebs_optimized: Optional[pulumi.Input[bool]] = None,
master_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
master_lifecycle: Optional[pulumi.Input[str]] = None,
master_target: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
output_cluster_id: Optional[pulumi.Input[str]] = None,
provisioning_timeout: Optional[pulumi.Input[pulumi.InputType['MrScalarProvisioningTimeoutArgs']]] = None,
region: Optional[pulumi.Input[str]] = None,
release_label: Optional[pulumi.Input[str]] = None,
repo_upgrade_on_boot: Optional[pulumi.Input[str]] = None,
retries: Optional[pulumi.Input[int]] = None,
scheduled_tasks: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarScheduledTaskArgs']]]]] = None,
security_config: Optional[pulumi.Input[str]] = None,
service_access_security_group: Optional[pulumi.Input[str]] = None,
service_role: Optional[pulumi.Input[str]] = None,
steps_files: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarStepsFileArgs']]]]] = None,
strategy: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTagArgs']]]]] = None,
task_desired_capacity: Optional[pulumi.Input[int]] = None,
task_ebs_block_devices: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTaskEbsBlockDeviceArgs']]]]] = None,
task_ebs_optimized: Optional[pulumi.Input[bool]] = None,
task_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
task_lifecycle: Optional[pulumi.Input[str]] = None,
task_max_size: Optional[pulumi.Input[int]] = None,
task_min_size: Optional[pulumi.Input[int]] = None,
task_scaling_down_policies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTaskScalingDownPolicyArgs']]]]] = None,
task_scaling_up_policies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTaskScalingUpPolicyArgs']]]]] = None,
task_unit: Optional[pulumi.Input[str]] = None,
termination_policies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTerminationPolicyArgs']]]]] = None,
termination_protected: Optional[pulumi.Input[bool]] = None,
visible_to_all_users: Optional[pulumi.Input[bool]] = None) -> 'MrScalar':
"""
Get an existing MrScalar resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] additional_info: This is meta information about third-party applications that third-party vendors use for testing purposes.
:param pulumi.Input[Sequence[pulumi.Input[str]]] additional_primary_security_groups: A list of additional Amazon EC2 security group IDs for the master node.
:param pulumi.Input[Sequence[pulumi.Input[str]]] additional_replica_security_groups: A list of additional Amazon EC2 security group IDs for the core and task nodes.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarApplicationArgs']]]] applications: A case-insensitive list of applications for Amazon EMR to install and configure when launching the cluster
:param pulumi.Input[Sequence[pulumi.Input[str]]] availability_zones: List of AZs and their subnet Ids. See example above for usage.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarBootstrapActionsFileArgs']]]] bootstrap_actions_files: Describes path to S3 file containing description of bootstrap actions. [More Information](https://api.spotinst.com/elastigroup-for-aws/services-integrations/elastic-mapreduce/import-an-emr-cluster/advanced/)
:param pulumi.Input[str] cluster_id: The MrScaler cluster id.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarConfigurationsFileArgs']]]] configurations_files: Describes path to S3 file containing description of configurations. [More Information](https://api.spotinst.com/elastigroup-for-aws/services-integrations/elastic-mapreduce/import-an-emr-cluster/advanced/)
:param pulumi.Input[int] core_desired_capacity: amount of instances in core group.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarCoreEbsBlockDeviceArgs']]]] core_ebs_block_devices: This determines the ebs configuration for your core group instances. Only a single block is allowed.
:param pulumi.Input[bool] core_ebs_optimized: EBS Optimization setting for instances in group.
:param pulumi.Input[Sequence[pulumi.Input[str]]] core_instance_types: The MrScaler instance types for the core nodes.
:param pulumi.Input[str] core_lifecycle: The MrScaler lifecycle for instances in core group. Allowed values are 'SPOT' and 'ON_DEMAND'.
:param pulumi.Input[int] core_max_size: maximal amount of instances in core group.
:param pulumi.Input[int] core_min_size: The minimal amount of instances in core group.
:param pulumi.Input[str] core_unit: Unit of task group for target, min and max. The unit could be `instance` or `weight`. instance - amount of instances. weight - amount of vCPU.
:param pulumi.Input[str] custom_ami_id: The ID of a custom Amazon EBS-backed Linux AMI if the cluster uses a custom AMI.
:param pulumi.Input[str] description: The MrScaler description.
:param pulumi.Input[str] ec2_key_name: The name of an Amazon EC2 key pair that can be used to ssh to the master node.
:param pulumi.Input[bool] expose_cluster_id: Allow the `cluster_id` to set a provider output variable.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarInstanceWeightArgs']]]] instance_weights: Describes the instance and weights. Check out [Elastigroup Weighted Instances](https://api.spotinst.com/elastigroup-for-aws/concepts/general-concepts/elastigroup-capacity-instances-or-weighted) for more info.
:param pulumi.Input[str] job_flow_role: The IAM role that was specified when the job flow was launched. The EC2 instances of the job flow assume this role.
:param pulumi.Input[bool] keep_job_flow_alive: Specifies whether the cluster should remain available after completing all steps.
:param pulumi.Input[str] log_uri: The path to the Amazon S3 location where logs for this cluster are stored.
:param pulumi.Input[str] managed_primary_security_group: EMR Managed Security group that will be set to the primary instance group.
:param pulumi.Input[str] managed_replica_security_group: EMR Managed Security group that will be set to the replica instance group.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarMasterEbsBlockDeviceArgs']]]] master_ebs_block_devices: This determines the ebs configuration for your master group instances. Only a single block is allowed.
:param pulumi.Input[bool] master_ebs_optimized: EBS Optimization setting for instances in group.
:param pulumi.Input[Sequence[pulumi.Input[str]]] master_instance_types: The MrScaler instance types for the master nodes.
:param pulumi.Input[str] master_lifecycle: The MrScaler lifecycle for instances in master group. Allowed values are 'SPOT' and 'ON_DEMAND'.
:param pulumi.Input[int] master_target: Number of instances in the master group.
:param pulumi.Input[str] name: The application name.
:param pulumi.Input[str] region: The MrScaler region.
:param pulumi.Input[str] repo_upgrade_on_boot: Applies only when `custom_ami_id` is used. Specifies the type of updates that are applied from the Amazon Linux AMI package repositories when an instance boots using the AMI. Possible values include: `SECURITY`, `NONE`.
:param pulumi.Input[int] retries: Specifies the maximum number of times a capacity provisioning should be retried if the provisioning timeout is exceeded. Valid values: `1-5`.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarScheduledTaskArgs']]]] scheduled_tasks: An array of scheduled tasks.
:param pulumi.Input[str] security_config: The name of the security configuration applied to the cluster.
:param pulumi.Input[str] service_access_security_group: The identifier of the Amazon EC2 security group for the Amazon EMR service to access clusters in VPC private subnets.
:param pulumi.Input[str] service_role: The IAM role that will be assumed by the Amazon EMR service to access AWS resources on your behalf.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarStepsFileArgs']]]] steps_files: Steps from S3.
:param pulumi.Input[str] strategy: The MrScaler strategy. Allowed values are `new` `clone` and `wrap`.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTagArgs']]]] tags: A list of tags to assign to the resource. You may define multiple tags.
:param pulumi.Input[int] task_desired_capacity: amount of instances in task group.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTaskEbsBlockDeviceArgs']]]] task_ebs_block_devices: This determines the ebs configuration for your task group instances. Only a single block is allowed.
:param pulumi.Input[bool] task_ebs_optimized: EBS Optimization setting for instances in group.
:param pulumi.Input[Sequence[pulumi.Input[str]]] task_instance_types: The MrScaler instance types for the task nodes.
:param pulumi.Input[str] task_lifecycle: The MrScaler lifecycle for instances in task group. Allowed values are 'SPOT' and 'ON_DEMAND'.
:param pulumi.Input[int] task_max_size: maximal amount of instances in task group.
:param pulumi.Input[int] task_min_size: The minimal amount of instances in task group.
:param pulumi.Input[str] task_unit: Unit of task group for target, min and max. The unit could be `instance` or `weight`. instance - amount of instances. weight - amount of vCPU.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTerminationPolicyArgs']]]] termination_policies: Allows defining termination policies for EMR clusters based on CloudWatch Metrics.
:param pulumi.Input[bool] termination_protected: Specifies whether the Amazon EC2 instances in the cluster are protected from termination by API calls, user intervention, or in the event of a job-flow error.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _MrScalarState.__new__(_MrScalarState)
__props__.__dict__["additional_info"] = additional_info
__props__.__dict__["additional_primary_security_groups"] = additional_primary_security_groups
__props__.__dict__["additional_replica_security_groups"] = additional_replica_security_groups
__props__.__dict__["applications"] = applications
__props__.__dict__["availability_zones"] = availability_zones
__props__.__dict__["bootstrap_actions_files"] = bootstrap_actions_files
__props__.__dict__["cluster_id"] = cluster_id
__props__.__dict__["configurations_files"] = configurations_files
__props__.__dict__["core_desired_capacity"] = core_desired_capacity
__props__.__dict__["core_ebs_block_devices"] = core_ebs_block_devices
__props__.__dict__["core_ebs_optimized"] = core_ebs_optimized
__props__.__dict__["core_instance_types"] = core_instance_types
__props__.__dict__["core_lifecycle"] = core_lifecycle
__props__.__dict__["core_max_size"] = core_max_size
__props__.__dict__["core_min_size"] = core_min_size
__props__.__dict__["core_scaling_down_policies"] = core_scaling_down_policies
__props__.__dict__["core_scaling_up_policies"] = core_scaling_up_policies
__props__.__dict__["core_unit"] = core_unit
__props__.__dict__["custom_ami_id"] = custom_ami_id
__props__.__dict__["description"] = description
__props__.__dict__["ebs_root_volume_size"] = ebs_root_volume_size
__props__.__dict__["ec2_key_name"] = ec2_key_name
__props__.__dict__["expose_cluster_id"] = expose_cluster_id
__props__.__dict__["instance_weights"] = instance_weights
__props__.__dict__["job_flow_role"] = job_flow_role
__props__.__dict__["keep_job_flow_alive"] = keep_job_flow_alive
__props__.__dict__["log_uri"] = log_uri
__props__.__dict__["managed_primary_security_group"] = managed_primary_security_group
__props__.__dict__["managed_replica_security_group"] = managed_replica_security_group
__props__.__dict__["master_ebs_block_devices"] = master_ebs_block_devices
__props__.__dict__["master_ebs_optimized"] = master_ebs_optimized
__props__.__dict__["master_instance_types"] = master_instance_types
__props__.__dict__["master_lifecycle"] = master_lifecycle
__props__.__dict__["master_target"] = master_target
__props__.__dict__["name"] = name
__props__.__dict__["output_cluster_id"] = output_cluster_id
__props__.__dict__["provisioning_timeout"] = provisioning_timeout
__props__.__dict__["region"] = region
__props__.__dict__["release_label"] = release_label
__props__.__dict__["repo_upgrade_on_boot"] = repo_upgrade_on_boot
__props__.__dict__["retries"] = retries
__props__.__dict__["scheduled_tasks"] = scheduled_tasks
__props__.__dict__["security_config"] = security_config
__props__.__dict__["service_access_security_group"] = service_access_security_group
__props__.__dict__["service_role"] = service_role
__props__.__dict__["steps_files"] = steps_files
__props__.__dict__["strategy"] = strategy
__props__.__dict__["tags"] = tags
__props__.__dict__["task_desired_capacity"] = task_desired_capacity
__props__.__dict__["task_ebs_block_devices"] = task_ebs_block_devices
__props__.__dict__["task_ebs_optimized"] = task_ebs_optimized
__props__.__dict__["task_instance_types"] = task_instance_types
__props__.__dict__["task_lifecycle"] = task_lifecycle
__props__.__dict__["task_max_size"] = task_max_size
__props__.__dict__["task_min_size"] = task_min_size
__props__.__dict__["task_scaling_down_policies"] = task_scaling_down_policies
__props__.__dict__["task_scaling_up_policies"] = task_scaling_up_policies
__props__.__dict__["task_unit"] = task_unit
__props__.__dict__["termination_policies"] = termination_policies
__props__.__dict__["termination_protected"] = termination_protected
__props__.__dict__["visible_to_all_users"] = visible_to_all_users
return MrScalar(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="additionalInfo")
def additional_info(self) -> pulumi.Output[Optional[str]]:
"""
This is meta information about third-party applications that third-party vendors use for testing purposes.
"""
return pulumi.get(self, "additional_info")
@property
@pulumi.getter(name="additionalPrimarySecurityGroups")
def additional_primary_security_groups(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A list of additional Amazon EC2 security group IDs for the master node.
"""
return pulumi.get(self, "additional_primary_security_groups")
@property
@pulumi.getter(name="additionalReplicaSecurityGroups")
def additional_replica_security_groups(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A list of additional Amazon EC2 security group IDs for the core and task nodes.
"""
return pulumi.get(self, "additional_replica_security_groups")
@property
@pulumi.getter
def applications(self) -> pulumi.Output[Optional[Sequence['outputs.MrScalarApplication']]]:
"""
A case-insensitive list of applications for Amazon EMR to install and configure when launching the cluster
"""
return pulumi.get(self, "applications")
@property
@pulumi.getter(name="availabilityZones")
def availability_zones(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
List of AZs and their subnet Ids. See example above for usage.
"""
return pulumi.get(self, "availability_zones")
@property
@pulumi.getter(name="bootstrapActionsFiles")
def bootstrap_actions_files(self) -> pulumi.Output[Optional[Sequence['outputs.MrScalarBootstrapActionsFile']]]:
"""
Describes path to S3 file containing description of bootstrap actions. [More Information](https://api.spotinst.com/elastigroup-for-aws/services-integrations/elastic-mapreduce/import-an-emr-cluster/advanced/)
"""
return pulumi.get(self, "bootstrap_actions_files")
@property
@pulumi.getter(name="clusterId")
def cluster_id(self) -> pulumi.Output[Optional[str]]:
"""
The MrScaler cluster id.
"""
return pulumi.get(self, "cluster_id")
@property
@pulumi.getter(name="configurationsFiles")
def configurations_files(self) -> pulumi.Output[Optional[Sequence['outputs.MrScalarConfigurationsFile']]]:
"""
Describes path to S3 file containing description of configurations. [More Information](https://api.spotinst.com/elastigroup-for-aws/services-integrations/elastic-mapreduce/import-an-emr-cluster/advanced/)
"""
return pulumi.get(self, "configurations_files")
@property
@pulumi.getter(name="coreDesiredCapacity")
def core_desired_capacity(self) -> pulumi.Output[Optional[int]]:
"""
amount of instances in core group.
"""
return pulumi.get(self, "core_desired_capacity")
@property
@pulumi.getter(name="coreEbsBlockDevices")
def core_ebs_block_devices(self) -> pulumi.Output[Optional[Sequence['outputs.MrScalarCoreEbsBlockDevice']]]:
"""
This determines the ebs configuration for your core group instances. Only a single block is allowed.
"""
return pulumi.get(self, "core_ebs_block_devices")
@property
@pulumi.getter(name="coreEbsOptimized")
def core_ebs_optimized(self) -> pulumi.Output[Optional[bool]]:
"""
EBS Optimization setting for instances in group.
"""
return pulumi.get(self, "core_ebs_optimized")
@property
@pulumi.getter(name="coreInstanceTypes")
def core_instance_types(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
The MrScaler instance types for the core nodes.
"""
return pulumi.get(self, "core_instance_types")
@property
@pulumi.getter(name="coreLifecycle")
def core_lifecycle(self) -> pulumi.Output[Optional[str]]:
"""
The MrScaler lifecycle for instances in core group. Allowed values are 'SPOT' and 'ON_DEMAND'.
"""
return pulumi.get(self, "core_lifecycle")
@property
@pulumi.getter(name="coreMaxSize")
def core_max_size(self) -> pulumi.Output[Optional[int]]:
"""
maximal amount of instances in core group.
"""
return pulumi.get(self, "core_max_size")
@property
@pulumi.getter(name="coreMinSize")
def core_min_size(self) -> pulumi.Output[Optional[int]]:
"""
The minimal amount of instances in core group.
"""
return pulumi.get(self, "core_min_size")
@property
@pulumi.getter(name="coreScalingDownPolicies")
def core_scaling_down_policies(self) -> pulumi.Output[Optional[Sequence['outputs.MrScalarCoreScalingDownPolicy']]]:
return pulumi.get(self, "core_scaling_down_policies")
@property
@pulumi.getter(name="coreScalingUpPolicies")
def core_scaling_up_policies(self) -> pulumi.Output[Optional[Sequence['outputs.MrScalarCoreScalingUpPolicy']]]:
return pulumi.get(self, "core_scaling_up_policies")
@property
@pulumi.getter(name="coreUnit")
def core_unit(self) -> pulumi.Output[Optional[str]]:
"""
Unit of task group for target, min and max. The unit could be `instance` or `weight`. instance - amount of instances. weight - amount of vCPU.
"""
return pulumi.get(self, "core_unit")
@property
@pulumi.getter(name="customAmiId")
def custom_ami_id(self) -> pulumi.Output[Optional[str]]:
"""
The ID of a custom Amazon EBS-backed Linux AMI if the cluster uses a custom AMI.
"""
return pulumi.get(self, "custom_ami_id")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
The MrScaler description.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="ebsRootVolumeSize")
def ebs_root_volume_size(self) -> pulumi.Output[Optional[int]]:
return pulumi.get(self, "ebs_root_volume_size")
@property
@pulumi.getter(name="ec2KeyName")
def ec2_key_name(self) -> pulumi.Output[Optional[str]]:
"""
The name of an Amazon EC2 key pair that can be used to ssh to the master node.
"""
return pulumi.get(self, "ec2_key_name")
@property
@pulumi.getter(name="exposeClusterId")
def expose_cluster_id(self) -> pulumi.Output[Optional[bool]]:
"""
Allow the `cluster_id` to set a provider output variable.
"""
return pulumi.get(self, "expose_cluster_id")
@property
@pulumi.getter(name="instanceWeights")
def instance_weights(self) -> pulumi.Output[Optional[Sequence['outputs.MrScalarInstanceWeight']]]:
"""
Describes the instance and weights. Check out [Elastigroup Weighted Instances](https://api.spotinst.com/elastigroup-for-aws/concepts/general-concepts/elastigroup-capacity-instances-or-weighted) for more info.
"""
return pulumi.get(self, "instance_weights")
@property
@pulumi.getter(name="jobFlowRole")
def job_flow_role(self) -> pulumi.Output[Optional[str]]:
"""
The IAM role that was specified when the job flow was launched. The EC2 instances of the job flow assume this role.
"""
return pulumi.get(self, "job_flow_role")
@property
@pulumi.getter(name="keepJobFlowAlive")
def keep_job_flow_alive(self) -> pulumi.Output[Optional[bool]]:
"""
Specifies whether the cluster should remain available after completing all steps.
"""
return pulumi.get(self, "keep_job_flow_alive")
@property
@pulumi.getter(name="logUri")
def log_uri(self) -> pulumi.Output[Optional[str]]:
"""
The path to the Amazon S3 location where logs for this cluster are stored.
"""
return pulumi.get(self, "log_uri")
@property
@pulumi.getter(name="managedPrimarySecurityGroup")
def managed_primary_security_group(self) -> pulumi.Output[Optional[str]]:
"""
EMR Managed Security group that will be set to the primary instance group.
"""
return pulumi.get(self, "managed_primary_security_group")
@property
@pulumi.getter(name="managedReplicaSecurityGroup")
def managed_replica_security_group(self) -> pulumi.Output[Optional[str]]:
"""
EMR Managed Security group that will be set to the replica instance group.
"""
return pulumi.get(self, "managed_replica_security_group")
@property
@pulumi.getter(name="masterEbsBlockDevices")
def master_ebs_block_devices(self) -> pulumi.Output[Optional[Sequence['outputs.MrScalarMasterEbsBlockDevice']]]:
"""
This determines the ebs configuration for your master group instances. Only a single block is allowed.
"""
return pulumi.get(self, "master_ebs_block_devices")
@property
@pulumi.getter(name="masterEbsOptimized")
def master_ebs_optimized(self) -> pulumi.Output[Optional[bool]]:
"""
EBS Optimization setting for instances in group.
"""
return pulumi.get(self, "master_ebs_optimized")
@property
@pulumi.getter(name="masterInstanceTypes")
def master_instance_types(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
The MrScaler instance types for the master nodes.
"""
return pulumi.get(self, "master_instance_types")
@property
@pulumi.getter(name="masterLifecycle")
def master_lifecycle(self) -> pulumi.Output[Optional[str]]:
"""
The MrScaler lifecycle for instances in master group. Allowed values are 'SPOT' and 'ON_DEMAND'.
"""
return pulumi.get(self, "master_lifecycle")
@property
@pulumi.getter(name="masterTarget")
def master_target(self) -> pulumi.Output[Optional[int]]:
"""
Number of instances in the master group.
"""
return pulumi.get(self, "master_target")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The application name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="outputClusterId")
def output_cluster_id(self) -> pulumi.Output[str]:
return pulumi.get(self, "output_cluster_id")
@property
@pulumi.getter(name="provisioningTimeout")
def provisioning_timeout(self) -> pulumi.Output[Optional['outputs.MrScalarProvisioningTimeout']]:
return pulumi.get(self, "provisioning_timeout")
@property
@pulumi.getter
def region(self) -> pulumi.Output[Optional[str]]:
"""
The MrScaler region.
"""
return pulumi.get(self, "region")
@property
@pulumi.getter(name="releaseLabel")
def release_label(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "release_label")
@property
@pulumi.getter(name="repoUpgradeOnBoot")
def repo_upgrade_on_boot(self) -> pulumi.Output[Optional[str]]:
"""
Applies only when `custom_ami_id` is used. Specifies the type of updates that are applied from the Amazon Linux AMI package repositories when an instance boots using the AMI. Possible values include: `SECURITY`, `NONE`.
"""
return pulumi.get(self, "repo_upgrade_on_boot")
@property
@pulumi.getter
def retries(self) -> pulumi.Output[Optional[int]]:
"""
Specifies the maximum number of times a capacity provisioning should be retried if the provisioning timeout is exceeded. Valid values: `1-5`.
"""
return pulumi.get(self, "retries")
@property
@pulumi.getter(name="scheduledTasks")
def scheduled_tasks(self) -> pulumi.Output[Optional[Sequence['outputs.MrScalarScheduledTask']]]:
"""
An array of scheduled tasks.
"""
return pulumi.get(self, "scheduled_tasks")
@property
@pulumi.getter(name="securityConfig")
def security_config(self) -> pulumi.Output[Optional[str]]:
"""
The name of the security configuration applied to the cluster.
"""
return pulumi.get(self, "security_config")
@property
@pulumi.getter(name="serviceAccessSecurityGroup")
def service_access_security_group(self) -> pulumi.Output[Optional[str]]:
"""
The identifier of the Amazon EC2 security group for the Amazon EMR service to access clusters in VPC private subnets.
"""
return pulumi.get(self, "service_access_security_group")
@property
@pulumi.getter(name="serviceRole")
def service_role(self) -> pulumi.Output[Optional[str]]:
"""
The IAM role that will be assumed by the Amazon EMR service to access AWS resources on your behalf.
"""
return pulumi.get(self, "service_role")
@property
@pulumi.getter(name="stepsFiles")
def steps_files(self) -> pulumi.Output[Optional[Sequence['outputs.MrScalarStepsFile']]]:
"""
Steps from S3.
"""
return pulumi.get(self, "steps_files")
@property
@pulumi.getter
def strategy(self) -> pulumi.Output[str]:
"""
The MrScaler strategy. Allowed values are `new` `clone` and `wrap`.
"""
return pulumi.get(self, "strategy")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Sequence['outputs.MrScalarTag']]]:
"""
A list of tags to assign to the resource. You may define multiple tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="taskDesiredCapacity")
def task_desired_capacity(self) -> pulumi.Output[Optional[int]]:
"""
amount of instances in task group.
"""
return pulumi.get(self, "task_desired_capacity")
@property
@pulumi.getter(name="taskEbsBlockDevices")
def task_ebs_block_devices(self) -> pulumi.Output[Optional[Sequence['outputs.MrScalarTaskEbsBlockDevice']]]:
"""
This determines the ebs configuration for your task group instances. Only a single block is allowed.
"""
return pulumi.get(self, "task_ebs_block_devices")
@property
@pulumi.getter(name="taskEbsOptimized")
def task_ebs_optimized(self) -> pulumi.Output[Optional[bool]]:
"""
EBS Optimization setting for instances in group.
"""
return pulumi.get(self, "task_ebs_optimized")
@property
@pulumi.getter(name="taskInstanceTypes")
def task_instance_types(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
The MrScaler instance types for the task nodes.
"""
return pulumi.get(self, "task_instance_types")
@property
@pulumi.getter(name="taskLifecycle")
def task_lifecycle(self) -> pulumi.Output[Optional[str]]:
"""
The MrScaler lifecycle for instances in task group. Allowed values are 'SPOT' and 'ON_DEMAND'.
"""
return pulumi.get(self, "task_lifecycle")
@property
@pulumi.getter(name="taskMaxSize")
def task_max_size(self) -> pulumi.Output[Optional[int]]:
"""
maximal amount of instances in task group.
"""
return pulumi.get(self, "task_max_size")
@property
@pulumi.getter(name="taskMinSize")
def task_min_size(self) -> pulumi.Output[Optional[int]]:
"""
The minimal amount of instances in task group.
"""
return pulumi.get(self, "task_min_size")
@property
@pulumi.getter(name="taskScalingDownPolicies")
def task_scaling_down_policies(self) -> pulumi.Output[Optional[Sequence['outputs.MrScalarTaskScalingDownPolicy']]]:
return pulumi.get(self, "task_scaling_down_policies")
@property
@pulumi.getter(name="taskScalingUpPolicies")
def task_scaling_up_policies(self) -> pulumi.Output[Optional[Sequence['outputs.MrScalarTaskScalingUpPolicy']]]:
return pulumi.get(self, "task_scaling_up_policies")
@property
@pulumi.getter(name="taskUnit")
def task_unit(self) -> pulumi.Output[Optional[str]]:
"""
Unit of task group for target, min and max. The unit could be `instance` or `weight`. instance - amount of instances. weight - amount of vCPU.
"""
return pulumi.get(self, "task_unit")
@property
@pulumi.getter(name="terminationPolicies")
def termination_policies(self) -> pulumi.Output[Optional[Sequence['outputs.MrScalarTerminationPolicy']]]:
"""
Allows defining termination policies for EMR clusters based on CloudWatch Metrics.
"""
return pulumi.get(self, "termination_policies")
@property
@pulumi.getter(name="terminationProtected")
def termination_protected(self) -> pulumi.Output[Optional[bool]]:
"""
Specifies whether the Amazon EC2 instances in the cluster are protected from termination by API calls, user intervention, or in the event of a job-flow error.
"""
return pulumi.get(self, "termination_protected")
@property
@pulumi.getter(name="visibleToAllUsers")
def visible_to_all_users(self) -> pulumi.Output[Optional[bool]]:
return pulumi.get(self, "visible_to_all_users")
| [
"pulumi.get",
"pulumi.getter",
"pulumi.set",
"warnings.warn",
"pulumi.log.warn",
"pulumi.ResourceOptions"
] | [((21095, 21131), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""additionalInfo"""'}), "(name='additionalInfo')\n", (21108, 21131), False, 'import pulumi\n'), ((21551, 21604), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""additionalPrimarySecurityGroups"""'}), "(name='additionalPrimarySecurityGroups')\n", (21564, 21604), False, 'import pulumi\n'), ((22132, 22185), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""additionalReplicaSecurityGroups"""'}), "(name='additionalReplicaSecurityGroups')\n", (22145, 22185), False, 'import pulumi\n'), ((23231, 23270), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""availabilityZones"""'}), "(name='availabilityZones')\n", (23244, 23270), False, 'import pulumi\n'), ((23709, 23752), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""bootstrapActionsFiles"""'}), "(name='bootstrapActionsFiles')\n", (23722, 23752), False, 'import pulumi\n'), ((24423, 24454), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""clusterId"""'}), "(name='clusterId')\n", (24436, 24454), False, 'import pulumi\n'), ((24767, 24808), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""configurationsFiles"""'}), "(name='configurationsFiles')\n", (24780, 24808), False, 'import pulumi\n'), ((25457, 25498), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""coreDesiredCapacity"""'}), "(name='coreDesiredCapacity')\n", (25470, 25498), False, 'import pulumi\n'), ((25876, 25917), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""coreEbsBlockDevices"""'}), "(name='coreEbsBlockDevices')\n", (25889, 25917), False, 'import pulumi\n'), ((26472, 26510), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""coreEbsOptimized"""'}), "(name='coreEbsOptimized')\n", (26485, 26510), False, 'import pulumi\n'), ((26889, 26928), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""coreInstanceTypes"""'}), "(name='coreInstanceTypes')\n", (26902, 26928), False, 'import pulumi\n'), ((27357, 27392), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""coreLifecycle"""'}), "(name='coreLifecycle')\n", (27370, 27392), False, 'import pulumi\n'), ((27795, 27828), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""coreMaxSize"""'}), "(name='coreMaxSize')\n", (27808, 27828), False, 'import pulumi\n'), ((28174, 28207), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""coreMinSize"""'}), "(name='coreMinSize')\n", (28187, 28207), False, 'import pulumi\n'), ((28557, 28602), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""coreScalingDownPolicies"""'}), "(name='coreScalingDownPolicies')\n", (28570, 28602), False, 'import pulumi\n'), ((29050, 29093), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""coreScalingUpPolicies"""'}), "(name='coreScalingUpPolicies')\n", (29063, 29093), False, 'import pulumi\n'), ((29527, 29557), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""coreUnit"""'}), "(name='coreUnit')\n", (29540, 29557), False, 'import pulumi\n'), ((29983, 30016), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""customAmiId"""'}), "(name='customAmiId')\n", (29996, 30016), False, 'import pulumi\n'), ((30732, 30771), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""ebsRootVolumeSize"""'}), "(name='ebsRootVolumeSize')\n", (30745, 30771), False, 'import pulumi\n'), ((31077, 31109), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""ec2KeyName"""'}), "(name='ec2KeyName')\n", (31090, 31109), False, 'import pulumi\n'), ((31486, 31523), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""exposeClusterId"""'}), "(name='exposeClusterId')\n", (31499, 31523), False, 'import pulumi\n'), ((31906, 31943), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""instanceWeights"""'}), "(name='instanceWeights')\n", (31919, 31943), False, 'import pulumi\n'), ((32568, 32601), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""jobFlowRole"""'}), "(name='jobFlowRole')\n", (32581, 32601), False, 'import pulumi\n'), ((33020, 33058), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""keepJobFlowAlive"""'}), "(name='keepJobFlowAlive')\n", (33033, 33058), False, 'import pulumi\n'), ((33475, 33503), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""logUri"""'}), "(name='logUri')\n", (33488, 33503), False, 'import pulumi\n'), ((33851, 33900), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""managedPrimarySecurityGroup"""'}), "(name='managedPrimarySecurityGroup')\n", (33864, 33900), False, 'import pulumi\n'), ((34363, 34412), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""managedReplicaSecurityGroup"""'}), "(name='managedReplicaSecurityGroup')\n", (34376, 34412), False, 'import pulumi\n'), ((34875, 34918), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""masterEbsBlockDevices"""'}), "(name='masterEbsBlockDevices')\n", (34888, 34918), False, 'import pulumi\n'), ((35489, 35529), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""masterEbsOptimized"""'}), "(name='masterEbsOptimized')\n", (35502, 35529), False, 'import pulumi\n'), ((35918, 35959), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""masterInstanceTypes"""'}), "(name='masterInstanceTypes')\n", (35931, 35959), False, 'import pulumi\n'), ((36400, 36437), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""masterLifecycle"""'}), "(name='masterLifecycle')\n", (36413, 36437), False, 'import pulumi\n'), ((36852, 36886), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""masterTarget"""'}), "(name='masterTarget')\n", (36865, 36886), False, 'import pulumi\n'), ((37523, 37564), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""provisioningTimeout"""'}), "(name='provisioningTimeout')\n", (37536, 37564), False, 'import pulumi\n'), ((38232, 38266), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""releaseLabel"""'}), "(name='releaseLabel')\n", (38245, 38266), False, 'import pulumi\n'), ((38537, 38576), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""repoUpgradeOnBoot"""'}), "(name='repoUpgradeOnBoot')\n", (38550, 38576), False, 'import pulumi\n'), ((39562, 39598), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""scheduledTasks"""'}), "(name='scheduledTasks')\n", (39575, 39598), False, 'import pulumi\n'), ((40036, 40072), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""securityConfig"""'}), "(name='securityConfig')\n", (40049, 40072), False, 'import pulumi\n'), ((40448, 40496), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""serviceAccessSecurityGroup"""'}), "(name='serviceAccessSecurityGroup')\n", (40461, 40496), False, 'import pulumi\n'), ((40997, 41030), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""serviceRole"""'}), "(name='serviceRole')\n", (41010, 41030), False, 'import pulumi\n'), ((41428, 41460), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""stepsFiles"""'}), "(name='stepsFiles')\n", (41441, 41460), False, 'import pulumi\n'), ((42275, 42316), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""taskDesiredCapacity"""'}), "(name='taskDesiredCapacity')\n", (42288, 42316), False, 'import pulumi\n'), ((42694, 42735), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""taskEbsBlockDevices"""'}), "(name='taskEbsBlockDevices')\n", (42707, 42735), False, 'import pulumi\n'), ((43290, 43328), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""taskEbsOptimized"""'}), "(name='taskEbsOptimized')\n", (43303, 43328), False, 'import pulumi\n'), ((43707, 43746), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""taskInstanceTypes"""'}), "(name='taskInstanceTypes')\n", (43720, 43746), False, 'import pulumi\n'), ((44175, 44210), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""taskLifecycle"""'}), "(name='taskLifecycle')\n", (44188, 44210), False, 'import pulumi\n'), ((44613, 44646), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""taskMaxSize"""'}), "(name='taskMaxSize')\n", (44626, 44646), False, 'import pulumi\n'), ((44992, 45025), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""taskMinSize"""'}), "(name='taskMinSize')\n", (45005, 45025), False, 'import pulumi\n'), ((45375, 45420), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""taskScalingDownPolicies"""'}), "(name='taskScalingDownPolicies')\n", (45388, 45420), False, 'import pulumi\n'), ((45868, 45911), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""taskScalingUpPolicies"""'}), "(name='taskScalingUpPolicies')\n", (45881, 45911), False, 'import pulumi\n'), ((46345, 46375), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""taskUnit"""'}), "(name='taskUnit')\n", (46358, 46375), False, 'import pulumi\n'), ((46801, 46842), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""terminationPolicies"""'}), "(name='terminationPolicies')\n", (46814, 46842), False, 'import pulumi\n'), ((47367, 47409), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""terminationProtected"""'}), "(name='terminationProtected')\n", (47380, 47409), False, 'import pulumi\n'), ((47913, 47952), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""visibleToAllUsers"""'}), "(name='visibleToAllUsers')\n", (47926, 47952), False, 'import pulumi\n'), ((68835, 68871), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""additionalInfo"""'}), "(name='additionalInfo')\n", (68848, 68871), False, 'import pulumi\n'), ((69291, 69344), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""additionalPrimarySecurityGroups"""'}), "(name='additionalPrimarySecurityGroups')\n", (69304, 69344), False, 'import pulumi\n'), ((69872, 69925), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""additionalReplicaSecurityGroups"""'}), "(name='additionalReplicaSecurityGroups')\n", (69885, 69925), False, 'import pulumi\n'), ((70971, 71010), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""availabilityZones"""'}), "(name='availabilityZones')\n", (70984, 71010), False, 'import pulumi\n'), ((71449, 71492), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""bootstrapActionsFiles"""'}), "(name='bootstrapActionsFiles')\n", (71462, 71492), False, 'import pulumi\n'), ((72163, 72194), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""clusterId"""'}), "(name='clusterId')\n", (72176, 72194), False, 'import pulumi\n'), ((72507, 72548), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""configurationsFiles"""'}), "(name='configurationsFiles')\n", (72520, 72548), False, 'import pulumi\n'), ((73197, 73238), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""coreDesiredCapacity"""'}), "(name='coreDesiredCapacity')\n", (73210, 73238), False, 'import pulumi\n'), ((73616, 73657), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""coreEbsBlockDevices"""'}), "(name='coreEbsBlockDevices')\n", (73629, 73657), False, 'import pulumi\n'), ((74212, 74250), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""coreEbsOptimized"""'}), "(name='coreEbsOptimized')\n", (74225, 74250), False, 'import pulumi\n'), ((74629, 74668), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""coreInstanceTypes"""'}), "(name='coreInstanceTypes')\n", (74642, 74668), False, 'import pulumi\n'), ((75097, 75132), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""coreLifecycle"""'}), "(name='coreLifecycle')\n", (75110, 75132), False, 'import pulumi\n'), ((75535, 75568), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""coreMaxSize"""'}), "(name='coreMaxSize')\n", (75548, 75568), False, 'import pulumi\n'), ((75914, 75947), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""coreMinSize"""'}), "(name='coreMinSize')\n", (75927, 75947), False, 'import pulumi\n'), ((76297, 76342), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""coreScalingDownPolicies"""'}), "(name='coreScalingDownPolicies')\n", (76310, 76342), False, 'import pulumi\n'), ((76790, 76833), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""coreScalingUpPolicies"""'}), "(name='coreScalingUpPolicies')\n", (76803, 76833), False, 'import pulumi\n'), ((77267, 77297), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""coreUnit"""'}), "(name='coreUnit')\n", (77280, 77297), False, 'import pulumi\n'), ((77723, 77756), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""customAmiId"""'}), "(name='customAmiId')\n", (77736, 77756), False, 'import pulumi\n'), ((78472, 78511), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""ebsRootVolumeSize"""'}), "(name='ebsRootVolumeSize')\n", (78485, 78511), False, 'import pulumi\n'), ((78817, 78849), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""ec2KeyName"""'}), "(name='ec2KeyName')\n", (78830, 78849), False, 'import pulumi\n'), ((79226, 79263), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""exposeClusterId"""'}), "(name='exposeClusterId')\n", (79239, 79263), False, 'import pulumi\n'), ((79646, 79683), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""instanceWeights"""'}), "(name='instanceWeights')\n", (79659, 79683), False, 'import pulumi\n'), ((80308, 80341), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""jobFlowRole"""'}), "(name='jobFlowRole')\n", (80321, 80341), False, 'import pulumi\n'), ((80760, 80798), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""keepJobFlowAlive"""'}), "(name='keepJobFlowAlive')\n", (80773, 80798), False, 'import pulumi\n'), ((81215, 81243), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""logUri"""'}), "(name='logUri')\n", (81228, 81243), False, 'import pulumi\n'), ((81591, 81640), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""managedPrimarySecurityGroup"""'}), "(name='managedPrimarySecurityGroup')\n", (81604, 81640), False, 'import pulumi\n'), ((82103, 82152), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""managedReplicaSecurityGroup"""'}), "(name='managedReplicaSecurityGroup')\n", (82116, 82152), False, 'import pulumi\n'), ((82615, 82658), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""masterEbsBlockDevices"""'}), "(name='masterEbsBlockDevices')\n", (82628, 82658), False, 'import pulumi\n'), ((83229, 83269), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""masterEbsOptimized"""'}), "(name='masterEbsOptimized')\n", (83242, 83269), False, 'import pulumi\n'), ((83658, 83699), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""masterInstanceTypes"""'}), "(name='masterInstanceTypes')\n", (83671, 83699), False, 'import pulumi\n'), ((84140, 84177), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""masterLifecycle"""'}), "(name='masterLifecycle')\n", (84153, 84177), False, 'import pulumi\n'), ((84592, 84626), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""masterTarget"""'}), "(name='masterTarget')\n", (84605, 84626), False, 'import pulumi\n'), ((85263, 85300), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""outputClusterId"""'}), "(name='outputClusterId')\n", (85276, 85300), False, 'import pulumi\n'), ((85591, 85632), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""provisioningTimeout"""'}), "(name='provisioningTimeout')\n", (85604, 85632), False, 'import pulumi\n'), ((86300, 86334), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""releaseLabel"""'}), "(name='releaseLabel')\n", (86313, 86334), False, 'import pulumi\n'), ((86605, 86644), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""repoUpgradeOnBoot"""'}), "(name='repoUpgradeOnBoot')\n", (86618, 86644), False, 'import pulumi\n'), ((87630, 87666), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""scheduledTasks"""'}), "(name='scheduledTasks')\n", (87643, 87666), False, 'import pulumi\n'), ((88104, 88140), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""securityConfig"""'}), "(name='securityConfig')\n", (88117, 88140), False, 'import pulumi\n'), ((88516, 88564), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""serviceAccessSecurityGroup"""'}), "(name='serviceAccessSecurityGroup')\n", (88529, 88564), False, 'import pulumi\n'), ((89065, 89098), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""serviceRole"""'}), "(name='serviceRole')\n", (89078, 89098), False, 'import pulumi\n'), ((89496, 89528), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""stepsFiles"""'}), "(name='stepsFiles')\n", (89509, 89528), False, 'import pulumi\n'), ((90702, 90743), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""taskDesiredCapacity"""'}), "(name='taskDesiredCapacity')\n", (90715, 90743), False, 'import pulumi\n'), ((91121, 91162), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""taskEbsBlockDevices"""'}), "(name='taskEbsBlockDevices')\n", (91134, 91162), False, 'import pulumi\n'), ((91717, 91755), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""taskEbsOptimized"""'}), "(name='taskEbsOptimized')\n", (91730, 91755), False, 'import pulumi\n'), ((92134, 92173), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""taskInstanceTypes"""'}), "(name='taskInstanceTypes')\n", (92147, 92173), False, 'import pulumi\n'), ((92602, 92637), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""taskLifecycle"""'}), "(name='taskLifecycle')\n", (92615, 92637), False, 'import pulumi\n'), ((93040, 93073), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""taskMaxSize"""'}), "(name='taskMaxSize')\n", (93053, 93073), False, 'import pulumi\n'), ((93419, 93452), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""taskMinSize"""'}), "(name='taskMinSize')\n", (93432, 93452), False, 'import pulumi\n'), ((93802, 93847), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""taskScalingDownPolicies"""'}), "(name='taskScalingDownPolicies')\n", (93815, 93847), False, 'import pulumi\n'), ((94295, 94338), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""taskScalingUpPolicies"""'}), "(name='taskScalingUpPolicies')\n", (94308, 94338), False, 'import pulumi\n'), ((94772, 94802), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""taskUnit"""'}), "(name='taskUnit')\n", (94785, 94802), False, 'import pulumi\n'), ((95228, 95269), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""terminationPolicies"""'}), "(name='terminationPolicies')\n", (95241, 95269), False, 'import pulumi\n'), ((95794, 95836), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""terminationProtected"""'}), "(name='terminationProtected')\n", (95807, 95836), False, 'import pulumi\n'), ((96340, 96379), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""visibleToAllUsers"""'}), "(name='visibleToAllUsers')\n", (96353, 96379), False, 'import pulumi\n'), ((156930, 156966), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""additionalInfo"""'}), "(name='additionalInfo')\n", (156943, 156966), False, 'import pulumi\n'), ((157240, 157293), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""additionalPrimarySecurityGroups"""'}), "(name='additionalPrimarySecurityGroups')\n", (157253, 157293), False, 'import pulumi\n'), ((157580, 157633), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""additionalReplicaSecurityGroups"""'}), "(name='additionalReplicaSecurityGroups')\n", (157593, 157633), False, 'import pulumi\n'), ((158245, 158284), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""availabilityZones"""'}), "(name='availabilityZones')\n", (158258, 158284), False, 'import pulumi\n'), ((158530, 158573), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""bootstrapActionsFiles"""'}), "(name='bootstrapActionsFiles')\n", (158543, 158573), False, 'import pulumi\n'), ((159009, 159040), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""clusterId"""'}), "(name='clusterId')\n", (159022, 159040), False, 'import pulumi\n'), ((159222, 159263), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""configurationsFiles"""'}), "(name='configurationsFiles')\n", (159235, 159263), False, 'import pulumi\n'), ((159688, 159729), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""coreDesiredCapacity"""'}), "(name='coreDesiredCapacity')\n", (159701, 159729), False, 'import pulumi\n'), ((159943, 159984), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""coreEbsBlockDevices"""'}), "(name='coreEbsBlockDevices')\n", (159956, 159984), False, 'import pulumi\n'), ((160309, 160347), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""coreEbsOptimized"""'}), "(name='coreEbsOptimized')\n", (160322, 160347), False, 'import pulumi\n'), ((160570, 160609), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""coreInstanceTypes"""'}), "(name='coreInstanceTypes')\n", (160583, 160609), False, 'import pulumi\n'), ((160842, 160877), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""coreLifecycle"""'}), "(name='coreLifecycle')\n", (160855, 160877), False, 'import pulumi\n'), ((161137, 161170), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""coreMaxSize"""'}), "(name='coreMaxSize')\n", (161150, 161170), False, 'import pulumi\n'), ((161376, 161409), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""coreMinSize"""'}), "(name='coreMinSize')\n", (161389, 161409), False, 'import pulumi\n'), ((161619, 161664), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""coreScalingDownPolicies"""'}), "(name='coreScalingDownPolicies')\n", (161632, 161664), False, 'import pulumi\n'), ((161867, 161910), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""coreScalingUpPolicies"""'}), "(name='coreScalingUpPolicies')\n", (161880, 161910), False, 'import pulumi\n'), ((162107, 162137), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""coreUnit"""'}), "(name='coreUnit')\n", (162120, 162137), False, 'import pulumi\n'), ((162435, 162468), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""customAmiId"""'}), "(name='customAmiId')\n", (162448, 162468), False, 'import pulumi\n'), ((162910, 162949), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""ebsRootVolumeSize"""'}), "(name='ebsRootVolumeSize')\n", (162923, 162949), False, 'import pulumi\n'), ((163094, 163126), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""ec2KeyName"""'}), "(name='ec2KeyName')\n", (163107, 163126), False, 'import pulumi\n'), ((163366, 163403), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""exposeClusterId"""'}), "(name='exposeClusterId')\n", (163379, 163403), False, 'import pulumi\n'), ((163633, 163670), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""instanceWeights"""'}), "(name='instanceWeights')\n", (163646, 163670), False, 'import pulumi\n'), ((164087, 164120), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""jobFlowRole"""'}), "(name='jobFlowRole')\n", (164100, 164120), False, 'import pulumi\n'), ((164399, 164437), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""keepJobFlowAlive"""'}), "(name='keepJobFlowAlive')\n", (164412, 164437), False, 'import pulumi\n'), ((164695, 164723), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""logUri"""'}), "(name='logUri')\n", (164708, 164723), False, 'import pulumi\n'), ((164949, 164998), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""managedPrimarySecurityGroup"""'}), "(name='managedPrimarySecurityGroup')\n", (164962, 164998), False, 'import pulumi\n'), ((165270, 165319), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""managedReplicaSecurityGroup"""'}), "(name='managedReplicaSecurityGroup')\n", (165283, 165319), False, 'import pulumi\n'), ((165591, 165634), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""masterEbsBlockDevices"""'}), "(name='masterEbsBlockDevices')\n", (165604, 165634), False, 'import pulumi\n'), ((165967, 166007), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""masterEbsOptimized"""'}), "(name='masterEbsOptimized')\n", (165980, 166007), False, 'import pulumi\n'), ((166234, 166275), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""masterInstanceTypes"""'}), "(name='masterInstanceTypes')\n", (166247, 166275), False, 'import pulumi\n'), ((166514, 166551), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""masterLifecycle"""'}), "(name='masterLifecycle')\n", (166527, 166551), False, 'import pulumi\n'), ((166817, 166851), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""masterTarget"""'}), "(name='masterTarget')\n", (166830, 166851), False, 'import pulumi\n'), ((167225, 167262), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""outputClusterId"""'}), "(name='outputClusterId')\n", (167238, 167262), False, 'import pulumi\n'), ((167391, 167432), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""provisioningTimeout"""'}), "(name='provisioningTimeout')\n", (167404, 167432), False, 'import pulumi\n'), ((167794, 167828), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""releaseLabel"""'}), "(name='releaseLabel')\n", (167807, 167828), False, 'import pulumi\n'), ((167959, 167998), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""repoUpgradeOnBoot"""'}), "(name='repoUpgradeOnBoot')\n", (167972, 167998), False, 'import pulumi\n'), ((168701, 168737), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""scheduledTasks"""'}), "(name='scheduledTasks')\n", (168714, 168737), False, 'import pulumi\n'), ((168971, 169007), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""securityConfig"""'}), "(name='securityConfig')\n", (168984, 169007), False, 'import pulumi\n'), ((169237, 169285), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""serviceAccessSecurityGroup"""'}), "(name='serviceAccessSecurityGroup')\n", (169250, 169285), False, 'import pulumi\n'), ((169598, 169631), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""serviceRole"""'}), "(name='serviceRole')\n", (169611, 169631), False, 'import pulumi\n'), ((169892, 169924), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""stepsFiles"""'}), "(name='stepsFiles')\n", (169905, 169924), False, 'import pulumi\n'), ((170614, 170655), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""taskDesiredCapacity"""'}), "(name='taskDesiredCapacity')\n", (170627, 170655), False, 'import pulumi\n'), ((170869, 170910), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""taskEbsBlockDevices"""'}), "(name='taskEbsBlockDevices')\n", (170882, 170910), False, 'import pulumi\n'), ((171235, 171273), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""taskEbsOptimized"""'}), "(name='taskEbsOptimized')\n", (171248, 171273), False, 'import pulumi\n'), ((171496, 171535), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""taskInstanceTypes"""'}), "(name='taskInstanceTypes')\n", (171509, 171535), False, 'import pulumi\n'), ((171768, 171803), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""taskLifecycle"""'}), "(name='taskLifecycle')\n", (171781, 171803), False, 'import pulumi\n'), ((172063, 172096), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""taskMaxSize"""'}), "(name='taskMaxSize')\n", (172076, 172096), False, 'import pulumi\n'), ((172302, 172335), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""taskMinSize"""'}), "(name='taskMinSize')\n", (172315, 172335), False, 'import pulumi\n'), ((172545, 172590), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""taskScalingDownPolicies"""'}), "(name='taskScalingDownPolicies')\n", (172558, 172590), False, 'import pulumi\n'), ((172793, 172836), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""taskScalingUpPolicies"""'}), "(name='taskScalingUpPolicies')\n", (172806, 172836), False, 'import pulumi\n'), ((173033, 173063), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""taskUnit"""'}), "(name='taskUnit')\n", (173046, 173063), False, 'import pulumi\n'), ((173361, 173402), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""terminationPolicies"""'}), "(name='terminationPolicies')\n", (173374, 173402), False, 'import pulumi\n'), ((173704, 173746), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""terminationProtected"""'}), "(name='terminationProtected')\n", (173717, 173746), False, 'import pulumi\n'), ((174085, 174124), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""visibleToAllUsers"""'}), "(name='visibleToAllUsers')\n", (174098, 174124), False, 'import pulumi\n'), ((13539, 13581), 'pulumi.set', 'pulumi.set', (['__self__', '"""strategy"""', 'strategy'], {}), "(__self__, 'strategy', strategy)\n", (13549, 13581), False, 'import pulumi\n'), ((20930, 20958), 'pulumi.get', 'pulumi.get', (['self', '"""strategy"""'], {}), "(self, 'strategy')\n", (20940, 20958), False, 'import pulumi\n'), ((21039, 21074), 'pulumi.set', 'pulumi.set', (['self', '"""strategy"""', 'value'], {}), "(self, 'strategy', value)\n", (21049, 21074), False, 'import pulumi\n'), ((21348, 21383), 'pulumi.get', 'pulumi.get', (['self', '"""additional_info"""'], {}), "(self, 'additional_info')\n", (21358, 21383), False, 'import pulumi\n'), ((21488, 21530), 'pulumi.set', 'pulumi.set', (['self', '"""additional_info"""', 'value'], {}), "(self, 'additional_info', value)\n", (21498, 21530), False, 'import pulumi\n'), ((21829, 21883), 'pulumi.get', 'pulumi.get', (['self', '"""additional_primary_security_groups"""'], {}), "(self, 'additional_primary_security_groups')\n", (21839, 21883), False, 'import pulumi\n'), ((22050, 22111), 'pulumi.set', 'pulumi.set', (['self', '"""additional_primary_security_groups"""', 'value'], {}), "(self, 'additional_primary_security_groups', value)\n", (22060, 22111), False, 'import pulumi\n'), ((22418, 22472), 'pulumi.get', 'pulumi.get', (['self', '"""additional_replica_security_groups"""'], {}), "(self, 'additional_replica_security_groups')\n", (22428, 22472), False, 'import pulumi\n'), ((22639, 22700), 'pulumi.set', 'pulumi.set', (['self', '"""additional_replica_security_groups"""', 'value'], {}), "(self, 'additional_replica_security_groups', value)\n", (22649, 22700), False, 'import pulumi\n'), ((22994, 23026), 'pulumi.get', 'pulumi.get', (['self', '"""applications"""'], {}), "(self, 'applications')\n", (23004, 23026), False, 'import pulumi\n'), ((23171, 23210), 'pulumi.set', 'pulumi.set', (['self', '"""applications"""', 'value'], {}), "(self, 'applications', value)\n", (23181, 23210), False, 'import pulumi\n'), ((23470, 23508), 'pulumi.get', 'pulumi.get', (['self', '"""availability_zones"""'], {}), "(self, 'availability_zones')\n", (23480, 23508), False, 'import pulumi\n'), ((23643, 23688), 'pulumi.set', 'pulumi.set', (['self', '"""availability_zones"""', 'value'], {}), "(self, 'availability_zones', value)\n", (23653, 23688), False, 'import pulumi\n'), ((24133, 24176), 'pulumi.get', 'pulumi.get', (['self', '"""bootstrap_actions_files"""'], {}), "(self, 'bootstrap_actions_files')\n", (24143, 24176), False, 'import pulumi\n'), ((24352, 24402), 'pulumi.set', 'pulumi.set', (['self', '"""bootstrap_actions_files"""', 'value'], {}), "(self, 'bootstrap_actions_files', value)\n", (24362, 24402), False, 'import pulumi\n'), ((24584, 24614), 'pulumi.get', 'pulumi.get', (['self', '"""cluster_id"""'], {}), "(self, 'cluster_id')\n", (24594, 24614), False, 'import pulumi\n'), ((24709, 24746), 'pulumi.set', 'pulumi.set', (['self', '"""cluster_id"""', 'value'], {}), "(self, 'cluster_id', value)\n", (24719, 24746), False, 'import pulumi\n'), ((25181, 25221), 'pulumi.get', 'pulumi.get', (['self', '"""configurations_files"""'], {}), "(self, 'configurations_files')\n", (25191, 25221), False, 'import pulumi\n'), ((25389, 25436), 'pulumi.set', 'pulumi.set', (['self', '"""configurations_files"""', 'value'], {}), "(self, 'configurations_files', value)\n", (25399, 25436), False, 'import pulumi\n'), ((25649, 25690), 'pulumi.get', 'pulumi.get', (['self', '"""core_desired_capacity"""'], {}), "(self, 'core_desired_capacity')\n", (25659, 25690), False, 'import pulumi\n'), ((25807, 25855), 'pulumi.set', 'pulumi.set', (['self', '"""core_desired_capacity"""', 'value'], {}), "(self, 'core_desired_capacity', value)\n", (25817, 25855), False, 'import pulumi\n'), ((26188, 26230), 'pulumi.get', 'pulumi.get', (['self', '"""core_ebs_block_devices"""'], {}), "(self, 'core_ebs_block_devices')\n", (26198, 26230), False, 'import pulumi\n'), ((26402, 26451), 'pulumi.set', 'pulumi.set', (['self', '"""core_ebs_block_devices"""', 'value'], {}), "(self, 'core_ebs_block_devices', value)\n", (26412, 26451), False, 'import pulumi\n'), ((26673, 26711), 'pulumi.get', 'pulumi.get', (['self', '"""core_ebs_optimized"""'], {}), "(self, 'core_ebs_optimized')\n", (26683, 26711), False, 'import pulumi\n'), ((26823, 26868), 'pulumi.set', 'pulumi.set', (['self', '"""core_ebs_optimized"""', 'value'], {}), "(self, 'core_ebs_optimized', value)\n", (26833, 26868), False, 'import pulumi\n'), ((27114, 27153), 'pulumi.get', 'pulumi.get', (['self', '"""core_instance_types"""'], {}), "(self, 'core_instance_types')\n", (27124, 27153), False, 'import pulumi\n'), ((27290, 27336), 'pulumi.set', 'pulumi.set', (['self', '"""core_instance_types"""', 'value'], {}), "(self, 'core_instance_types', value)\n", (27300, 27336), False, 'import pulumi\n'), ((27596, 27630), 'pulumi.get', 'pulumi.get', (['self', '"""core_lifecycle"""'], {}), "(self, 'core_lifecycle')\n", (27606, 27630), False, 'import pulumi\n'), ((27733, 27774), 'pulumi.set', 'pulumi.set', (['self', '"""core_lifecycle"""', 'value'], {}), "(self, 'core_lifecycle', value)\n", (27743, 27774), False, 'import pulumi\n'), ((27979, 28012), 'pulumi.get', 'pulumi.get', (['self', '"""core_max_size"""'], {}), "(self, 'core_max_size')\n", (27989, 28012), False, 'import pulumi\n'), ((28113, 28153), 'pulumi.set', 'pulumi.set', (['self', '"""core_max_size"""', 'value'], {}), "(self, 'core_max_size', value)\n", (28123, 28153), False, 'import pulumi\n'), ((28362, 28395), 'pulumi.get', 'pulumi.get', (['self', '"""core_min_size"""'], {}), "(self, 'core_min_size')\n", (28372, 28395), False, 'import pulumi\n'), ((28496, 28536), 'pulumi.set', 'pulumi.set', (['self', '"""core_min_size"""', 'value'], {}), "(self, 'core_min_size', value)\n", (28506, 28536), False, 'import pulumi\n'), ((28747, 28793), 'pulumi.get', 'pulumi.get', (['self', '"""core_scaling_down_policies"""'], {}), "(self, 'core_scaling_down_policies')\n", (28757, 28793), False, 'import pulumi\n'), ((28976, 29029), 'pulumi.set', 'pulumi.set', (['self', '"""core_scaling_down_policies"""', 'value'], {}), "(self, 'core_scaling_down_policies', value)\n", (28986, 29029), False, 'import pulumi\n'), ((29234, 29278), 'pulumi.get', 'pulumi.get', (['self', '"""core_scaling_up_policies"""'], {}), "(self, 'core_scaling_up_policies')\n", (29244, 29278), False, 'import pulumi\n'), ((29455, 29506), 'pulumi.set', 'pulumi.set', (['self', '"""core_scaling_up_policies"""', 'value'], {}), "(self, 'core_scaling_up_policies', value)\n", (29465, 29506), False, 'import pulumi\n'), ((29804, 29833), 'pulumi.get', 'pulumi.get', (['self', '"""core_unit"""'], {}), "(self, 'core_unit')\n", (29814, 29833), False, 'import pulumi\n'), ((29926, 29962), 'pulumi.set', 'pulumi.set', (['self', '"""core_unit"""', 'value'], {}), "(self, 'core_unit', value)\n", (29936, 29962), False, 'import pulumi\n'), ((30205, 30238), 'pulumi.get', 'pulumi.get', (['self', '"""custom_ami_id"""'], {}), "(self, 'custom_ami_id')\n", (30215, 30238), False, 'import pulumi\n'), ((30339, 30379), 'pulumi.set', 'pulumi.set', (['self', '"""custom_ami_id"""', 'value'], {}), "(self, 'custom_ami_id', value)\n", (30349, 30379), False, 'import pulumi\n'), ((30545, 30576), 'pulumi.get', 'pulumi.get', (['self', '"""description"""'], {}), "(self, 'description')\n", (30555, 30576), False, 'import pulumi\n'), ((30673, 30711), 'pulumi.set', 'pulumi.set', (['self', '"""description"""', 'value'], {}), "(self, 'description', value)\n", (30683, 30711), False, 'import pulumi\n'), ((30854, 30894), 'pulumi.get', 'pulumi.get', (['self', '"""ebs_root_volume_size"""'], {}), "(self, 'ebs_root_volume_size')\n", (30864, 30894), False, 'import pulumi\n'), ((31009, 31056), 'pulumi.set', 'pulumi.set', (['self', '"""ebs_root_volume_size"""', 'value'], {}), "(self, 'ebs_root_volume_size', value)\n", (31019, 31056), False, 'import pulumi\n'), ((31295, 31327), 'pulumi.get', 'pulumi.get', (['self', '"""ec2_key_name"""'], {}), "(self, 'ec2_key_name')\n", (31305, 31327), False, 'import pulumi\n'), ((31426, 31465), 'pulumi.set', 'pulumi.set', (['self', '"""ec2_key_name"""', 'value'], {}), "(self, 'ec2_key_name', value)\n", (31436, 31465), False, 'import pulumi\n'), ((31694, 31731), 'pulumi.get', 'pulumi.get', (['self', '"""expose_cluster_id"""'], {}), "(self, 'expose_cluster_id')\n", (31704, 31731), False, 'import pulumi\n'), ((31841, 31885), 'pulumi.set', 'pulumi.set', (['self', '"""expose_cluster_id"""', 'value'], {}), "(self, 'expose_cluster_id', value)\n", (31851, 31885), False, 'import pulumi\n'), ((32312, 32348), 'pulumi.get', 'pulumi.get', (['self', '"""instance_weights"""'], {}), "(self, 'instance_weights')\n", (32322, 32348), False, 'import pulumi\n'), ((32504, 32547), 'pulumi.set', 'pulumi.set', (['self', '"""instance_weights"""', 'value'], {}), "(self, 'instance_weights', value)\n", (32514, 32547), False, 'import pulumi\n'), ((32825, 32858), 'pulumi.get', 'pulumi.get', (['self', '"""job_flow_role"""'], {}), "(self, 'job_flow_role')\n", (32835, 32858), False, 'import pulumi\n'), ((32959, 32999), 'pulumi.set', 'pulumi.set', (['self', '"""job_flow_role"""', 'value'], {}), "(self, 'job_flow_role', value)\n", (32969, 32999), False, 'import pulumi\n'), ((33255, 33294), 'pulumi.get', 'pulumi.get', (['self', '"""keep_job_flow_alive"""'], {}), "(self, 'keep_job_flow_alive')\n", (33265, 33294), False, 'import pulumi\n'), ((33408, 33454), 'pulumi.set', 'pulumi.set', (['self', '"""keep_job_flow_alive"""', 'value'], {}), "(self, 'keep_job_flow_alive', value)\n", (33418, 33454), False, 'import pulumi\n'), ((33680, 33707), 'pulumi.get', 'pulumi.get', (['self', '"""log_uri"""'], {}), "(self, 'log_uri')\n", (33690, 33707), False, 'import pulumi\n'), ((33796, 33830), 'pulumi.set', 'pulumi.set', (['self', '"""log_uri"""', 'value'], {}), "(self, 'log_uri', value)\n", (33806, 33830), False, 'import pulumi\n'), ((34100, 34150), 'pulumi.get', 'pulumi.get', (['self', '"""managed_primary_security_group"""'], {}), "(self, 'managed_primary_security_group')\n", (34110, 34150), False, 'import pulumi\n'), ((34285, 34342), 'pulumi.set', 'pulumi.set', (['self', '"""managed_primary_security_group"""', 'value'], {}), "(self, 'managed_primary_security_group', value)\n", (34295, 34342), False, 'import pulumi\n'), ((34612, 34662), 'pulumi.get', 'pulumi.get', (['self', '"""managed_replica_security_group"""'], {}), "(self, 'managed_replica_security_group')\n", (34622, 34662), False, 'import pulumi\n'), ((34797, 34854), 'pulumi.set', 'pulumi.set', (['self', '"""managed_replica_security_group"""', 'value'], {}), "(self, 'managed_replica_security_group', value)\n", (34807, 34854), False, 'import pulumi\n'), ((35195, 35239), 'pulumi.get', 'pulumi.get', (['self', '"""master_ebs_block_devices"""'], {}), "(self, 'master_ebs_block_devices')\n", (35205, 35239), False, 'import pulumi\n'), ((35417, 35468), 'pulumi.set', 'pulumi.set', (['self', '"""master_ebs_block_devices"""', 'value'], {}), "(self, 'master_ebs_block_devices', value)\n", (35427, 35468), False, 'import pulumi\n'), ((35694, 35734), 'pulumi.get', 'pulumi.get', (['self', '"""master_ebs_optimized"""'], {}), "(self, 'master_ebs_optimized')\n", (35704, 35734), False, 'import pulumi\n'), ((35850, 35897), 'pulumi.set', 'pulumi.set', (['self', '"""master_ebs_optimized"""', 'value'], {}), "(self, 'master_ebs_optimized', value)\n", (35860, 35897), False, 'import pulumi\n'), ((36149, 36190), 'pulumi.get', 'pulumi.get', (['self', '"""master_instance_types"""'], {}), "(self, 'master_instance_types')\n", (36159, 36190), False, 'import pulumi\n'), ((36331, 36379), 'pulumi.set', 'pulumi.set', (['self', '"""master_instance_types"""', 'value'], {}), "(self, 'master_instance_types', value)\n", (36341, 36379), False, 'import pulumi\n'), ((36645, 36681), 'pulumi.get', 'pulumi.get', (['self', '"""master_lifecycle"""'], {}), "(self, 'master_lifecycle')\n", (36655, 36681), False, 'import pulumi\n'), ((36788, 36831), 'pulumi.set', 'pulumi.set', (['self', '"""master_lifecycle"""', 'value'], {}), "(self, 'master_lifecycle', value)\n", (36798, 36831), False, 'import pulumi\n'), ((37035, 37068), 'pulumi.get', 'pulumi.get', (['self', '"""master_target"""'], {}), "(self, 'master_target')\n", (37045, 37068), False, 'import pulumi\n'), ((37169, 37209), 'pulumi.set', 'pulumi.set', (['self', '"""master_target"""', 'value'], {}), "(self, 'master_target', value)\n", (37179, 37209), False, 'import pulumi\n'), ((37364, 37388), 'pulumi.get', 'pulumi.get', (['self', '"""name"""'], {}), "(self, 'name')\n", (37374, 37388), False, 'import pulumi\n'), ((37471, 37502), 'pulumi.set', 'pulumi.set', (['self', '"""name"""', 'value'], {}), "(self, 'name', value)\n", (37481, 37502), False, 'import pulumi\n'), ((37677, 37717), 'pulumi.get', 'pulumi.get', (['self', '"""provisioning_timeout"""'], {}), "(self, 'provisioning_timeout')\n", (37687, 37717), False, 'import pulumi\n'), ((37862, 37909), 'pulumi.set', 'pulumi.set', (['self', '"""provisioning_timeout"""', 'value'], {}), "(self, 'provisioning_timeout', value)\n", (37872, 37909), False, 'import pulumi\n'), ((38065, 38091), 'pulumi.get', 'pulumi.get', (['self', '"""region"""'], {}), "(self, 'region')\n", (38075, 38091), False, 'import pulumi\n'), ((38178, 38211), 'pulumi.set', 'pulumi.set', (['self', '"""region"""', 'value'], {}), "(self, 'region', value)\n", (38188, 38211), False, 'import pulumi\n'), ((38342, 38375), 'pulumi.get', 'pulumi.get', (['self', '"""release_label"""'], {}), "(self, 'release_label')\n", (38352, 38375), False, 'import pulumi\n'), ((38476, 38516), 'pulumi.set', 'pulumi.set', (['self', '"""release_label"""', 'value'], {}), "(self, 'release_label', value)\n", (38486, 38516), False, 'import pulumi\n'), ((38911, 38951), 'pulumi.get', 'pulumi.get', (['self', '"""repo_upgrade_on_boot"""'], {}), "(self, 'repo_upgrade_on_boot')\n", (38921, 38951), False, 'import pulumi\n'), ((39066, 39113), 'pulumi.set', 'pulumi.set', (['self', '"""repo_upgrade_on_boot"""', 'value'], {}), "(self, 'repo_upgrade_on_boot', value)\n", (39076, 39113), False, 'import pulumi\n'), ((39391, 39418), 'pulumi.get', 'pulumi.get', (['self', '"""retries"""'], {}), "(self, 'retries')\n", (39401, 39418), False, 'import pulumi\n'), ((39507, 39541), 'pulumi.set', 'pulumi.set', (['self', '"""retries"""', 'value'], {}), "(self, 'retries', value)\n", (39517, 39541), False, 'import pulumi\n'), ((39785, 39820), 'pulumi.get', 'pulumi.get', (['self', '"""scheduled_tasks"""'], {}), "(self, 'scheduled_tasks')\n", (39795, 39820), False, 'import pulumi\n'), ((39973, 40015), 'pulumi.set', 'pulumi.set', (['self', '"""scheduled_tasks"""', 'value'], {}), "(self, 'scheduled_tasks', value)\n", (39983, 40015), False, 'import pulumi\n'), ((40245, 40280), 'pulumi.get', 'pulumi.get', (['self', '"""security_config"""'], {}), "(self, 'security_config')\n", (40255, 40280), False, 'import pulumi\n'), ((40385, 40427), 'pulumi.set', 'pulumi.set', (['self', '"""security_config"""', 'value'], {}), "(self, 'security_config', value)\n", (40395, 40427), False, 'import pulumi\n'), ((40738, 40787), 'pulumi.get', 'pulumi.get', (['self', '"""service_access_security_group"""'], {}), "(self, 'service_access_security_group')\n", (40748, 40787), False, 'import pulumi\n'), ((40920, 40976), 'pulumi.set', 'pulumi.set', (['self', '"""service_access_security_group"""', 'value'], {}), "(self, 'service_access_security_group', value)\n", (40930, 40976), False, 'import pulumi\n'), ((41237, 41269), 'pulumi.get', 'pulumi.get', (['self', '"""service_role"""'], {}), "(self, 'service_role')\n", (41247, 41269), False, 'import pulumi\n'), ((41368, 41407), 'pulumi.set', 'pulumi.set', (['self', '"""service_role"""', 'value'], {}), "(self, 'service_role', value)\n", (41378, 41407), False, 'import pulumi\n'), ((41625, 41656), 'pulumi.get', 'pulumi.get', (['self', '"""steps_files"""'], {}), "(self, 'steps_files')\n", (41635, 41656), False, 'import pulumi\n'), ((41797, 41835), 'pulumi.set', 'pulumi.set', (['self', '"""steps_files"""', 'value'], {}), "(self, 'steps_files', value)\n", (41807, 41835), False, 'import pulumi\n'), ((42078, 42102), 'pulumi.get', 'pulumi.get', (['self', '"""tags"""'], {}), "(self, 'tags')\n", (42088, 42102), False, 'import pulumi\n'), ((42223, 42254), 'pulumi.set', 'pulumi.set', (['self', '"""tags"""', 'value'], {}), "(self, 'tags', value)\n", (42233, 42254), False, 'import pulumi\n'), ((42467, 42508), 'pulumi.get', 'pulumi.get', (['self', '"""task_desired_capacity"""'], {}), "(self, 'task_desired_capacity')\n", (42477, 42508), False, 'import pulumi\n'), ((42625, 42673), 'pulumi.set', 'pulumi.set', (['self', '"""task_desired_capacity"""', 'value'], {}), "(self, 'task_desired_capacity', value)\n", (42635, 42673), False, 'import pulumi\n'), ((43006, 43048), 'pulumi.get', 'pulumi.get', (['self', '"""task_ebs_block_devices"""'], {}), "(self, 'task_ebs_block_devices')\n", (43016, 43048), False, 'import pulumi\n'), ((43220, 43269), 'pulumi.set', 'pulumi.set', (['self', '"""task_ebs_block_devices"""', 'value'], {}), "(self, 'task_ebs_block_devices', value)\n", (43230, 43269), False, 'import pulumi\n'), ((43491, 43529), 'pulumi.get', 'pulumi.get', (['self', '"""task_ebs_optimized"""'], {}), "(self, 'task_ebs_optimized')\n", (43501, 43529), False, 'import pulumi\n'), ((43641, 43686), 'pulumi.set', 'pulumi.set', (['self', '"""task_ebs_optimized"""', 'value'], {}), "(self, 'task_ebs_optimized', value)\n", (43651, 43686), False, 'import pulumi\n'), ((43932, 43971), 'pulumi.get', 'pulumi.get', (['self', '"""task_instance_types"""'], {}), "(self, 'task_instance_types')\n", (43942, 43971), False, 'import pulumi\n'), ((44108, 44154), 'pulumi.set', 'pulumi.set', (['self', '"""task_instance_types"""', 'value'], {}), "(self, 'task_instance_types', value)\n", (44118, 44154), False, 'import pulumi\n'), ((44414, 44448), 'pulumi.get', 'pulumi.get', (['self', '"""task_lifecycle"""'], {}), "(self, 'task_lifecycle')\n", (44424, 44448), False, 'import pulumi\n'), ((44551, 44592), 'pulumi.set', 'pulumi.set', (['self', '"""task_lifecycle"""', 'value'], {}), "(self, 'task_lifecycle', value)\n", (44561, 44592), False, 'import pulumi\n'), ((44797, 44830), 'pulumi.get', 'pulumi.get', (['self', '"""task_max_size"""'], {}), "(self, 'task_max_size')\n", (44807, 44830), False, 'import pulumi\n'), ((44931, 44971), 'pulumi.set', 'pulumi.set', (['self', '"""task_max_size"""', 'value'], {}), "(self, 'task_max_size', value)\n", (44941, 44971), False, 'import pulumi\n'), ((45180, 45213), 'pulumi.get', 'pulumi.get', (['self', '"""task_min_size"""'], {}), "(self, 'task_min_size')\n", (45190, 45213), False, 'import pulumi\n'), ((45314, 45354), 'pulumi.set', 'pulumi.set', (['self', '"""task_min_size"""', 'value'], {}), "(self, 'task_min_size', value)\n", (45324, 45354), False, 'import pulumi\n'), ((45565, 45611), 'pulumi.get', 'pulumi.get', (['self', '"""task_scaling_down_policies"""'], {}), "(self, 'task_scaling_down_policies')\n", (45575, 45611), False, 'import pulumi\n'), ((45794, 45847), 'pulumi.set', 'pulumi.set', (['self', '"""task_scaling_down_policies"""', 'value'], {}), "(self, 'task_scaling_down_policies', value)\n", (45804, 45847), False, 'import pulumi\n'), ((46052, 46096), 'pulumi.get', 'pulumi.get', (['self', '"""task_scaling_up_policies"""'], {}), "(self, 'task_scaling_up_policies')\n", (46062, 46096), False, 'import pulumi\n'), ((46273, 46324), 'pulumi.set', 'pulumi.set', (['self', '"""task_scaling_up_policies"""', 'value'], {}), "(self, 'task_scaling_up_policies', value)\n", (46283, 46324), False, 'import pulumi\n'), ((46622, 46651), 'pulumi.get', 'pulumi.get', (['self', '"""task_unit"""'], {}), "(self, 'task_unit')\n", (46632, 46651), False, 'import pulumi\n'), ((46744, 46780), 'pulumi.set', 'pulumi.set', (['self', '"""task_unit"""', 'value'], {}), "(self, 'task_unit', value)\n", (46754, 46780), False, 'import pulumi\n'), ((47092, 47132), 'pulumi.get', 'pulumi.get', (['self', '"""termination_policies"""'], {}), "(self, 'termination_policies')\n", (47102, 47132), False, 'import pulumi\n'), ((47299, 47346), 'pulumi.set', 'pulumi.set', (['self', '"""termination_policies"""', 'value'], {}), "(self, 'termination_policies', value)\n", (47309, 47346), False, 'import pulumi\n'), ((47685, 47726), 'pulumi.get', 'pulumi.get', (['self', '"""termination_protected"""'], {}), "(self, 'termination_protected')\n", (47695, 47726), False, 'import pulumi\n'), ((47844, 47892), 'pulumi.set', 'pulumi.set', (['self', '"""termination_protected"""', 'value'], {}), "(self, 'termination_protected', value)\n", (47854, 47892), False, 'import pulumi\n'), ((48036, 48076), 'pulumi.get', 'pulumi.get', (['self', '"""visible_to_all_users"""'], {}), "(self, 'visible_to_all_users')\n", (48046, 48076), False, 'import pulumi\n'), ((48192, 48239), 'pulumi.set', 'pulumi.set', (['self', '"""visible_to_all_users"""', 'value'], {}), "(self, 'visible_to_all_users', value)\n", (48202, 48239), False, 'import pulumi\n'), ((69088, 69123), 'pulumi.get', 'pulumi.get', (['self', '"""additional_info"""'], {}), "(self, 'additional_info')\n", (69098, 69123), False, 'import pulumi\n'), ((69228, 69270), 'pulumi.set', 'pulumi.set', (['self', '"""additional_info"""', 'value'], {}), "(self, 'additional_info', value)\n", (69238, 69270), False, 'import pulumi\n'), ((69569, 69623), 'pulumi.get', 'pulumi.get', (['self', '"""additional_primary_security_groups"""'], {}), "(self, 'additional_primary_security_groups')\n", (69579, 69623), False, 'import pulumi\n'), ((69790, 69851), 'pulumi.set', 'pulumi.set', (['self', '"""additional_primary_security_groups"""', 'value'], {}), "(self, 'additional_primary_security_groups', value)\n", (69800, 69851), False, 'import pulumi\n'), ((70158, 70212), 'pulumi.get', 'pulumi.get', (['self', '"""additional_replica_security_groups"""'], {}), "(self, 'additional_replica_security_groups')\n", (70168, 70212), False, 'import pulumi\n'), ((70379, 70440), 'pulumi.set', 'pulumi.set', (['self', '"""additional_replica_security_groups"""', 'value'], {}), "(self, 'additional_replica_security_groups', value)\n", (70389, 70440), False, 'import pulumi\n'), ((70734, 70766), 'pulumi.get', 'pulumi.get', (['self', '"""applications"""'], {}), "(self, 'applications')\n", (70744, 70766), False, 'import pulumi\n'), ((70911, 70950), 'pulumi.set', 'pulumi.set', (['self', '"""applications"""', 'value'], {}), "(self, 'applications', value)\n", (70921, 70950), False, 'import pulumi\n'), ((71210, 71248), 'pulumi.get', 'pulumi.get', (['self', '"""availability_zones"""'], {}), "(self, 'availability_zones')\n", (71220, 71248), False, 'import pulumi\n'), ((71383, 71428), 'pulumi.set', 'pulumi.set', (['self', '"""availability_zones"""', 'value'], {}), "(self, 'availability_zones', value)\n", (71393, 71428), False, 'import pulumi\n'), ((71873, 71916), 'pulumi.get', 'pulumi.get', (['self', '"""bootstrap_actions_files"""'], {}), "(self, 'bootstrap_actions_files')\n", (71883, 71916), False, 'import pulumi\n'), ((72092, 72142), 'pulumi.set', 'pulumi.set', (['self', '"""bootstrap_actions_files"""', 'value'], {}), "(self, 'bootstrap_actions_files', value)\n", (72102, 72142), False, 'import pulumi\n'), ((72324, 72354), 'pulumi.get', 'pulumi.get', (['self', '"""cluster_id"""'], {}), "(self, 'cluster_id')\n", (72334, 72354), False, 'import pulumi\n'), ((72449, 72486), 'pulumi.set', 'pulumi.set', (['self', '"""cluster_id"""', 'value'], {}), "(self, 'cluster_id', value)\n", (72459, 72486), False, 'import pulumi\n'), ((72921, 72961), 'pulumi.get', 'pulumi.get', (['self', '"""configurations_files"""'], {}), "(self, 'configurations_files')\n", (72931, 72961), False, 'import pulumi\n'), ((73129, 73176), 'pulumi.set', 'pulumi.set', (['self', '"""configurations_files"""', 'value'], {}), "(self, 'configurations_files', value)\n", (73139, 73176), False, 'import pulumi\n'), ((73389, 73430), 'pulumi.get', 'pulumi.get', (['self', '"""core_desired_capacity"""'], {}), "(self, 'core_desired_capacity')\n", (73399, 73430), False, 'import pulumi\n'), ((73547, 73595), 'pulumi.set', 'pulumi.set', (['self', '"""core_desired_capacity"""', 'value'], {}), "(self, 'core_desired_capacity', value)\n", (73557, 73595), False, 'import pulumi\n'), ((73928, 73970), 'pulumi.get', 'pulumi.get', (['self', '"""core_ebs_block_devices"""'], {}), "(self, 'core_ebs_block_devices')\n", (73938, 73970), False, 'import pulumi\n'), ((74142, 74191), 'pulumi.set', 'pulumi.set', (['self', '"""core_ebs_block_devices"""', 'value'], {}), "(self, 'core_ebs_block_devices', value)\n", (74152, 74191), False, 'import pulumi\n'), ((74413, 74451), 'pulumi.get', 'pulumi.get', (['self', '"""core_ebs_optimized"""'], {}), "(self, 'core_ebs_optimized')\n", (74423, 74451), False, 'import pulumi\n'), ((74563, 74608), 'pulumi.set', 'pulumi.set', (['self', '"""core_ebs_optimized"""', 'value'], {}), "(self, 'core_ebs_optimized', value)\n", (74573, 74608), False, 'import pulumi\n'), ((74854, 74893), 'pulumi.get', 'pulumi.get', (['self', '"""core_instance_types"""'], {}), "(self, 'core_instance_types')\n", (74864, 74893), False, 'import pulumi\n'), ((75030, 75076), 'pulumi.set', 'pulumi.set', (['self', '"""core_instance_types"""', 'value'], {}), "(self, 'core_instance_types', value)\n", (75040, 75076), False, 'import pulumi\n'), ((75336, 75370), 'pulumi.get', 'pulumi.get', (['self', '"""core_lifecycle"""'], {}), "(self, 'core_lifecycle')\n", (75346, 75370), False, 'import pulumi\n'), ((75473, 75514), 'pulumi.set', 'pulumi.set', (['self', '"""core_lifecycle"""', 'value'], {}), "(self, 'core_lifecycle', value)\n", (75483, 75514), False, 'import pulumi\n'), ((75719, 75752), 'pulumi.get', 'pulumi.get', (['self', '"""core_max_size"""'], {}), "(self, 'core_max_size')\n", (75729, 75752), False, 'import pulumi\n'), ((75853, 75893), 'pulumi.set', 'pulumi.set', (['self', '"""core_max_size"""', 'value'], {}), "(self, 'core_max_size', value)\n", (75863, 75893), False, 'import pulumi\n'), ((76102, 76135), 'pulumi.get', 'pulumi.get', (['self', '"""core_min_size"""'], {}), "(self, 'core_min_size')\n", (76112, 76135), False, 'import pulumi\n'), ((76236, 76276), 'pulumi.set', 'pulumi.set', (['self', '"""core_min_size"""', 'value'], {}), "(self, 'core_min_size', value)\n", (76246, 76276), False, 'import pulumi\n'), ((76487, 76533), 'pulumi.get', 'pulumi.get', (['self', '"""core_scaling_down_policies"""'], {}), "(self, 'core_scaling_down_policies')\n", (76497, 76533), False, 'import pulumi\n'), ((76716, 76769), 'pulumi.set', 'pulumi.set', (['self', '"""core_scaling_down_policies"""', 'value'], {}), "(self, 'core_scaling_down_policies', value)\n", (76726, 76769), False, 'import pulumi\n'), ((76974, 77018), 'pulumi.get', 'pulumi.get', (['self', '"""core_scaling_up_policies"""'], {}), "(self, 'core_scaling_up_policies')\n", (76984, 77018), False, 'import pulumi\n'), ((77195, 77246), 'pulumi.set', 'pulumi.set', (['self', '"""core_scaling_up_policies"""', 'value'], {}), "(self, 'core_scaling_up_policies', value)\n", (77205, 77246), False, 'import pulumi\n'), ((77544, 77573), 'pulumi.get', 'pulumi.get', (['self', '"""core_unit"""'], {}), "(self, 'core_unit')\n", (77554, 77573), False, 'import pulumi\n'), ((77666, 77702), 'pulumi.set', 'pulumi.set', (['self', '"""core_unit"""', 'value'], {}), "(self, 'core_unit', value)\n", (77676, 77702), False, 'import pulumi\n'), ((77945, 77978), 'pulumi.get', 'pulumi.get', (['self', '"""custom_ami_id"""'], {}), "(self, 'custom_ami_id')\n", (77955, 77978), False, 'import pulumi\n'), ((78079, 78119), 'pulumi.set', 'pulumi.set', (['self', '"""custom_ami_id"""', 'value'], {}), "(self, 'custom_ami_id', value)\n", (78089, 78119), False, 'import pulumi\n'), ((78285, 78316), 'pulumi.get', 'pulumi.get', (['self', '"""description"""'], {}), "(self, 'description')\n", (78295, 78316), False, 'import pulumi\n'), ((78413, 78451), 'pulumi.set', 'pulumi.set', (['self', '"""description"""', 'value'], {}), "(self, 'description', value)\n", (78423, 78451), False, 'import pulumi\n'), ((78594, 78634), 'pulumi.get', 'pulumi.get', (['self', '"""ebs_root_volume_size"""'], {}), "(self, 'ebs_root_volume_size')\n", (78604, 78634), False, 'import pulumi\n'), ((78749, 78796), 'pulumi.set', 'pulumi.set', (['self', '"""ebs_root_volume_size"""', 'value'], {}), "(self, 'ebs_root_volume_size', value)\n", (78759, 78796), False, 'import pulumi\n'), ((79035, 79067), 'pulumi.get', 'pulumi.get', (['self', '"""ec2_key_name"""'], {}), "(self, 'ec2_key_name')\n", (79045, 79067), False, 'import pulumi\n'), ((79166, 79205), 'pulumi.set', 'pulumi.set', (['self', '"""ec2_key_name"""', 'value'], {}), "(self, 'ec2_key_name', value)\n", (79176, 79205), False, 'import pulumi\n'), ((79434, 79471), 'pulumi.get', 'pulumi.get', (['self', '"""expose_cluster_id"""'], {}), "(self, 'expose_cluster_id')\n", (79444, 79471), False, 'import pulumi\n'), ((79581, 79625), 'pulumi.set', 'pulumi.set', (['self', '"""expose_cluster_id"""', 'value'], {}), "(self, 'expose_cluster_id', value)\n", (79591, 79625), False, 'import pulumi\n'), ((80052, 80088), 'pulumi.get', 'pulumi.get', (['self', '"""instance_weights"""'], {}), "(self, 'instance_weights')\n", (80062, 80088), False, 'import pulumi\n'), ((80244, 80287), 'pulumi.set', 'pulumi.set', (['self', '"""instance_weights"""', 'value'], {}), "(self, 'instance_weights', value)\n", (80254, 80287), False, 'import pulumi\n'), ((80565, 80598), 'pulumi.get', 'pulumi.get', (['self', '"""job_flow_role"""'], {}), "(self, 'job_flow_role')\n", (80575, 80598), False, 'import pulumi\n'), ((80699, 80739), 'pulumi.set', 'pulumi.set', (['self', '"""job_flow_role"""', 'value'], {}), "(self, 'job_flow_role', value)\n", (80709, 80739), False, 'import pulumi\n'), ((80995, 81034), 'pulumi.get', 'pulumi.get', (['self', '"""keep_job_flow_alive"""'], {}), "(self, 'keep_job_flow_alive')\n", (81005, 81034), False, 'import pulumi\n'), ((81148, 81194), 'pulumi.set', 'pulumi.set', (['self', '"""keep_job_flow_alive"""', 'value'], {}), "(self, 'keep_job_flow_alive', value)\n", (81158, 81194), False, 'import pulumi\n'), ((81420, 81447), 'pulumi.get', 'pulumi.get', (['self', '"""log_uri"""'], {}), "(self, 'log_uri')\n", (81430, 81447), False, 'import pulumi\n'), ((81536, 81570), 'pulumi.set', 'pulumi.set', (['self', '"""log_uri"""', 'value'], {}), "(self, 'log_uri', value)\n", (81546, 81570), False, 'import pulumi\n'), ((81840, 81890), 'pulumi.get', 'pulumi.get', (['self', '"""managed_primary_security_group"""'], {}), "(self, 'managed_primary_security_group')\n", (81850, 81890), False, 'import pulumi\n'), ((82025, 82082), 'pulumi.set', 'pulumi.set', (['self', '"""managed_primary_security_group"""', 'value'], {}), "(self, 'managed_primary_security_group', value)\n", (82035, 82082), False, 'import pulumi\n'), ((82352, 82402), 'pulumi.get', 'pulumi.get', (['self', '"""managed_replica_security_group"""'], {}), "(self, 'managed_replica_security_group')\n", (82362, 82402), False, 'import pulumi\n'), ((82537, 82594), 'pulumi.set', 'pulumi.set', (['self', '"""managed_replica_security_group"""', 'value'], {}), "(self, 'managed_replica_security_group', value)\n", (82547, 82594), False, 'import pulumi\n'), ((82935, 82979), 'pulumi.get', 'pulumi.get', (['self', '"""master_ebs_block_devices"""'], {}), "(self, 'master_ebs_block_devices')\n", (82945, 82979), False, 'import pulumi\n'), ((83157, 83208), 'pulumi.set', 'pulumi.set', (['self', '"""master_ebs_block_devices"""', 'value'], {}), "(self, 'master_ebs_block_devices', value)\n", (83167, 83208), False, 'import pulumi\n'), ((83434, 83474), 'pulumi.get', 'pulumi.get', (['self', '"""master_ebs_optimized"""'], {}), "(self, 'master_ebs_optimized')\n", (83444, 83474), False, 'import pulumi\n'), ((83590, 83637), 'pulumi.set', 'pulumi.set', (['self', '"""master_ebs_optimized"""', 'value'], {}), "(self, 'master_ebs_optimized', value)\n", (83600, 83637), False, 'import pulumi\n'), ((83889, 83930), 'pulumi.get', 'pulumi.get', (['self', '"""master_instance_types"""'], {}), "(self, 'master_instance_types')\n", (83899, 83930), False, 'import pulumi\n'), ((84071, 84119), 'pulumi.set', 'pulumi.set', (['self', '"""master_instance_types"""', 'value'], {}), "(self, 'master_instance_types', value)\n", (84081, 84119), False, 'import pulumi\n'), ((84385, 84421), 'pulumi.get', 'pulumi.get', (['self', '"""master_lifecycle"""'], {}), "(self, 'master_lifecycle')\n", (84395, 84421), False, 'import pulumi\n'), ((84528, 84571), 'pulumi.set', 'pulumi.set', (['self', '"""master_lifecycle"""', 'value'], {}), "(self, 'master_lifecycle', value)\n", (84538, 84571), False, 'import pulumi\n'), ((84775, 84808), 'pulumi.get', 'pulumi.get', (['self', '"""master_target"""'], {}), "(self, 'master_target')\n", (84785, 84808), False, 'import pulumi\n'), ((84909, 84949), 'pulumi.set', 'pulumi.set', (['self', '"""master_target"""', 'value'], {}), "(self, 'master_target', value)\n", (84919, 84949), False, 'import pulumi\n'), ((85104, 85128), 'pulumi.get', 'pulumi.get', (['self', '"""name"""'], {}), "(self, 'name')\n", (85114, 85128), False, 'import pulumi\n'), ((85211, 85242), 'pulumi.set', 'pulumi.set', (['self', '"""name"""', 'value'], {}), "(self, 'name', value)\n", (85221, 85242), False, 'import pulumi\n'), ((85380, 85417), 'pulumi.get', 'pulumi.get', (['self', '"""output_cluster_id"""'], {}), "(self, 'output_cluster_id')\n", (85390, 85417), False, 'import pulumi\n'), ((85526, 85570), 'pulumi.set', 'pulumi.set', (['self', '"""output_cluster_id"""', 'value'], {}), "(self, 'output_cluster_id', value)\n", (85536, 85570), False, 'import pulumi\n'), ((85745, 85785), 'pulumi.get', 'pulumi.get', (['self', '"""provisioning_timeout"""'], {}), "(self, 'provisioning_timeout')\n", (85755, 85785), False, 'import pulumi\n'), ((85930, 85977), 'pulumi.set', 'pulumi.set', (['self', '"""provisioning_timeout"""', 'value'], {}), "(self, 'provisioning_timeout', value)\n", (85940, 85977), False, 'import pulumi\n'), ((86133, 86159), 'pulumi.get', 'pulumi.get', (['self', '"""region"""'], {}), "(self, 'region')\n", (86143, 86159), False, 'import pulumi\n'), ((86246, 86279), 'pulumi.set', 'pulumi.set', (['self', '"""region"""', 'value'], {}), "(self, 'region', value)\n", (86256, 86279), False, 'import pulumi\n'), ((86410, 86443), 'pulumi.get', 'pulumi.get', (['self', '"""release_label"""'], {}), "(self, 'release_label')\n", (86420, 86443), False, 'import pulumi\n'), ((86544, 86584), 'pulumi.set', 'pulumi.set', (['self', '"""release_label"""', 'value'], {}), "(self, 'release_label', value)\n", (86554, 86584), False, 'import pulumi\n'), ((86979, 87019), 'pulumi.get', 'pulumi.get', (['self', '"""repo_upgrade_on_boot"""'], {}), "(self, 'repo_upgrade_on_boot')\n", (86989, 87019), False, 'import pulumi\n'), ((87134, 87181), 'pulumi.set', 'pulumi.set', (['self', '"""repo_upgrade_on_boot"""', 'value'], {}), "(self, 'repo_upgrade_on_boot', value)\n", (87144, 87181), False, 'import pulumi\n'), ((87459, 87486), 'pulumi.get', 'pulumi.get', (['self', '"""retries"""'], {}), "(self, 'retries')\n", (87469, 87486), False, 'import pulumi\n'), ((87575, 87609), 'pulumi.set', 'pulumi.set', (['self', '"""retries"""', 'value'], {}), "(self, 'retries', value)\n", (87585, 87609), False, 'import pulumi\n'), ((87853, 87888), 'pulumi.get', 'pulumi.get', (['self', '"""scheduled_tasks"""'], {}), "(self, 'scheduled_tasks')\n", (87863, 87888), False, 'import pulumi\n'), ((88041, 88083), 'pulumi.set', 'pulumi.set', (['self', '"""scheduled_tasks"""', 'value'], {}), "(self, 'scheduled_tasks', value)\n", (88051, 88083), False, 'import pulumi\n'), ((88313, 88348), 'pulumi.get', 'pulumi.get', (['self', '"""security_config"""'], {}), "(self, 'security_config')\n", (88323, 88348), False, 'import pulumi\n'), ((88453, 88495), 'pulumi.set', 'pulumi.set', (['self', '"""security_config"""', 'value'], {}), "(self, 'security_config', value)\n", (88463, 88495), False, 'import pulumi\n'), ((88806, 88855), 'pulumi.get', 'pulumi.get', (['self', '"""service_access_security_group"""'], {}), "(self, 'service_access_security_group')\n", (88816, 88855), False, 'import pulumi\n'), ((88988, 89044), 'pulumi.set', 'pulumi.set', (['self', '"""service_access_security_group"""', 'value'], {}), "(self, 'service_access_security_group', value)\n", (88998, 89044), False, 'import pulumi\n'), ((89305, 89337), 'pulumi.get', 'pulumi.get', (['self', '"""service_role"""'], {}), "(self, 'service_role')\n", (89315, 89337), False, 'import pulumi\n'), ((89436, 89475), 'pulumi.set', 'pulumi.set', (['self', '"""service_role"""', 'value'], {}), "(self, 'service_role', value)\n", (89446, 89475), False, 'import pulumi\n'), ((89693, 89724), 'pulumi.get', 'pulumi.get', (['self', '"""steps_files"""'], {}), "(self, 'steps_files')\n", (89703, 89724), False, 'import pulumi\n'), ((89865, 89903), 'pulumi.set', 'pulumi.set', (['self', '"""steps_files"""', 'value'], {}), "(self, 'steps_files', value)\n", (89875, 89903), False, 'import pulumi\n'), ((90108, 90136), 'pulumi.get', 'pulumi.get', (['self', '"""strategy"""'], {}), "(self, 'strategy')\n", (90118, 90136), False, 'import pulumi\n'), ((90227, 90262), 'pulumi.set', 'pulumi.set', (['self', '"""strategy"""', 'value'], {}), "(self, 'strategy', value)\n", (90237, 90262), False, 'import pulumi\n'), ((90505, 90529), 'pulumi.get', 'pulumi.get', (['self', '"""tags"""'], {}), "(self, 'tags')\n", (90515, 90529), False, 'import pulumi\n'), ((90650, 90681), 'pulumi.set', 'pulumi.set', (['self', '"""tags"""', 'value'], {}), "(self, 'tags', value)\n", (90660, 90681), False, 'import pulumi\n'), ((90894, 90935), 'pulumi.get', 'pulumi.get', (['self', '"""task_desired_capacity"""'], {}), "(self, 'task_desired_capacity')\n", (90904, 90935), False, 'import pulumi\n'), ((91052, 91100), 'pulumi.set', 'pulumi.set', (['self', '"""task_desired_capacity"""', 'value'], {}), "(self, 'task_desired_capacity', value)\n", (91062, 91100), False, 'import pulumi\n'), ((91433, 91475), 'pulumi.get', 'pulumi.get', (['self', '"""task_ebs_block_devices"""'], {}), "(self, 'task_ebs_block_devices')\n", (91443, 91475), False, 'import pulumi\n'), ((91647, 91696), 'pulumi.set', 'pulumi.set', (['self', '"""task_ebs_block_devices"""', 'value'], {}), "(self, 'task_ebs_block_devices', value)\n", (91657, 91696), False, 'import pulumi\n'), ((91918, 91956), 'pulumi.get', 'pulumi.get', (['self', '"""task_ebs_optimized"""'], {}), "(self, 'task_ebs_optimized')\n", (91928, 91956), False, 'import pulumi\n'), ((92068, 92113), 'pulumi.set', 'pulumi.set', (['self', '"""task_ebs_optimized"""', 'value'], {}), "(self, 'task_ebs_optimized', value)\n", (92078, 92113), False, 'import pulumi\n'), ((92359, 92398), 'pulumi.get', 'pulumi.get', (['self', '"""task_instance_types"""'], {}), "(self, 'task_instance_types')\n", (92369, 92398), False, 'import pulumi\n'), ((92535, 92581), 'pulumi.set', 'pulumi.set', (['self', '"""task_instance_types"""', 'value'], {}), "(self, 'task_instance_types', value)\n", (92545, 92581), False, 'import pulumi\n'), ((92841, 92875), 'pulumi.get', 'pulumi.get', (['self', '"""task_lifecycle"""'], {}), "(self, 'task_lifecycle')\n", (92851, 92875), False, 'import pulumi\n'), ((92978, 93019), 'pulumi.set', 'pulumi.set', (['self', '"""task_lifecycle"""', 'value'], {}), "(self, 'task_lifecycle', value)\n", (92988, 93019), False, 'import pulumi\n'), ((93224, 93257), 'pulumi.get', 'pulumi.get', (['self', '"""task_max_size"""'], {}), "(self, 'task_max_size')\n", (93234, 93257), False, 'import pulumi\n'), ((93358, 93398), 'pulumi.set', 'pulumi.set', (['self', '"""task_max_size"""', 'value'], {}), "(self, 'task_max_size', value)\n", (93368, 93398), False, 'import pulumi\n'), ((93607, 93640), 'pulumi.get', 'pulumi.get', (['self', '"""task_min_size"""'], {}), "(self, 'task_min_size')\n", (93617, 93640), False, 'import pulumi\n'), ((93741, 93781), 'pulumi.set', 'pulumi.set', (['self', '"""task_min_size"""', 'value'], {}), "(self, 'task_min_size', value)\n", (93751, 93781), False, 'import pulumi\n'), ((93992, 94038), 'pulumi.get', 'pulumi.get', (['self', '"""task_scaling_down_policies"""'], {}), "(self, 'task_scaling_down_policies')\n", (94002, 94038), False, 'import pulumi\n'), ((94221, 94274), 'pulumi.set', 'pulumi.set', (['self', '"""task_scaling_down_policies"""', 'value'], {}), "(self, 'task_scaling_down_policies', value)\n", (94231, 94274), False, 'import pulumi\n'), ((94479, 94523), 'pulumi.get', 'pulumi.get', (['self', '"""task_scaling_up_policies"""'], {}), "(self, 'task_scaling_up_policies')\n", (94489, 94523), False, 'import pulumi\n'), ((94700, 94751), 'pulumi.set', 'pulumi.set', (['self', '"""task_scaling_up_policies"""', 'value'], {}), "(self, 'task_scaling_up_policies', value)\n", (94710, 94751), False, 'import pulumi\n'), ((95049, 95078), 'pulumi.get', 'pulumi.get', (['self', '"""task_unit"""'], {}), "(self, 'task_unit')\n", (95059, 95078), False, 'import pulumi\n'), ((95171, 95207), 'pulumi.set', 'pulumi.set', (['self', '"""task_unit"""', 'value'], {}), "(self, 'task_unit', value)\n", (95181, 95207), False, 'import pulumi\n'), ((95519, 95559), 'pulumi.get', 'pulumi.get', (['self', '"""termination_policies"""'], {}), "(self, 'termination_policies')\n", (95529, 95559), False, 'import pulumi\n'), ((95726, 95773), 'pulumi.set', 'pulumi.set', (['self', '"""termination_policies"""', 'value'], {}), "(self, 'termination_policies', value)\n", (95736, 95773), False, 'import pulumi\n'), ((96112, 96153), 'pulumi.get', 'pulumi.get', (['self', '"""termination_protected"""'], {}), "(self, 'termination_protected')\n", (96122, 96153), False, 'import pulumi\n'), ((96271, 96319), 'pulumi.set', 'pulumi.set', (['self', '"""termination_protected"""', 'value'], {}), "(self, 'termination_protected', value)\n", (96281, 96319), False, 'import pulumi\n'), ((96463, 96503), 'pulumi.get', 'pulumi.get', (['self', '"""visible_to_all_users"""'], {}), "(self, 'visible_to_all_users')\n", (96473, 96503), False, 'import pulumi\n'), ((96619, 96666), 'pulumi.set', 'pulumi.set', (['self', '"""visible_to_all_users"""', 'value'], {}), "(self, 'visible_to_all_users', value)\n", (96629, 96666), False, 'import pulumi\n'), ((157184, 157219), 'pulumi.get', 'pulumi.get', (['self', '"""additional_info"""'], {}), "(self, 'additional_info')\n", (157194, 157219), False, 'import pulumi\n'), ((157505, 157559), 'pulumi.get', 'pulumi.get', (['self', '"""additional_primary_security_groups"""'], {}), "(self, 'additional_primary_security_groups')\n", (157515, 157559), False, 'import pulumi\n'), ((157853, 157907), 'pulumi.get', 'pulumi.get', (['self', '"""additional_replica_security_groups"""'], {}), "(self, 'additional_replica_security_groups')\n", (157863, 157907), False, 'import pulumi\n'), ((158192, 158224), 'pulumi.get', 'pulumi.get', (['self', '"""applications"""'], {}), "(self, 'applications')\n", (158202, 158224), False, 'import pulumi\n'), ((158471, 158509), 'pulumi.get', 'pulumi.get', (['self', '"""availability_zones"""'], {}), "(self, 'availability_zones')\n", (158481, 158509), False, 'import pulumi\n'), ((158945, 158988), 'pulumi.get', 'pulumi.get', (['self', '"""bootstrap_actions_files"""'], {}), "(self, 'bootstrap_actions_files')\n", (158955, 158988), False, 'import pulumi\n'), ((159171, 159201), 'pulumi.get', 'pulumi.get', (['self', '"""cluster_id"""'], {}), "(self, 'cluster_id')\n", (159181, 159201), False, 'import pulumi\n'), ((159627, 159667), 'pulumi.get', 'pulumi.get', (['self', '"""configurations_files"""'], {}), "(self, 'configurations_files')\n", (159637, 159667), False, 'import pulumi\n'), ((159881, 159922), 'pulumi.get', 'pulumi.get', (['self', '"""core_desired_capacity"""'], {}), "(self, 'core_desired_capacity')\n", (159891, 159922), False, 'import pulumi\n'), ((160246, 160288), 'pulumi.get', 'pulumi.get', (['self', '"""core_ebs_block_devices"""'], {}), "(self, 'core_ebs_block_devices')\n", (160256, 160288), False, 'import pulumi\n'), ((160511, 160549), 'pulumi.get', 'pulumi.get', (['self', '"""core_ebs_optimized"""'], {}), "(self, 'core_ebs_optimized')\n", (160521, 160549), False, 'import pulumi\n'), ((160782, 160821), 'pulumi.get', 'pulumi.get', (['self', '"""core_instance_types"""'], {}), "(self, 'core_instance_types')\n", (160792, 160821), False, 'import pulumi\n'), ((161082, 161116), 'pulumi.get', 'pulumi.get', (['self', '"""core_lifecycle"""'], {}), "(self, 'core_lifecycle')\n", (161092, 161116), False, 'import pulumi\n'), ((161322, 161355), 'pulumi.get', 'pulumi.get', (['self', '"""core_max_size"""'], {}), "(self, 'core_max_size')\n", (161332, 161355), False, 'import pulumi\n'), ((161565, 161598), 'pulumi.get', 'pulumi.get', (['self', '"""core_min_size"""'], {}), "(self, 'core_min_size')\n", (161575, 161598), False, 'import pulumi\n'), ((161800, 161846), 'pulumi.get', 'pulumi.get', (['self', '"""core_scaling_down_policies"""'], {}), "(self, 'core_scaling_down_policies')\n", (161810, 161846), False, 'import pulumi\n'), ((162042, 162086), 'pulumi.get', 'pulumi.get', (['self', '"""core_scaling_up_policies"""'], {}), "(self, 'core_scaling_up_policies')\n", (162052, 162086), False, 'import pulumi\n'), ((162385, 162414), 'pulumi.get', 'pulumi.get', (['self', '"""core_unit"""'], {}), "(self, 'core_unit')\n", (162395, 162414), False, 'import pulumi\n'), ((162658, 162691), 'pulumi.get', 'pulumi.get', (['self', '"""custom_ami_id"""'], {}), "(self, 'custom_ami_id')\n", (162668, 162691), False, 'import pulumi\n'), ((162858, 162889), 'pulumi.get', 'pulumi.get', (['self', '"""description"""'], {}), "(self, 'description')\n", (162868, 162889), False, 'import pulumi\n'), ((163033, 163073), 'pulumi.get', 'pulumi.get', (['self', '"""ebs_root_volume_size"""'], {}), "(self, 'ebs_root_volume_size')\n", (163043, 163073), False, 'import pulumi\n'), ((163313, 163345), 'pulumi.get', 'pulumi.get', (['self', '"""ec2_key_name"""'], {}), "(self, 'ec2_key_name')\n", (163323, 163345), False, 'import pulumi\n'), ((163575, 163612), 'pulumi.get', 'pulumi.get', (['self', '"""expose_cluster_id"""'], {}), "(self, 'expose_cluster_id')\n", (163585, 163612), False, 'import pulumi\n'), ((164030, 164066), 'pulumi.get', 'pulumi.get', (['self', '"""instance_weights"""'], {}), "(self, 'instance_weights')\n", (164040, 164066), False, 'import pulumi\n'), ((164345, 164378), 'pulumi.get', 'pulumi.get', (['self', '"""job_flow_role"""'], {}), "(self, 'job_flow_role')\n", (164355, 164378), False, 'import pulumi\n'), ((164635, 164674), 'pulumi.get', 'pulumi.get', (['self', '"""keep_job_flow_alive"""'], {}), "(self, 'keep_job_flow_alive')\n", (164645, 164674), False, 'import pulumi\n'), ((164901, 164928), 'pulumi.get', 'pulumi.get', (['self', '"""log_uri"""'], {}), "(self, 'log_uri')\n", (164911, 164928), False, 'import pulumi\n'), ((165199, 165249), 'pulumi.get', 'pulumi.get', (['self', '"""managed_primary_security_group"""'], {}), "(self, 'managed_primary_security_group')\n", (165209, 165249), False, 'import pulumi\n'), ((165520, 165570), 'pulumi.get', 'pulumi.get', (['self', '"""managed_replica_security_group"""'], {}), "(self, 'managed_replica_security_group')\n", (165530, 165570), False, 'import pulumi\n'), ((165902, 165946), 'pulumi.get', 'pulumi.get', (['self', '"""master_ebs_block_devices"""'], {}), "(self, 'master_ebs_block_devices')\n", (165912, 165946), False, 'import pulumi\n'), ((166173, 166213), 'pulumi.get', 'pulumi.get', (['self', '"""master_ebs_optimized"""'], {}), "(self, 'master_ebs_optimized')\n", (166183, 166213), False, 'import pulumi\n'), ((166452, 166493), 'pulumi.get', 'pulumi.get', (['self', '"""master_instance_types"""'], {}), "(self, 'master_instance_types')\n", (166462, 166493), False, 'import pulumi\n'), ((166760, 166796), 'pulumi.get', 'pulumi.get', (['self', '"""master_lifecycle"""'], {}), "(self, 'master_lifecycle')\n", (166770, 166796), False, 'import pulumi\n'), ((167001, 167034), 'pulumi.get', 'pulumi.get', (['self', '"""master_target"""'], {}), "(self, 'master_target')\n", (167011, 167034), False, 'import pulumi\n'), ((167180, 167204), 'pulumi.get', 'pulumi.get', (['self', '"""name"""'], {}), "(self, 'name')\n", (167190, 167204), False, 'import pulumi\n'), ((167333, 167370), 'pulumi.get', 'pulumi.get', (['self', '"""output_cluster_id"""'], {}), "(self, 'output_cluster_id')\n", (167343, 167370), False, 'import pulumi\n'), ((167550, 167590), 'pulumi.get', 'pulumi.get', (['self', '"""provisioning_timeout"""'], {}), "(self, 'provisioning_timeout')\n", (167560, 167590), False, 'import pulumi\n'), ((167747, 167773), 'pulumi.get', 'pulumi.get', (['self', '"""region"""'], {}), "(self, 'region')\n", (167757, 167773), False, 'import pulumi\n'), ((167905, 167938), 'pulumi.get', 'pulumi.get', (['self', '"""release_label"""'], {}), "(self, 'release_label')\n", (167915, 167938), False, 'import pulumi\n'), ((168334, 168374), 'pulumi.get', 'pulumi.get', (['self', '"""repo_upgrade_on_boot"""'], {}), "(self, 'repo_upgrade_on_boot')\n", (168344, 168374), False, 'import pulumi\n'), ((168653, 168680), 'pulumi.get', 'pulumi.get', (['self', '"""retries"""'], {}), "(self, 'retries')\n", (168663, 168680), False, 'import pulumi\n'), ((168915, 168950), 'pulumi.get', 'pulumi.get', (['self', '"""scheduled_tasks"""'], {}), "(self, 'scheduled_tasks')\n", (168925, 168950), False, 'import pulumi\n'), ((169181, 169216), 'pulumi.get', 'pulumi.get', (['self', '"""security_config"""'], {}), "(self, 'security_config')\n", (169191, 169216), False, 'import pulumi\n'), ((169528, 169577), 'pulumi.get', 'pulumi.get', (['self', '"""service_access_security_group"""'], {}), "(self, 'service_access_security_group')\n", (169538, 169577), False, 'import pulumi\n'), ((169839, 169871), 'pulumi.get', 'pulumi.get', (['self', '"""service_role"""'], {}), "(self, 'service_role')\n", (169849, 169871), False, 'import pulumi\n'), ((170080, 170111), 'pulumi.get', 'pulumi.get', (['self', '"""steps_files"""'], {}), "(self, 'steps_files')\n", (170090, 170111), False, 'import pulumi\n'), ((170307, 170335), 'pulumi.get', 'pulumi.get', (['self', '"""strategy"""'], {}), "(self, 'strategy')\n", (170317, 170335), False, 'import pulumi\n'), ((170569, 170593), 'pulumi.get', 'pulumi.get', (['self', '"""tags"""'], {}), "(self, 'tags')\n", (170579, 170593), False, 'import pulumi\n'), ((170807, 170848), 'pulumi.get', 'pulumi.get', (['self', '"""task_desired_capacity"""'], {}), "(self, 'task_desired_capacity')\n", (170817, 170848), False, 'import pulumi\n'), ((171172, 171214), 'pulumi.get', 'pulumi.get', (['self', '"""task_ebs_block_devices"""'], {}), "(self, 'task_ebs_block_devices')\n", (171182, 171214), False, 'import pulumi\n'), ((171437, 171475), 'pulumi.get', 'pulumi.get', (['self', '"""task_ebs_optimized"""'], {}), "(self, 'task_ebs_optimized')\n", (171447, 171475), False, 'import pulumi\n'), ((171708, 171747), 'pulumi.get', 'pulumi.get', (['self', '"""task_instance_types"""'], {}), "(self, 'task_instance_types')\n", (171718, 171747), False, 'import pulumi\n'), ((172008, 172042), 'pulumi.get', 'pulumi.get', (['self', '"""task_lifecycle"""'], {}), "(self, 'task_lifecycle')\n", (172018, 172042), False, 'import pulumi\n'), ((172248, 172281), 'pulumi.get', 'pulumi.get', (['self', '"""task_max_size"""'], {}), "(self, 'task_max_size')\n", (172258, 172281), False, 'import pulumi\n'), ((172491, 172524), 'pulumi.get', 'pulumi.get', (['self', '"""task_min_size"""'], {}), "(self, 'task_min_size')\n", (172501, 172524), False, 'import pulumi\n'), ((172726, 172772), 'pulumi.get', 'pulumi.get', (['self', '"""task_scaling_down_policies"""'], {}), "(self, 'task_scaling_down_policies')\n", (172736, 172772), False, 'import pulumi\n'), ((172968, 173012), 'pulumi.get', 'pulumi.get', (['self', '"""task_scaling_up_policies"""'], {}), "(self, 'task_scaling_up_policies')\n", (172978, 173012), False, 'import pulumi\n'), ((173311, 173340), 'pulumi.get', 'pulumi.get', (['self', '"""task_unit"""'], {}), "(self, 'task_unit')\n", (173321, 173340), False, 'import pulumi\n'), ((173643, 173683), 'pulumi.get', 'pulumi.get', (['self', '"""termination_policies"""'], {}), "(self, 'termination_policies')\n", (173653, 173683), False, 'import pulumi\n'), ((174023, 174064), 'pulumi.get', 'pulumi.get', (['self', '"""termination_protected"""'], {}), "(self, 'termination_protected')\n", (174033, 174064), False, 'import pulumi\n'), ((174209, 174249), 'pulumi.get', 'pulumi.get', (['self', '"""visible_to_all_users"""'], {}), "(self, 'visible_to_all_users')\n", (174219, 174249), False, 'import pulumi\n'), ((13634, 13690), 'pulumi.set', 'pulumi.set', (['__self__', '"""additional_info"""', 'additional_info'], {}), "(__self__, 'additional_info', additional_info)\n", (13644, 13690), False, 'import pulumi\n'), ((13762, 13860), 'pulumi.set', 'pulumi.set', (['__self__', '"""additional_primary_security_groups"""', 'additional_primary_security_groups'], {}), "(__self__, 'additional_primary_security_groups',\n additional_primary_security_groups)\n", (13772, 13860), False, 'import pulumi\n'), ((13928, 14026), 'pulumi.set', 'pulumi.set', (['__self__', '"""additional_replica_security_groups"""', 'additional_replica_security_groups'], {}), "(__self__, 'additional_replica_security_groups',\n additional_replica_security_groups)\n", (13938, 14026), False, 'import pulumi\n'), ((14072, 14122), 'pulumi.set', 'pulumi.set', (['__self__', '"""applications"""', 'applications'], {}), "(__self__, 'applications', applications)\n", (14082, 14122), False, 'import pulumi\n'), ((14178, 14240), 'pulumi.set', 'pulumi.set', (['__self__', '"""availability_zones"""', 'availability_zones'], {}), "(__self__, 'availability_zones', availability_zones)\n", (14188, 14240), False, 'import pulumi\n'), ((14301, 14373), 'pulumi.set', 'pulumi.set', (['__self__', '"""bootstrap_actions_files"""', 'bootstrap_actions_files'], {}), "(__self__, 'bootstrap_actions_files', bootstrap_actions_files)\n", (14311, 14373), False, 'import pulumi\n'), ((14421, 14467), 'pulumi.set', 'pulumi.set', (['__self__', '"""cluster_id"""', 'cluster_id'], {}), "(__self__, 'cluster_id', cluster_id)\n", (14431, 14467), False, 'import pulumi\n'), ((14525, 14591), 'pulumi.set', 'pulumi.set', (['__self__', '"""configurations_files"""', 'configurations_files'], {}), "(__self__, 'configurations_files', configurations_files)\n", (14535, 14591), False, 'import pulumi\n'), ((14650, 14718), 'pulumi.set', 'pulumi.set', (['__self__', '"""core_desired_capacity"""', 'core_desired_capacity'], {}), "(__self__, 'core_desired_capacity', core_desired_capacity)\n", (14660, 14718), False, 'import pulumi\n'), ((14778, 14848), 'pulumi.set', 'pulumi.set', (['__self__', '"""core_ebs_block_devices"""', 'core_ebs_block_devices'], {}), "(__self__, 'core_ebs_block_devices', core_ebs_block_devices)\n", (14788, 14848), False, 'import pulumi\n'), ((14904, 14966), 'pulumi.set', 'pulumi.set', (['__self__', '"""core_ebs_optimized"""', 'core_ebs_optimized'], {}), "(__self__, 'core_ebs_optimized', core_ebs_optimized)\n", (14914, 14966), False, 'import pulumi\n'), ((15023, 15087), 'pulumi.set', 'pulumi.set', (['__self__', '"""core_instance_types"""', 'core_instance_types'], {}), "(__self__, 'core_instance_types', core_instance_types)\n", (15033, 15087), False, 'import pulumi\n'), ((15139, 15193), 'pulumi.set', 'pulumi.set', (['__self__', '"""core_lifecycle"""', 'core_lifecycle'], {}), "(__self__, 'core_lifecycle', core_lifecycle)\n", (15149, 15193), False, 'import pulumi\n'), ((15244, 15296), 'pulumi.set', 'pulumi.set', (['__self__', '"""core_max_size"""', 'core_max_size'], {}), "(__self__, 'core_max_size', core_max_size)\n", (15254, 15296), False, 'import pulumi\n'), ((15347, 15399), 'pulumi.set', 'pulumi.set', (['__self__', '"""core_min_size"""', 'core_min_size'], {}), "(__self__, 'core_min_size', core_min_size)\n", (15357, 15399), False, 'import pulumi\n'), ((15463, 15541), 'pulumi.set', 'pulumi.set', (['__self__', '"""core_scaling_down_policies"""', 'core_scaling_down_policies'], {}), "(__self__, 'core_scaling_down_policies', core_scaling_down_policies)\n", (15473, 15541), False, 'import pulumi\n'), ((15603, 15677), 'pulumi.set', 'pulumi.set', (['__self__', '"""core_scaling_up_policies"""', 'core_scaling_up_policies'], {}), "(__self__, 'core_scaling_up_policies', core_scaling_up_policies)\n", (15613, 15677), False, 'import pulumi\n'), ((15724, 15768), 'pulumi.set', 'pulumi.set', (['__self__', '"""core_unit"""', 'core_unit'], {}), "(__self__, 'core_unit', core_unit)\n", (15734, 15768), False, 'import pulumi\n'), ((15819, 15871), 'pulumi.set', 'pulumi.set', (['__self__', '"""custom_ami_id"""', 'custom_ami_id'], {}), "(__self__, 'custom_ami_id', custom_ami_id)\n", (15829, 15871), False, 'import pulumi\n'), ((15920, 15968), 'pulumi.set', 'pulumi.set', (['__self__', '"""description"""', 'description'], {}), "(__self__, 'description', description)\n", (15930, 15968), False, 'import pulumi\n'), ((16026, 16092), 'pulumi.set', 'pulumi.set', (['__self__', '"""ebs_root_volume_size"""', 'ebs_root_volume_size'], {}), "(__self__, 'ebs_root_volume_size', ebs_root_volume_size)\n", (16036, 16092), False, 'import pulumi\n'), ((16142, 16192), 'pulumi.set', 'pulumi.set', (['__self__', '"""ec2_key_name"""', 'ec2_key_name'], {}), "(__self__, 'ec2_key_name', ec2_key_name)\n", (16152, 16192), False, 'import pulumi\n'), ((16247, 16307), 'pulumi.set', 'pulumi.set', (['__self__', '"""expose_cluster_id"""', 'expose_cluster_id'], {}), "(__self__, 'expose_cluster_id', expose_cluster_id)\n", (16257, 16307), False, 'import pulumi\n'), ((16361, 16419), 'pulumi.set', 'pulumi.set', (['__self__', '"""instance_weights"""', 'instance_weights'], {}), "(__self__, 'instance_weights', instance_weights)\n", (16371, 16419), False, 'import pulumi\n'), ((16470, 16522), 'pulumi.set', 'pulumi.set', (['__self__', '"""job_flow_role"""', 'job_flow_role'], {}), "(__self__, 'job_flow_role', job_flow_role)\n", (16480, 16522), False, 'import pulumi\n'), ((16579, 16643), 'pulumi.set', 'pulumi.set', (['__self__', '"""keep_job_flow_alive"""', 'keep_job_flow_alive'], {}), "(__self__, 'keep_job_flow_alive', keep_job_flow_alive)\n", (16589, 16643), False, 'import pulumi\n'), ((16688, 16728), 'pulumi.set', 'pulumi.set', (['__self__', '"""log_uri"""', 'log_uri'], {}), "(__self__, 'log_uri', log_uri)\n", (16698, 16728), False, 'import pulumi\n'), ((16796, 16886), 'pulumi.set', 'pulumi.set', (['__self__', '"""managed_primary_security_group"""', 'managed_primary_security_group'], {}), "(__self__, 'managed_primary_security_group',\n managed_primary_security_group)\n", (16806, 16886), False, 'import pulumi\n'), ((16950, 17040), 'pulumi.set', 'pulumi.set', (['__self__', '"""managed_replica_security_group"""', 'managed_replica_security_group'], {}), "(__self__, 'managed_replica_security_group',\n managed_replica_security_group)\n", (16960, 17040), False, 'import pulumi\n'), ((17098, 17172), 'pulumi.set', 'pulumi.set', (['__self__', '"""master_ebs_block_devices"""', 'master_ebs_block_devices'], {}), "(__self__, 'master_ebs_block_devices', master_ebs_block_devices)\n", (17108, 17172), False, 'import pulumi\n'), ((17230, 17296), 'pulumi.set', 'pulumi.set', (['__self__', '"""master_ebs_optimized"""', 'master_ebs_optimized'], {}), "(__self__, 'master_ebs_optimized', master_ebs_optimized)\n", (17240, 17296), False, 'import pulumi\n'), ((17355, 17423), 'pulumi.set', 'pulumi.set', (['__self__', '"""master_instance_types"""', 'master_instance_types'], {}), "(__self__, 'master_instance_types', master_instance_types)\n", (17365, 17423), False, 'import pulumi\n'), ((17477, 17535), 'pulumi.set', 'pulumi.set', (['__self__', '"""master_lifecycle"""', 'master_lifecycle'], {}), "(__self__, 'master_lifecycle', master_lifecycle)\n", (17487, 17535), False, 'import pulumi\n'), ((17586, 17638), 'pulumi.set', 'pulumi.set', (['__self__', '"""master_target"""', 'master_target'], {}), "(__self__, 'master_target', master_target)\n", (17596, 17638), False, 'import pulumi\n'), ((17680, 17714), 'pulumi.set', 'pulumi.set', (['__self__', '"""name"""', 'name'], {}), "(__self__, 'name', name)\n", (17690, 17714), False, 'import pulumi\n'), ((17772, 17838), 'pulumi.set', 'pulumi.set', (['__self__', '"""provisioning_timeout"""', 'provisioning_timeout'], {}), "(__self__, 'provisioning_timeout', provisioning_timeout)\n", (17782, 17838), False, 'import pulumi\n'), ((17882, 17920), 'pulumi.set', 'pulumi.set', (['__self__', '"""region"""', 'region'], {}), "(__self__, 'region', region)\n", (17892, 17920), False, 'import pulumi\n'), ((17971, 18023), 'pulumi.set', 'pulumi.set', (['__self__', '"""release_label"""', 'release_label'], {}), "(__self__, 'release_label', release_label)\n", (17981, 18023), False, 'import pulumi\n'), ((18081, 18147), 'pulumi.set', 'pulumi.set', (['__self__', '"""repo_upgrade_on_boot"""', 'repo_upgrade_on_boot'], {}), "(__self__, 'repo_upgrade_on_boot', repo_upgrade_on_boot)\n", (18091, 18147), False, 'import pulumi\n'), ((18192, 18232), 'pulumi.set', 'pulumi.set', (['__self__', '"""retries"""', 'retries'], {}), "(__self__, 'retries', retries)\n", (18202, 18232), False, 'import pulumi\n'), ((18285, 18341), 'pulumi.set', 'pulumi.set', (['__self__', '"""scheduled_tasks"""', 'scheduled_tasks'], {}), "(__self__, 'scheduled_tasks', scheduled_tasks)\n", (18295, 18341), False, 'import pulumi\n'), ((18394, 18450), 'pulumi.set', 'pulumi.set', (['__self__', '"""security_config"""', 'security_config'], {}), "(__self__, 'security_config', security_config)\n", (18404, 18450), False, 'import pulumi\n'), ((18517, 18605), 'pulumi.set', 'pulumi.set', (['__self__', '"""service_access_security_group"""', 'service_access_security_group'], {}), "(__self__, 'service_access_security_group',\n service_access_security_group)\n", (18527, 18605), False, 'import pulumi\n'), ((18651, 18701), 'pulumi.set', 'pulumi.set', (['__self__', '"""service_role"""', 'service_role'], {}), "(__self__, 'service_role', service_role)\n", (18661, 18701), False, 'import pulumi\n'), ((18750, 18798), 'pulumi.set', 'pulumi.set', (['__self__', '"""steps_files"""', 'steps_files'], {}), "(__self__, 'steps_files', steps_files)\n", (18760, 18798), False, 'import pulumi\n'), ((18840, 18874), 'pulumi.set', 'pulumi.set', (['__self__', '"""tags"""', 'tags'], {}), "(__self__, 'tags', tags)\n", (18850, 18874), False, 'import pulumi\n'), ((18933, 19001), 'pulumi.set', 'pulumi.set', (['__self__', '"""task_desired_capacity"""', 'task_desired_capacity'], {}), "(__self__, 'task_desired_capacity', task_desired_capacity)\n", (18943, 19001), False, 'import pulumi\n'), ((19061, 19131), 'pulumi.set', 'pulumi.set', (['__self__', '"""task_ebs_block_devices"""', 'task_ebs_block_devices'], {}), "(__self__, 'task_ebs_block_devices', task_ebs_block_devices)\n", (19071, 19131), False, 'import pulumi\n'), ((19187, 19249), 'pulumi.set', 'pulumi.set', (['__self__', '"""task_ebs_optimized"""', 'task_ebs_optimized'], {}), "(__self__, 'task_ebs_optimized', task_ebs_optimized)\n", (19197, 19249), False, 'import pulumi\n'), ((19306, 19370), 'pulumi.set', 'pulumi.set', (['__self__', '"""task_instance_types"""', 'task_instance_types'], {}), "(__self__, 'task_instance_types', task_instance_types)\n", (19316, 19370), False, 'import pulumi\n'), ((19422, 19476), 'pulumi.set', 'pulumi.set', (['__self__', '"""task_lifecycle"""', 'task_lifecycle'], {}), "(__self__, 'task_lifecycle', task_lifecycle)\n", (19432, 19476), False, 'import pulumi\n'), ((19527, 19579), 'pulumi.set', 'pulumi.set', (['__self__', '"""task_max_size"""', 'task_max_size'], {}), "(__self__, 'task_max_size', task_max_size)\n", (19537, 19579), False, 'import pulumi\n'), ((19630, 19682), 'pulumi.set', 'pulumi.set', (['__self__', '"""task_min_size"""', 'task_min_size'], {}), "(__self__, 'task_min_size', task_min_size)\n", (19640, 19682), False, 'import pulumi\n'), ((19746, 19824), 'pulumi.set', 'pulumi.set', (['__self__', '"""task_scaling_down_policies"""', 'task_scaling_down_policies'], {}), "(__self__, 'task_scaling_down_policies', task_scaling_down_policies)\n", (19756, 19824), False, 'import pulumi\n'), ((19886, 19960), 'pulumi.set', 'pulumi.set', (['__self__', '"""task_scaling_up_policies"""', 'task_scaling_up_policies'], {}), "(__self__, 'task_scaling_up_policies', task_scaling_up_policies)\n", (19896, 19960), False, 'import pulumi\n'), ((20007, 20051), 'pulumi.set', 'pulumi.set', (['__self__', '"""task_unit"""', 'task_unit'], {}), "(__self__, 'task_unit', task_unit)\n", (20017, 20051), False, 'import pulumi\n'), ((20109, 20175), 'pulumi.set', 'pulumi.set', (['__self__', '"""termination_policies"""', 'termination_policies'], {}), "(__self__, 'termination_policies', termination_policies)\n", (20119, 20175), False, 'import pulumi\n'), ((20234, 20302), 'pulumi.set', 'pulumi.set', (['__self__', '"""termination_protected"""', 'termination_protected'], {}), "(__self__, 'termination_protected', termination_protected)\n", (20244, 20302), False, 'import pulumi\n'), ((20360, 20475), 'warnings.warn', 'warnings.warn', (['"""This field has been removed from our API and is no longer functional."""', 'DeprecationWarning'], {}), "(\n 'This field has been removed from our API and is no longer functional.',\n DeprecationWarning)\n", (20373, 20475), False, 'import warnings\n'), ((20483, 20617), 'pulumi.log.warn', 'pulumi.log.warn', (['"""visible_to_all_users is deprecated: This field has been removed from our API and is no longer functional."""'], {}), "(\n 'visible_to_all_users is deprecated: This field has been removed from our API and is no longer functional.'\n )\n", (20498, 20617), False, 'import pulumi\n'), ((20669, 20735), 'pulumi.set', 'pulumi.set', (['__self__', '"""visible_to_all_users"""', 'visible_to_all_users'], {}), "(__self__, 'visible_to_all_users', visible_to_all_users)\n", (20679, 20735), False, 'import pulumi\n'), ((61510, 61566), 'pulumi.set', 'pulumi.set', (['__self__', '"""additional_info"""', 'additional_info'], {}), "(__self__, 'additional_info', additional_info)\n", (61520, 61566), False, 'import pulumi\n'), ((61638, 61736), 'pulumi.set', 'pulumi.set', (['__self__', '"""additional_primary_security_groups"""', 'additional_primary_security_groups'], {}), "(__self__, 'additional_primary_security_groups',\n additional_primary_security_groups)\n", (61648, 61736), False, 'import pulumi\n'), ((61804, 61902), 'pulumi.set', 'pulumi.set', (['__self__', '"""additional_replica_security_groups"""', 'additional_replica_security_groups'], {}), "(__self__, 'additional_replica_security_groups',\n additional_replica_security_groups)\n", (61814, 61902), False, 'import pulumi\n'), ((61948, 61998), 'pulumi.set', 'pulumi.set', (['__self__', '"""applications"""', 'applications'], {}), "(__self__, 'applications', applications)\n", (61958, 61998), False, 'import pulumi\n'), ((62054, 62116), 'pulumi.set', 'pulumi.set', (['__self__', '"""availability_zones"""', 'availability_zones'], {}), "(__self__, 'availability_zones', availability_zones)\n", (62064, 62116), False, 'import pulumi\n'), ((62177, 62249), 'pulumi.set', 'pulumi.set', (['__self__', '"""bootstrap_actions_files"""', 'bootstrap_actions_files'], {}), "(__self__, 'bootstrap_actions_files', bootstrap_actions_files)\n", (62187, 62249), False, 'import pulumi\n'), ((62297, 62343), 'pulumi.set', 'pulumi.set', (['__self__', '"""cluster_id"""', 'cluster_id'], {}), "(__self__, 'cluster_id', cluster_id)\n", (62307, 62343), False, 'import pulumi\n'), ((62401, 62467), 'pulumi.set', 'pulumi.set', (['__self__', '"""configurations_files"""', 'configurations_files'], {}), "(__self__, 'configurations_files', configurations_files)\n", (62411, 62467), False, 'import pulumi\n'), ((62526, 62594), 'pulumi.set', 'pulumi.set', (['__self__', '"""core_desired_capacity"""', 'core_desired_capacity'], {}), "(__self__, 'core_desired_capacity', core_desired_capacity)\n", (62536, 62594), False, 'import pulumi\n'), ((62654, 62724), 'pulumi.set', 'pulumi.set', (['__self__', '"""core_ebs_block_devices"""', 'core_ebs_block_devices'], {}), "(__self__, 'core_ebs_block_devices', core_ebs_block_devices)\n", (62664, 62724), False, 'import pulumi\n'), ((62780, 62842), 'pulumi.set', 'pulumi.set', (['__self__', '"""core_ebs_optimized"""', 'core_ebs_optimized'], {}), "(__self__, 'core_ebs_optimized', core_ebs_optimized)\n", (62790, 62842), False, 'import pulumi\n'), ((62899, 62963), 'pulumi.set', 'pulumi.set', (['__self__', '"""core_instance_types"""', 'core_instance_types'], {}), "(__self__, 'core_instance_types', core_instance_types)\n", (62909, 62963), False, 'import pulumi\n'), ((63015, 63069), 'pulumi.set', 'pulumi.set', (['__self__', '"""core_lifecycle"""', 'core_lifecycle'], {}), "(__self__, 'core_lifecycle', core_lifecycle)\n", (63025, 63069), False, 'import pulumi\n'), ((63120, 63172), 'pulumi.set', 'pulumi.set', (['__self__', '"""core_max_size"""', 'core_max_size'], {}), "(__self__, 'core_max_size', core_max_size)\n", (63130, 63172), False, 'import pulumi\n'), ((63223, 63275), 'pulumi.set', 'pulumi.set', (['__self__', '"""core_min_size"""', 'core_min_size'], {}), "(__self__, 'core_min_size', core_min_size)\n", (63233, 63275), False, 'import pulumi\n'), ((63339, 63417), 'pulumi.set', 'pulumi.set', (['__self__', '"""core_scaling_down_policies"""', 'core_scaling_down_policies'], {}), "(__self__, 'core_scaling_down_policies', core_scaling_down_policies)\n", (63349, 63417), False, 'import pulumi\n'), ((63479, 63553), 'pulumi.set', 'pulumi.set', (['__self__', '"""core_scaling_up_policies"""', 'core_scaling_up_policies'], {}), "(__self__, 'core_scaling_up_policies', core_scaling_up_policies)\n", (63489, 63553), False, 'import pulumi\n'), ((63600, 63644), 'pulumi.set', 'pulumi.set', (['__self__', '"""core_unit"""', 'core_unit'], {}), "(__self__, 'core_unit', core_unit)\n", (63610, 63644), False, 'import pulumi\n'), ((63695, 63747), 'pulumi.set', 'pulumi.set', (['__self__', '"""custom_ami_id"""', 'custom_ami_id'], {}), "(__self__, 'custom_ami_id', custom_ami_id)\n", (63705, 63747), False, 'import pulumi\n'), ((63796, 63844), 'pulumi.set', 'pulumi.set', (['__self__', '"""description"""', 'description'], {}), "(__self__, 'description', description)\n", (63806, 63844), False, 'import pulumi\n'), ((63902, 63968), 'pulumi.set', 'pulumi.set', (['__self__', '"""ebs_root_volume_size"""', 'ebs_root_volume_size'], {}), "(__self__, 'ebs_root_volume_size', ebs_root_volume_size)\n", (63912, 63968), False, 'import pulumi\n'), ((64018, 64068), 'pulumi.set', 'pulumi.set', (['__self__', '"""ec2_key_name"""', 'ec2_key_name'], {}), "(__self__, 'ec2_key_name', ec2_key_name)\n", (64028, 64068), False, 'import pulumi\n'), ((64123, 64183), 'pulumi.set', 'pulumi.set', (['__self__', '"""expose_cluster_id"""', 'expose_cluster_id'], {}), "(__self__, 'expose_cluster_id', expose_cluster_id)\n", (64133, 64183), False, 'import pulumi\n'), ((64237, 64295), 'pulumi.set', 'pulumi.set', (['__self__', '"""instance_weights"""', 'instance_weights'], {}), "(__self__, 'instance_weights', instance_weights)\n", (64247, 64295), False, 'import pulumi\n'), ((64346, 64398), 'pulumi.set', 'pulumi.set', (['__self__', '"""job_flow_role"""', 'job_flow_role'], {}), "(__self__, 'job_flow_role', job_flow_role)\n", (64356, 64398), False, 'import pulumi\n'), ((64455, 64519), 'pulumi.set', 'pulumi.set', (['__self__', '"""keep_job_flow_alive"""', 'keep_job_flow_alive'], {}), "(__self__, 'keep_job_flow_alive', keep_job_flow_alive)\n", (64465, 64519), False, 'import pulumi\n'), ((64564, 64604), 'pulumi.set', 'pulumi.set', (['__self__', '"""log_uri"""', 'log_uri'], {}), "(__self__, 'log_uri', log_uri)\n", (64574, 64604), False, 'import pulumi\n'), ((64672, 64762), 'pulumi.set', 'pulumi.set', (['__self__', '"""managed_primary_security_group"""', 'managed_primary_security_group'], {}), "(__self__, 'managed_primary_security_group',\n managed_primary_security_group)\n", (64682, 64762), False, 'import pulumi\n'), ((64826, 64916), 'pulumi.set', 'pulumi.set', (['__self__', '"""managed_replica_security_group"""', 'managed_replica_security_group'], {}), "(__self__, 'managed_replica_security_group',\n managed_replica_security_group)\n", (64836, 64916), False, 'import pulumi\n'), ((64974, 65048), 'pulumi.set', 'pulumi.set', (['__self__', '"""master_ebs_block_devices"""', 'master_ebs_block_devices'], {}), "(__self__, 'master_ebs_block_devices', master_ebs_block_devices)\n", (64984, 65048), False, 'import pulumi\n'), ((65106, 65172), 'pulumi.set', 'pulumi.set', (['__self__', '"""master_ebs_optimized"""', 'master_ebs_optimized'], {}), "(__self__, 'master_ebs_optimized', master_ebs_optimized)\n", (65116, 65172), False, 'import pulumi\n'), ((65231, 65299), 'pulumi.set', 'pulumi.set', (['__self__', '"""master_instance_types"""', 'master_instance_types'], {}), "(__self__, 'master_instance_types', master_instance_types)\n", (65241, 65299), False, 'import pulumi\n'), ((65353, 65411), 'pulumi.set', 'pulumi.set', (['__self__', '"""master_lifecycle"""', 'master_lifecycle'], {}), "(__self__, 'master_lifecycle', master_lifecycle)\n", (65363, 65411), False, 'import pulumi\n'), ((65462, 65514), 'pulumi.set', 'pulumi.set', (['__self__', '"""master_target"""', 'master_target'], {}), "(__self__, 'master_target', master_target)\n", (65472, 65514), False, 'import pulumi\n'), ((65556, 65590), 'pulumi.set', 'pulumi.set', (['__self__', '"""name"""', 'name'], {}), "(__self__, 'name', name)\n", (65566, 65590), False, 'import pulumi\n'), ((65645, 65705), 'pulumi.set', 'pulumi.set', (['__self__', '"""output_cluster_id"""', 'output_cluster_id'], {}), "(__self__, 'output_cluster_id', output_cluster_id)\n", (65655, 65705), False, 'import pulumi\n'), ((65763, 65829), 'pulumi.set', 'pulumi.set', (['__self__', '"""provisioning_timeout"""', 'provisioning_timeout'], {}), "(__self__, 'provisioning_timeout', provisioning_timeout)\n", (65773, 65829), False, 'import pulumi\n'), ((65873, 65911), 'pulumi.set', 'pulumi.set', (['__self__', '"""region"""', 'region'], {}), "(__self__, 'region', region)\n", (65883, 65911), False, 'import pulumi\n'), ((65962, 66014), 'pulumi.set', 'pulumi.set', (['__self__', '"""release_label"""', 'release_label'], {}), "(__self__, 'release_label', release_label)\n", (65972, 66014), False, 'import pulumi\n'), ((66072, 66138), 'pulumi.set', 'pulumi.set', (['__self__', '"""repo_upgrade_on_boot"""', 'repo_upgrade_on_boot'], {}), "(__self__, 'repo_upgrade_on_boot', repo_upgrade_on_boot)\n", (66082, 66138), False, 'import pulumi\n'), ((66183, 66223), 'pulumi.set', 'pulumi.set', (['__self__', '"""retries"""', 'retries'], {}), "(__self__, 'retries', retries)\n", (66193, 66223), False, 'import pulumi\n'), ((66276, 66332), 'pulumi.set', 'pulumi.set', (['__self__', '"""scheduled_tasks"""', 'scheduled_tasks'], {}), "(__self__, 'scheduled_tasks', scheduled_tasks)\n", (66286, 66332), False, 'import pulumi\n'), ((66385, 66441), 'pulumi.set', 'pulumi.set', (['__self__', '"""security_config"""', 'security_config'], {}), "(__self__, 'security_config', security_config)\n", (66395, 66441), False, 'import pulumi\n'), ((66508, 66596), 'pulumi.set', 'pulumi.set', (['__self__', '"""service_access_security_group"""', 'service_access_security_group'], {}), "(__self__, 'service_access_security_group',\n service_access_security_group)\n", (66518, 66596), False, 'import pulumi\n'), ((66642, 66692), 'pulumi.set', 'pulumi.set', (['__self__', '"""service_role"""', 'service_role'], {}), "(__self__, 'service_role', service_role)\n", (66652, 66692), False, 'import pulumi\n'), ((66741, 66789), 'pulumi.set', 'pulumi.set', (['__self__', '"""steps_files"""', 'steps_files'], {}), "(__self__, 'steps_files', steps_files)\n", (66751, 66789), False, 'import pulumi\n'), ((66835, 66877), 'pulumi.set', 'pulumi.set', (['__self__', '"""strategy"""', 'strategy'], {}), "(__self__, 'strategy', strategy)\n", (66845, 66877), False, 'import pulumi\n'), ((66919, 66953), 'pulumi.set', 'pulumi.set', (['__self__', '"""tags"""', 'tags'], {}), "(__self__, 'tags', tags)\n", (66929, 66953), False, 'import pulumi\n'), ((67012, 67080), 'pulumi.set', 'pulumi.set', (['__self__', '"""task_desired_capacity"""', 'task_desired_capacity'], {}), "(__self__, 'task_desired_capacity', task_desired_capacity)\n", (67022, 67080), False, 'import pulumi\n'), ((67140, 67210), 'pulumi.set', 'pulumi.set', (['__self__', '"""task_ebs_block_devices"""', 'task_ebs_block_devices'], {}), "(__self__, 'task_ebs_block_devices', task_ebs_block_devices)\n", (67150, 67210), False, 'import pulumi\n'), ((67266, 67328), 'pulumi.set', 'pulumi.set', (['__self__', '"""task_ebs_optimized"""', 'task_ebs_optimized'], {}), "(__self__, 'task_ebs_optimized', task_ebs_optimized)\n", (67276, 67328), False, 'import pulumi\n'), ((67385, 67449), 'pulumi.set', 'pulumi.set', (['__self__', '"""task_instance_types"""', 'task_instance_types'], {}), "(__self__, 'task_instance_types', task_instance_types)\n", (67395, 67449), False, 'import pulumi\n'), ((67501, 67555), 'pulumi.set', 'pulumi.set', (['__self__', '"""task_lifecycle"""', 'task_lifecycle'], {}), "(__self__, 'task_lifecycle', task_lifecycle)\n", (67511, 67555), False, 'import pulumi\n'), ((67606, 67658), 'pulumi.set', 'pulumi.set', (['__self__', '"""task_max_size"""', 'task_max_size'], {}), "(__self__, 'task_max_size', task_max_size)\n", (67616, 67658), False, 'import pulumi\n'), ((67709, 67761), 'pulumi.set', 'pulumi.set', (['__self__', '"""task_min_size"""', 'task_min_size'], {}), "(__self__, 'task_min_size', task_min_size)\n", (67719, 67761), False, 'import pulumi\n'), ((67825, 67903), 'pulumi.set', 'pulumi.set', (['__self__', '"""task_scaling_down_policies"""', 'task_scaling_down_policies'], {}), "(__self__, 'task_scaling_down_policies', task_scaling_down_policies)\n", (67835, 67903), False, 'import pulumi\n'), ((67965, 68039), 'pulumi.set', 'pulumi.set', (['__self__', '"""task_scaling_up_policies"""', 'task_scaling_up_policies'], {}), "(__self__, 'task_scaling_up_policies', task_scaling_up_policies)\n", (67975, 68039), False, 'import pulumi\n'), ((68086, 68130), 'pulumi.set', 'pulumi.set', (['__self__', '"""task_unit"""', 'task_unit'], {}), "(__self__, 'task_unit', task_unit)\n", (68096, 68130), False, 'import pulumi\n'), ((68188, 68254), 'pulumi.set', 'pulumi.set', (['__self__', '"""termination_policies"""', 'termination_policies'], {}), "(__self__, 'termination_policies', termination_policies)\n", (68198, 68254), False, 'import pulumi\n'), ((68313, 68381), 'pulumi.set', 'pulumi.set', (['__self__', '"""termination_protected"""', 'termination_protected'], {}), "(__self__, 'termination_protected', termination_protected)\n", (68323, 68381), False, 'import pulumi\n'), ((68439, 68554), 'warnings.warn', 'warnings.warn', (['"""This field has been removed from our API and is no longer functional."""', 'DeprecationWarning'], {}), "(\n 'This field has been removed from our API and is no longer functional.',\n DeprecationWarning)\n", (68452, 68554), False, 'import warnings\n'), ((68562, 68696), 'pulumi.log.warn', 'pulumi.log.warn', (['"""visible_to_all_users is deprecated: This field has been removed from our API and is no longer functional."""'], {}), "(\n 'visible_to_all_users is deprecated: This field has been removed from our API and is no longer functional.'\n )\n", (68577, 68696), False, 'import pulumi\n'), ((68748, 68814), 'pulumi.set', 'pulumi.set', (['__self__', '"""visible_to_all_users"""', 'visible_to_all_users'], {}), "(__self__, 'visible_to_all_users', visible_to_all_users)\n", (68758, 68814), False, 'import pulumi\n'), ((133208, 133232), 'pulumi.ResourceOptions', 'pulumi.ResourceOptions', ([], {}), '()\n', (133230, 133232), False, 'import pulumi\n'), ((152577, 152606), 'pulumi.ResourceOptions', 'pulumi.ResourceOptions', ([], {'id': 'id'}), '(id=id)\n', (152599, 152606), False, 'import pulumi\n'), ((138194, 138309), 'warnings.warn', 'warnings.warn', (['"""This field has been removed from our API and is no longer functional."""', 'DeprecationWarning'], {}), "(\n 'This field has been removed from our API and is no longer functional.',\n DeprecationWarning)\n", (138207, 138309), False, 'import warnings\n'), ((138321, 138455), 'pulumi.log.warn', 'pulumi.log.warn', (['"""visible_to_all_users is deprecated: This field has been removed from our API and is no longer functional."""'], {}), "(\n 'visible_to_all_users is deprecated: This field has been removed from our API and is no longer functional.'\n )\n", (138336, 138455), False, 'import pulumi\n')] |
# MIT License
#
# Copyright (c) 2020 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import random
import re
from collections import defaultdict
from toposort import toposort, CircularDependencyError
from elit.components.amr.amr_parser.data import REL
number_regexp = re.compile(r'^-?(\d)+(\.\d+)?$')
abstract_regexp0 = re.compile(r'^([A-Z]+_)+\d+$')
abstract_regexp1 = re.compile(r'^\d0*$')
discard_regexp = re.compile(r'^n(\d+)?$')
attr_value_set = set(['-', '+', 'interrogative', 'imperative', 'expressive'])
def _is_attr_form(x):
return (x in attr_value_set or x.endswith('_') or number_regexp.match(x) is not None)
def _is_abs_form(x):
return (abstract_regexp0.match(x) is not None or abstract_regexp1.match(x) is not None)
def is_attr_or_abs_form(x):
return _is_attr_form(x) or _is_abs_form(x)
def need_an_instance(x):
return (not _is_attr_form(x) or (abstract_regexp0.match(x) is not None))
class AMRGraph(object):
def __init__(self, smatch_amr):
# transform amr from original smatch format into our own data structure
instance_triple, attribute_triple, relation_triple = smatch_amr.get_triples()
self.root = smatch_amr.root
self.nodes = set()
self.edges = dict()
self.reversed_edges = dict()
self.undirected_edges = dict()
self.name2concept = dict()
# will do some adjustments
self.abstract_concepts = dict()
for _, name, concept in instance_triple:
if is_attr_or_abs_form(concept):
if _is_abs_form(concept):
self.abstract_concepts[name] = concept
else:
# print('bad concept', _, name, concept)
pass
self.name2concept[name] = concept
self.nodes.add(name)
for rel, concept, value in attribute_triple:
if rel == 'TOP':
continue
# discard some empty names
if rel == 'name' and discard_regexp.match(value):
continue
# abstract concept can't have an attribute
if concept in self.abstract_concepts:
# print(rel, self.abstract_concepts[concept], value, "abstract concept cannot have an attribute")
continue
name = "%s_attr_%d" % (value, len(self.name2concept))
if not _is_attr_form(value):
if _is_abs_form(value):
self.abstract_concepts[name] = value
else:
# print('bad attribute', rel, concept, value)
continue
self.name2concept[name] = value
self._add_edge(rel, concept, name)
for rel, head, tail in relation_triple:
self._add_edge(rel, head, tail)
# lower concept
for name in self.name2concept:
v = self.name2concept[name]
if not _is_abs_form(v):
v = v.lower()
self.name2concept[name] = v
def __len__(self):
return len(self.name2concept)
def _add_edge(self, rel, src, des):
self.nodes.add(src)
self.nodes.add(des)
self.edges[src] = self.edges.get(src, []) + [(rel, des)]
self.reversed_edges[des] = self.reversed_edges.get(des, []) + [(rel, src)]
self.undirected_edges[src] = self.undirected_edges.get(src, []) + [(rel, des)]
self.undirected_edges[des] = self.undirected_edges.get(des, []) + [(rel + '_reverse_', src)]
def root_centered_sort(self, rel_order=None, shuffle=True):
queue = [self.root]
visited = set(queue)
step = 0
while len(queue) > step:
src = queue[step]
step += 1
if src not in self.undirected_edges:
continue
if shuffle:
random.shuffle(self.undirected_edges[src])
if rel_order is not None:
# Do some random thing here for performance enhancement
if shuffle and random.random() < 0.5:
self.undirected_edges[src].sort(
key=lambda x: -rel_order(x[0]) if (x[0].startswith('snt') or x[0].startswith('op')) else -1)
else:
self.undirected_edges[src].sort(key=lambda x: -rel_order(x[0]))
for rel, des in self.undirected_edges[src]:
if des in visited:
continue
else:
queue.append(des)
visited.add(des)
not_connected = len(queue) != len(self.nodes)
assert (not not_connected)
name2pos = dict(zip(queue, range(len(queue))))
visited = set()
edge = []
for x in queue:
if x not in self.undirected_edges:
continue
for r, y in self.undirected_edges[x]:
if y in visited:
r = r[:-9] if r.endswith('_reverse_') else r + '_reverse_'
edge.append((name2pos[x], name2pos[y], r)) # x -> y: r
visited.add(x)
return [self.name2concept[x] for x in queue], edge, not_connected
def to_levi(self, rel_order=None, shuffle=True):
dependencies = defaultdict(set)
name2instance = dict()
name2instance.update(self.name2concept)
for u, rs in self.edges.items():
for r, v in rs:
# u --r--> v
r = REL + r
r_name = f'rel_{len(name2instance)}'
name2instance[r_name] = r
dependencies[v].add(r_name)
dependencies[r_name].add(u)
gs = []
try:
for g in toposort(dependencies):
gs.append(g)
except CircularDependencyError:
pass
node_seq = []
for g in gs:
g = list(g)
if rel_order:
if shuffle:
if random.random() < 0.5:
g = sorted(g, key=lambda x: -rel_order(name2instance[x]) if (
name2instance[x].startswith('snt') or name2instance[x].startswith('op')) else -1)
else:
random.shuffle(g)
else:
g = sorted(g, key=lambda x: -rel_order(name2instance[x]))
node_seq += g
ind = dict(map(reversed, enumerate(node_seq)))
edge = []
for v, us in dependencies.items():
if v not in ind:
continue
for u in us:
if u not in ind:
continue
edge.append((ind[v], ind[u], ''))
return [name2instance[x] for x in node_seq], edge
| [
"random.shuffle",
"re.compile",
"collections.defaultdict",
"random.random",
"toposort.toposort"
] | [((1288, 1322), 're.compile', 're.compile', (['"""^-?(\\\\d)+(\\\\.\\\\d+)?$"""'], {}), "('^-?(\\\\d)+(\\\\.\\\\d+)?$')\n", (1298, 1322), False, 'import re\n'), ((1340, 1370), 're.compile', 're.compile', (['"""^([A-Z]+_)+\\\\d+$"""'], {}), "('^([A-Z]+_)+\\\\d+$')\n", (1350, 1370), False, 'import re\n'), ((1390, 1411), 're.compile', 're.compile', (['"""^\\\\d0*$"""'], {}), "('^\\\\d0*$')\n", (1400, 1411), False, 'import re\n'), ((1429, 1453), 're.compile', 're.compile', (['"""^n(\\\\d+)?$"""'], {}), "('^n(\\\\d+)?$')\n", (1439, 1453), False, 'import re\n'), ((6255, 6271), 'collections.defaultdict', 'defaultdict', (['set'], {}), '(set)\n', (6266, 6271), False, 'from collections import defaultdict\n'), ((6710, 6732), 'toposort.toposort', 'toposort', (['dependencies'], {}), '(dependencies)\n', (6718, 6732), False, 'from toposort import toposort, CircularDependencyError\n'), ((4856, 4898), 'random.shuffle', 'random.shuffle', (['self.undirected_edges[src]'], {}), '(self.undirected_edges[src])\n', (4870, 4898), False, 'import random\n'), ((5040, 5055), 'random.random', 'random.random', ([], {}), '()\n', (5053, 5055), False, 'import random\n'), ((6964, 6979), 'random.random', 'random.random', ([], {}), '()\n', (6977, 6979), False, 'import random\n'), ((7237, 7254), 'random.shuffle', 'random.shuffle', (['g'], {}), '(g)\n', (7251, 7254), False, 'import random\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import string
from collections import Counter
import numpy as np
import theano
import theano.tensor as T
punctuation = set(string.punctuation)
punctuation.add('\n')
punctuation.add('\t')
punctuation.add(u'’')
punctuation.add(u'‘')
punctuation.add(u'“')
punctuation.add(u'”')
punctuation.add(u'´')
punctuation.add('')
def one_hot(X, n=None, negative_class=0.):
X = np.asarray(X).flatten()
if n is None:
n = np.max(X) + 1
Xoh = np.ones((len(X), n)) * negative_class
Xoh[np.arange(len(X)), X] = 1.
return Xoh
def flatten(l):
return [item for sublist in l for item in sublist]
def lbf(l,b):
return [el for el, condition in zip(l, b) if condition]
def list_index(l, idxs):
return [l[idx] for idx in idxs]
def tokenize(text):
tokenized = []
w = ''
for t in text:
if t in punctuation:
tokenized.append(w)
tokenized.append(t)
w = ''
elif t == ' ':
tokenized.append(w)
w = ''
else:
w += t
if w != '':
tokenized.append(w)
tokenized = [token for token in tokenized if token]
return tokenized
def token_encoder(texts, max_features=9997, min_df=10):
df = {}
for text in texts:
tokens = set(text)
for token in tokens:
if token in df:
df[token] += 1
else:
df[token] = 1
k, v = df.keys(), np.asarray(df.values())
valid = v >= min_df
k = lbf(k, valid)
v = v[valid]
sort_mask = np.argsort(v)[::-1]
k = list_index(k, sort_mask)[:max_features]
v = v[sort_mask][:max_features]
xtoi = dict(zip(k, range(3, len(k)+3)))
return xtoi
def standardize_targets(Y, cost):
Y = np.asarray(Y)
ndim = len(Y.shape)
if ndim == 1:
Y = Y.reshape(-1, 1)
if Y.shape[1] == 1 and cost.__name__ == 'CategoricalCrossEntropy':
Y = one_hot(Y, negative_class=0.)
if Y.shape[1] == 1 and 'Hinge' in cost.__name__:
if len(np.unique(Y)) > 2:
Y = one_hot(Y, negative_class=-1.)
else:
Y[Y==0] -= 1
return Y
class Tokenizer(object):
"""
For converting lists of text into tokens used by Passage models.
max_features sets the maximum number of tokens (all others are mapped to UNK)
min_df sets the minimum number of documents a token must appear in to not get mapped to UNK
lowercase controls whether the text is lowercased or not
character sets whether the tokenizer works on a character or word level
Usage:
>>> from passage.preprocessing import Tokenizer
>>> example_text = ['This. is.', 'Example TEXT', 'is text']
>>> tokenizer = Tokenizer(min_df=1, lowercase=True, character=False)
>>> tokenized = tokenizer.fit_transform(example_text)
>>> tokenized
[[7, 5, 3, 5], [6, 4], [3, 4]]
>>> tokenizer.inverse_transform(tokenized)
['this . is .', 'example text', 'is text']
"""
def __init__(self, max_features=9997, min_df=10, lowercase=True, character=False):
self.max_features = max_features
self.min_df = min_df
self.lowercase = lowercase
self.character = character
def fit(self, texts):
if self.lowercase:
texts = [text.lower() for text in texts]
if self.character:
tokens = [list(text) for text in texts]
else:
tokens = [tokenize(text) for text in texts]
self.encoder = token_encoder(tokens, max_features=self.max_features-3, min_df=self.min_df)
self.encoder['PAD'] = 0
self.encoder['END'] = 1
self.encoder['UNK'] = 2
self.decoder = dict(zip(self.encoder.values(), self.encoder.keys()))
self.n_features = len(self.encoder)
return self
def transform(self, texts):
if self.lowercase:
texts = [text.lower() for text in texts]
if self.character:
texts = [list(text) for text in texts]
else:
texts = [tokenize(text) for text in texts]
tokens = [[self.encoder.get(token, 2) for token in text] for text in texts]
return tokens
def fit_transform(self, texts):
self.fit(texts)
tokens = self.transform(texts)
return tokens
def inverse_transform(self, codes):
if self.character:
joiner = ''
else:
joiner = ' '
return [joiner.join([self.decoder[token] for token in code]) for code in codes]
class LenFilter(object):
def __init__(self, max_len=1000, min_max_len=100, percentile=99):
self.max_len = max_len
self.percentile = percentile
self.min_max_len = min_max_len
def filter(self, *data):
lens = [len(seq) for seq in data[0]]
if self.percentile > 0:
max_len = np.percentile(lens, self.percentile)
max_len = np.clip(max_len, self.min_max_len, self.max_len)
else:
max_len = self.max_len
valid_idxs = [i for i, l in enumerate(lens) if l <= max_len]
if len(data) == 1:
return list_index(data[0], valid_idxs)
else:
return tuple([list_index(d, valid_idxs) for d in data])
| [
"numpy.clip",
"numpy.unique",
"numpy.asarray",
"numpy.max",
"numpy.argsort",
"numpy.percentile"
] | [((1789, 1802), 'numpy.asarray', 'np.asarray', (['Y'], {}), '(Y)\n', (1799, 1802), True, 'import numpy as np\n'), ((1582, 1595), 'numpy.argsort', 'np.argsort', (['v'], {}), '(v)\n', (1592, 1595), True, 'import numpy as np\n'), ((417, 430), 'numpy.asarray', 'np.asarray', (['X'], {}), '(X)\n', (427, 430), True, 'import numpy as np\n'), ((471, 480), 'numpy.max', 'np.max', (['X'], {}), '(X)\n', (477, 480), True, 'import numpy as np\n'), ((4865, 4901), 'numpy.percentile', 'np.percentile', (['lens', 'self.percentile'], {}), '(lens, self.percentile)\n', (4878, 4901), True, 'import numpy as np\n'), ((4924, 4972), 'numpy.clip', 'np.clip', (['max_len', 'self.min_max_len', 'self.max_len'], {}), '(max_len, self.min_max_len, self.max_len)\n', (4931, 4972), True, 'import numpy as np\n'), ((2055, 2067), 'numpy.unique', 'np.unique', (['Y'], {}), '(Y)\n', (2064, 2067), True, 'import numpy as np\n')] |
import pickle
import numpy as np
from numpy.testing import assert_array_almost_equal, assert_array_equal
import pytest
from oddt.scoring.models import classifiers, regressors
@pytest.mark.filterwarnings('ignore:Stochastic Optimizer')
@pytest.mark.parametrize('cls',
[classifiers.svm(probability=True),
classifiers.neuralnetwork(random_state=42)])
def test_classifiers(cls):
# toy data
X = np.concatenate((np.zeros((5, 2)), np.ones((5, 2))))
Y = np.concatenate((np.ones(5), np.zeros(5)))
np.random.seed(42)
cls.fit(X, Y)
assert_array_equal(cls.predict(X), Y)
assert cls.score(X, Y) == 1.0
prob = cls.predict_proba(X)
assert_array_almost_equal(prob, [[0, 1]] * 5 + [[1, 0]] * 5, decimal=1)
log_prob = cls.predict_log_proba(X)
assert_array_almost_equal(np.log(prob), log_prob)
pickled = pickle.dumps(cls)
reloaded = pickle.loads(pickled)
prob_reloaded = reloaded.predict_proba(X)
assert_array_almost_equal(prob, prob_reloaded)
@pytest.mark.parametrize('reg',
[regressors.svm(C=10),
regressors.randomforest(random_state=42),
regressors.neuralnetwork(solver='lbfgs',
random_state=42,
hidden_layer_sizes=(20, 20)),
regressors.mlr()])
def test_regressors(reg):
X = np.vstack((np.arange(30, 10, -2, dtype='float64'),
np.arange(100, 90, -1, dtype='float64'))).T
Y = np.arange(10, dtype='float64')
np.random.seed(42)
reg.fit(X, Y)
pred = reg.predict(X)
assert (np.abs(pred.flatten() - Y) < 1).all()
assert reg.score(X, Y) > 0.9
pickled = pickle.dumps(reg)
reloaded = pickle.loads(pickled)
pred_reloaded = reloaded.predict(X)
assert_array_almost_equal(pred, pred_reloaded)
| [
"oddt.scoring.models.regressors.randomforest",
"numpy.testing.assert_array_almost_equal",
"pytest.mark.filterwarnings",
"numpy.ones",
"oddt.scoring.models.regressors.neuralnetwork",
"pickle.dumps",
"numpy.log",
"oddt.scoring.models.classifiers.neuralnetwork",
"oddt.scoring.models.regressors.svm",
... | [((180, 237), 'pytest.mark.filterwarnings', 'pytest.mark.filterwarnings', (['"""ignore:Stochastic Optimizer"""'], {}), "('ignore:Stochastic Optimizer')\n", (206, 237), False, 'import pytest\n'), ((559, 577), 'numpy.random.seed', 'np.random.seed', (['(42)'], {}), '(42)\n', (573, 577), True, 'import numpy as np\n'), ((711, 782), 'numpy.testing.assert_array_almost_equal', 'assert_array_almost_equal', (['prob', '([[0, 1]] * 5 + [[1, 0]] * 5)'], {'decimal': '(1)'}), '(prob, [[0, 1]] * 5 + [[1, 0]] * 5, decimal=1)\n', (736, 782), False, 'from numpy.testing import assert_array_almost_equal, assert_array_equal\n'), ((892, 909), 'pickle.dumps', 'pickle.dumps', (['cls'], {}), '(cls)\n', (904, 909), False, 'import pickle\n'), ((925, 946), 'pickle.loads', 'pickle.loads', (['pickled'], {}), '(pickled)\n', (937, 946), False, 'import pickle\n'), ((997, 1043), 'numpy.testing.assert_array_almost_equal', 'assert_array_almost_equal', (['prob', 'prob_reloaded'], {}), '(prob, prob_reloaded)\n', (1022, 1043), False, 'from numpy.testing import assert_array_almost_equal, assert_array_equal\n'), ((1612, 1642), 'numpy.arange', 'np.arange', (['(10)'], {'dtype': '"""float64"""'}), "(10, dtype='float64')\n", (1621, 1642), True, 'import numpy as np\n'), ((1648, 1666), 'numpy.random.seed', 'np.random.seed', (['(42)'], {}), '(42)\n', (1662, 1666), True, 'import numpy as np\n'), ((1811, 1828), 'pickle.dumps', 'pickle.dumps', (['reg'], {}), '(reg)\n', (1823, 1828), False, 'import pickle\n'), ((1844, 1865), 'pickle.loads', 'pickle.loads', (['pickled'], {}), '(pickled)\n', (1856, 1865), False, 'import pickle\n'), ((1910, 1956), 'numpy.testing.assert_array_almost_equal', 'assert_array_almost_equal', (['pred', 'pred_reloaded'], {}), '(pred, pred_reloaded)\n', (1935, 1956), False, 'from numpy.testing import assert_array_almost_equal, assert_array_equal\n'), ((853, 865), 'numpy.log', 'np.log', (['prob'], {}), '(prob)\n', (859, 865), True, 'import numpy as np\n'), ((296, 329), 'oddt.scoring.models.classifiers.svm', 'classifiers.svm', ([], {'probability': '(True)'}), '(probability=True)\n', (311, 329), False, 'from oddt.scoring.models import classifiers, regressors\n'), ((357, 399), 'oddt.scoring.models.classifiers.neuralnetwork', 'classifiers.neuralnetwork', ([], {'random_state': '(42)'}), '(random_state=42)\n', (382, 399), False, 'from oddt.scoring.models import classifiers, regressors\n'), ((1104, 1124), 'oddt.scoring.models.regressors.svm', 'regressors.svm', ([], {'C': '(10)'}), '(C=10)\n', (1118, 1124), False, 'from oddt.scoring.models import classifiers, regressors\n'), ((1152, 1192), 'oddt.scoring.models.regressors.randomforest', 'regressors.randomforest', ([], {'random_state': '(42)'}), '(random_state=42)\n', (1175, 1192), False, 'from oddt.scoring.models import classifiers, regressors\n'), ((1220, 1310), 'oddt.scoring.models.regressors.neuralnetwork', 'regressors.neuralnetwork', ([], {'solver': '"""lbfgs"""', 'random_state': '(42)', 'hidden_layer_sizes': '(20, 20)'}), "(solver='lbfgs', random_state=42,\n hidden_layer_sizes=(20, 20))\n", (1244, 1310), False, 'from oddt.scoring.models import classifiers, regressors\n'), ((1436, 1452), 'oddt.scoring.models.regressors.mlr', 'regressors.mlr', ([], {}), '()\n', (1450, 1452), False, 'from oddt.scoring.models import classifiers, regressors\n'), ((468, 484), 'numpy.zeros', 'np.zeros', (['(5, 2)'], {}), '((5, 2))\n', (476, 484), True, 'import numpy as np\n'), ((486, 501), 'numpy.ones', 'np.ones', (['(5, 2)'], {}), '((5, 2))\n', (493, 501), True, 'import numpy as np\n'), ((528, 538), 'numpy.ones', 'np.ones', (['(5)'], {}), '(5)\n', (535, 538), True, 'import numpy as np\n'), ((540, 551), 'numpy.zeros', 'np.zeros', (['(5)'], {}), '(5)\n', (548, 551), True, 'import numpy as np\n'), ((1500, 1538), 'numpy.arange', 'np.arange', (['(30)', '(10)', '(-2)'], {'dtype': '"""float64"""'}), "(30, 10, -2, dtype='float64')\n", (1509, 1538), True, 'import numpy as np\n'), ((1559, 1598), 'numpy.arange', 'np.arange', (['(100)', '(90)', '(-1)'], {'dtype': '"""float64"""'}), "(100, 90, -1, dtype='float64')\n", (1568, 1598), True, 'import numpy as np\n')] |
from google.appengine.ext import ndb
class Collaborator(ndb.Model):
"""
Represents collab relationship at events
Notifications will only be sent if both the
sender and receiver have shared with each other
"""
srcUserId = ndb.StringProperty(required=True)
dstUserId = ndb.StringProperty(required=True)
mutual = ndb.BooleanProperty(default=False)
eventKey = ndb.StringProperty(required=True)
created = ndb.DateTimeProperty(auto_now_add=True)
updated = ndb.DateTimeProperty(auto_now=True, indexed=False)
| [
"google.appengine.ext.ndb.DateTimeProperty",
"google.appengine.ext.ndb.BooleanProperty",
"google.appengine.ext.ndb.StringProperty"
] | [((248, 281), 'google.appengine.ext.ndb.StringProperty', 'ndb.StringProperty', ([], {'required': '(True)'}), '(required=True)\n', (266, 281), False, 'from google.appengine.ext import ndb\n'), ((298, 331), 'google.appengine.ext.ndb.StringProperty', 'ndb.StringProperty', ([], {'required': '(True)'}), '(required=True)\n', (316, 331), False, 'from google.appengine.ext import ndb\n'), ((345, 379), 'google.appengine.ext.ndb.BooleanProperty', 'ndb.BooleanProperty', ([], {'default': '(False)'}), '(default=False)\n', (364, 379), False, 'from google.appengine.ext import ndb\n'), ((395, 428), 'google.appengine.ext.ndb.StringProperty', 'ndb.StringProperty', ([], {'required': '(True)'}), '(required=True)\n', (413, 428), False, 'from google.appengine.ext import ndb\n'), ((444, 483), 'google.appengine.ext.ndb.DateTimeProperty', 'ndb.DateTimeProperty', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (464, 483), False, 'from google.appengine.ext import ndb\n'), ((498, 548), 'google.appengine.ext.ndb.DateTimeProperty', 'ndb.DateTimeProperty', ([], {'auto_now': '(True)', 'indexed': '(False)'}), '(auto_now=True, indexed=False)\n', (518, 548), False, 'from google.appengine.ext import ndb\n')] |
from graphene import Int
from .decorators import require_authenication
class PrimaryKeyMixin(object):
pk = Int(source='pk')
class LoginRequiredMixin(object):
@classmethod
@require_authenication(info_position=1)
def get_node(cls, info, id):
return super(LoginRequiredMixin, cls).get_node(info, id)
| [
"graphene.Int"
] | [((114, 130), 'graphene.Int', 'Int', ([], {'source': '"""pk"""'}), "(source='pk')\n", (117, 130), False, 'from graphene import Int\n')] |
#Tests that blocks can't have multiple verification packets for the same transaction.
from typing import Dict, Any
import json
from pytest import raises
from e2e.Libs.Minisketch import Sketch
from e2e.Classes.Transactions.Data import Data
from e2e.Classes.Consensus.VerificationPacket import VerificationPacket
from e2e.Classes.Merit.Blockchain import Block, Blockchain
from e2e.Meros.Meros import MessageType
from e2e.Meros.RPC import RPC
from e2e.Meros.Liver import Liver
from e2e.Tests.Errors import TestError, SuccessError
def MultiplePacketsTest(
rpc: RPC
) -> None:
#Spawn a Blockchain just to set the RandomX key.
_: Blockchain = Blockchain()
vectors: Dict[str, Any]
with open("e2e/Vectors/Merit/MultiplePackets.json", "r") as file:
vectors = json.loads(file.read())
data: Data = Data.fromJSON(vectors["data"])
block: Block = Block.fromJSON(vectors["blockchain"][-1])
def sendDataAndBlock() -> None:
#Send the Data.
if rpc.meros.liveTransaction(data) != rpc.meros.live.recv():
raise TestError("Meros didn't send back the Data.")
rpc.meros.liveBlockHeader(block.header)
rpc.meros.handleBlockBody(block)
msg: bytes = rpc.meros.sync.recv()
if MessageType(msg[0]) != MessageType.SketchHashRequests:
raise TestError("Meros didn't request the packets for this Block.")
packets: Dict[int, VerificationPacket] = {}
for packet in block.body.packets:
packets[Sketch.hash(block.header.sketchSalt, packet)] = packet
#Look up each requested packet and respond accordingly.
for h in range(int.from_bytes(msg[33 : 37], byteorder="little")):
sketchHash: int = int.from_bytes(msg[37 + (h * 8) : 45 + (h * 8)], byteorder="little")
if sketchHash not in packets:
raise TestError("Meros asked for a non-existent Sketch Hash.")
rpc.meros.packet(packets[sketchHash])
try:
if MessageType(rpc.meros.live.recv()[0]) == MessageType.BlockHeader:
raise TestError("Meros added the Block.")
except Exception as e:
if str(e) != "Meros added the Block.":
raise SuccessError()
with raises(SuccessError):
Liver(
rpc,
vectors["blockchain"],
callbacks={
2: sendDataAndBlock
}
).live()
| [
"e2e.Meros.Liver.Liver",
"e2e.Libs.Minisketch.Sketch.hash",
"e2e.Tests.Errors.SuccessError",
"e2e.Classes.Merit.Blockchain.Blockchain",
"e2e.Classes.Merit.Blockchain.Block.fromJSON",
"pytest.raises",
"e2e.Classes.Transactions.Data.Data.fromJSON",
"e2e.Tests.Errors.TestError",
"e2e.Meros.Meros.Messag... | [((649, 661), 'e2e.Classes.Merit.Blockchain.Blockchain', 'Blockchain', ([], {}), '()\n', (659, 661), False, 'from e2e.Classes.Merit.Blockchain import Block, Blockchain\n'), ((811, 841), 'e2e.Classes.Transactions.Data.Data.fromJSON', 'Data.fromJSON', (["vectors['data']"], {}), "(vectors['data'])\n", (824, 841), False, 'from e2e.Classes.Transactions.Data import Data\n'), ((859, 900), 'e2e.Classes.Merit.Blockchain.Block.fromJSON', 'Block.fromJSON', (["vectors['blockchain'][-1]"], {}), "(vectors['blockchain'][-1])\n", (873, 900), False, 'from e2e.Classes.Merit.Blockchain import Block, Blockchain\n'), ((2111, 2131), 'pytest.raises', 'raises', (['SuccessError'], {}), '(SuccessError)\n', (2117, 2131), False, 'from pytest import raises\n'), ((1033, 1078), 'e2e.Tests.Errors.TestError', 'TestError', (['"""Meros didn\'t send back the Data."""'], {}), '("Meros didn\'t send back the Data.")\n', (1042, 1078), False, 'from e2e.Tests.Errors import TestError, SuccessError\n'), ((1207, 1226), 'e2e.Meros.Meros.MessageType', 'MessageType', (['msg[0]'], {}), '(msg[0])\n', (1218, 1226), False, 'from e2e.Meros.Meros import MessageType\n'), ((1274, 1335), 'e2e.Tests.Errors.TestError', 'TestError', (['"""Meros didn\'t request the packets for this Block."""'], {}), '("Meros didn\'t request the packets for this Block.")\n', (1283, 1335), False, 'from e2e.Tests.Errors import TestError, SuccessError\n'), ((1437, 1481), 'e2e.Libs.Minisketch.Sketch.hash', 'Sketch.hash', (['block.header.sketchSalt', 'packet'], {}), '(block.header.sketchSalt, packet)\n', (1448, 1481), False, 'from e2e.Libs.Minisketch import Sketch\n'), ((1766, 1822), 'e2e.Tests.Errors.TestError', 'TestError', (['"""Meros asked for a non-existent Sketch Hash."""'], {}), "('Meros asked for a non-existent Sketch Hash.')\n", (1775, 1822), False, 'from e2e.Tests.Errors import TestError, SuccessError\n'), ((1966, 2001), 'e2e.Tests.Errors.TestError', 'TestError', (['"""Meros added the Block."""'], {}), "('Meros added the Block.')\n", (1975, 2001), False, 'from e2e.Tests.Errors import TestError, SuccessError\n'), ((2137, 2205), 'e2e.Meros.Liver.Liver', 'Liver', (['rpc', "vectors['blockchain']"], {'callbacks': '{(2): sendDataAndBlock}'}), "(rpc, vectors['blockchain'], callbacks={(2): sendDataAndBlock})\n", (2142, 2205), False, 'from e2e.Meros.Liver import Liver\n'), ((2088, 2102), 'e2e.Tests.Errors.SuccessError', 'SuccessError', ([], {}), '()\n', (2100, 2102), False, 'from e2e.Tests.Errors import TestError, SuccessError\n')] |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
'''
Created on 2018年4月17日 @author: encodingl
'''
from django.shortcuts import render
#from dwebsocket.decorators import accept_websocket, require_websocket
from django.http import HttpResponse
import paramiko
from django.contrib.auth.decorators import login_required
from skaccounts.permission import permission_verify
import subprocess
from django.shortcuts import render
from django.template import RequestContext
from channels.layers import get_channel_layer
from asgiref.sync import async_to_sync
def web_send(group_name,msg):
print("4")
channel_layer = get_channel_layer()
print("5")
print(group_name)
async_to_sync(channel_layer.group_send)(group_name,
{
'type': 'show_in_windows',
'message': msg
}
)
print("6")
class TestWebSend():
def __init__(self,group_name,msg):
self.msg = msg
print(self.msg)
self.group_name = group_name
print(self.group_name)
def sendmsg(self):
print("3")
web_send(self.group_name, self.msg)
print("8")
@login_required()
@permission_verify()
def websocket_index(request):
temp_name = "skworkorders/skworkorders-header.html"
return render(request,'skworkorders/websocket.html', locals())
def exec_command(comm):
hostname = '172.28.28.127'
username = 'root'
password = '<PASSWORD>'
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(hostname=hostname, username=username, password=password)
stdin, stdout, stderr = ssh.exec_command(comm)
result = stdout.read()
ssh.close()
return result
#@accept_websocket
def echo(request):
temp_name = "skworkorders/skworkorders-header.html"
if not request.is_websocket():#判断是不是websocket连接
try:#如果是普通的http方法
message = request.GET['message']
return HttpResponse(message)
except:
return render(request,'skworkorders/websocket.html', locals())
else:
for message in request.websocket:
cmd = message
print(cmd)
# request.websocket.send(exec_command(cmd))
pcmd = subprocess.Popen(cmd,stdout=subprocess.PIPE,stderr=subprocess.STDOUT,shell=True)
while True:
line = pcmd.stdout.readline().strip() #获取内容
print(line)
if line:
request.websocket.send(line)
else:
break
retcode=pcmd.wait()
if retcode==0:
ret_message="执行成功"
else:
ret_message="执行失败"
request.websocket.send(ret_message)
# request.websocket.send(exec_command(cmd))#发送消息到客户端 | [
"skaccounts.permission.permission_verify",
"paramiko.AutoAddPolicy",
"django.http.HttpResponse",
"subprocess.Popen",
"django.contrib.auth.decorators.login_required",
"channels.layers.get_channel_layer",
"paramiko.SSHClient",
"asgiref.sync.async_to_sync"
] | [((1162, 1178), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {}), '()\n', (1176, 1178), False, 'from django.contrib.auth.decorators import login_required\n'), ((1180, 1199), 'skaccounts.permission.permission_verify', 'permission_verify', ([], {}), '()\n', (1197, 1199), False, 'from skaccounts.permission import permission_verify\n'), ((622, 641), 'channels.layers.get_channel_layer', 'get_channel_layer', ([], {}), '()\n', (639, 641), False, 'from channels.layers import get_channel_layer\n'), ((1477, 1497), 'paramiko.SSHClient', 'paramiko.SSHClient', ([], {}), '()\n', (1495, 1497), False, 'import paramiko\n'), ((684, 723), 'asgiref.sync.async_to_sync', 'async_to_sync', (['channel_layer.group_send'], {}), '(channel_layer.group_send)\n', (697, 723), False, 'from asgiref.sync import async_to_sync\n'), ((1534, 1558), 'paramiko.AutoAddPolicy', 'paramiko.AutoAddPolicy', ([], {}), '()\n', (1556, 1558), False, 'import paramiko\n'), ((1992, 2013), 'django.http.HttpResponse', 'HttpResponse', (['message'], {}), '(message)\n', (2004, 2013), False, 'from django.http import HttpResponse\n'), ((2289, 2376), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.STDOUT', 'shell': '(True)'}), '(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,\n shell=True)\n', (2305, 2376), False, 'import subprocess\n')] |
from __future__ import print_function
from django.shortcuts import render
from django.http import HttpResponse, Http404, HttpResponseRedirect
from django.contrib.auth import get_user_model
import os
from django.core.mail import send_mail
import nltk
from nltk.tokenize import sent_tokenize, word_tokenize
from nltk.stem import PorterStemmer
from nltk.corpus import stopwords
import string
from .models import Meeting, MeetingAttendee, Team, upload_audio_path,get_filename_ext
from .FrequencySummarizer import FrequencySummarizer
import json,io
from os.path import join, dirname
from watson_developer_cloud import SpeechToTextV1
from watson_developer_cloud.websocket import RecognizeCallback, AudioSource
import threading
import math
from django.template import Template, Context
from django.http import HttpResponse
# Make it work for Python 2+3 and with Unicode
try:
to_unicode = unicode
except NameError:
to_unicode = str
keywords=[['frontend','front-end','responsive','color','theme','scheme','CSS','HTML','JS','javascript'],#frontend
['script','backend','back-end','database','query','object','script','python'],#backend
['people','business','analyse']]#management
def sttxt(request,filename,textfilepath,textfilename):
kl = []
service = SpeechToTextV1(
username='80a593b1-5a21-4ea4-adb1-e7218fb5a9fa',
password='<PASSWORD>',
url='https://stream.watsonplatform.net/speech-to-text/api')
models = service.list_models().get_result()
#print(json.dumps(models, indent=2))
model = service.get_model('en-US_NarrowbandModel').get_result()
#print(json.dumps(model, indent=2))
# with open(join(dirname(__file__), filename),'rb') as audio_file:
print(filename)
with open(filename,'rb') as audio_file:
with io.open('data.json', 'w', encoding='utf8') as outfile:
str_ = json.dumps(service.recognize(audio=audio_file,content_type='audio/mp3',speaker_labels=True).get_result(),indent=2)
outfile.write(to_unicode(str_))
outfile.close()
# Read JSON file
with open('data.json') as data_file:
data_loaded = json.load(data_file)
spea = []
l=0
for i in data_loaded['speaker_labels']:
temp = ""
if l == int(i['speaker']):
for z in range(math.floor(i['from']),math.ceil(i['to'])):
for v in data_loaded['results']:
for m in v['alternatives']:
for n in m['timestamps']:
if n[1] >= i['from'] and n[2] <= i['to']:
if temp is not n[0]:
spea.append(n[0])
temp = n[0]
#print(spea)
else:
str1 = ' '.join(spea)
print(textfilepath+'transcripts/'+textfilename+'/'+textfilename+".txt")
with io.open(textfilepath+'transcripts/'+textfilename+'/'+textfilename+".txt", 'a', encoding='utf8') as outfile:
# print("Speaker "+str(l)+": "+str1+"\n")
str_ = outfile.write(" Speaker "+str(l)+": "+str1+"\n")
kl.append("Speaker "+str(l)+": "+str1+"\n")
outfile.close()
l = i['speaker']
del spea[0:len(spea)-1]
str1 = ' '.join(spea)
with io.open(textfilepath+'transcripts/'+textfilename+'/'+textfilename+".txt", 'a', encoding='utf8') as outfile:
# print("Speaker "+str(l)+": "+str1+"\n")
str_ = outfile.write(" Speaker "+str(l)+": "+str1+"\n")
kl.append("Speaker "+str(l)+": "+str1+"\n")
outfile.close()
u = summary_function(textfilepath+'transcripts/'+textfilename+'/'+textfilename+".txt")
print('vvvvvvvvvvvvvvvvvvv summarize VVVVVVVVVVVVVVVv')
print(u)
print('------------------- decisions ------------------------------------')
decision=nltk(textfilepath+'transcripts/'+textfilename+'/'+textfilename+".txt")
print(decision)
request.session['summ'] = u
request.session['trans1'] = kl
request.session['deci'] = decision
context={
'summarize':u,
'trans':kl,
}
return render(request,'Analyse/transcript.html',context)
#return render(request,'Analyse/transcript.html',context)
def transcript(request):
context={
'summarize':request.session['summ'],
'trans':request.session['trans1'],
'deci':request.session['deci'],
}
return render(request,'Analyse/transcript.html',context)
def summary_function(textfilepathfinal):
with open(textfilepathfinal, 'r') as myfile:
text=myfile.read().replace('\n','')
fs = FrequencySummarizer()
s = fs.summarize(str(text), 2)
return s
def nltk(textfilepathfinal):
# def nltk(request):
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
MEDIA_ROOT = os.path.join(os.path.dirname(BASE_DIR), "static_cdn", "media_root")
with open(textfilepathfinal, 'r') as myfile:
text=myfile.read().replace('\n','')
# text="Decide, the frontend team needs to make the website mobile reponsive decision end"
print(text)
datas=word_tokenize(text)
decision_string = str('')
decision=[]
frontend_score=0
backend_score=0
management_score=0
scores=[['Front-End Team', 0],
['Back-End Team', 0],
['Management Team', 0]]
flag=False # to see if 'Decide' word was said
ps=PorterStemmer() # variable for stemming
final_decisions=[]
# final_decisions=[[0 for x in range(100)] for y in range(100)]
z=0
for i in range(len(datas)):
# print(datas[i]+","+str(flag))
if datas[i].lower() == 'decide':
flag=True
if flag==True and datas[i].lower() == 'decision' and datas[i+1].lower() == "end":
# print("hie")
flag=False
decision_string=decision_string.strip(' ')
print(decision_string)
# now doing the keyword matching using stemming
decision=word_tokenize(decision_string)
print(decision)
for j in range(len(decision)):
if decision[j] not in string.punctuation:
# stemmed_word=ps.stem(decision[j])
# print(stemmed_word)
# now checking if the stemmed word is in any of the keywords ka list and appropriately assigning scores
for x in range(len(keywords)):
for y in range(len(keywords[x])):
# print(str(x)+","+str(y))
if ps.stem(decision[j]).lower() == ps.stem(keywords[x][y]) :
scores[x][1] = scores[x][1]+1
print(scores)
score=[]
score.append(scores[0][1])
score.append(scores[1][1])
score.append(scores[2][1])
notify=score.index(max(score))
notify_team=scores[notify][0]
# final_decisions[z][0]=decision_string
# final_decisions[z][1]=notify_team
final_decisions.append(decision_string)
final_decisions.append(notify_team)
z=z+1
print(notify_team)
decision_string=str('')
if flag==True and datas[i].lower() != 'speaker' and i!=0:
# i=i+1
if datas[i] in string.punctuation:
# if not any(p in datas[i] for p in string.punctuation):
# print(datas[i])
if datas[i] == ":" and datas[i-1].isdigit():
print("in")
else:
decision_string = decision_string + datas[i]
else:
if (datas[i].isdigit() and datas[i+1]== ":") or (i < len(datas) and datas[i] == datas[i+1]):
print("in")
else:
decision_string = decision_string + ' ' + datas[i]
context={
'datas':'hello'
}
# return render(request, "Analyse/nltk.html", context)
# print(final_decisions)
return final_decisions
User=get_user_model()
def handle_uploaded_file(file, filename, foldername):
print("--here--")
print(filename)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
MEDIA_ROOT = os.path.join(os.path.dirname(BASE_DIR), "static_cdn", "media_root")
foldername= MEDIA_ROOT+'/transcripts/'+foldername
if not os.path.exists(foldername):
print("not exists")
os.mkdir(foldername)
with open(MEDIA_ROOT+'/'+filename, 'wb+') as destination:
for chunk in file.chunks():
destination.write(chunk)
def meeting(request, *args, **kwargs):
print("hi")
if request.method == "POST":
print("haha")
print(request.FILES['recording'])
recording=upload_audio_path(request,str(request.FILES['recording']))
print(recording)
folder_name, ext=get_filename_ext(recording)
print(folder_name)
handle_uploaded_file(request.FILES['recording'], recording, folder_name)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
MEDIA_ROOT = os.path.join(os.path.dirname(BASE_DIR), "static_cdn", "media_root")
filepath=MEDIA_ROOT+'/'+recording
newfilepath=MEDIA_ROOT+'/'
print(filepath)
m=Meeting.objects.get(id=1)
m.recording = filepath # change field
m.save() # this will update only
sttxt(request, filepath,newfilepath,folder_name)
print("hagre")
user=request.user
meeting=Meeting.objects.filter(conductor=request.user)
# print(meeting)
users=User.objects.exclude(username=request.user.username)
# print(users)
ma=[]
for i in meeting:
meetatten=MeetingAttendee.objects.filter(meeting=i)
for j in meetatten:
ma.append(j)
# print(ma)
context={
'datas':'hello',
'meetatten':ma,
}
return render(request, "Analyse/meetings.html", context)
def calenda(request):
if method == 'POST':
agenda = request.POST['agenda']
print(agenda) | [
"django.shortcuts.render",
"django.contrib.auth.get_user_model",
"os.path.exists",
"math.ceil",
"nltk",
"math.floor",
"watson_developer_cloud.SpeechToTextV1",
"nltk.stem.PorterStemmer",
"io.open",
"nltk.tokenize.word_tokenize",
"os.path.dirname",
"os.mkdir",
"json.load",
"os.path.abspath"
... | [((7613, 7629), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (7627, 7629), False, 'from django.contrib.auth import get_user_model\n'), ((1278, 1429), 'watson_developer_cloud.SpeechToTextV1', 'SpeechToTextV1', ([], {'username': '"""80a593b1-5a21-4ea4-adb1-e7218fb5a9fa"""', 'password': '"""<PASSWORD>"""', 'url': '"""https://stream.watsonplatform.net/speech-to-text/api"""'}), "(username='80a593b1-5a21-4ea4-adb1-e7218fb5a9fa', password=\n '<PASSWORD>', url='https://stream.watsonplatform.net/speech-to-text/api')\n", (1292, 1429), False, 'from watson_developer_cloud import SpeechToTextV1\n'), ((4028, 4113), 'nltk', 'nltk', (["(textfilepath + 'transcripts/' + textfilename + '/' + textfilename + '.txt')"], {}), "(textfilepath + 'transcripts/' + textfilename + '/' + textfilename + '.txt'\n )\n", (4032, 4113), False, 'import nltk\n'), ((4297, 4348), 'django.shortcuts.render', 'render', (['request', '"""Analyse/transcript.html"""', 'context'], {}), "(request, 'Analyse/transcript.html', context)\n", (4303, 4348), False, 'from django.shortcuts import render\n'), ((4591, 4642), 'django.shortcuts.render', 'render', (['request', '"""Analyse/transcript.html"""', 'context'], {}), "(request, 'Analyse/transcript.html', context)\n", (4597, 4642), False, 'from django.shortcuts import render\n'), ((5247, 5266), 'nltk.tokenize.word_tokenize', 'word_tokenize', (['text'], {}), '(text)\n', (5260, 5266), False, 'from nltk.tokenize import sent_tokenize, word_tokenize\n'), ((5503, 5518), 'nltk.stem.PorterStemmer', 'PorterStemmer', ([], {}), '()\n', (5516, 5518), False, 'from nltk.stem import PorterStemmer\n'), ((9333, 9382), 'django.shortcuts.render', 'render', (['request', '"""Analyse/meetings.html"""', 'context'], {}), "(request, 'Analyse/meetings.html', context)\n", (9339, 9382), False, 'from django.shortcuts import render\n'), ((2149, 2169), 'json.load', 'json.load', (['data_file'], {}), '(data_file)\n', (2158, 2169), False, 'import json, io\n'), ((3477, 3586), 'io.open', 'io.open', (["(textfilepath + 'transcripts/' + textfilename + '/' + textfilename + '.txt')", '"""a"""'], {'encoding': '"""utf8"""'}), "(textfilepath + 'transcripts/' + textfilename + '/' + textfilename +\n '.txt', 'a', encoding='utf8')\n", (3484, 3586), False, 'import json, io\n'), ((4991, 5016), 'os.path.dirname', 'os.path.dirname', (['BASE_DIR'], {}), '(BASE_DIR)\n', (5006, 5016), False, 'import os\n'), ((7838, 7863), 'os.path.dirname', 'os.path.dirname', (['BASE_DIR'], {}), '(BASE_DIR)\n', (7853, 7863), False, 'import os\n'), ((7959, 7985), 'os.path.exists', 'os.path.exists', (['foldername'], {}), '(foldername)\n', (7973, 7985), False, 'import os\n'), ((8017, 8037), 'os.mkdir', 'os.mkdir', (['foldername'], {}), '(foldername)\n', (8025, 8037), False, 'import os\n'), ((1797, 1839), 'io.open', 'io.open', (['"""data.json"""', '"""w"""'], {'encoding': '"""utf8"""'}), "('data.json', 'w', encoding='utf8')\n", (1804, 1839), False, 'import json, io\n'), ((4936, 4961), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (4951, 4961), False, 'import os\n'), ((6001, 6031), 'nltk.tokenize.word_tokenize', 'word_tokenize', (['decision_string'], {}), '(decision_string)\n', (6014, 6031), False, 'from nltk.tokenize import sent_tokenize, word_tokenize\n'), ((7780, 7805), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (7795, 7805), False, 'import os\n'), ((8650, 8675), 'os.path.dirname', 'os.path.dirname', (['BASE_DIR'], {}), '(BASE_DIR)\n', (8665, 8675), False, 'import os\n'), ((2317, 2338), 'math.floor', 'math.floor', (["i['from']"], {}), "(i['from'])\n", (2327, 2338), False, 'import math\n'), ((2339, 2357), 'math.ceil', 'math.ceil', (["i['to']"], {}), "(i['to'])\n", (2348, 2357), False, 'import math\n'), ((3018, 3127), 'io.open', 'io.open', (["(textfilepath + 'transcripts/' + textfilename + '/' + textfilename + '.txt')", '"""a"""'], {'encoding': '"""utf8"""'}), "(textfilepath + 'transcripts/' + textfilename + '/' + textfilename +\n '.txt', 'a', encoding='utf8')\n", (3025, 3127), False, 'import json, io\n'), ((8594, 8619), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (8609, 8619), False, 'import os\n')] |
"""
Test the fits-module by loading a dumped rtfits result and performing
all actions again
"""
import unittest
import numpy as np
import cloudpickle
import matplotlib.pyplot as plt
import copy
import os
class TestDUMPS(unittest.TestCase):
def setUp(self):
self.sig0_dB_path = os.path.dirname(__file__) + os.sep + "sig0_dB.dump"
self.sig0_linear_path = os.path.dirname(__file__) + os.sep + "sig0_linear.dump"
def load_data(self, path):
with open(path, 'rb') as file:
fit = cloudpickle.load(file)
return fit
# self.assertTrue(
# err < errdict[key],
# msg='derived error' + str(err) + 'too high for ' + str(key))
def test_rtplots(self):
for path, msg in zip([self.sig0_dB_path, self.sig0_linear_path],
['dB', 'linear']):
print(f'testing plotfunctions for {msg} fit')
fit = self.load_data(path)
# call performfit to re-initialize _fnevals functions
# and evaluate intermediate results
# (they might have been removed if symeninge has been used)
fit.lsq_kwargs['verbose'] = 0
fit.performfit(intermediate_results=True,
print_progress=True)
# get list of available plot-methods
method_list = [func for func in dir(fit.plot) if
callable(getattr(fit.plot, func)) and not func.startswith("__")]
for function_name in method_list:
print(f'... {function_name}')
if function_name == 'printsig0analysis':
# check 'dataset' index slider
f, s1, s2 = fit.plot.__getattribute__(function_name)(
range2=2, range1=1, use_index='dataset')
# check update functions
s1.set_val(1)
s2.set_val(1)
plt.close(f)
# check 'groups' index slider
f, s1, s2 = fit.plot.__getattribute__(function_name)(
range2=2, range1=1, use_index='groups')
# check update functions
s1.set_val(1)
s2.set_val(1)
plt.close(f)
elif function_name == 'analyzemodel':
f, sliders, txt_but = fit.plot.__getattribute__(
function_name)()
# check update functions
for key, s in sliders.items():
s.set_val((s.valmax - s.valmin)/2.)
for key, b in txt_but.items():
if key == 'buttons':
# the initial status is ALL OFF
stat = b.get_status()
for i in range(len(stat)):
b.set_active(i)
# now all should be ON
self.assertTrue(np.all(b.get_status()))
for i in range(len(stat)):
b.set_active(i)
# now all should be OFF again
self.assertTrue(~np.all(b.get_status()))
else:
# set the boundaries of the parameters
if 'min' in key:
b.set_val(0.02)
if 'max' in key:
b.set_val(0.99)
plt.close(f)
elif function_name == 'intermediate_residuals':
# check default (e.g. pandas datetime-offset)
f = fit.plot.__getattribute__(function_name)(fmt='%d.%b %Y')
plt.close(f)
# check grouping with respect to incidence angles and
# convert the labels to degrees
f = fit.plot.__getattribute__(function_name)(
grp=('inc', 10),
label_formatter=lambda x,y:round(np.rad2deg(x),2))
plt.close(f)
# check grouping with respect to datetimes
f = fit.plot.__getattribute__(function_name)(grp='groups')
plt.close(f)
# check grouping with respect to the dataset index
f = fit.plot.__getattribute__(function_name)(
grp='dataset', plottype='2D', fmt='%Y %b %d (%H:%M)')
plt.close(f)
else:
f = fit.plot.__getattribute__(function_name)()
plt.close(f)
def test_performfit(self):
for path, msg in zip([self.sig0_dB_path, self.sig0_linear_path],
['dB', 'linear']):
print(f'testing plotfunctions for {msg} fit')
fit = self.load_data(path)
old_results = fit.res_dict
# print model definition
fit.model_definition
print('testing performfit')
fit.lsq_kwargs['verbose'] = 0
fit.performfit(intermediate_results=True,
print_progress=True)
# call _cache_info() to make coveralls happy
fit._cache_info()
fit.R._cache_info()
# try to dump the file again (without fit-details)
fit.dump(os.path.join(os.path.dirname(__file__), 'testdump1.dump'),
mini=True)
# try to dump the file again (with fit-details)
fit.dump(os.path.join(os.path.dirname(__file__), 'testdump2.dump'),
mini=False)
for key, val in old_results.items():
self.assertTrue(np.allclose(fit.res_dict[key],
old_results[key], atol=1e-4, rtol=1e-4),
msg=f'fitted values for {msg} fit of {key} ' +
f'differ by {np.subtract(fit.res_dict[key], old_results[key]).mean()}')
if __name__ == "__main__":
unittest.main() | [
"cloudpickle.load",
"numpy.allclose",
"numpy.subtract",
"matplotlib.pyplot.close",
"os.path.dirname",
"unittest.main",
"numpy.rad2deg"
] | [((6174, 6189), 'unittest.main', 'unittest.main', ([], {}), '()\n', (6187, 6189), False, 'import unittest\n'), ((521, 543), 'cloudpickle.load', 'cloudpickle.load', (['file'], {}), '(file)\n', (537, 543), False, 'import cloudpickle\n'), ((292, 317), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (307, 317), False, 'import os\n'), ((376, 401), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (391, 401), False, 'import os\n'), ((1958, 1970), 'matplotlib.pyplot.close', 'plt.close', (['f'], {}), '(f)\n', (1967, 1970), True, 'import matplotlib.pyplot as plt\n'), ((2297, 2309), 'matplotlib.pyplot.close', 'plt.close', (['f'], {}), '(f)\n', (2306, 2309), True, 'import matplotlib.pyplot as plt\n'), ((5503, 5528), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (5518, 5528), False, 'import os\n'), ((5675, 5700), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (5690, 5700), False, 'import os\n'), ((5836, 5910), 'numpy.allclose', 'np.allclose', (['fit.res_dict[key]', 'old_results[key]'], {'atol': '(0.0001)', 'rtol': '(0.0001)'}), '(fit.res_dict[key], old_results[key], atol=0.0001, rtol=0.0001)\n', (5847, 5910), True, 'import numpy as np\n'), ((3595, 3607), 'matplotlib.pyplot.close', 'plt.close', (['f'], {}), '(f)\n', (3604, 3607), True, 'import matplotlib.pyplot as plt\n'), ((3840, 3852), 'matplotlib.pyplot.close', 'plt.close', (['f'], {}), '(f)\n', (3849, 3852), True, 'import matplotlib.pyplot as plt\n'), ((4181, 4193), 'matplotlib.pyplot.close', 'plt.close', (['f'], {}), '(f)\n', (4190, 4193), True, 'import matplotlib.pyplot as plt\n'), ((4356, 4368), 'matplotlib.pyplot.close', 'plt.close', (['f'], {}), '(f)\n', (4365, 4368), True, 'import matplotlib.pyplot as plt\n'), ((4604, 4616), 'matplotlib.pyplot.close', 'plt.close', (['f'], {}), '(f)\n', (4613, 4616), True, 'import matplotlib.pyplot as plt\n'), ((4727, 4739), 'matplotlib.pyplot.close', 'plt.close', (['f'], {}), '(f)\n', (4736, 4739), True, 'import matplotlib.pyplot as plt\n'), ((4143, 4156), 'numpy.rad2deg', 'np.rad2deg', (['x'], {}), '(x)\n', (4153, 4156), True, 'import numpy as np\n'), ((6081, 6129), 'numpy.subtract', 'np.subtract', (['fit.res_dict[key]', 'old_results[key]'], {}), '(fit.res_dict[key], old_results[key])\n', (6092, 6129), True, 'import numpy as np\n')] |
from __future__ import annotations
# Standard library
import re
from copy import deepcopy
from dataclasses import dataclass
from typing import Callable, Optional
__all__ = ['NamedEntity', 'NamedEntityList']
@dataclass(frozen=True)
class NamedEntity:
name: str
entity: str
string: str
span: tuple[int, int]
class NamedEntityList:
"""Named entity list class."""
def __init__(self, init_list: Optional[list] = None):
init_list = [] if init_list is None else init_list
self._list = init_list
def append(self, entity: NamedEntity):
"""Append entity to this list, where the element must be of type NamedEntity."""
if not isinstance(entity, NamedEntity):
raise TypeError(
f'{self.__class__.__name__} holds {NamedEntity} objects. You gave {type(entity)}.')
self._list.append(entity)
def copy(self):
return deepcopy(self)
def extend(self, entity_list: NamedEntityList | list[NamedEntity]):
"""Extend list. Similar to the standard python list object, extend takes an iterable as an argument."""
if not isinstance(entity_list, (NamedEntityList, list)):
raise TypeError(
f'Expected object of type {self.__class__.__name__} or list. You gave {type(entity_list)}.'
)
for elem in entity_list:
self.append(elem)
def get_unique_names(self) -> set[str]:
"""Return set of the unique names in this NamedEntityList."""
return set([entity.name for entity in self])
def sort(self, key: Callable, *, reverse: bool = False) -> None:
"""
Sort the list according to the given key. The sort is executed in-place.
Parameters
----------
key : callable (e.g., a lambda function)
Function that defines how the list should be sorted.
reverse : bool, optional
If True, sort in descending order.
"""
self._list.sort(key=key, reverse=reverse)
def __add__(self, other: NamedEntityList):
"""Define what it means to add two list objects together."""
concatenated_list = list(self) + list(other)
return self.__class__(concatenated_list)
def __getitem__(self, item):
if isinstance(item, list):
return self.__class__([self._list[i] for i in item])
elif isinstance(item, slice):
return self.__class__(self._list[item])
else:
return self._list[item]
def __iter__(self):
return iter(self._list)
def __len__(self):
return len(self._list)
def __repr__(self):
repr = '\n'.join([f'[{i}] {p.__repr__()}' for i, p in enumerate(self)])
repr = re.sub(r'^', ' ' * 4, repr, flags=re.M)
repr = f'(\n{repr}\n)' if len(self) > 0 else f'([])'
return f'{self.__class__.__name__}{repr}'
| [
"re.sub",
"dataclasses.dataclass",
"copy.deepcopy"
] | [((213, 235), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (222, 235), False, 'from dataclasses import dataclass\n'), ((916, 930), 'copy.deepcopy', 'deepcopy', (['self'], {}), '(self)\n', (924, 930), False, 'from copy import deepcopy\n'), ((2747, 2785), 're.sub', 're.sub', (['"""^"""', "(' ' * 4)", 'repr'], {'flags': 're.M'}), "('^', ' ' * 4, repr, flags=re.M)\n", (2753, 2785), False, 'import re\n')] |
from django.utils import timezone
from django.utils.translation import ugettext
from mediane.algorithms.enumeration import get_name_from
from mediane.algorithms.lri.BioConsert import BioConsert
from mediane.algorithms.lri.ExactAlgorithm import ExactAlgorithm
from mediane.algorithms.misc.borda_count import BordaCount
from mediane.distances.KendallTauGeneralizedNlogN import KendallTauGeneralizedNlogN
from mediane.distances.enumeration import GENERALIZED_KENDALL_TAU_DISTANCE_WITH_UNIFICATION
from mediane.median_ranking_tools import parse_ranking_with_ties_of_str, dump_ranking_with_ties_to_str
from mediane.normalizations.enumeration import NONE, UNIFICATION, PROJECTION
from mediane.normalizations.unification import Unification
from mediane.normalizations.projection import Projection
MIN_MEASURE_DURATION = 3
def execute_median_rankings_computation_from_rankings(
rankings,
algorithm,
normalization,
distance,
precise_time_measurement,
dataset=None,
algorithms=None,
):
if str(normalization) == "Unification":
rankings_real = Unification.rankings_to_rankings(rankings)
elif str(normalization) == "Projection":
rankings_real = Projection.rankings_to_rankings(rankings)
else:
rankings_real = rankings
if algorithms:
return [execute_median_rankings_computation_from_rankings(
rankings=rankings_real,
algorithm=a,
normalization=normalization,
distance=distance,
precise_time_measurement=precise_time_measurement,
dataset=dataset,
) for a in algorithms]
iteration = 1
start_timezone = timezone.now()
c = algorithm.compute_median_rankings(rankings=rankings_real, distance=distance)
duration = (timezone.now() - start_timezone).total_seconds()
while precise_time_measurement and duration < MIN_MEASURE_DURATION:
# print(iteration, duration)
iteration = int((iteration / duration) * MIN_MEASURE_DURATION * 1.1)
rang_iter = range(2, iteration)
start_timezone = timezone.now()
for k in rang_iter:
algorithm.compute_median_rankings(rankings=rankings_real, distance=distance)
duration = (timezone.now() - start_timezone).total_seconds()
return dict(
dataset=dict(
id=-1,
name=ugettext('typed'),
) if dataset is None else
dict(
id=dataset.id,
name=str(dataset),
),
consensus=c,
distance=KendallTauGeneralizedNlogN(distance).get_distance_to_a_set_of_rankings(
c[0],
rankings=rankings,
)[distance.id_order],
duration=(int(duration / iteration * 1000.0 * 1000.0 * 1000.0)) / 1000.0 / 1000.0,
algo=dict(
id=algorithm.get_full_name(),
name=str(get_name_from(algorithm.get_full_name())),
),
)
def execute_median_rankings_computation_from_datasets(
datasets,
algorithm,
normalization,
distance,
precise_time_measurement,
algorithms=None,
):
submission_results = []
algorithms = algorithms or []
if algorithm is not None:
algorithms.append(algorithm)
for d in datasets:
if not d.complete:
if str(normalization) == "Unification":
rankings_real = Unification.rankings_to_rankings(d.rankings)
elif str(normalization) == "Projection":
rankings_real = Projection.rankings_to_rankings(d.rankings)
else:
rankings_real = d.rankings
else:
rankings_real = d.rankings
for a in algorithms:
submission_results.append(
execute_median_rankings_computation_from_rankings(
rankings=rankings_real,
algorithm=a,
normalization=normalization,
distance=distance,
precise_time_measurement=precise_time_measurement,
dataset=d,
)
)
return submission_results
def create_computation_job(
datasets,
normalization,
distance,
precise_time_measurement,
algorithms,
owner,
):
from mediane import models
job = models.Job.objects.create(
owner=owner,
dist=distance,
norm=normalization,
creation=timezone.now(),
bench=precise_time_measurement,
identifier=None,
)
for d in datasets:
for a in algorithms:
r = models.Result.objects.create(
algo=a,
dataset=d,
job=job,
)
r.mark_as_todo()
job.update_task_count()
return job
def execute_median_rankings_computation_of_result(
result,
):
submission_result = execute_median_rankings_computation_from_rankings(
rankings=result.dataset.rankings,
algorithm=result.algo.get_instance(),
normalization=result.job.norm,
distance=result.job.dist,
precise_time_measurement=result.job.bench,
dataset=result.dataset,
)
result.consensuses = '\n'.join([dump_ranking_with_ties_to_str(c) for c in submission_result["consensus"]])
result.distance_value = submission_result["distance"]
result.duration = submission_result["duration"]
result.save()
def cleanup_dataset(rankings_as_one_str):
if rankings_as_one_str is None:
return ""
rankings_as_one_str = rankings_as_one_str.replace("\r", "")
rankings_as_one_str = rankings_as_one_str.replace("\\\n", "")
rankings_as_one_str = rankings_as_one_str.replace(":\n", "")
if rankings_as_one_str[-1] == ':':
rankings_as_one_str = rankings_as_one_str[:-1]
return rankings_as_one_str
def evaluate_dataset_and_provide_stats(rankings_str):
evaluation = {}
elements = None
rankings = []
complete = True
invalid_rankings = {}
cpt = -1
for ranking_str in rankings_str:
cpt += 1
try:
ranking = parse_ranking_with_ties_of_str(ranking_str)
except ValueError as e:
invalid_rankings[cpt] = e.args if len(e.args) > 1 else e.args[0]
ranking = []
rankings.append(ranking)
ranking_elements = set()
for bucket in ranking:
for element in bucket:
if element in ranking_elements:
invalid_rankings[cpt] = "Duplicated element '%s'" % element
ranking_elements.add(element)
if elements is None:
elements = ranking_elements
if ranking_elements != elements:
complete = False
elements.update(ranking_elements)
evaluation["complete"] = complete
evaluation["n"] = len(elements)
evaluation["m"] = len(rankings)
evaluation["invalid"] = len(invalid_rankings) > 0
evaluation["invalid_rankings_id"] = invalid_rankings
evaluation["rankings"] = rankings
return evaluation
def compute_consensus_settings_based_on_datasets(
n,
m,
complete,
rankings,
user,
dbdatasets=None,
algos=None,
):
"""
:param n:
:param m:
:param complete:
:param rankings:
:param user: the user for which we are find the best settings, should be used to
not select an algorithm/distance/norm that is not visible by the user
:param dbdatasets:
:param algos:
:return:
"""
dbdatasets = [] if dbdatasets is None else dbdatasets
algos = [] if algos is None else algos
from mediane.models import Distance, Normalization, Algorithm
consensus_settings = {}
consensus_settings["algo"] = Algorithm.objects.get(key_name=str(BioConsert().get_full_name())).pk
consensus_settings["dist"] = Distance.objects.get(key_name=GENERALIZED_KENDALL_TAU_DISTANCE_WITH_UNIFICATION).pk
# consensus_settings["norm"] = Normalization.objects.get(key_name=NONE if complete else UNIFICATION).pk
consensus_settings["norm"] = Normalization.objects.get(key_name=NONE).pk
if n < 70 and ExactAlgorithm().can_be_executed():
consensus_settings["algo"] = Algorithm.objects.get(key_name=str(ExactAlgorithm().get_full_name())).pk
elif n > 100 or len(dbdatasets) * len(algos) > 20:
consensus_settings["algo"] = Algorithm.objects.get(key_name=str(BordaCount().get_full_name())).pk
# consensus_settings["auto_compute"] = n < 50 and len(dbdatasets) * len(algos) < 50
consensus_settings["auto_compute"] = False
consensus_settings["bench"] = False
consensus_settings["extended_analysis"] = len(dbdatasets) * len(algos) > 50
# print(consensus_settings)
return consensus_settings
| [
"mediane.algorithms.lri.BioConsert.BioConsert",
"mediane.algorithms.misc.borda_count.BordaCount",
"mediane.models.Distance.objects.get",
"mediane.models.Normalization.objects.get",
"django.utils.timezone.now",
"mediane.normalizations.unification.Unification.rankings_to_rankings",
"mediane.median_ranking... | [((1681, 1695), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (1693, 1695), False, 'from django.utils import timezone\n'), ((1103, 1145), 'mediane.normalizations.unification.Unification.rankings_to_rankings', 'Unification.rankings_to_rankings', (['rankings'], {}), '(rankings)\n', (1135, 1145), False, 'from mediane.normalizations.unification import Unification\n'), ((2097, 2111), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (2109, 2111), False, 'from django.utils import timezone\n'), ((7880, 7965), 'mediane.models.Distance.objects.get', 'Distance.objects.get', ([], {'key_name': 'GENERALIZED_KENDALL_TAU_DISTANCE_WITH_UNIFICATION'}), '(key_name=GENERALIZED_KENDALL_TAU_DISTANCE_WITH_UNIFICATION\n )\n', (7900, 7965), False, 'from mediane.models import Distance, Normalization, Algorithm\n'), ((8105, 8145), 'mediane.models.Normalization.objects.get', 'Normalization.objects.get', ([], {'key_name': 'NONE'}), '(key_name=NONE)\n', (8130, 8145), False, 'from mediane.models import Distance, Normalization, Algorithm\n'), ((1215, 1256), 'mediane.normalizations.projection.Projection.rankings_to_rankings', 'Projection.rankings_to_rankings', (['rankings'], {}), '(rankings)\n', (1246, 1256), False, 'from mediane.normalizations.projection import Projection\n'), ((4464, 4478), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (4476, 4478), False, 'from django.utils import timezone\n'), ((4619, 4675), 'mediane.models.Result.objects.create', 'models.Result.objects.create', ([], {'algo': 'a', 'dataset': 'd', 'job': 'job'}), '(algo=a, dataset=d, job=job)\n', (4647, 4675), False, 'from mediane import models\n'), ((5245, 5277), 'mediane.median_ranking_tools.dump_ranking_with_ties_to_str', 'dump_ranking_with_ties_to_str', (['c'], {}), '(c)\n', (5274, 5277), False, 'from mediane.median_ranking_tools import parse_ranking_with_ties_of_str, dump_ranking_with_ties_to_str\n'), ((6127, 6170), 'mediane.median_ranking_tools.parse_ranking_with_ties_of_str', 'parse_ranking_with_ties_of_str', (['ranking_str'], {}), '(ranking_str)\n', (6157, 6170), False, 'from mediane.median_ranking_tools import parse_ranking_with_ties_of_str, dump_ranking_with_ties_to_str\n'), ((1797, 1811), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (1809, 1811), False, 'from django.utils import timezone\n'), ((3393, 3437), 'mediane.normalizations.unification.Unification.rankings_to_rankings', 'Unification.rankings_to_rankings', (['d.rankings'], {}), '(d.rankings)\n', (3425, 3437), False, 'from mediane.normalizations.unification import Unification\n'), ((8167, 8183), 'mediane.algorithms.lri.ExactAlgorithm.ExactAlgorithm', 'ExactAlgorithm', ([], {}), '()\n', (8181, 8183), False, 'from mediane.algorithms.lri.ExactAlgorithm import ExactAlgorithm\n'), ((2249, 2263), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (2261, 2263), False, 'from django.utils import timezone\n'), ((3523, 3566), 'mediane.normalizations.projection.Projection.rankings_to_rankings', 'Projection.rankings_to_rankings', (['d.rankings'], {}), '(d.rankings)\n', (3554, 3566), False, 'from mediane.normalizations.projection import Projection\n'), ((2374, 2391), 'django.utils.translation.ugettext', 'ugettext', (['"""typed"""'], {}), "('typed')\n", (2382, 2391), False, 'from django.utils.translation import ugettext\n'), ((2548, 2584), 'mediane.distances.KendallTauGeneralizedNlogN.KendallTauGeneralizedNlogN', 'KendallTauGeneralizedNlogN', (['distance'], {}), '(distance)\n', (2574, 2584), False, 'from mediane.distances.KendallTauGeneralizedNlogN import KendallTauGeneralizedNlogN\n'), ((7813, 7825), 'mediane.algorithms.lri.BioConsert.BioConsert', 'BioConsert', ([], {}), '()\n', (7823, 7825), False, 'from mediane.algorithms.lri.BioConsert import BioConsert\n'), ((8275, 8291), 'mediane.algorithms.lri.ExactAlgorithm.ExactAlgorithm', 'ExactAlgorithm', ([], {}), '()\n', (8289, 8291), False, 'from mediane.algorithms.lri.ExactAlgorithm import ExactAlgorithm\n'), ((8440, 8452), 'mediane.algorithms.misc.borda_count.BordaCount', 'BordaCount', ([], {}), '()\n', (8450, 8452), False, 'from mediane.algorithms.misc.borda_count import BordaCount\n')] |
# Generated by Django 2.2.2 on 2020-05-08 12:06
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mainapp', '0002_auto_20200508_1115'),
]
operations = [
migrations.AlterField(
model_name='yourorder',
name='phone',
field=models.CharField(max_length=10, null=True),
),
]
| [
"django.db.models.CharField"
] | [((338, 380), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10)', 'null': '(True)'}), '(max_length=10, null=True)\n', (354, 380), False, 'from django.db import migrations, models\n')] |
"""JSON Schemas."""
import csv
from collections import defaultdict
from datetime import date
from os.path import dirname, join, realpath
from flask import current_app
from marshmallow import Schema, fields
from cd2h_repo_project.modules.records.resource_type import ResourceType
class DataCiteResourceTypeMap(object):
"""DataCite Resource Type Mapping.
TODO: If we extract this module out, make this class a configuration
setting.
"""
def __init__(self):
"""Constructor."""
self.filename = join(
dirname(dirname(realpath(__file__))),
'records', 'data', 'resource_type_mapping.csv'
)
with open(self.filename) as f:
reader = csv.DictReader(f)
self.map = {
(row['Group'].lower(), row['Name'].lower()):
row['DataCite'].strip()
for row in reader
}
def get(self, key, default=None):
"""Return the mapped value.
`key` is (<general resource type>, <specific resource type>).
"""
return self.map.get(key, default)
class DataCiteResourceTypeSchemaV4(Schema):
"""ResourceType schema."""
resourceTypeGeneral = fields.Method('get_general_resource_type')
resourceType = fields.Method('get_specific_resource_type')
def get_general_resource_type(self, resource_type):
"""Return DataCite's controlled vocabulary General Resource Type."""
resource_type_obj = ResourceType.get(
resource_type['general'], resource_type['specific']
)
return resource_type_obj.map(DataCiteResourceTypeMap())
def get_specific_resource_type(self, resource_type):
"""Return title-ized Specific Resource Type."""
return resource_type['specific'].title()
class DataCiteTitleSchemaV4(Schema):
"""Title schema."""
title = fields.Str()
class DataCiteCreatorSchemaV4(Schema):
"""Creator schema.
Each of these fields are inside the `creator` node.
"""
creatorName = fields.Str(attribute='full_name')
# TODO (optional): sub creatorName: nameType
givenName = fields.Str(attribute='first_name')
familyName = fields.Str(attribute='last_name')
# TODO (optional):
# nameIdentifier
# nameIdentifierScheme
# schemeURI
# affiliation
class DataCiteSchemaV4(Schema):
"""Schema for DataCite Metadata.
For now, only the minimum required fields are implemented. In the future,
we may want to include optional fields as well.
Fields and subfields are based on
schema.datacite.org/meta/kernel-4.1/doc/DataCite-MetadataKernel_v4.1.pdf
"""
identifier = fields.Method('get_identifier', dump_only=True)
# NOTE: This auto-magically serializes the `creators` and `creator` nodes.
creators = fields.List(
fields.Nested(DataCiteCreatorSchemaV4),
attribute='metadata.authors',
dump_only=True)
titles = fields.List(
fields.Nested(DataCiteTitleSchemaV4),
attribute='metadata',
dump_only=True)
publisher = fields.Method('get_publisher', dump_only=True)
publicationYear = fields.Method('get_year', dump_only=True)
resourceType = fields.Nested(
DataCiteResourceTypeSchemaV4,
attribute='metadata.resource_type',
dump_only=True)
def get_identifier(self, data):
"""Get record main identifier."""
return {
# If no DOI, 'DUMMY' value is used and will be ignored by DataCite
'identifier': data.get('metadata', {}).get('doi') or 'DUMMY',
'identifierType': 'DOI'
}
def get_publisher(self, data):
"""Extract publisher."""
return current_app.config['DOI_PUBLISHER']
def get_year(self, data):
"""Extract year.
Current year for now.
TODO: Revisit when dealing with embargo.
"""
return date.today().year
| [
"marshmallow.fields.Method",
"csv.DictReader",
"marshmallow.fields.Nested",
"marshmallow.fields.Str",
"cd2h_repo_project.modules.records.resource_type.ResourceType.get",
"os.path.realpath",
"datetime.date.today"
] | [((1219, 1261), 'marshmallow.fields.Method', 'fields.Method', (['"""get_general_resource_type"""'], {}), "('get_general_resource_type')\n", (1232, 1261), False, 'from marshmallow import Schema, fields\n'), ((1281, 1324), 'marshmallow.fields.Method', 'fields.Method', (['"""get_specific_resource_type"""'], {}), "('get_specific_resource_type')\n", (1294, 1324), False, 'from marshmallow import Schema, fields\n'), ((1882, 1894), 'marshmallow.fields.Str', 'fields.Str', ([], {}), '()\n', (1892, 1894), False, 'from marshmallow import Schema, fields\n'), ((2043, 2076), 'marshmallow.fields.Str', 'fields.Str', ([], {'attribute': '"""full_name"""'}), "(attribute='full_name')\n", (2053, 2076), False, 'from marshmallow import Schema, fields\n'), ((2142, 2176), 'marshmallow.fields.Str', 'fields.Str', ([], {'attribute': '"""first_name"""'}), "(attribute='first_name')\n", (2152, 2176), False, 'from marshmallow import Schema, fields\n'), ((2194, 2227), 'marshmallow.fields.Str', 'fields.Str', ([], {'attribute': '"""last_name"""'}), "(attribute='last_name')\n", (2204, 2227), False, 'from marshmallow import Schema, fields\n'), ((2681, 2728), 'marshmallow.fields.Method', 'fields.Method', (['"""get_identifier"""'], {'dump_only': '(True)'}), "('get_identifier', dump_only=True)\n", (2694, 2728), False, 'from marshmallow import Schema, fields\n'), ((3088, 3134), 'marshmallow.fields.Method', 'fields.Method', (['"""get_publisher"""'], {'dump_only': '(True)'}), "('get_publisher', dump_only=True)\n", (3101, 3134), False, 'from marshmallow import Schema, fields\n'), ((3157, 3198), 'marshmallow.fields.Method', 'fields.Method', (['"""get_year"""'], {'dump_only': '(True)'}), "('get_year', dump_only=True)\n", (3170, 3198), False, 'from marshmallow import Schema, fields\n'), ((3218, 3318), 'marshmallow.fields.Nested', 'fields.Nested', (['DataCiteResourceTypeSchemaV4'], {'attribute': '"""metadata.resource_type"""', 'dump_only': '(True)'}), "(DataCiteResourceTypeSchemaV4, attribute=\n 'metadata.resource_type', dump_only=True)\n", (3231, 3318), False, 'from marshmallow import Schema, fields\n'), ((1487, 1556), 'cd2h_repo_project.modules.records.resource_type.ResourceType.get', 'ResourceType.get', (["resource_type['general']", "resource_type['specific']"], {}), "(resource_type['general'], resource_type['specific'])\n", (1503, 1556), False, 'from cd2h_repo_project.modules.records.resource_type import ResourceType\n'), ((2844, 2882), 'marshmallow.fields.Nested', 'fields.Nested', (['DataCiteCreatorSchemaV4'], {}), '(DataCiteCreatorSchemaV4)\n', (2857, 2882), False, 'from marshmallow import Schema, fields\n'), ((2980, 3016), 'marshmallow.fields.Nested', 'fields.Nested', (['DataCiteTitleSchemaV4'], {}), '(DataCiteTitleSchemaV4)\n', (2993, 3016), False, 'from marshmallow import Schema, fields\n'), ((723, 740), 'csv.DictReader', 'csv.DictReader', (['f'], {}), '(f)\n', (737, 740), False, 'import csv\n'), ((3917, 3929), 'datetime.date.today', 'date.today', ([], {}), '()\n', (3927, 3929), False, 'from datetime import date\n'), ((572, 590), 'os.path.realpath', 'realpath', (['__file__'], {}), '(__file__)\n', (580, 590), False, 'from os.path import dirname, join, realpath\n')] |
import sys
from copy import deepcopy
mass_file=open('integer_mass_table.txt')
mass_table = {}
for line in mass_file:
aa, mass = line.rstrip().split(' ')
mass_table[int(mass)] = aa
# mass_table[4] = 'X'
# mass_table[5] = 'Z'
def PeptideSequencing(spectral_vector):
spectral_vector = [0] + spectral_vector
adj_list = []
for i in range(len(spectral_vector)):
for j in range(i, len(spectral_vector)):
if (j - i) in mass_table.keys():
adj_list.append([i, j])
adj_dict = {}
for i in range(len(spectral_vector)):
for j in range(i, len(spectral_vector)):
if (j - i) in mass_table.keys():
tmp = [i, mass_table[j - i]]
if not j in adj_dict.keys():
adj_dict[j] = [tmp]
else:
adj_dict[j].append(tmp)
scores = {0: [0, '-']}
for node in adj_dict.keys():
scores[node] = [-1e6, '-']
tmp = adj_dict[node]
for x in tmp:
if x[0] != 0:
scores[x[0]] = [-1e6, '-']
for node in adj_dict.keys():
max_score = -1e6
bold_edge = '-'
for parent in adj_dict[node]:
score = scores[parent[0]][0]
if score > max_score:
max_score = score
bold_edge = parent
scores[node] = [max_score + spectral_vector[node], bold_edge]
node = list(scores.keys())[-1]
peptide = ''
while node != 0:
peptide = scores[node][1][1] + peptide
node = scores[node][1][0]
return peptide
if __name__ == "__main__":
spectral_vector = [int(x) for x in sys.stdin.read().rstrip().split(' ')]
# print(spectral_vector)
print(PeptideSequencing(spectral_vector))
| [
"sys.stdin.read"
] | [((1658, 1674), 'sys.stdin.read', 'sys.stdin.read', ([], {}), '()\n', (1672, 1674), False, 'import sys\n')] |
from django import forms
from django.contrib.auth.forms import UserCreationForm,AuthenticationForm
from django.contrib.auth.models import User
from .models import Profile,Project,Review
class RegForm(UserCreationForm):
email = forms.EmailField()
class Meta:
model = User
fields = ('username','email', '<PASSWORD>','<PASSWORD>')
class LoginForm(AuthenticationForm):
username = forms.CharField(label='Username', max_length=254)
password = forms.CharField(label='Password',widget=forms.PasswordInput)
class ProfileForm(forms.ModelForm):
class Meta:
model = Profile
fields = ('profile_pic','bio')
class ProjectForm(forms.ModelForm):
class Meta:
model = Project
fields = ('title','description','project_pic','project_link')
class RatingForm(forms.ModelForm):
class Meta:
model = Review
fields =('design','usability','content') | [
"django.forms.EmailField",
"django.forms.CharField"
] | [((233, 251), 'django.forms.EmailField', 'forms.EmailField', ([], {}), '()\n', (249, 251), False, 'from django import forms\n'), ((420, 469), 'django.forms.CharField', 'forms.CharField', ([], {'label': '"""Username"""', 'max_length': '(254)'}), "(label='Username', max_length=254)\n", (435, 469), False, 'from django import forms\n'), ((486, 547), 'django.forms.CharField', 'forms.CharField', ([], {'label': '"""Password"""', 'widget': 'forms.PasswordInput'}), "(label='Password', widget=forms.PasswordInput)\n", (501, 547), False, 'from django import forms\n')] |
#!/usr/bin/python
#coding = utf-8
import numpy as np
import pandas as pd
import mysql.connector
class mysqlTool():
"""
This is the API to connect with mysql database.
"""
def __init__(self,databaseNameString:str,hostAddress:str,userName:str,passWord:str):
self.targetDB = mysql.connector.connect(
host = hostAddress,
user = userName,
passwd = passWord,
database = databaseNameString
# buffered = True
)
self.targetCursor = self.targetDB.cursor(buffered=True)
def getAllTables(self):
self.targetCursor.execute("SHOW TABLES")
return [i for i in self.targetCursor]
def getColNameOfTable(self,tableNameString:str):
sql = "SELECT * FROM "+tableNameString
self.targetCursor.execute(sql)
return [i for i in self.targetCursor.column_names]
def selectAllFromTable(self,tableNameString:str):
sql = "SELECT * FROM "+tableNameString
self.targetCursor.execute(sql)
result = self.targetCursor.fetchall()
df = pd.DataFrame(result,columns = self.targetCursor.column_names)
return df
def selectDictFromTable(self,tableNameString:str,colNameAsKey:str,colNameAsValue:str):
try:
sql = "SELECT "+colNameAsKey+","+colNameAsValue+" FROM "+tableNameString
self.targetCursor.execute(sql)
result = self.targetCursor.fetchall()
resultDict = dict(zip([i[0] for i in result],[i[1] for i in result]))
return resultDict
except Exception as e:
print(e)
return {}
def selectColFromTable(self,tableNameString:str,colNameList:list):
colNameString = "".join(["`"+i+"`," for i in colNameList]).strip(",")
sql = "SELECT "+colNameString+" FROM "+tableNameString
self.targetCursor.execute(sql)
result = self.targetCursor.fetchall()
df = pd.DataFrame(result,columns = self.targetCursor.column_names)
return df
def selectColFromTableWithCondition(self,tableNameString:str,colNameList:list,conditionString:str):
colNameString = "".join(["`"+i+"`," for i in colNameList]).strip(",")
sql = "SELECT "+colNameString+" FROM "+tableNameString+" WHERE "+conditionString
self.targetCursor.execute(sql)
result = self.targetCursor.fetchall()
df = pd.DataFrame(result,columns = self.targetCursor.column_names)
return df
def selectAllFromTableWithCondition(self,tableNameString:str,conditionString:str):
sql = "SELECT * FROM "+tableNameString+" WHERE "+conditionString
self.targetCursor.execute(sql)
result = self.targetCursor.fetchall()
df = pd.DataFrame(result,columns = self.targetCursor.column_names)
return df
def insertRowIntoTable(self,tableNameString:str,valuesTuple:tuple):
sql = "SELECT * FROM "+tableNameString
self.targetCursor.execute(sql)
colNameString = "".join(["`"+i+"`," for i in self.targetCursor.column_names]).strip(", ")
sql = "INSERT INTO "+tableNameString+" ("+colNameString+") VALUES (" + "".join(["%s, " for i in range(len(self.targetCursor.column_names))]).strip(", ")+")"
val = valuesTuple
self.targetCursor.execute(sql,val)
self.targetDB.commit()
print("Insert Finished")
def replaceRowsIntoTable(self,tableNameString:str,valuesTupleList:list):
sql = "SELECT * FROM "+tableNameString
self.targetCursor.execute(sql)
colNameString = "".join(["`"+i+"`," for i in self.targetCursor.column_names]).strip(", ")
sql = "REPLACE INTO "+tableNameString+" ("+colNameString+") VALUES (" + "".join(["%s, " for i in range(len(self.targetCursor.column_names))]).strip(", ")+")"
val = valuesTupleList
self.targetCursor.executemany(sql, val)
self.targetDB.commit()
print("Insert Finished")
def replaceDFIntoTable(self,tableNameString:str,dataFrame:pd.DataFrame):
try:
import numpy as np
DBTableColNameList = self.getColNameOfTable(tableNameString)
df = dataFrame[DBTableColNameList]
# convert to tuple
valuesTapleList = df.apply(lambda x: tuple([None if type(i)==type(np.nan) and np.isnan(i) else i for i in x]),axis=1).to_list()
sql = "SELECT * FROM "+tableNameString
self.targetCursor.execute(sql)
colNameString = "".join(["`"+i+"`," for i in self.targetCursor.column_names]).strip(", ")
sql = "REPLACE INTO "+tableNameString+" ("+colNameString+") VALUES (" + "".join(["%s, " for i in range(len(self.targetCursor.column_names))]).strip(", ")+")"
val = valuesTapleList
self.targetCursor.executemany(sql, val)
self.targetDB.commit()
print("Replace Finished")
except Exception as e:
print("Replace Failed, Error:",e)
class oracleTool():
"""
This is the API to connect with oracle database.
"""
def __init__(self,databaseNameString:str,hostAddress:str,port:int,userName:str,passWord:str):
from sqlalchemy import create_engine
uri = f'oracle+cx_oracle://{userName}:{passWord}@{hostAddress}:{port}/{databaseNameString}'
self.engine = create_engine(uri)
def readSql(self,sql:str):
data = pd.read_sql(sql,con=self.engine)
return data
class neo4jTool():
"""
This is the API to connect with neo4j database.
"""
def __init__(self, hostAddress:str,port:int,userName:str,password:str):
from py2neo import Graph
self.engine = Graph(hostAddress+":"+str(port),auth=(userName,password))
def readCypher(self,cypher:str):
data = self.engine.run(cypher)
return data
def convertDataType(self,x):
if isinstance(x,np.float64):
return float(x)
elif hasattr(x,'strftime'):
return x.strftime("%Y-%m-%d")
elif isinstance(x,list):
return [self.convertDataType(i) for i in x]
else:
return x
def updateDFToNode(self,nodeList:list,df:pd.DataFrame,colAsName:str):
nameWaitedToBeUpdated = df[colAsName].to_list()
nameList = [i for i in nodeList if i['name'] in nameWaitedToBeUpdated]
tmp = df.set_index(colAsName,drop=True)
[[node.update({j:self.convertDataType(tmp.loc[node['name']][j])}) for j in tmp.columns if j!= colAsName] for node in nameList]
def convertDFToNode(self, nodeType:str, df:pd.DataFrame, colAsName:str):
from py2neo import Node
nodeList = [Node(nodeType, name=df.iloc[i][colAsName]) for i in range(df.shape[0])]
[[nodeList[i].update({j:self.convertDataType(df.iloc[i][j])}) for j in df.columns if j!=colAsName] for i in range(df.shape[0])]
return nodeList
def addNodeFromDF(self, nodeType:str, df:pd.DataFrame, colAsName:str):
nodeList = self.convertDFToNode(nodeType, df, colAsName)
[self.engine.create(i) for i in nodeList]
return nodeList
def selectAllLabel(self):
labelList = self.readCypher("MATCH (res) RETURN distinct labels(res)")
return [i[0][0] for i in labelList]
def selectAllNode(self, nodeType:str):
nodeList = self.readCypher(f'''MATCH (res:`{nodeType}`) RETURN res''')
return [i['res'] for i in nodeList]
def selectAttrFromNode(self, nodeType:str, attrList:list):
if type(attrList)==type(''):
attrList = [attrList]
else:
pass
attr = "'],res['".join(attrList)
nodeList = self.readCypher(f"MATCH (res:`{nodeType}`) RETURN res['"+attr+"']")
return nodeList.to_data_frame().rename(columns=dict(zip(["res['"+i+"']" for i in attrList],attrList)))
def selectAllNodeWithCondition(self, nodeType: str, conditionString:str, resultVariableName:str = 'res'):
nodeList = self.readCypher(f'''MATCH ({resultVariableName}:`{nodeType}`) WHERE {conditionString} RETURN {resultVariableName}''')
return [i[resultVariableName] for i in nodeList]
def selectAttrFromNodeWithCondition(self, nodeType: str, attrList: list, conditionString:str, resultVariableName:str = 'res'):
if type(attrList) == type(''):
attrList = [attrList]
else:
pass
attr = "'],res['".join(attrList)
nodeList = self.readCypher(f"MATCH ({resultVariableName}:`{nodeType}`) WHERE {conditionString} RETURN {resultVariableName}['" + attr + "']")
return nodeList.to_data_frame().rename(columns=dict(zip([f"{resultVariableName}['" + i + "']" for i in attrList], attrList)))
def connectNodeByAttr(self, nodeTypeLeft:str, nodeTypeRight:str, attrNameLeft:str, attrNameRight:str, relationName:str):
from py2neo import Relationship
leftNode = self.selectAllNode(nodeTypeLeft)
rightNode = self.selectAllNode(nodeTypeRight)
pair = [(left,right) for left in leftNode for right in rightNode if left[attrNameLeft]==right[attrNameRight]]
relation = [Relationship(i[0],relationName,i[1]) for i in pair]
[self.engine.create(i) for i in relation]
def replaceNode(self, nodeObj):
self.engine.push(nodeObj)
def replaceNodeFromDF(self, nodeType:str, df:pd.DataFrame, colAsName:str):
nodeList = self.selectAllNodeWithCondition(nodeType,"res.name IN ['"+"','".join(df[colAsName].to_list())+"']")
self.updateDFToNode(nodeList,df,colAsName)
oldNode = [i['name'] for i in nodeList]
tmp = df[[(i not in oldNode) for i in df[colAsName]]]
self.addNodeFromDF(nodeType,tmp,colAsName)
[self.engine.push(i) for i in nodeList]
def deleteAllNode(self):
self.engine.delete_all()
print("All Nodes Have Been Deleted")
def deleteNode(self, nodeObj):
self.engine.delete(nodeObj) | [
"py2neo.Node",
"sqlalchemy.create_engine",
"numpy.isnan",
"pandas.DataFrame",
"py2neo.Relationship",
"pandas.read_sql"
] | [((946, 1006), 'pandas.DataFrame', 'pd.DataFrame', (['result'], {'columns': 'self.targetCursor.column_names'}), '(result, columns=self.targetCursor.column_names)\n', (958, 1006), True, 'import pandas as pd\n'), ((1689, 1749), 'pandas.DataFrame', 'pd.DataFrame', (['result'], {'columns': 'self.targetCursor.column_names'}), '(result, columns=self.targetCursor.column_names)\n', (1701, 1749), True, 'import pandas as pd\n'), ((2100, 2160), 'pandas.DataFrame', 'pd.DataFrame', (['result'], {'columns': 'self.targetCursor.column_names'}), '(result, columns=self.targetCursor.column_names)\n', (2112, 2160), True, 'import pandas as pd\n'), ((2406, 2466), 'pandas.DataFrame', 'pd.DataFrame', (['result'], {'columns': 'self.targetCursor.column_names'}), '(result, columns=self.targetCursor.column_names)\n', (2418, 2466), True, 'import pandas as pd\n'), ((4703, 4721), 'sqlalchemy.create_engine', 'create_engine', (['uri'], {}), '(uri)\n', (4716, 4721), False, 'from sqlalchemy import create_engine\n'), ((4760, 4793), 'pandas.read_sql', 'pd.read_sql', (['sql'], {'con': 'self.engine'}), '(sql, con=self.engine)\n', (4771, 4793), True, 'import pandas as pd\n'), ((5862, 5904), 'py2neo.Node', 'Node', (['nodeType'], {'name': 'df.iloc[i][colAsName]'}), '(nodeType, name=df.iloc[i][colAsName])\n', (5866, 5904), False, 'from py2neo import Node\n'), ((8097, 8135), 'py2neo.Relationship', 'Relationship', (['i[0]', 'relationName', 'i[1]'], {}), '(i[0], relationName, i[1])\n', (8109, 8135), False, 'from py2neo import Relationship\n'), ((3813, 3824), 'numpy.isnan', 'np.isnan', (['i'], {}), '(i)\n', (3821, 3824), True, 'import numpy as np\n')] |
"""
NOAA/ESRL/PSD Jython functions
"""
def calcMonAnom(monthly, ltm, normalize=0):
""" Calculate the monthly anomaly from a long term mean.
The number of timesteps in ltm must be 12
"""
from visad import VisADException
monAnom = monthly.clone()
months = len(ltm)
if (not months == 12):
raise VisADException("Number of months in ltm must be a 12")
years = int(len(monthly)/months) +1
startMonth = getStartMonth(GridUtil.getTimeSet(monthly))-1
#print "Start month = " , startMonth
index = 0
for year in range(years):
for month in range(12):
if index > len(monthly) - 1:
break
thisMonth = (startMonth+month)%12
#print thisMonth
diff = sub(monthly[index],ltm[thisMonth])
if normalize != 0:
diff = sub(diff,xav(diff))
diff = GridUtil.setParamType(diff, GridUtil.getParamType(monAnom))
monAnom[index] = diff
index = index + 1
return monAnom
def getStartMonth(timeSet):
""" Get the starting month number (1-12) from a timeset.
"""
from visad.util import DataUtility as du
from visad import DateTime
r = du.getSample(timeSet, 0).getComponent(0)
dt = DateTime(r)
month = dt.formattedString("MM",DateTime.getFormatTimeZone())
return int(month)
| [
"visad.util.DataUtility.getSample",
"visad.VisADException",
"visad.DateTime",
"visad.DateTime.getFormatTimeZone"
] | [((1161, 1172), 'visad.DateTime', 'DateTime', (['r'], {}), '(r)\n', (1169, 1172), False, 'from visad import DateTime\n'), ((315, 369), 'visad.VisADException', 'VisADException', (['"""Number of months in ltm must be a 12"""'], {}), "('Number of months in ltm must be a 12')\n", (329, 369), False, 'from visad import VisADException\n'), ((1207, 1235), 'visad.DateTime.getFormatTimeZone', 'DateTime.getFormatTimeZone', ([], {}), '()\n', (1233, 1235), False, 'from visad import DateTime\n'), ((1113, 1137), 'visad.util.DataUtility.getSample', 'du.getSample', (['timeSet', '(0)'], {}), '(timeSet, 0)\n', (1125, 1137), True, 'from visad.util import DataUtility as du\n')] |
import numpy as np
import random
N = 10
def null(a, rtol=1e-5):
u, s, v = np.linalg.svd(a)
rank = (s > rtol*s[0]).sum()
return rank, v[rank:].T.copy()
def gen_data(N, noisy=False):
lower = -1
upper = 1
dim = 2
X = np.random.rand(dim, N)*(upper-lower)+lower
while True:
Xsample = np.concatenate(
(np.ones((1, dim)), np.random.rand(dim, dim)*(upper-lower)+lower))
k, w = null(Xsample.T)
y = np.sign(np.dot(w.T, np.concatenate((np.ones((1, N)), X))))
if np.all(y):
break
return (X, y, w)
def change_label(y):
idx = random.sample(range(1, N), N/10)
y[idx] = -y[idx]
return y
if __name__ == '__main__':
X, y, w = gen_data(10)
print(X)
| [
"numpy.linalg.svd",
"numpy.all",
"numpy.ones",
"numpy.random.rand"
] | [((82, 98), 'numpy.linalg.svd', 'np.linalg.svd', (['a'], {}), '(a)\n', (95, 98), True, 'import numpy as np\n'), ((535, 544), 'numpy.all', 'np.all', (['y'], {}), '(y)\n', (541, 544), True, 'import numpy as np\n'), ((249, 271), 'numpy.random.rand', 'np.random.rand', (['dim', 'N'], {}), '(dim, N)\n', (263, 271), True, 'import numpy as np\n'), ((356, 373), 'numpy.ones', 'np.ones', (['(1, dim)'], {}), '((1, dim))\n', (363, 373), True, 'import numpy as np\n'), ((375, 399), 'numpy.random.rand', 'np.random.rand', (['dim', 'dim'], {}), '(dim, dim)\n', (389, 399), True, 'import numpy as np\n'), ((501, 516), 'numpy.ones', 'np.ones', (['(1, N)'], {}), '((1, N))\n', (508, 516), True, 'import numpy as np\n')] |
#!/usr/bin/env python3
# -*- coding:utf-8 -*-
import os
import yaml
from tools.times import timestamp
from config.conf import ELEMENT_PATH, LOCATE_MODE
def inspect_element():
"""审查所有的元素是否正确"""
start_time = timestamp()
for i in os.listdir(ELEMENT_PATH):
_path = os.path.join(ELEMENT_PATH, i)
if os.path.isfile(_path):
with open(_path, encoding='utf-8') as f:
data = yaml.safe_load(f)
for k in data.values():
pattern, value = k.split('==')
if pattern not in LOCATE_MODE:
raise AttributeError('【%s】路径中【%s]元素没有指定类型' % (i, k))
if pattern == 'xpath':
assert '//' in value, '【%s】路径中【%s]元素xpath类型与值不配' % (
i, k)
if pattern == 'css':
assert '//' not in value, '【%s】路径中【%s]元素css类型与值不配' % (
i, k)
if pattern in ('id', 'name', 'class'):
assert value, '【%s】路径中【%s]元素类型与值不匹配' % (i, k)
end_time = timestamp()
print("校验元素done!用时%.3f秒!" % (end_time - start_time))
if __name__ == '__main__':
inspect_element()
| [
"os.listdir",
"os.path.join",
"os.path.isfile",
"yaml.safe_load",
"tools.times.timestamp"
] | [((216, 227), 'tools.times.timestamp', 'timestamp', ([], {}), '()\n', (225, 227), False, 'from tools.times import timestamp\n'), ((241, 265), 'os.listdir', 'os.listdir', (['ELEMENT_PATH'], {}), '(ELEMENT_PATH)\n', (251, 265), False, 'import os\n'), ((1112, 1123), 'tools.times.timestamp', 'timestamp', ([], {}), '()\n', (1121, 1123), False, 'from tools.times import timestamp\n'), ((283, 312), 'os.path.join', 'os.path.join', (['ELEMENT_PATH', 'i'], {}), '(ELEMENT_PATH, i)\n', (295, 312), False, 'import os\n'), ((324, 345), 'os.path.isfile', 'os.path.isfile', (['_path'], {}), '(_path)\n', (338, 345), False, 'import os\n'), ((423, 440), 'yaml.safe_load', 'yaml.safe_load', (['f'], {}), '(f)\n', (437, 440), False, 'import yaml\n')] |