id
stringlengths 3
8
| content
stringlengths 100
981k
|
|---|---|
175457
|
import json
from json import JSONDecodeError
import pandas as pd
from requests import post
import yaml
# General class for necessary file imports
class FileImport():
def read_app_key_file(self, filename: str = "keys.json") -> tuple:
"""Reads file with consumer key and consumer secret (JSON)
Args:
filename (str, optional): Defaults to "keys.json"
Returns:
Tuple with two strings: (1) being the twitter consumer token and (2) being the
twitter consumer secret
"""
# TODO: change return to dictionary
try:
with open(filename, "r") as f:
self.key_file = json.load(f)
except FileNotFoundError:
raise FileNotFoundError('"keys.json" could not be found')
except JSONDecodeError as e:
print("Bad JSON file. Please check that 'keys.json' is formatted\
correctly and that it is not empty")
raise e
if "consumer_token" not in self.key_file or "consumer_secret" not in self.key_file:
raise KeyError('''"keys.json" does not contain the dictionary keys
"consumer_token" and/or "consumer_secret"''')
if type(self.key_file["consumer_secret"]) is not str or type(
self.key_file["consumer_token"]) is not str:
raise TypeError("Consumer secret is type" +
str(type(self.key_file["consumer_secret"])) +
"and consumer token is type " + str(type(
self.key_file["consumer_token"])) + '''. Both
must be of type str. ''')
return (self.key_file["consumer_token"], self.key_file["consumer_secret"])
def read_seed_file(self, filename: str = "seeds.csv") -> pd.DataFrame:
"""Reads file with specified seeds to start from (csv)
Args:
filename (str, optional): Defaults to "seeds.csv"
Returns:
A single column pandas DataFrame with one Twitter ID (seed) each row.
"""
try:
with open("seeds.csv", "r") as f:
self.seeds = pd.read_csv(f, header=None)
except FileNotFoundError:
raise FileNotFoundError('"seeds.csv" could not be found')
except pd.errors.EmptyDataError as e:
print('"seeds.csv" is empty!')
raise e
return self.seeds
def read_token_file(self, filename="tokens.csv"):
"""Reads file with authorized user tokens (csv).
Args:
filename (str, optional): Defaults to "tokens.csv"
Returns:
pandas.DataFrame: With columns `token` and `secret`, one line per user
"""
return pd.read_csv(filename)
# Configuration class. Reads out all information from a given config.yml
class Config():
"""Class that handles the SQL and twitter user details configuration.
Attributes:
config_file (str): Path to configuration file
config_dict (dict): Dictionary containing the config information (in case
the dictionary shall be directly passed instead of read
out of a configuration file).
"""
config_template = "config_template.py"
# Initializes class using config.yml
def __init__(self, config_file="config.yml", config_dict: dict = None):
if config_dict is not None:
self.config = config_dict
else:
self.config_path = config_file
try:
with open(self.config_path, 'r') as f:
self.config = yaml.load(f)
except FileNotFoundError:
raise FileNotFoundError('Could not find "' + self.config_path + '''".\n
Please run "python3 make_config.py" or provide a config.yml''')
# Check if mailgun notifications should be used
if "notifications" not in self.config:
self.use_notifications = False
else:
self.notif_config = self.config["notifications"]
notif_config_items = [value for (key, value) in self.notif_config.items()]
single_items = list(set(notif_config_items))
if len(single_items) == 1 and single_items[0] is None:
self.use_notifications = False
elif None in single_items:
missing = [key for (key, value) in self.notif_config.items() if value is None]
raise ValueError(f"""You have not filled all required fields for the notifications
configuration! Fields missing are {missing}""")
else:
self.use_notifications = True
# Check for necessary database information. If no information is provided,
# stop
if "sql" not in self.config:
print("Config file " + config_file + """ does not contain key 'sql'!
Will use default sqlite configuration.""")
self.config["sql"] = dict(dbtype="sqlite",
dbname="new_database")
self.sql_config = self.config["sql"]
# No db type given in Config
if self.sql_config["dbtype"] is None:
print('''Parameter dbtype not set in the "config.yml". Will create
an sqlite database.''')
self.dbtype = "sqlite"
else:
self.dbtype = self.sql_config["dbtype"].strip()
# DB type is msql - checking for all parameters
if self.dbtype == "mysql":
try:
self.dbhost = str(self.sql_config["host"])
self.dbuser = str(self.sql_config["user"])
self.dbpwd = str(self.sql_config["passwd"])
if self.dbhost == '':
raise ValueError("dbhost parameter is empty")
if self.dbuser == '':
raise ValueError("dbuser parameter is empty")
if self.dbpwd == '':
raise ValueError("passwd parameter is empty")
except KeyError as e:
raise e
elif self.dbtype == "sqlite":
self.dbhost = None
self.dbuser = None
self.dbpwd = None
else:
raise ValueError('''dbtype parameter is neither "sqlite" nor
"mysql". Please adjust the "config.yml" ''')
# Set db name
if self.sql_config["dbname"] is not None:
self.dbname = self.sql_config["dbname"]
else:
print('''Parameter "dbname" is missing. New database will have the name
"new_database".''')
self.dbname = "new_database"
# Function to send mail if notifications are turned on in config.yml
# TODO: finalize this function
def send_mail(self, message_dict):
'''Sends an email via Mailgun.
Args:
message_dict (dict):
{
"subject": "your_subject"
"text": "message"
}
config (dict):
{
"mailgun_api_base_url": "link to mailgun_api_base_url"
"mailgun_api_key": "your mailgun_api_key"
"mailgun_default_smtp_login": "your mailgun_default_smtp_login"
"email_to_notify": "the email_to_notify"
}
Returns:
requests.post to Mailgun API.
'''
api_base_url = self.notif_config["mailgun_api_base_url"] + '/messages'
auth = ('api', self.notif_config["mailgun_api_key"])
data = {
"from": f"SparseTwitter <{self.notif_config['mailgun_default_smtp_login']}>",
"to": self.notif_config["email_to_notify"]
}
data.update(message_dict)
return post(api_base_url, auth=auth, data=data)
# TODO: Add mailgun config
|
175464
|
from .models import print_image_classifiers, image_classifier
from .models import print_image_regression_models, image_regression_model
from .data import show_image, show_random_images, preview_data_aug, get_data_aug
from .data import images_from_folder, images_from_csv, images_from_array, images_from_fname, preprocess_csv
from .predictor import ImagePredictor
__all__ = [
'image_classifier', 'image_regression_model',
'print_image_classifiers', 'print_image_regression_models',
'images_from_folder', 'images_from_csv', 'images_from_array', 'images_from_fname',
'get_data_aug',
'preprocess_csv',
'ImagePredictor',
'show_image',
'show_random_images',
'preview_data_aug'
]
|
175481
|
from .all_pass import all_pass
def positive(x):
return x > 0
def less_than_ten(x):
return x < 10
def all_pass_nocurry_test():
assert all_pass([], "foo")
assert all_pass([positive, less_than_ten], 5)
assert not all_pass([positive, less_than_ten], 10)
assert not all_pass([positive, less_than_ten], 0)
def all_pass_curry_test():
between_zero_and_ten = all_pass([positive, less_than_ten])
assert between_zero_and_ten(5)
assert not between_zero_and_ten(10)
assert not between_zero_and_ten(0)
|
175501
|
from copy import deepcopy
from typing import Any, Optional, Union
import pandas as pd
from pycaret.internal.logging import get_logger
from pycaret.internal.Display import Display
from sklearn.base import clone
from sklearn.utils.validation import check_is_fitted
from sklearn.pipeline import Pipeline
def is_sklearn_pipeline(object):
from sklearn.pipeline import Pipeline
return isinstance(object, Pipeline)
def is_sklearn_cv_generator(object):
return not isinstance(object, str) and hasattr(object, "split")
def is_fitted(estimator) -> bool:
try:
check_is_fitted(estimator)
return True
except:
return False
class fit_if_not_fitted(object):
"""
Context which fits an estimator if it's not fitted.
"""
def __init__(
self,
estimator,
X_train: pd.DataFrame,
y_train: pd.DataFrame,
groups=None,
**fit_kwargs,
):
logger = get_logger()
self.estimator = deepcopy(estimator)
if not is_fitted(self.estimator):
try:
self.estimator._carry_over_final_estimator_fit_vars()
except:
pass
if not is_fitted(self.estimator):
logger.info(f"fit_if_not_fitted: {estimator} is not fitted, fitting")
try:
self.estimator.fit(X_train, y_train, groups=groups, **fit_kwargs)
except:
self.estimator.fit(X_train, y_train, **fit_kwargs)
def __enter__(self):
return self.estimator
def __exit__(self, type, value, traceback):
return
def supports_partial_fit(estimator, params: dict = None) -> bool:
# special case for MLP
from sklearn.neural_network import MLPClassifier
if isinstance(estimator, MLPClassifier):
try:
if (
params and "solver" in params and "lbfgs" in list(params["solver"])
) or estimator.solver == "lbfgs":
return False
except:
return False
if isinstance(estimator, Pipeline):
return hasattr(estimator.steps[-1][1], "partial_fit")
return hasattr(estimator, "partial_fit")
|
175509
|
from requests import Session
from bs4 import BeautifulSoup
import re
class tiktok:
def __init__(self) -> None:
self.request = Session()
self.url = "https://ssstik.io"
self.html = self.request.get(self.url).text
self.key = BeautifulSoup(self.html, "html.parser").find_all("form",attrs={"data-hx-target":"#target"})[0].get("include-vals")
self.post = BeautifulSoup(self.html, "html.parser").find_all("form",attrs={"data-hx-target":"#target"})[0].get("data-hx-post")
self.tt = re.search("tt\:\'(.*?)\'",self.key)[1]
self.ts = re.search("ts\:([0-9]{5,15})",self.key)[1]
self.header = {"content-type": "application/x-www-form-urlencoded; charset=UTF-8","hx-active-element": "submit","hx-current-url": "https://ssstik.io/","hx-request": "true","hx-target": "target","origin": "https://ssstik.io","sec-fetch-dest": "","sec-fetch-mode": "cors","sec-fetch-site": "same-origin","user-agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.111 Safari/537.36"}
def download(self, url) -> dict:
data = {"id": url,"locale": "en","tt": self.tt,"ts": int(self.ts)}
post = self.request.post(f"{self.url}{self.post}", headers=self.header, data=data)
respon = BeautifulSoup(post.text, "html.parser")
hasil = {"video":[f'{self.url}{respon.find_all("a",class_="pure-button pure-button-primary is-center u-bl dl-button download_link without_watermark")[0].get("href")}',f'{self.url}{respon.find_all("a",class_="pure-button pure-button-primary is-center u-bl dl-button download_link without_watermark_direct")[0].get("href")}'],"music":f'{respon.find_all("a",class_="pure-button pure-button-primary is-center u-bl dl-button download_link music")[0].get("href")}'}
return hasil
class tiktok2:
def __init__(self, url) -> None:
self.request = Session()
self.url =url
self.header = {"accept": "*/*","accept-language": "en-US,en;q=0.9,id;q=0.8","origin": "https://snaptik.app","referer": "https://snaptik.app/ID","sec-fetch-dest": None,"sec-fetch-mode": "cors","sec-fetch-site": "same-origin","user-agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.111 Safari/537.36",}
def download(self):
self.request.post("https://snaptik.app/check_token.php", headers=self.header)
self.bs = BeautifulSoup(self.request.post("https://snaptik.app/action-2021.php", headers=self.header, data={"url":self.url}).text, "html.parser")
return [self.request,{"title":self.bs('a', attrs={"title":""})[0].text,"date":self.bs("b", attrs={"class":"blur"})[0].text,"url":list(filter(lambda x:x, map(lambda x:x["href"] if "token" in x["href"] else None, self.bs("a", attrs={"class":"abutton is-success is-fullwidth"}))))}, self.header]
|
175513
|
def less_or_equal(value):
if value : # Change this line
return "25 or less"
elif value : # Change this line
return "75 or less"
else:
return "More than 75"
# Change the value 1 below to experiment with different values
print(less_or_equal(1))
|
175532
|
import os
import json
import argus
from argus.callbacks import MonitorCheckpoint, EarlyStopping, LoggingToFile
from torch.utils.data import DataLoader
from src.dataset import SaltDataset, SaltTestDataset
from src.transforms import SimpleDepthTransform, SaltTransform
from src.lr_scheduler import ReduceLROnPlateau
from src.argus_models import SaltMeanTeacherModel
from src import config
from src.nick_zoo.resnet_blocks import resnet152
EXPERIMENT_NAME = 'flex-fpn-mt-resnet152-001'
TRAIN_BATCH_SIZE = 10
VAL_BATCH_SIZE = 10
UNLABELED_BATCH = 6
IMAGE_SIZE = (128, 128)
OUTPUT_SIZE = (101, 101)
TRAIN_FOLDS_PATH = '/workdir/data/train_folds_148.csv'
TEST_DIR = '/workdir/data/test/images148'
SAVE_DIR = f'/workdir/data/experiments/{EXPERIMENT_NAME}'
FOLDS = list(range(config.N_FOLDS))
PARAMS = {
'nn_module': ('UNetFPNFlexProb', {
'num_classes': 1,
'num_channels': 3,
'blocks': resnet152,
'final': 'sigmoid',
'dropout_2d': 0.2,
'is_deconv': True,
'deflation': 4,
'use_first_pool': False,
'skip_dropout': True,
'pretrain': 'resnet152',
'pretrain_layers': [True for _ in range(5)],
'fpn_layers': [16, 32, 64, 128]
}),
'loss': ('FbBceProbLoss', {
'fb_weight': 0.95,
'fb_beta': 2,
'bce_weight': 0.9,
'prob_weight': 0.85
}),
'prediction_transform': ('ProbOutputTransform', {
'segm_thresh': 0.5,
'prob_thresh': 0.5,
}),
'mean_teacher': {
'alpha': 0.99,
'rampup_length': 10,
'unlabeled_batch': UNLABELED_BATCH,
'consistency_segm_weight': 0.3,
'consistency_prob_weight': 0.3
},
'optimizer': ('Adam', {'lr': 0.0001}),
'device': 'cuda'
}
@argus.callbacks.on_epoch_start
def update_model_epoch(state: argus.engine.State):
state.model.epoch = state.epoch
def train_fold(save_dir, train_folds, val_folds):
depth_trns = SimpleDepthTransform()
train_trns = SaltTransform(IMAGE_SIZE, True, 'crop')
val_trns = SaltTransform(IMAGE_SIZE, False, 'crop')
train_dataset = SaltDataset(TRAIN_FOLDS_PATH, train_folds, train_trns, depth_trns)
val_dataset = SaltDataset(TRAIN_FOLDS_PATH, val_folds, val_trns, depth_trns)
train_loader = DataLoader(train_dataset, batch_size=TRAIN_BATCH_SIZE, shuffle=True,
drop_last=True, num_workers=4)
val_loader = DataLoader(val_dataset, batch_size=VAL_BATCH_SIZE, shuffle=False, num_workers=4)
test_dataset = SaltTestDataset(test_dir=TEST_DIR, transform=val_trns, depth_transform=depth_trns)
model = SaltMeanTeacherModel(PARAMS)
model.test_dataset = test_dataset
callbacks = [
MonitorCheckpoint(save_dir, monitor='val_crop_iout', max_saves=3, copy_last=False),
EarlyStopping(monitor='val_crop_iout', patience=100),
ReduceLROnPlateau(monitor='val_crop_iout', patience=25, factor=0.72, min_lr=1e-8),
LoggingToFile(os.path.join(save_dir, 'log.txt')),
update_model_epoch
]
model.fit(train_loader,
val_loader=val_loader,
max_epochs=700,
callbacks=callbacks,
metrics=['crop_iout'])
if __name__ == "__main__":
if not os.path.exists(SAVE_DIR):
os.makedirs(SAVE_DIR)
else:
print(f"Folder {SAVE_DIR} already exists.")
with open(os.path.join(SAVE_DIR, 'source.py'), 'w') as outfile:
outfile.write(open(__file__).read())
with open(os.path.join(SAVE_DIR, 'params.json'), 'w') as outfile:
json.dump(PARAMS, outfile)
for i in range(len(FOLDS)):
val_folds = [FOLDS[i]]
train_folds = FOLDS[:i] + FOLDS[i + 1:]
save_fold_dir = os.path.join(SAVE_DIR, f'fold_{FOLDS[i]}')
print(f"Val folds: {val_folds}, Train folds: {train_folds}")
print(f"Fold save dir {save_fold_dir}")
train_fold(save_fold_dir, train_folds, val_folds)
|
175549
|
from measurement.measures import Energy
class TestEnergy:
def test_dietary_calories_kwarg(self):
calories = Energy(Calorie=2000)
kilojoules = Energy(kJ=8368)
assert calories.si_value == kilojoules.si_value
|
175644
|
from typing import Tuple
import numpy as np
from PyGenetic.crossover import CrossoverDecidor
from PyGenetic.mutation import MutationDecidor
class FactoryPopulation():
def __init__(self):
self.crossover_decidor = CrossoverDecidor(self.crossover_type,
self.n_genes)
self.mutation_decidor = MutationDecidor(self.mutation_type,
self.n_genes,
self.mutation_propability,
self.low_boundery,
self.high_boundery)
def crossover(self, first_chromo: np.array,
second_chromo: np.array) -> Tuple[np.array, np.array]:
first_child, second_child = self.crossover_decidor.run(
first_chromo, second_chromo)
return (first_child, second_child)
def mutation(self, chromosome: np.array) -> np.array:
self.mutation_decidor.run(chromosome)
def parent_selection(self, fitness: np.array) -> None:
fit_idx = np.argsort(fitness)[::-1]
self.parents = self.pool[fit_idx[:self.n_parents]]
def breed_childern(self) -> None:
for i in range(self.n_pool // 2):
first_chromo = self.parents[np.random.choice(range(
self.n_parents))]
second_chromo = self.parents[np.random.choice(range(
self.n_parents))]
first_child, second_child = self.crossover(first_chromo,
second_chromo)
self.mutation(first_child)
self.mutation(second_child)
self.pool[i:i + 2] = [first_child, second_child]
self.pool[-1] = self.parents[0]
|
175689
|
from django.conf.urls import url
from drf_yasg.views import get_schema_view
from drf_yasg import openapi
schema_view = get_schema_view(
openapi.Info(
title="West Oakland Air Quality API",
default_version="v1",
description=(
"West Oakland Air Quality (WOAQ) is a project of OpenOakland "
"focused on building digital advocacy tools around air quality data "
"collected by volunteers and citizen scientists. WOAQ works in partnership "
"with West Oakland Environmental Indicators Project (WOEIP). This API is "
"for storing and updating WOEIP air quality datasets."
),
terms_of_service=None, # Optional URL to terms of service
contact=openapi.Contact(email="<EMAIL>"),
license=openapi.License(
name="MIT", url="https://github.com/openoakland/woeip/blob/master/LICENSE"
),
),
public=False,
)
urlpatterns = [
url(
r"^swagger(?P<format>\.json|\.yaml)$",
schema_view.without_ui(cache_timeout=0),
name="schema-json",
),
url(
r"^swagger/$",
schema_view.with_ui("swagger", cache_timeout=0),
name="schema-swagger-ui",
),
url(
r"^redoc/$", schema_view.with_ui("redoc", cache_timeout=0), name="schema-redoc",
),
]
|
175693
|
import os
import datetime
import torch
def run(parser, dev):
args = parser.parse_args()
## Set gpu ids
str_ids = args.gpu_ids.split(',')
args.gpu_ids = []
for str_id in str_ids:
gpu_id = int(str_id)
if gpu_id >= 0:
args.gpu_ids.append(gpu_id)
if len(args.gpu_ids) > 0:
torch.cuda.set_device(args.gpu_ids[0])
## Set device
args.device = torch.device('cuda:{}'.format(args.gpu_ids[0])) if args.gpu_ids else torch.device('cpu')
if args.isTrain:
## Set decay steps
str_steps = args.decay_steps.split(',')
args.decay_steps = []
for str_step in str_steps:
str_step = int(str_step)
args.decay_steps.append(str_step)
## Set names
current_time = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
args.name = '{}_{}_{}_{}_{}'.format(args.name, current_time, args.dataset, args.backbone, args.loss_type)
## Print Options
message = ''
message += '----------------- Options ---------------\n'
for k, v in sorted(vars(args).items()):
comment = ''
default = parser.get_default(k)
if v != default:
comment = '\t[default: %s]' % str(default)
message += '{:>25}: {:<30}{}\n'.format(str(k), str(v), comment)
message += '----------------- End -------------------'
print(message)
if args.isTrain and not dev:
## Save Options
expr_dir = os.path.join(args.checkpoints_dir, args.name)
os.makedirs(expr_dir)
file_name = os.path.join(expr_dir, 'arguments.txt')
with open(file_name, 'wt') as opt_file:
opt_file.write(message)
opt_file.write('\n')
return args
|
175717
|
from contextlib import closing
import psycopg
class PostgreSQLConnectionChecker:
def __init__(self, **kwargs):
self.host = kwargs.get('host')
self.port = kwargs.get('port')
self.user = kwargs.get('user')
self.password = kwargs.get('password')
self.database = kwargs.get('database', 'postgres')
def _get_connection(self):
conn = psycopg.connect(f'host={self.host} port={self.port} dbname={self.database} user={self.user} password={self.password}', connect_timeout=10)
return conn
def check_connection(self):
try:
con = self._get_connection()
with closing(con) as con:
cur = con.cursor()
cur.execute('select 1;')
connected = True
except Exception as e:
print('EXCEPTION!')
print(e)
connected = False
return connected
|
175745
|
import tensorflow as tf
import datetime
import numpy as np
import zutils.tf_math_funcs as tmf
from zutils.py_utils import *
from scipy.io import savemat
class OneEpochRunner:
def __init__(
self, data_module, output_list=None,
net_func=None, batch_axis=0, num_samples=None, disp_time_interval=2,
output_fn=None, is_large=False):
self.data_module = data_module
self.num_samples = self.data_module.num_samples()
self.batch_axis = batch_axis
self.disp_time_interval = disp_time_interval
self.output_fn = output_fn
self.is_large = is_large
if num_samples is not None:
if self.num_samples < num_samples:
print("specified number_samples is larger than one epoch")
else:
self.num_samples = num_samples
self.use_net_func = output_list is None # otherwise use net_func
if self.use_net_func:
assert net_func is not None, \
"output_list and net_func should not be both specified"
self.net_func = net_func
# remark: net_func(sess)
else:
assert net_func is None, \
"one of output_list and net_func must be specified"
self.output_list = output_list
[self.flatten_output_list, self.output_wrap_func] = \
recursive_flatten_with_wrap_func(
lambda x: tmf.is_tf_data(x), self.output_list)
self.data_module.reset()
self.cur_sample_end = 0
def run_single_batch(self, sess):
if self.cur_sample_end >= self.num_samples:
return None
if self.use_net_func:
output_val = self.net_func(sess)
else:
output_val = sess.run(self.flatten_output_list, {})
output_val = self.output_wrap_func(output_val)
batch_size = first_element_apply(
lambda x: isinstance(x, np.ndarray),
lambda x: x.shape[self.batch_axis], output_val)
self.batch_size = batch_size
new_end = self.cur_sample_end + batch_size
if new_end > self.num_samples:
effective_batch_size = \
batch_size - (new_end-self.num_samples)
slice_indexes = (slice(None),)*self.batch_axis + (slice(effective_batch_size),)
output_val = recursive_apply(
lambda x: isinstance(x, np.ndarray),
lambda x: x[slice_indexes], output_val)
self.cur_sample_end = new_end
return output_val
def run(self, sess):
disp_countdown = IfTimeout(self.disp_time_interval)
num_samples_total = self.num_samples
output_val_single = self.run_single_batch(sess)
output_val = []
while output_val_single is not None:
output_val += [output_val_single]
iter = self.data_module.iter()
if self.data_module.epoch() == 0:
num_samples_finished = self.data_module.num_samples_finished()
else:
num_samples_finished = self.num_samples
if disp_countdown.is_timeout():
epoch_percentage = num_samples_finished / num_samples_total * 100
print("%s] Iter %d (%4.1f%% = %d / %d)" %
(datetime.datetime.now().strftime('%Y-%m/%d-%H:%M:%S.%f'),
iter, epoch_percentage, num_samples_finished, num_samples_total))
disp_countdown = IfTimeout(self.disp_time_interval)
if self.is_large and (num_samples_finished % (100*self.batch_size) == 0 or num_samples_finished == self.num_samples):
output_val = recursive_apply(
lambda *args: isinstance(args[0], np.ndarray),
lambda *args: np.concatenate(args, axis=self.batch_axis),
*output_val)
self.dir_path = os.path.dirname(self.output_fn+'_'+'%06d'%num_samples_finished)
if not os.path.exists(self.dir_path):
os.makedirs(self.dir_path)
savemat(self.output_fn+'_'+'%06d'%num_samples_finished+'.mat',output_val)
print('Saving part of output to '+ self.output_fn+'_'+'%06d'%num_samples_finished+'.mat')
output_val = []
output_val_single = self.run_single_batch(sess)
if not self.is_large:
output_val = recursive_apply(
lambda *args: isinstance(args[0], np.ndarray),
lambda *args: np.concatenate(args, axis=self.batch_axis),
*output_val)
savemat(self.output_fn + ".mat", output_val)
print('Saving output to ' + self.output_fn + ".mat")
|
175766
|
from simple_zpl2 import ZPLDocument
def add_to_zdoc(upc):
zdoc = ZPLDocument()
zdoc.add_barcode(upc)
return zdoc
|
175773
|
import unittest, sys, os, io, copy
import numpy as np
import cctk
if __name__ == '__main__':
unittest.main()
class TestOrca(unittest.TestCase):
def test_write(self):
read_path = "test/static/test_peptide.xyz"
path = "test/static/test_peptide.inp"
new_path = "test/static/test_peptide_copy.inp"
file = cctk.XYZFile.read_file(read_path)
self.assertTrue(isinstance(file.get_molecule(), cctk.Molecule))
header = "! aug-cc-pVTZ aug-cc-pVTZ/C DLPNO-CCSD(T) TightSCF TightPNO MiniPrint"
variables = {"maxcore": 4000}
blocks = {"pal": ["nproc 4"], "mdci": ["density none"]}
cctk.OrcaFile.write_molecule_to_file(new_path, file.get_molecule(), header, variables, blocks)
with open(path) as old:
with open(new_path) as new:
self.assertListEqual(
list(new),
list(old)
)
os.remove(new_path)
ensemble = cctk.ConformationalEnsemble()
ensemble.add_molecule(file.get_molecule())
orca_file = cctk.OrcaFile(job_types=[cctk.OrcaJobType.SP], ensemble=ensemble, header=header, blocks=blocks, variables=variables)
orca_file.write_file(new_path)
with open(path) as old:
with open(new_path) as new:
self.assertListEqual(
list(new),
list(old)
)
os.remove(new_path)
def test_read(self):
path = "test/static/MsOH_ccsdt.out"
file = cctk.OrcaFile.read_file(path)
self.assertEqual(file.successful_terminations, 1)
self.assertEqual(file.elapsed_time, 8575)
self.assertEqual(file.header, "! aug-cc-pVQZ aug-cc-pVQZ/C DLPNO-CCSD(T) TightSCF TightPNO MiniPrint")
self.assertEqual(file.variables["maxcore"], "50000")
self.assertListEqual(file.blocks["mdci"], ["density none"])
mol = file.get_molecule()
self.assertTrue(isinstance(mol, cctk.Molecule))
self.assertEqual(mol.num_atoms(), 9)
self.assertEqual(file.ensemble[mol,"energy"], -663.663569902734)
path = "test/static/AcOH_orca.out"
file = cctk.OrcaFile.read_file(path)
mol = file.get_molecule()
self.assertTrue(isinstance(mol, cctk.Molecule))
self.assertEqual(mol.num_atoms(), 8)
self.assertEqual(file.ensemble[mol,"energy"], -229.12132242363)
self.assertEqual(file.ensemble[mol, "dipole_moment"], 1.76241)
self.assertEqual(file.ensemble[mol, "mulliken_charges"][1], 0.333851)
self.assertEqual(file.ensemble[mol, "lowdin_charges"][1], -0.515118)
self.assertEqual(file.ensemble[mol, "temperature"], 298.15)
self.assertEqual(file.ensemble[mol, "enthalpy"], -229.05330337)
self.assertEqual(file.ensemble[mol, "gibbs_free_energy"], -229.08534132)
def test_nmr(self):
path = "test/static/ibuprofen_nmr_orca.out"
file = cctk.OrcaFile.read_file(path)
molecule = file.get_molecule()
properties_dict = file.ensemble.get_properties_dict(molecule)
energy = properties_dict["energy"]
self.assertTrue(abs(energy + 656.306067336866) < 1e8)
self.assertTrue(abs(file.ensemble[-1, "energy"] + 656.306067336866) < 1e8)
shieldings = properties_dict["isotropic_shielding"]
self.assertListEqual(list(shieldings[:6]), [55.307, 68.003, 63.738, 51.446, 65.325])
|
175803
|
import numpy as np
def sigmoid(x):
indp = np.where(x>=0)
indn = np.where(x<0)
tx = np.zeros(x.shape)
tx[indp] = 1./(1.+np.exp(-x[indp]))
tx[indn] = np.exp(x[indn])/(1.+np.exp(x[indn]))
return tx
def sigmoid_prime(x):
return sigmoid(x) * (1 - sigmoid(x))
def KL_divergence(x, y):
return x * (np.log(x+1E-20)-np.log(y+1E-20)) + (1 - x) * (np.log(1 - x+1E-20) - np.log(1 - y+1E-20))
def initialize(hidden_size, visible_size):
r = np.sqrt(6) / np.sqrt(hidden_size + visible_size + 1)
W1 = np.random.random((hidden_size, visible_size)) * 2 * r - r
W2 = np.random.random((visible_size, hidden_size)) * 2 * r - r
b1 = np.zeros(hidden_size, dtype=np.float64)
b2 = np.zeros(visible_size, dtype=np.float64)
theta = np.concatenate((W1.reshape(hidden_size * visible_size),
W2.reshape(hidden_size * visible_size),
b1.reshape(hidden_size),
b2.reshape(visible_size)))
return theta
def sparse_autoencoder_cost(theta, visible_size, hidden_size,
lambda_, sparsity_param, beta, data):
W1 = theta[0:hidden_size * visible_size].reshape(hidden_size, visible_size)
W2 = theta[hidden_size * visible_size:2 * hidden_size * visible_size].reshape(visible_size, hidden_size)
b1 = theta[2 * hidden_size * visible_size:2 * hidden_size * visible_size + hidden_size]
b2 = theta[2 * hidden_size * visible_size + hidden_size:]
m = data.shape[1]
z2 = W1.dot(data) + np.tile(b1, (m, 1)).transpose()
a2 = sigmoid(z2)
z3 = W2.dot(a2) + np.tile(b2, (m, 1)).transpose()
h = sigmoid(z3)
cost = np.sum((h - data) ** 2) / (2 * m) + \
(lambda_ / 2) * (np.sum(W1 ** 2) + np.sum(W2 ** 2))# + \
sparsity_delta = 0
delta3 = -(data - h) * sigmoid_prime(z3)
delta2 = (W2.transpose().dot(delta3) + beta * sparsity_delta) * sigmoid_prime(z2)
W1grad = delta2.dot(data.transpose()) / m + lambda_ * W1
W2grad = delta3.dot(a2.transpose()) / m + lambda_ * W2
b1grad = np.sum(delta2, axis=1) / m
b2grad = np.sum(delta3, axis=1) / m
grad = np.concatenate((W1grad.reshape(hidden_size * visible_size),
W2grad.reshape(hidden_size * visible_size),
b1grad.reshape(hidden_size),
b2grad.reshape(visible_size)))
return cost, grad
def sparse_autoencoder(theta, hidden_size, visible_size, data):
W1 = theta[0:hidden_size * visible_size].reshape(hidden_size, visible_size)
b1 = theta[2 * hidden_size * visible_size:2 * hidden_size * visible_size + hidden_size]
m = data.shape[1]
z2 = W1.dot(data) + np.tile(b1, (m, 1)).transpose()
a2 = sigmoid(z2)
return a2
def sparse_autoencoder_linear_cost(theta, visible_size, hidden_size,
lambda_, sparsity_param, beta, data):
W1 = theta[0:hidden_size * visible_size].reshape(hidden_size, visible_size)
W2 = theta[hidden_size * visible_size:2 * hidden_size * visible_size].reshape(visible_size, hidden_size)
b1 = theta[2 * hidden_size * visible_size:2 * hidden_size * visible_size + hidden_size]
b2 = theta[2 * hidden_size * visible_size + hidden_size:]
m = data.shape[1]
z2 = W1.dot(data) + np.tile(b1, (m, 1)).transpose()
a2 = sigmoid(z2)
z3 = W2.dot(a2) + np.tile(b2, (m, 1)).transpose()
h = z3
cost = np.sum((h - data) ** 2) / (2 * m) + \
(lambda_ / 2) * (np.sum(W1 ** 2) + np.sum(W2 ** 2))
sparsity_delta = 0.
delta3 = -(data - h)
delta2 = (W2.transpose().dot(delta3) + beta * sparsity_delta) * sigmoid_prime(z2)
W1grad = delta2.dot(data.transpose()) / m + lambda_ * W1
W2grad = delta3.dot(a2.transpose()) / m + lambda_ * W2
b1grad = np.sum(delta2, axis=1) / m
b2grad = np.sum(delta3, axis=1) / m
grad = np.concatenate((W1grad.reshape(hidden_size * visible_size),
W2grad.reshape(hidden_size * visible_size),
b1grad.reshape(hidden_size),
b2grad.reshape(visible_size)))
return cost, grad
|
175816
|
import json
import traceback
import uuid
import datetime
from isodate import duration_isoformat
from motorway.utils import DateTimeAwareJsonEncoder
import logging
class Message(object):
"""
:param ramp_unique_id: the unique message ID delivered back upon completion to the ramp
:param content: any json serializable content
:param grouping_value: String that can be used for routing messages consistently to the same receiver
:return:
"""
FAIL = -1
SUCCESS = 0
def __init__(self, ramp_unique_id, content=None, ack_value=None, controller_queue=None, grouping_value=None,
error_message=None, process_name=None, producer_uuid=None, destination_endpoint=None,
destination_uuid=None):
self.ramp_unique_id = ramp_unique_id
self.content = content
if not ack_value:
ack_value = uuid.uuid4().int
self.ack_value = ack_value
self.controller_queue = controller_queue
self.grouping_value = grouping_value
self.error_message = error_message
self.process_name = process_name
self.producer_uuid = producer_uuid
self.destination_endpoint = destination_endpoint
self.destination_uuid = destination_uuid
self.init_time = datetime.datetime.now()
@classmethod
def new(cls, message, content, grouping_value=None, error_message=None):
"""
Creates a new message, based on an existing message. This has the consequence that it will be tracked together
and the tap will not be notified until every message in the chain is properly ack'ed.
:param message: Message instance, as received by the intersection
:param content: Any value that can be serialized into json
:param grouping_value: String that can be used for routing messages consistently to the same receiver
"""
return cls(ramp_unique_id=message.ramp_unique_id, content=content, grouping_value=grouping_value,
error_message=error_message, producer_uuid=message.producer_uuid)
@classmethod
def from_message(cls, message, controller_queue, process_name=None):
"""
:param message: Message dict (converted from JSON)
:param controller_queue:
:param process_name: UUID of the process processing this message (as string)
:return:
"""
# assert type(message) is dict, "message (%s) should be dict" % message
if type(message) is not dict:
logging.error("Expected type dict, got type %s - message: %s", type(message), message)
message['process_name'] = process_name
# assert 'producer_uuid' in message, "missing uuid %s" % message
return cls(controller_queue=controller_queue, **message)
def _message(self):
return {
'content': self.content,
'ramp_unique_id': self.ramp_unique_id,
'ack_value': self.ack_value,
'grouping_value': self.grouping_value,
'producer_uuid': self.producer_uuid
}
def as_json(self):
return json.dumps(self._message(), cls=DateTimeAwareJsonEncoder)
def send(self, queue, producer_uuid=None):
if producer_uuid and not self.producer_uuid: # Check if provided and we didn't get one already
self.producer_uuid = producer_uuid
elif not self.producer_uuid:
assert self.producer_uuid
queue.send_string(
self.as_json()
)
def send_control_message(self, controller_queue, time_consumed=None, process_name=None, destination_endpoint=None,
destination_uuid=None, sender=None):
"""
Control messages are notifications that a new message have been created, so the controller can keep track of
this particular message and let the ramp know once the entire tree of messages has been completed.
This is called implicitly on yield Message(_id, 'message')
:param process_name: UUID of the process processing this message (as string)
"""
content = {
'process_name': process_name,
'msg_type': 'new_msg',
}
if not self.producer_uuid:
raise Exception("Cannot send control message without producer UUID")
if time_consumed:
# Ramps provide time consumed, since we don't know the "start time" like in a intersection
# where it's clear when the message is received and later 'acked' as the last action
content['duration'] = duration_isoformat(time_consumed)
content['sender'] = sender
controller_queue.send_json({
'ramp_unique_id': self.ramp_unique_id,
'ack_value': self.ack_value,
'content': content,
'producer_uuid': self.producer_uuid,
'destination_endpoint': destination_endpoint,
'destination_uuid': destination_uuid
})
def ack(self, time_consumed=None):
"""
Send a message to the controller that this message was properly processed
"""
self.controller_queue.send_json({
'ramp_unique_id': self.ramp_unique_id,
'ack_value': self.ack_value,
'content': {
'process_name': self.process_name,
'msg_type': 'ack',
'duration': duration_isoformat(time_consumed or (datetime.datetime.now() - self.init_time))
},
'producer_uuid': self.producer_uuid,
'destination_uuid': self.producer_uuid
})
def fail(self, error_message="", capture_exception=True):
"""
Send a message to the controller that this message failed to process
"""
self.controller_queue.send_json({
'ramp_unique_id': self.ramp_unique_id,
'ack_value': -1,
'content': {
'process_name': self.process_name,
'msg_type': 'fail',
'duration': duration_isoformat(datetime.datetime.now() - self.init_time),
'message_content': self.content
},
'producer_uuid': self.producer_uuid,
'destination_uuid': self.producer_uuid,
'error_message': error_message if not capture_exception else traceback.format_exc(),
})
def __repr__(self):
return "<Message: %s> %s" % (self.ramp_unique_id, self.content)
|
175828
|
from __future__ import absolute_import
import sys
from collections import Iterable, Mapping
from kombu.five import string_t
__all__ = ['lazy', 'maybe_evaluate', 'is_list', 'maybe_list']
class lazy(object):
"""Holds lazy evaluation.
Evaluated when called or if the :meth:`evaluate` method is called.
The function is re-evaluated on every call.
Overloaded operations that will evaluate the promise:
:meth:`__str__`, :meth:`__repr__`, :meth:`__cmp__`.
"""
def __init__(self, fun, *args, **kwargs):
self._fun = fun
self._args = args
self._kwargs = kwargs
def __call__(self):
return self.evaluate()
def evaluate(self):
return self._fun(*self._args, **self._kwargs)
def __str__(self):
return str(self())
def __repr__(self):
return repr(self())
def __eq__(self, rhs):
return self() == rhs
def __ne__(self, rhs):
return self() != rhs
def __deepcopy__(self, memo):
memo[id(self)] = self
return self
def __reduce__(self):
return (self.__class__, (self._fun, ), {'_args': self._args,
'_kwargs': self._kwargs})
if sys.version_info[0] < 3:
def __cmp__(self, rhs):
if isinstance(rhs, self.__class__):
return -cmp(rhs, self())
return cmp(self(), rhs)
def maybe_evaluate(value):
"""Evaluates if the value is a :class:`lazy` instance."""
if isinstance(value, lazy):
return value.evaluate()
return value
def is_list(l, scalars=(Mapping, string_t), iters=(Iterable, )):
"""Return true if the object is iterable (but not
if object is a mapping or string)."""
return isinstance(l, iters) and not isinstance(l, scalars or ())
def maybe_list(l, scalars=(Mapping, string_t)):
"""Return list of one element if ``l`` is a scalar."""
return l if l is None or is_list(l, scalars) else [l]
# Compat names (before kombu 3.0)
promise = lazy
maybe_promise = maybe_evaluate
|
175857
|
from django.db import transaction
class AtomicMixin(object):
"""
Ensures we rollback db transactions on exceptions.
Idea from https://github.com/tomchristie/django-rest-framework/pull/1204
"""
@transaction.atomic()
def dispatch(self, *args, **kwargs):
return super(AtomicMixin, self).dispatch(*args, **kwargs)
def handle_exception(self, *args, **kwargs):
response = super(AtomicMixin, self).handle_exception(*args, **kwargs)
if getattr(response, 'exception'):
# We've suppressed the exception but still need to rollback any transaction.
transaction.set_rollback(True)
return response
|
175871
|
from .windows import add_dock, window, run, persist_layout
from mpldock.common import named
from . import tweaks, backend
__all__ = ['window', 'add_dock', 'tweaks', 'backend', 'run', 'persist_layout']
|
175936
|
import pytest
@pytest.mark.parametrize(('f', 't'), [(sum, list), (len, int)])
def test_foo(f, t):
assert isinstance(f([[1], [2]]), t)
def test_bar(): # unparametrized
pass
|
175962
|
from chainerui.models.argument import Argument # NOQA
from chainerui.models.asset import Asset # NOQA
from chainerui.models.bindata import Bindata # NOQA
from chainerui.models.command import Command # NOQA
from chainerui.models.log import Log # NOQA
from chainerui.models.project import Project # NOQA
from chainerui.models.result import Result # NOQA
from chainerui.models.snapshot import Snapshot # NOQA
|
175981
|
BZX = Contract.from_abi("BZX", "0xc47812857a74425e2039b57891a3dfcf51602d5d", interface.IBZx.abi)
TOKEN_REGISTRY = Contract.from_abi("TOKEN_REGISTRY", "0x2fA30fB75E08f5533f0CF8EBcbb1445277684E85", TokenRegistry.abi)
list = TOKEN_REGISTRY.getTokens(0, 100)
for l in list:
iTokenTemp = Contract.from_abi("iTokenTemp", l[0], LoanTokenLogicStandard.abi)
globals()[iTokenTemp.symbol()] = iTokenTemp
underlyingTemp = Contract.from_abi("underlyingTemp", l[1], TestToken.abi)
globals()[underlyingTemp.symbol()] = underlyingTemp
CHI = Contract.from_abi("CHI", "0x0000000000004946c0e9F43F4Dee607b0eF1fA1c", TestToken.abi)
CHEF = Contract.from_abi("CHEF", "0x1FDCA2422668B961E162A8849dc0C2feaDb58915", MasterChef_BSC.abi)
HELPER = Contract.from_abi("HELPER", "0xE05999ACcb887D32c9bd186e8C9dfE0e1E7814dE", HelperImpl.abi)
BGOV = Contract.from_abi("PGOV", "0xf8E026dC4C0860771f691EcFFBbdfe2fa51c77CF", GovToken.abi)
CHEF = Contract.from_abi("CHEF", CHEF.address, interface.IMasterChef.abi)
SWEEP_FEES = Contract.from_abi("STAKING", "0x5c9b515f05a0E2a9B14C171E2675dDc1655D9A1c", FeeExtractAndDistribute_BSC.abi)
|
176000
|
from sre_parse import Pattern, SubPattern, parse
from sre_compile import compile as sre_compile
from sre_constants import BRANCH, SUBPATTERN
class _ScanMatch(object):
def __init__(self, match, rule, start, end):
self._match = match
self._start = start
self._end = end
self._rule = rule
def __getattr__(self, name):
return getattr(self._match, name)
def __group_proc(self, method, group):
if group == 0:
return method()
if isinstance(group, basestring):
return method(self._rule + '\x00' + group)
real_group = self._start + group
if real_group > self._end:
raise IndexError('no such group')
return method(real_group)
def group(self, *groups):
if len(groups) in (0, 1):
return self.__group_proc(self._match.group,
groups and groups[0] or 0)
return tuple(self.__group_proc(self._match.group, group)
for group in groups)
def groupdict(self, default=None):
prefix = self._rule + '\x00'
rv = {}
for key, value in self._match.groupdict(default).iteritems():
if key.startswith(prefix):
rv[key[len(prefix):]] = value
return rv
def span(self, group=0):
return self.__group_proc(self._match.span, group)
def groups(self):
return self._match.groups()[self._start:self._end]
def start(self, group=0):
return self.__group_proc(self._match.start, group)
def end(self, group=0):
return self.__group_proc(self._match.end, group)
def expand(self, template):
raise RuntimeError('Unsupported on scan matches')
class ScanEnd(Exception):
def __init__(self, pos):
Exception.__init__(self, pos)
self.pos = pos
class Scanner(object):
def __init__(self, rules, flags=0):
pattern = Pattern()
pattern.flags = flags
pattern.groups = len(rules) + 1
_og = pattern.opengroup
pattern.opengroup = lambda n: _og(n and '%s\x00%s' % (name, n) or n)
self.rules = []
subpatterns = []
for group, (name, regex) in enumerate(rules, 1):
last_group = pattern.groups - 1
subpatterns.append(SubPattern(pattern, [
(SUBPATTERN, (group, parse(regex, flags, pattern))),
]))
self.rules.append((name, last_group, pattern.groups - 1))
self._scanner = sre_compile(SubPattern(
pattern, [(BRANCH, (None, subpatterns))])).scanner
def scan(self, string, skip=False):
sc = self._scanner(string)
match = None
for match in iter(sc.search if skip else sc.match, None):
rule, start, end = self.rules[match.lastindex - 1]
yield rule, _ScanMatch(match, rule, start, end)
if not skip:
end = match and match.end() or 0
if end < len(string):
raise ScanEnd(end)
def scan_with_holes(self, string):
pos = 0
for rule, match in self.scan(string, skip=True):
hole = string[pos:match.start()]
if hole:
yield None, hole
yield rule, match
pos = match.end()
hole = string[pos:]
if hole:
yield None, hole
|
176005
|
import pandas as pd
import numpy as np
from os.path import join, exists, split
from os import mkdir, makedirs, listdir
import gc
import matplotlib.pyplot as plt
import seaborn
from copy import deepcopy
from time import time
import pickle
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('split_name')
parser.add_argument('f')
parser.add_argument('--t_reset',type=int, default=25)
args = parser.parse_args()
split_name = args.split_name
f = args.f
t_reset = args.t_reset
# split_name = 'temporal_4'
# f = 'batch_38.h5'
# t_reset = 25
fixed_rec = 0.9
model_name = 'shap_top500_features'
delta_t = 0
window_size = 480
data_version = 'v6b'
result_version = '181108'
t_postevent = np.timedelta64(2, 'h')
wsize_upper_h = (window_size+delta_t) * np.timedelta64(1, 'm')
wsize_lower_h = delta_t * np.timedelta64(1, 'm')
bern_path = '/cluster/work/grlab/clinical/Inselspital/DataReleases/01-19-2017/InselSpital/'
alarm_path = join(bern_path, 'circews_analysis', 'alarm_score_for_calibration_new',
data_version, 'merged_0_reset_%d'%t_reset, model_name, split_name)
ep_path = join(bern_path, '3a_endpoints', data_version,'reduced')
res_dir = lambda s: 'WorseStateFromZero_0.0_8.0_%s_lightgbm_full'%s
pred_path = join(bern_path, '8_predictions', result_version, 'reduced',
split_name, res_dir(model_name))
with pd.HDFStore(join(pred_path, f), mode='r') as tmp:
pids = [int(key[2:]) for key in tmp.keys()]
gc.collect()
df_store = pd.HDFStore(join(pred_path, f), mode='r')
pids = [int(key[2:]) for key in df_store.keys()]
df_store.close()
gc.collect()
stats = dict()
lst_period_type = ['critical_window', 'maintenance_window', 'uncritical_window', 'patients_wo_events']
for period_type in lst_period_type:
stats.update({period_type: dict(valid_los=[], cnt_alarm=[], los=[])})
stats.update(cnt_catched_event=0, cnt_missed_event=0)
is_critical_win = lambda t, ts: np.logical_and(ts< t-wsize_lower_h,
ts>=t-wsize_upper_h)
is_maintenance_win = lambda t, ts: np.logical_and(ts>t,
ts<=t+t_postevent)
is_uncritical_win = lambda t, ts, mode: ts<t-wsize_upper_h if mode=='before' else ts>t+t_postevent
t_start = time()
for n, pid in enumerate(pids):
df = pd.read_hdf(join(alarm_path, f), 'p%d'%pid).reset_index()
# df = pd.read_hdf(join(alarm_path, 'rec_%g'%fixed_rec+f), 'p%d'%pid).reset_index()
df.set_index('AbsDatetime', inplace=True)
for col in ['InEvent', 'Stable']:
df.loc[:,col] = df[col].astype(int)
if df.InEvent.sum()==0:
# assert('Yes' not in df.IsAlarmTrue.unique())
stats['patients_wo_events']['valid_los'].append( df.Stable.sum() / 12)
stats['patients_wo_events']['cnt_alarm'].append( df.Alarm.sum() )
stats['patients_wo_events']['los'].append( len(df)/12 )
else:
stable_sum = 0
beg_onset = df.index[np.where(np.array([0]+np.diff(df.InEvent.values).tolist())==1)[0]]
end_onset = df.index[np.where(np.diff(df.InEvent.values)==-1)[0]]
if df.iloc[0].InEvent==1:
beg_onset = np.concatenate([[df.index[0]], beg_onset])
if df.iloc[-1].InEvent==1:
end_onset = np.concatenate([end_onset, [df.index[-1]]])
assert(len(beg_onset)==len(end_onset))
### Critical window
for i, dt in enumerate(beg_onset):
dt = np.datetime64(dt)
win_pre_event = df[is_critical_win(dt, df.index.values)]
if len(win_pre_event)==0:
continue
if len(win_pre_event)==0 or win_pre_event.Stable.sum()==0:
continue
if ~ df.loc[dt,'Onset']:
pass
elif win_pre_event.Alarm.sum()>0 and df.loc[dt,'Onset'] and df.loc[dt,'CatchedOnset']:
stats['cnt_catched_event'] += 1
elif win_pre_event.Alarm.sum()==0 and df.loc[dt,'Onset'] and ~df.loc[dt,'CatchedOnset']:
stats['cnt_missed_event'] += 1
else:
print('Alarm number', win_pre_event.Alarm.sum(),'; Onset status', df.loc[dt, 'CatchedOnset'])
print(dt)
raise Exception('Problem!!!!')
if i > 0:
win_pre_event = win_pre_event[win_pre_event.index>end_onset[i-1]]
stable_sum += win_pre_event.Stable.sum() / 12
stats['critical_window']['valid_los'].append( win_pre_event.Stable.sum() / 12 )
stats['critical_window']['los'].append( len(df)/12 )
stats['critical_window']['cnt_alarm'].append( win_pre_event.Alarm.sum() )
### Uncritical window
for i, dt in enumerate(beg_onset):
dt = np.datetime64(dt)
win_pre_event = df[is_uncritical_win(dt, df.index.values, 'before')]
if len(win_pre_event)==0:
continue
if i > 0:
win_pre_event = win_pre_event[win_pre_event.index>end_onset[i-1]+t_postevent]
if len(win_pre_event)==0 or win_pre_event.Stable.sum()==0:
continue
stable_sum += win_pre_event.Stable.sum() / 12
stats['uncritical_window']['valid_los'].append(win_pre_event.Stable.sum() / 12)
stats['uncritical_window']['los'].append( len(df)/12 )
stats['uncritical_window']['cnt_alarm'].append(win_pre_event.Alarm.sum())
win_post_last_event = df[is_uncritical_win(np.datetime64(end_onset[-1]),df.index.values,'after')]
stable_sum += win_post_last_event.Stable.sum() / 12
stats['uncritical_window']['valid_los'].append(win_post_last_event.Stable.sum() / 12)
stats['uncritical_window']['los'].append( len(df)/12 )
stats['uncritical_window']['cnt_alarm'].append(win_post_last_event.Alarm.sum())
### Maintenance window
for i, dt in enumerate(end_onset):
dt = np.datetime64(dt)
win_post_event = df[is_maintenance_win(dt, df.index.values)]
if len(win_post_event)==0:
continue
if i < len(beg_onset) - 1:
win_post_event = win_post_event[win_post_event.index<beg_onset[i+1]-wsize_upper_h]
if len(win_post_event)==0 or win_post_event.Stable.sum()==0:
continue
stable_sum += win_post_event.Stable.sum() / 12
stats['maintenance_window']['valid_los'].append(win_post_event.Stable.sum() / 12)
stats['maintenance_window']['los'].append( len(df)/12 )
stats['maintenance_window']['cnt_alarm'].append(win_post_event.Alarm.sum())
assert(np.abs(df.Stable.sum()/12-stable_sum)<1e-10)
if (n+1)%10==0:
print('Process %d patients, time: %4.4g sec'%(n+1, time()-t_start))
gc.collect()
# with open(join(alarm_path, 'rec_%g'%fixed_rec+f.replace('.h5', '.pkl')), 'wb') as tmp:
# pickle.dump(stats, tmp)
with open(join(alarm_path, f.replace('.h5', '.pkl')), 'wb') as tmp:
pickle.dump(stats, tmp)
|
176035
|
import numpy as np
import pandas as pd
from vimms.old_unused_experimental.PythonMzmine import get_base_scoring_df
from vimms.Roi import make_roi
QCB_MZML2CHEMS_DICT = {'min_ms1_intensity': 1.75E5,
'mz_tol': 2,
'mz_units': 'ppm',
'min_length': 1,
'min_intensity': 0,
'start_rt': 0,
'stop_rt': 1560}
def get_rois(mzml, min_roi_length, mzml2chems_dict=QCB_MZML2CHEMS_DICT):
good_roi, junk_roi = make_roi(mzml, mz_tol=mzml2chems_dict['mz_tol'], mz_units=mzml2chems_dict['mz_units'],
min_length=min_roi_length, min_intensity=mzml2chems_dict['min_intensity'],
start_rt=mzml2chems_dict['start_rt'], stop_rt=mzml2chems_dict['stop_rt'])
return good_roi, junk_roi
def mzml2classificationdata(mzmls, mzml_picked_peaks_files, min_roi_length=5, mzml2chems_dict=QCB_MZML2CHEMS_DICT,
mz_slack=0.01, drift_window_lengths=[5], rt_peak_tol=2, include_status=True):
rois = []
for i in range(len(mzmls)):
good_roi, junk_roi = get_rois(mzmls[i], min_roi_length, mzml2chems_dict)
rois.extend(good_roi)
picked_peaks = get_base_scoring_df(mzml_picked_peaks_files[i])
df_new = rois2classificationdata2(good_roi, picked_peaks, mz_slack=mz_slack,
drift_window_lengths=drift_window_lengths, rt_peak_tol=rt_peak_tol,
include_status=include_status)
if i == 0:
df = df_new
else:
df = pd.concat([df, df_new])
return df, rois
class get_prob_classifier(object):
def __init__(self, mzmls, mzml_picked_peaks_files, min_roi_length=5, mzml2chems_dict=QCB_MZML2CHEMS_DICT,
mz_slack=0.01, roi_change_n=5, rt_peak_tol=2):
self.roi_change_n = roi_change_n
df, rois = mzml2classificationdata(mzmls, mzml_picked_peaks_files, min_roi_length, mzml2chems_dict,
mz_slack, [roi_change_n], rt_peak_tol, include_status=True)
df = df.dropna(thresh=2)
base_classes = ['Decrease', 'Increase', 'Noise', 'Top']
self.probabilities = []
for i in range(int(max(df.iloc[:, 0]) + 1)):
i_classes = df['rt_status'].iloc[np.where(df.iloc[:, 0] == i)[0]]
probs = np.array([sum(i_classes == base) for base in base_classes]) / len(i_classes)
self.probabilities.append(probs)
def predict(self, value):
return self.probabilities[value]
def calculate_window_change(intensities, drift_window_len):
return sum((np.array(intensities)[-(drift_window_len - 1):] - np.array(intensities)[-drift_window_len:-1]) > 0)
def find_possible_peaks(roi, picked_peaks, mz_slack):
rt_check1 = (picked_peaks['rt min'] >= roi.rt_list[0]) & (roi.rt_list[-1] >= picked_peaks['rt min'])
rt_check2 = (picked_peaks['rt max'] >= roi.rt_list[0]) & (roi.rt_list[-1] >= picked_peaks['rt max'])
rt_check3 = (picked_peaks['rt min'] <= roi.rt_list[0]) & (picked_peaks['rt max'] >= roi.rt_list[-1])
rt_check = rt_check1 | rt_check2 | rt_check3
# logger.debug('rt len ' + len(rt_check))
# logger.debug('rt check ' + rt_check)
# plus and minus one is just slack for the initial check
initial_mz_check = (picked_peaks['m/z max'] + 1 >= roi.get_mean_mz()) & (
roi.get_mean_mz() >= picked_peaks['m/z min'] - 1)
# logger.debug('mz len ' + len(initial_mz_check))
# logger.debug('mz check ' + initial_mz_check)
possible_peaks = np.where(np.logical_and(rt_check, initial_mz_check))[0]
updated_possible_peaks = []
for j in possible_peaks:
peak = picked_peaks.iloc[j]
check_peak = np.nonzero((peak['rt min'] < roi.rt_list) & (roi.rt_list < peak['rt max']))[0]
mean_mz = np.mean(np.array(roi.mz_list)[check_peak])
if peak['m/z min'] - mz_slack < mean_mz < peak['m/z max'] + mz_slack:
updated_possible_peaks.append(j)
return updated_possible_peaks
def rois2classificationdata2(rois, picked_peaks, mz_slack=0.01, drift_window_lengths=[5], rt_peak_tol=2,
include_status=True):
roi_change_list = [[] for i in range(len(drift_window_lengths))]
rt_status_list = []
for roi in rois:
# get drift data
for window in range(len(drift_window_lengths)):
roi_change_list[window].extend([None for i in range(drift_window_lengths[window] - 1)])
roi_change = [calculate_window_change(roi.intensity_list[:i], drift_window_lengths[window])
for i in range(drift_window_lengths[window], roi.n + 1)]
roi_change_list[window].extend(roi_change)
# get possible peaks
if include_status:
possible_peaks = find_possible_peaks(roi, picked_peaks, mz_slack)
possible_peaks_list = picked_peaks.iloc[possible_peaks]
# get data
if not possible_peaks:
rt_status_list.extend([0 for rt in roi.rt_list])
else:
for rt in roi.rt_list:
rt_status = 0
for j in range(len(possible_peaks_list.index)):
if possible_peaks_list['rt centre'].iloc[j] - rt_peak_tol <= rt <= \
possible_peaks_list['rt centre'].iloc[j] + rt_peak_tol:
rt_status = max(3, rt_status)
elif possible_peaks_list['rt min'].iloc[j] <= rt <= possible_peaks_list['rt centre'].iloc[j]:
rt_status = max(2, rt_status)
elif possible_peaks_list['rt centre'].iloc[j] <= rt <= possible_peaks_list['rt max'].iloc[j]:
rt_status = max(1, rt_status)
else:
rt_status = max(0, rt_status)
rt_status_list.append(rt_status)
# convert rt status to classes
if include_status:
rt_status_list = np.array(rt_status_list)
rt_status_list_str = np.array(['Unknown' for i in range(len(rt_status_list))], dtype="<U10")
rt_status_list_str[np.where(rt_status_list == 0)[0]] = 'Noise'
rt_status_list_str[np.where(rt_status_list == 1)[0]] = 'Decrease'
rt_status_list_str[np.where(rt_status_list == 2)[0]] = 'Increase'
rt_status_list_str[np.where(rt_status_list == 3)[0]] = 'Top'
# save as data frame
df = pd.DataFrame()
for window in range(len(drift_window_lengths)):
df['roi_change_' + str(drift_window_lengths[window])] = roi_change_list[window]
if include_status:
df['rt_status'] = rt_status_list_str
return df
# def get_intensity_difference(roi_intensities, n, positive=True):
# # add exception for short roi
# difference = []
# for i in range(len(roi_intensities) - n):
# difference.append(np.log(roi_intensities[i + n]) - np.log(roi_intensities[i]))
# if positive:
# return max(difference)
# else:
# return min(difference)
#
#
# def get_max_increasing(roi_intensities, n_skip=0, increasing_TF=True):
# # add exception for short roi
# max_increasing = 0
# for i in range(len(roi_intensities)):
# current_increasing = 0
# current_skip = 0
# if len(roi_intensities[i:]) <= max_increasing:
# break
# for j in range(1, len(roi_intensities[i:])):
# if (roi_intensities[i:][j] > roi_intensities[i:][j - 1 - current_skip]) == increasing_TF:
# current_increasing += 1 + current_skip
# current_skip = 0
# else:
# current_skip += 1
# if current_skip > n_skip:
# max_increasing = max(max_increasing, current_increasing)
# break
# return max_increasing
#
#
# def get_intensity_list(roi, max_length):
# if max_length is None:
# return roi.intensity_list
# else:
# return roi.intensity_list[0:max_length]
# def rois2classificationdata(rois, picked_peaks, mz_slack=0.01):
# base_roi = []
# base_status = []
# split_roi = []
# split_status = []
# for roi in rois:
# rt_check1 = (picked_peaks['rt min'] >= roi.rt_list[0]) & (roi.rt_list[-1] >= picked_peaks['rt min'])
# rt_check2 = (picked_peaks['rt max'] >= roi.rt_list[0]) & (roi.rt_list[-1] >= picked_peaks['rt max'])
# rt_check3 = (picked_peaks['rt min'] <= roi.rt_list[0]) & (picked_peaks['rt max'] >= roi.rt_list[-1])
# rt_check = rt_check1 | rt_check2 | rt_check3
# # plus and minus one is just slack for the initial check
# initial_mz_check = (picked_peaks['m/z max'] + 1 >= roi.get_mean_mz()) & (
# roi.get_mean_mz() >= picked_peaks['m/z min'] - 1)
# possible_peaks = np.nonzero(rt_check & initial_mz_check)[0]
# if len(possible_peaks) == 0:
# base_roi.append(roi)
# split_roi.append(roi)
# base_status.append(0)
# split_status.append(0)
# else:
# updated_possible_peaks = []
# for j in possible_peaks:
# peak = picked_peaks.iloc[j]
# check_peak = np.nonzero((peak['rt min'] < roi.rt_list) & (roi.rt_list < peak['rt max']))[0]
# mean_mz = np.mean(np.array(roi.mz_list)[check_peak])
# if peak['m/z min'] - mz_slack < mean_mz < peak['m/z max'] + mz_slack:
# updated_possible_peaks.append(j)
# if len(updated_possible_peaks) == 0:
# base_roi.append(roi)
# split_roi.append(roi)
# base_status.append(0)
# split_status.append(0)
# else:
# if len(updated_possible_peaks) == 1:
# base_roi.append(roi)
# split_roi.append(roi)
# base_status.append(1)
# split_status.append(1)
# if len(updated_possible_peaks) > 1:
# base_roi.append(roi)
# base_status.append(1)
# df = picked_peaks.iloc[updated_possible_peaks]
# df = df.sort_values(by=['rt min'])
# splits = (np.array(df['rt min'][1:]) + np.array(df['rt max'][0:-1])) / 2
# splits = np.insert(np.insert(splits, 0, 0), len(splits) + 1, 2000)
# for j in range(len(splits) - 1):
# check_range1 = roi.rt_list > splits[j]
# check_range2 = roi.rt_list < splits[j + 1]
# mz = np.array(roi.mz_list)[np.nonzero(check_range1 & check_range2)[0]].tolist()
# rt = np.array(roi.rt_list)[np.nonzero(check_range1 & check_range2)[0]].tolist()
# intensity = np.array(roi.intensity_list)[np.nonzero(check_range1 & check_range2)].tolist()
# split_roi.append(Roi(mz, rt, intensity))
# split_status.append(1)
# return base_roi, base_status, split_roi, split_status
#
#
# def get_roi_classification_params(rois, roi_param_dict):
# df = pd.DataFrame()
# if roi_param_dict['include_log_max_intensity']:
# df['log_max_intensity'] = np.log([roi.get_max_intensity() for roi in rois])
# if roi_param_dict['include_log_intensity_difference']:
# df['log_intensity_difference'] = np.log(df['log_max_intensity']) - np.log([roi.get_min_intensity() for roi in rois])
# if roi_param_dict['consecutively_change_max'] > 0:
# for i in range(roi_param_dict['consecutively_change_max']):
# df['n_increase_' + str(i)] = [get_max_increasing(roi.intensity_list, i, True) for roi in rois]
# df['n_decrease_' + str(i)] = [get_max_increasing(roi.intensity_list, i, False) for roi in rois]
# df['n_interaction_' + str(i)] = df['n_increase_' + str(i)] * df['n_decrease_' + str(i)]
# if roi_param_dict['intensity_change_max'] > 0:
# for i in range(roi_param_dict['intensity_change_max']):
# df['intensity_increase_' + str(i)] = [get_intensity_difference(roi.intensity_list, i+1, True) for roi in rois]
# df['intensity_decrease_' + str(i)] = [get_intensity_difference(roi.intensity_list, i+1, False) for roi in rois]
# df['intensity_interaction_' + str(i)] = df['intensity_increase_' + str(i)] * df['intensity_decrease_' + str(i)]
# if roi_param_dict['lag_max'] > 0:
# for i in range(roi_param_dict['lag_max']):
# df['autocorrelation_' + str(i+1)] = [roi.get_autocorrelation(i+1) for roi in rois]
# return df
|
176057
|
from django.test import TestCase
from django.test import Client
class Exercise4TestCase(TestCase):
def test_template_content(self):
"""Test that the index view returns the set names from the paramaters, or defaults to 'world'"""
c = Client()
response = c.get('/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'Hello, world!')
response = c.get('/?name=Ben')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'Hello, Ben!')
response = c.get('/?name=Ben&name=John')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'Hello, John!')
response = c.get('/?name=')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'Hello, world!')
|
176062
|
import connexion
import six
from tapi_server.models.inline_object1 import InlineObject1 # noqa: E501
from tapi_server.models.inline_object12 import InlineObject12 # noqa: E501
from tapi_server.models.inline_object13 import InlineObject13 # noqa: E501
from tapi_server.models.inline_object14 import InlineObject14 # noqa: E501
from tapi_server.models.inline_object27 import InlineObject27 # noqa: E501
from tapi_server.models.inline_object6 import InlineObject6 # noqa: E501
from tapi_server.models.tapi_common_bandwidth_profile import TapiCommonBandwidthProfile # noqa: E501
from tapi_server.models.tapi_common_capacity import TapiCommonCapacity # noqa: E501
from tapi_server.models.tapi_common_capacity_value import TapiCommonCapacityValue # noqa: E501
from tapi_server.models.tapi_common_name_and_value import TapiCommonNameAndValue # noqa: E501
from tapi_server.models.tapi_common_service_interface_point_ref import TapiCommonServiceInterfacePointRef # noqa: E501
from tapi_server.models.tapi_common_time_range import TapiCommonTimeRange # noqa: E501
from tapi_server.models.tapi_connectivity_ceplist_connection_end_point import TapiConnectivityCeplistConnectionEndPoint # noqa: E501
from tapi_server.models.tapi_connectivity_connection import TapiConnectivityConnection # noqa: E501
from tapi_server.models.tapi_connectivity_connection_end_point_ref import TapiConnectivityConnectionEndPointRef # noqa: E501
from tapi_server.models.tapi_connectivity_connection_ref import TapiConnectivityConnectionRef # noqa: E501
from tapi_server.models.tapi_connectivity_connectivity_context import TapiConnectivityConnectivityContext # noqa: E501
from tapi_server.models.tapi_connectivity_connectivity_service_ref import TapiConnectivityConnectivityServiceRef # noqa: E501
from tapi_server.models.tapi_connectivity_connectivitycontext_connectivity_service import TapiConnectivityConnectivitycontextConnectivityService # noqa: E501
from tapi_server.models.tapi_connectivity_connectivityservice_end_point import TapiConnectivityConnectivityserviceEndPoint # noqa: E501
from tapi_server.models.tapi_connectivity_context_topologycontext_topology_node_ownednodeedgepoint_cep_list import TapiConnectivityContextTopologycontextTopologyNodeOwnednodeedgepointCepList # noqa: E501
from tapi_server.models.tapi_connectivity_create_connectivity_service import TapiConnectivityCreateConnectivityService # noqa: E501
from tapi_server.models.tapi_connectivity_get_connection_details import TapiConnectivityGetConnectionDetails # noqa: E501
from tapi_server.models.tapi_connectivity_get_connection_end_point_details import TapiConnectivityGetConnectionEndPointDetails # noqa: E501
from tapi_server.models.tapi_connectivity_get_connectivity_service_details import TapiConnectivityGetConnectivityServiceDetails # noqa: E501
from tapi_server.models.tapi_connectivity_get_connectivity_service_list import TapiConnectivityGetConnectivityServiceList # noqa: E501
from tapi_server.models.tapi_connectivity_route import TapiConnectivityRoute # noqa: E501
from tapi_server.models.tapi_connectivity_route_ref import TapiConnectivityRouteRef # noqa: E501
from tapi_server.models.tapi_connectivity_switch import TapiConnectivitySwitch # noqa: E501
from tapi_server.models.tapi_connectivity_switch_control import TapiConnectivitySwitchControl # noqa: E501
from tapi_server.models.tapi_connectivity_switch_control_ref import TapiConnectivitySwitchControlRef # noqa: E501
from tapi_server.models.tapi_connectivity_update_connectivity_service import TapiConnectivityUpdateConnectivityService # noqa: E501
from tapi_server.models.tapi_path_computation_path_ref import TapiPathComputationPathRef # noqa: E501
from tapi_server.models.tapi_topology_cost_characteristic import TapiTopologyCostCharacteristic # noqa: E501
from tapi_server.models.tapi_topology_latency_characteristic import TapiTopologyLatencyCharacteristic # noqa: E501
from tapi_server.models.tapi_topology_link_ref import TapiTopologyLinkRef # noqa: E501
from tapi_server.models.tapi_topology_node_edge_point_ref import TapiTopologyNodeEdgePointRef # noqa: E501
from tapi_server.models.tapi_topology_node_ref import TapiTopologyNodeRef # noqa: E501
from tapi_server.models.tapi_topology_resilience_type import TapiTopologyResilienceType # noqa: E501
from tapi_server.models.tapi_topology_risk_characteristic import TapiTopologyRiskCharacteristic # noqa: E501
from tapi_server.models.tapi_topology_topology_ref import TapiTopologyTopologyRef # noqa: E501
from tapi_server.models.tapi_connectivity_getconnectivityservicelist_output import TapiConnectivityGetconnectivityservicelistOutput # noqa: F401,E501
from tapi_server.models.tapi_connectivity_getconnectivityservicedetails_output import TapiConnectivityGetconnectivityservicedetailsOutput # noqa: F401,E501
from tapi_server.models.tapi_connectivity_getconnectiondetails_output import TapiConnectivityGetconnectiondetailsOutput # noqa: F401,E501
from tapi_server.models.tapi_connectivity_getconnectionendpointdetails_output import TapiConnectivityGetconnectionendpointdetailsOutput # noqa: F401,E501
from tapi_server import util
from tapi_server import database
def data_context_connectivity_context_connectionuuid_connection_end_pointtopology_uuidnode_uuidnode_edge_point_uuidconnection_end_point_uuid_get(uuid, topology_uuid, node_uuid, node_edge_point_uuid, connection_end_point_uuid): # noqa: E501
"""data_context_connectivity_context_connectionuuid_connection_end_pointtopology_uuidnode_uuidnode_edge_point_uuidconnection_end_point_uuid_get
returns tapi.connectivity.ConnectionEndPointRef # noqa: E501
:param uuid: Id of connection
:type uuid: str
:param topology_uuid: Id of connection-end-point
:type topology_uuid: str
:param node_uuid: Id of connection-end-point
:type node_uuid: str
:param node_edge_point_uuid: Id of connection-end-point
:type node_edge_point_uuid: str
:param connection_end_point_uuid: Id of connection-end-point
:type connection_end_point_uuid: str
:rtype: TapiConnectivityConnectionEndPointRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectionuuid_get(uuid): # noqa: E501
"""data_context_connectivity_context_connectionuuid_get
returns tapi.connectivity.Connection # noqa: E501
:param uuid: Id of connection
:type uuid: str
:rtype: TapiConnectivityConnection
"""
return 'do some magic!'
def data_context_connectivity_context_connectionuuid_lower_connectionconnection_uuid_get(uuid, connection_uuid): # noqa: E501
"""data_context_connectivity_context_connectionuuid_lower_connectionconnection_uuid_get
returns tapi.connectivity.ConnectionRef # noqa: E501
:param uuid: Id of connection
:type uuid: str
:param connection_uuid: Id of lower-connection
:type connection_uuid: str
:rtype: TapiConnectivityConnectionRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectionuuid_namevalue_name_get(uuid, value_name): # noqa: E501
"""data_context_connectivity_context_connectionuuid_namevalue_name_get
returns tapi.common.NameAndValue # noqa: E501
:param uuid: Id of connection
:type uuid: str
:param value_name: Id of name
:type value_name: str
:rtype: TapiCommonNameAndValue
"""
return 'do some magic!'
def data_context_connectivity_context_connectionuuid_routelocal_id_connection_end_pointtopology_uuidnode_uuidnode_edge_point_uuidconnection_end_point_uuid_get(uuid, local_id, topology_uuid, node_uuid, node_edge_point_uuid, connection_end_point_uuid): # noqa: E501
"""data_context_connectivity_context_connectionuuid_routelocal_id_connection_end_pointtopology_uuidnode_uuidnode_edge_point_uuidconnection_end_point_uuid_get
returns tapi.connectivity.ConnectionEndPointRef # noqa: E501
:param uuid: Id of connection
:type uuid: str
:param local_id: Id of route
:type local_id: str
:param topology_uuid: Id of connection-end-point
:type topology_uuid: str
:param node_uuid: Id of connection-end-point
:type node_uuid: str
:param node_edge_point_uuid: Id of connection-end-point
:type node_edge_point_uuid: str
:param connection_end_point_uuid: Id of connection-end-point
:type connection_end_point_uuid: str
:rtype: TapiConnectivityConnectionEndPointRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectionuuid_routelocal_id_get(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectionuuid_routelocal_id_get
returns tapi.connectivity.Route # noqa: E501
:param uuid: Id of connection
:type uuid: str
:param local_id: Id of route
:type local_id: str
:rtype: TapiConnectivityRoute
"""
return 'do some magic!'
def data_context_connectivity_context_connectionuuid_routelocal_id_namevalue_name_get(uuid, local_id, value_name): # noqa: E501
"""data_context_connectivity_context_connectionuuid_routelocal_id_namevalue_name_get
returns tapi.common.NameAndValue # noqa: E501
:param uuid: Id of connection
:type uuid: str
:param local_id: Id of route
:type local_id: str
:param value_name: Id of name
:type value_name: str
:rtype: TapiCommonNameAndValue
"""
return 'do some magic!'
def data_context_connectivity_context_connectionuuid_supported_client_linktopology_uuidlink_uuid_get(uuid, topology_uuid, link_uuid): # noqa: E501
"""data_context_connectivity_context_connectionuuid_supported_client_linktopology_uuidlink_uuid_get
returns tapi.topology.LinkRef # noqa: E501
:param uuid: Id of connection
:type uuid: str
:param topology_uuid: Id of supported-client-link
:type topology_uuid: str
:param link_uuid: Id of supported-client-link
:type link_uuid: str
:rtype: TapiTopologyLinkRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectionuuid_switch_controlswitch_control_uuid_get(uuid, switch_control_uuid): # noqa: E501
"""data_context_connectivity_context_connectionuuid_switch_controlswitch_control_uuid_get
returns tapi.connectivity.SwitchControl # noqa: E501
:param uuid: Id of connection
:type uuid: str
:param switch_control_uuid: Id of switch-control
:type switch_control_uuid: str
:rtype: TapiConnectivitySwitchControl
"""
return 'do some magic!'
def data_context_connectivity_context_connectionuuid_switch_controlswitch_control_uuid_namevalue_name_get(uuid, switch_control_uuid, value_name): # noqa: E501
"""data_context_connectivity_context_connectionuuid_switch_controlswitch_control_uuid_namevalue_name_get
returns tapi.common.NameAndValue # noqa: E501
:param uuid: Id of connection
:type uuid: str
:param switch_control_uuid: Id of switch-control
:type switch_control_uuid: str
:param value_name: Id of name
:type value_name: str
:rtype: TapiCommonNameAndValue
"""
return 'do some magic!'
def data_context_connectivity_context_connectionuuid_switch_controlswitch_control_uuid_resilience_type_get(uuid, switch_control_uuid): # noqa: E501
"""data_context_connectivity_context_connectionuuid_switch_controlswitch_control_uuid_resilience_type_get
returns tapi.topology.ResilienceType # noqa: E501
:param uuid: Id of connection
:type uuid: str
:param switch_control_uuid: Id of switch-control
:type switch_control_uuid: str
:rtype: TapiTopologyResilienceType
"""
return 'do some magic!'
def data_context_connectivity_context_connectionuuid_switch_controlswitch_control_uuid_sub_switch_controlconnection_uuidsub_switch_control_switch_control_uuid_get(uuid, switch_control_uuid, connection_uuid, sub_switch_control_switch_control_uuid): # noqa: E501
"""data_context_connectivity_context_connectionuuid_switch_controlswitch_control_uuid_sub_switch_controlconnection_uuidsub_switch_control_switch_control_uuid_get
returns tapi.connectivity.SwitchControlRef # noqa: E501
:param uuid: Id of connection
:type uuid: str
:param switch_control_uuid: Id of switch-control
:type switch_control_uuid: str
:param connection_uuid: Id of sub-switch-control
:type connection_uuid: str
:param sub_switch_control_switch_control_uuid: Id of sub-switch-control
:type sub_switch_control_switch_control_uuid: str
:rtype: TapiConnectivitySwitchControlRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectionuuid_switch_controlswitch_control_uuid_switchlocal_id_get(uuid, switch_control_uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectionuuid_switch_controlswitch_control_uuid_switchlocal_id_get
returns tapi.connectivity.Switch # noqa: E501
:param uuid: Id of connection
:type uuid: str
:param switch_control_uuid: Id of switch-control
:type switch_control_uuid: str
:param local_id: Id of switch
:type local_id: str
:rtype: TapiConnectivitySwitch
"""
return 'do some magic!'
def data_context_connectivity_context_connectionuuid_switch_controlswitch_control_uuid_switchlocal_id_namevalue_name_get(uuid, switch_control_uuid, local_id, value_name): # noqa: E501
"""data_context_connectivity_context_connectionuuid_switch_controlswitch_control_uuid_switchlocal_id_namevalue_name_get
returns tapi.common.NameAndValue # noqa: E501
:param uuid: Id of connection
:type uuid: str
:param switch_control_uuid: Id of switch-control
:type switch_control_uuid: str
:param local_id: Id of switch
:type local_id: str
:param value_name: Id of name
:type value_name: str
:rtype: TapiCommonNameAndValue
"""
return 'do some magic!'
def data_context_connectivity_context_connectionuuid_switch_controlswitch_control_uuid_switchlocal_id_selected_connection_end_pointtopology_uuidnode_uuidnode_edge_point_uuidconnection_end_point_uuid_get(uuid, switch_control_uuid, local_id, topology_uuid, node_uuid, node_edge_point_uuid, connection_end_point_uuid): # noqa: E501
"""data_context_connectivity_context_connectionuuid_switch_controlswitch_control_uuid_switchlocal_id_selected_connection_end_pointtopology_uuidnode_uuidnode_edge_point_uuidconnection_end_point_uuid_get
returns tapi.connectivity.ConnectionEndPointRef # noqa: E501
:param uuid: Id of connection
:type uuid: str
:param switch_control_uuid: Id of switch-control
:type switch_control_uuid: str
:param local_id: Id of switch
:type local_id: str
:param topology_uuid: Id of selected-connection-end-point
:type topology_uuid: str
:param node_uuid: Id of selected-connection-end-point
:type node_uuid: str
:param node_edge_point_uuid: Id of selected-connection-end-point
:type node_edge_point_uuid: str
:param connection_end_point_uuid: Id of selected-connection-end-point
:type connection_end_point_uuid: str
:rtype: TapiConnectivityConnectionEndPointRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectionuuid_switch_controlswitch_control_uuid_switchlocal_id_selected_routeconnection_uuidroute_local_id_get(uuid, switch_control_uuid, local_id, connection_uuid, route_local_id): # noqa: E501
"""data_context_connectivity_context_connectionuuid_switch_controlswitch_control_uuid_switchlocal_id_selected_routeconnection_uuidroute_local_id_get
returns tapi.connectivity.RouteRef # noqa: E501
:param uuid: Id of connection
:type uuid: str
:param switch_control_uuid: Id of switch-control
:type switch_control_uuid: str
:param local_id: Id of switch
:type local_id: str
:param connection_uuid: Id of selected-route
:type connection_uuid: str
:param route_local_id: Id of selected-route
:type route_local_id: str
:rtype: TapiConnectivityRouteRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_service_post(tapi_connectivity_connectivitycontext_connectivity_service=None): # noqa: E501
"""data_context_connectivity_context_connectivity_service_post
creates tapi.connectivity.connectivitycontext.ConnectivityService # noqa: E501
:param tapi_connectivity_connectivitycontext_connectivity_service: tapi.connectivity.connectivitycontext.ConnectivityService to be added to list
:type tapi_connectivity_connectivitycontext_connectivity_service: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_connectivity_connectivitycontext_connectivity_service = TapiConnectivityConnectivitycontextConnectivityService.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_avoid_topologytopology_uuid_get(uuid, topology_uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_avoid_topologytopology_uuid_get
returns tapi.topology.TopologyRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param topology_uuid: Id of avoid-topology
:type topology_uuid: str
:rtype: TapiTopologyTopologyRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_connectionconnection_uuid_get(uuid, connection_uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_connectionconnection_uuid_get
returns tapi.connectivity.ConnectionRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param connection_uuid: Id of connection
:type connection_uuid: str
:rtype: TapiConnectivityConnectionRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_coroute_inclusion_delete(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_coroute_inclusion_delete
removes tapi.connectivity.ConnectivityServiceRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_coroute_inclusion_get(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_coroute_inclusion_get
returns tapi.connectivity.ConnectivityServiceRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: TapiConnectivityConnectivityServiceRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_coroute_inclusion_post(uuid, tapi_connectivity_connectivity_service_ref=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_coroute_inclusion_post
creates tapi.connectivity.ConnectivityServiceRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_connectivity_connectivity_service_ref: tapi.connectivity.ConnectivityServiceRef to be added to list
:type tapi_connectivity_connectivity_service_ref: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_connectivity_connectivity_service_ref = TapiConnectivityConnectivityServiceRef.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_coroute_inclusion_put(uuid, tapi_connectivity_connectivity_service_ref=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_coroute_inclusion_put
creates or updates tapi.connectivity.ConnectivityServiceRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_connectivity_connectivity_service_ref: tapi.connectivity.ConnectivityServiceRef to be added or updated
:type tapi_connectivity_connectivity_service_ref: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_connectivity_connectivity_service_ref = TapiConnectivityConnectivityServiceRef.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_cost_characteristic_post(uuid, tapi_topology_cost_characteristic=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_cost_characteristic_post
creates tapi.topology.CostCharacteristic # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_topology_cost_characteristic: tapi.topology.CostCharacteristic to be added to list
:type tapi_topology_cost_characteristic: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_topology_cost_characteristic = TapiTopologyCostCharacteristic.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_cost_characteristiccost_name_delete(uuid, cost_name): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_cost_characteristiccost_name_delete
removes tapi.topology.CostCharacteristic # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param cost_name: Id of cost-characteristic
:type cost_name: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_cost_characteristiccost_name_get(uuid, cost_name): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_cost_characteristiccost_name_get
returns tapi.topology.CostCharacteristic # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param cost_name: Id of cost-characteristic
:type cost_name: str
:rtype: TapiTopologyCostCharacteristic
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_cost_characteristiccost_name_post(uuid, cost_name, tapi_topology_cost_characteristic=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_cost_characteristiccost_name_post
creates tapi.topology.CostCharacteristic # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param cost_name: Id of cost-characteristic
:type cost_name: str
:param tapi_topology_cost_characteristic: tapi.topology.CostCharacteristic to be added to list
:type tapi_topology_cost_characteristic: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_topology_cost_characteristic = TapiTopologyCostCharacteristic.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_cost_characteristiccost_name_put(uuid, cost_name, tapi_topology_cost_characteristic=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_cost_characteristiccost_name_put
creates or updates tapi.topology.CostCharacteristic # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param cost_name: Id of cost-characteristic
:type cost_name: str
:param tapi_topology_cost_characteristic: tapi.topology.CostCharacteristic to be added or updated
:type tapi_topology_cost_characteristic: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_topology_cost_characteristic = TapiTopologyCostCharacteristic.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_delete(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_delete
removes tapi.connectivity.connectivitycontext.ConnectivityService # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_diversity_exclusion_post(uuid, tapi_connectivity_connectivity_service_ref=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_diversity_exclusion_post
creates tapi.connectivity.ConnectivityServiceRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_connectivity_connectivity_service_ref: tapi.connectivity.ConnectivityServiceRef to be added to list
:type tapi_connectivity_connectivity_service_ref: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_connectivity_connectivity_service_ref = TapiConnectivityConnectivityServiceRef.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_diversity_exclusionconnectivity_service_uuid_delete(uuid, connectivity_service_uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_diversity_exclusionconnectivity_service_uuid_delete
removes tapi.connectivity.ConnectivityServiceRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param connectivity_service_uuid: Id of diversity-exclusion
:type connectivity_service_uuid: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_diversity_exclusionconnectivity_service_uuid_get(uuid, connectivity_service_uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_diversity_exclusionconnectivity_service_uuid_get
returns tapi.connectivity.ConnectivityServiceRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param connectivity_service_uuid: Id of diversity-exclusion
:type connectivity_service_uuid: str
:rtype: TapiConnectivityConnectivityServiceRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_diversity_exclusionconnectivity_service_uuid_post(uuid, connectivity_service_uuid, tapi_connectivity_connectivity_service_ref=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_diversity_exclusionconnectivity_service_uuid_post
creates tapi.connectivity.ConnectivityServiceRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param connectivity_service_uuid: Id of diversity-exclusion
:type connectivity_service_uuid: str
:param tapi_connectivity_connectivity_service_ref: tapi.connectivity.ConnectivityServiceRef to be added to list
:type tapi_connectivity_connectivity_service_ref: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_connectivity_connectivity_service_ref = TapiConnectivityConnectivityServiceRef.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_diversity_exclusionconnectivity_service_uuid_put(uuid, connectivity_service_uuid, tapi_connectivity_connectivity_service_ref=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_diversity_exclusionconnectivity_service_uuid_put
creates or updates tapi.connectivity.ConnectivityServiceRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param connectivity_service_uuid: Id of diversity-exclusion
:type connectivity_service_uuid: str
:param tapi_connectivity_connectivity_service_ref: tapi.connectivity.ConnectivityServiceRef to be added or updated
:type tapi_connectivity_connectivity_service_ref: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_connectivity_connectivity_service_ref = TapiConnectivityConnectivityServiceRef.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_point_post(uuid, tapi_connectivity_connectivityservice_end_point=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_point_post
creates tapi.connectivity.connectivityservice.EndPoint # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_connectivity_connectivityservice_end_point: tapi.connectivity.connectivityservice.EndPoint to be added to list
:type tapi_connectivity_connectivityservice_end_point: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_connectivity_connectivityservice_end_point = TapiConnectivityConnectivityserviceEndPoint.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_burst_size_delete(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_burst_size_delete
removes tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_burst_size_get(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_burst_size_get
returns tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:rtype: TapiCommonCapacityValue
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_burst_size_post(uuid, local_id, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_burst_size_post
creates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added to list
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_burst_size_put(uuid, local_id, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_burst_size_put
creates or updates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added or updated
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_information_rate_delete(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_information_rate_delete
removes tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_information_rate_get(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_information_rate_get
returns tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:rtype: TapiCommonCapacityValue
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_information_rate_post(uuid, local_id, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_information_rate_post
creates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added to list
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_information_rate_put(uuid, local_id, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_committed_information_rate_put
creates or updates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added or updated
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_delete(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_delete
removes tapi.common.BandwidthProfile # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_get(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_get
returns tapi.common.BandwidthProfile # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:rtype: TapiCommonBandwidthProfile
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_burst_size_delete(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_burst_size_delete
removes tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_burst_size_get(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_burst_size_get
returns tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:rtype: TapiCommonCapacityValue
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_burst_size_post(uuid, local_id, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_burst_size_post
creates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added to list
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_burst_size_put(uuid, local_id, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_burst_size_put
creates or updates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added or updated
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_information_rate_delete(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_information_rate_delete
removes tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_information_rate_get(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_information_rate_get
returns tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:rtype: TapiCommonCapacityValue
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_information_rate_post(uuid, local_id, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_information_rate_post
creates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added to list
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_information_rate_put(uuid, local_id, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_peak_information_rate_put
creates or updates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added or updated
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_post(uuid, local_id, tapi_common_bandwidth_profile=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_post
creates tapi.common.BandwidthProfile # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_common_bandwidth_profile: tapi.common.BandwidthProfile to be added to list
:type tapi_common_bandwidth_profile: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_bandwidth_profile = TapiCommonBandwidthProfile.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_put(uuid, local_id, tapi_common_bandwidth_profile=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_bandwidth_profile_put
creates or updates tapi.common.BandwidthProfile # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_common_bandwidth_profile: tapi.common.BandwidthProfile to be added or updated
:type tapi_common_bandwidth_profile: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_bandwidth_profile = TapiCommonBandwidthProfile.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_delete(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_delete
removes tapi.common.Capacity # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_get(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_get
returns tapi.common.Capacity # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:rtype: TapiCommonCapacity
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_post(uuid, local_id, tapi_common_capacity=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_post
creates tapi.common.Capacity # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_common_capacity: tapi.common.Capacity to be added to list
:type tapi_common_capacity: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity = TapiCommonCapacity.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_put(uuid, local_id, tapi_common_capacity=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_put
creates or updates tapi.common.Capacity # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_common_capacity: tapi.common.Capacity to be added or updated
:type tapi_common_capacity: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity = TapiCommonCapacity.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_total_size_delete(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_total_size_delete
removes tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_total_size_get(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_total_size_get
returns tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:rtype: TapiCommonCapacityValue
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_total_size_post(uuid, local_id, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_total_size_post
creates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added to list
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_total_size_put(uuid, local_id, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_capacity_total_size_put
creates or updates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added or updated
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_connection_end_pointtopology_uuidnode_uuidnode_edge_point_uuidconnection_end_point_uuid_get(uuid, local_id, topology_uuid, node_uuid, node_edge_point_uuid, connection_end_point_uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_connection_end_pointtopology_uuidnode_uuidnode_edge_point_uuidconnection_end_point_uuid_get
returns tapi.connectivity.ConnectionEndPointRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param topology_uuid: Id of connection-end-point
:type topology_uuid: str
:param node_uuid: Id of connection-end-point
:type node_uuid: str
:param node_edge_point_uuid: Id of connection-end-point
:type node_edge_point_uuid: str
:param connection_end_point_uuid: Id of connection-end-point
:type connection_end_point_uuid: str
:rtype: TapiConnectivityConnectionEndPointRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_delete(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_delete
removes tapi.connectivity.connectivityservice.EndPoint # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_get(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_get
returns tapi.connectivity.connectivityservice.EndPoint # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:rtype: TapiConnectivityConnectivityserviceEndPoint
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_name_post(uuid, local_id, tapi_common_name_and_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_name_post
creates tapi.common.NameAndValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_common_name_and_value: tapi.common.NameAndValue to be added to list
:type tapi_common_name_and_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_name_and_value = TapiCommonNameAndValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_namevalue_name_delete(uuid, local_id, value_name): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_namevalue_name_delete
removes tapi.common.NameAndValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param value_name: Id of name
:type value_name: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_namevalue_name_get(uuid, local_id, value_name): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_namevalue_name_get
returns tapi.common.NameAndValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param value_name: Id of name
:type value_name: str
:rtype: TapiCommonNameAndValue
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_namevalue_name_post(uuid, local_id, value_name, tapi_common_name_and_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_namevalue_name_post
creates tapi.common.NameAndValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param value_name: Id of name
:type value_name: str
:param tapi_common_name_and_value: tapi.common.NameAndValue to be added to list
:type tapi_common_name_and_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_name_and_value = TapiCommonNameAndValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_namevalue_name_put(uuid, local_id, value_name, tapi_common_name_and_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_namevalue_name_put
creates or updates tapi.common.NameAndValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param value_name: Id of name
:type value_name: str
:param tapi_common_name_and_value: tapi.common.NameAndValue to be added or updated
:type tapi_common_name_and_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_name_and_value = TapiCommonNameAndValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_post(uuid, local_id, tapi_connectivity_connectivityservice_end_point=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_post
creates tapi.connectivity.connectivityservice.EndPoint # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_connectivity_connectivityservice_end_point: tapi.connectivity.connectivityservice.EndPoint to be added to list
:type tapi_connectivity_connectivityservice_end_point: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_connectivity_connectivityservice_end_point = TapiConnectivityConnectivityserviceEndPoint.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_put(uuid, local_id, tapi_connectivity_connectivityservice_end_point=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_put
creates or updates tapi.connectivity.connectivityservice.EndPoint # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_connectivity_connectivityservice_end_point: tapi.connectivity.connectivityservice.EndPoint to be added or updated
:type tapi_connectivity_connectivityservice_end_point: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_connectivity_connectivityservice_end_point = TapiConnectivityConnectivityserviceEndPoint.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_service_interface_point_delete(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_service_interface_point_delete
removes tapi.common.ServiceInterfacePointRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_service_interface_point_get(uuid, local_id): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_service_interface_point_get
returns tapi.common.ServiceInterfacePointRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:rtype: TapiCommonServiceInterfacePointRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_service_interface_point_post(uuid, local_id, tapi_common_service_interface_point_ref=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_service_interface_point_post
creates tapi.common.ServiceInterfacePointRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_common_service_interface_point_ref: tapi.common.ServiceInterfacePointRef to be added to list
:type tapi_common_service_interface_point_ref: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_service_interface_point_ref = TapiCommonServiceInterfacePointRef.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_service_interface_point_put(uuid, local_id, tapi_common_service_interface_point_ref=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_end_pointlocal_id_service_interface_point_put
creates or updates tapi.common.ServiceInterfacePointRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param local_id: Id of end-point
:type local_id: str
:param tapi_common_service_interface_point_ref: tapi.common.ServiceInterfacePointRef to be added or updated
:type tapi_common_service_interface_point_ref: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_service_interface_point_ref = TapiCommonServiceInterfacePointRef.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_exclude_linktopology_uuidlink_uuid_get(uuid, topology_uuid, link_uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_exclude_linktopology_uuidlink_uuid_get
returns tapi.topology.LinkRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param topology_uuid: Id of exclude-link
:type topology_uuid: str
:param link_uuid: Id of exclude-link
:type link_uuid: str
:rtype: TapiTopologyLinkRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_exclude_nodetopology_uuidnode_uuid_get(uuid, topology_uuid, node_uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_exclude_nodetopology_uuidnode_uuid_get
returns tapi.topology.NodeRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param topology_uuid: Id of exclude-node
:type topology_uuid: str
:param node_uuid: Id of exclude-node
:type node_uuid: str
:rtype: TapiTopologyNodeRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_exclude_pathpath_uuid_get(uuid, path_uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_exclude_pathpath_uuid_get
returns tapi.path.computation.PathRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param path_uuid: Id of exclude-path
:type path_uuid: str
:rtype: TapiPathComputationPathRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_get(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_get
returns tapi.connectivity.connectivitycontext.ConnectivityService # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: TapiConnectivityConnectivitycontextConnectivityService
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_include_linktopology_uuidlink_uuid_get(uuid, topology_uuid, link_uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_include_linktopology_uuidlink_uuid_get
returns tapi.topology.LinkRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param topology_uuid: Id of include-link
:type topology_uuid: str
:param link_uuid: Id of include-link
:type link_uuid: str
:rtype: TapiTopologyLinkRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_include_nodetopology_uuidnode_uuid_get(uuid, topology_uuid, node_uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_include_nodetopology_uuidnode_uuid_get
returns tapi.topology.NodeRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param topology_uuid: Id of include-node
:type topology_uuid: str
:param node_uuid: Id of include-node
:type node_uuid: str
:rtype: TapiTopologyNodeRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_include_pathpath_uuid_get(uuid, path_uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_include_pathpath_uuid_get
returns tapi.path.computation.PathRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param path_uuid: Id of include-path
:type path_uuid: str
:rtype: TapiPathComputationPathRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_include_topologytopology_uuid_get(uuid, topology_uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_include_topologytopology_uuid_get
returns tapi.topology.TopologyRef # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param topology_uuid: Id of include-topology
:type topology_uuid: str
:rtype: TapiTopologyTopologyRef
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_latency_characteristic_post(uuid, tapi_topology_latency_characteristic=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_latency_characteristic_post
creates tapi.topology.LatencyCharacteristic # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_topology_latency_characteristic: tapi.topology.LatencyCharacteristic to be added to list
:type tapi_topology_latency_characteristic: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_topology_latency_characteristic = TapiTopologyLatencyCharacteristic.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_latency_characteristictraffic_property_name_delete(uuid, traffic_property_name): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_latency_characteristictraffic_property_name_delete
removes tapi.topology.LatencyCharacteristic # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param traffic_property_name: Id of latency-characteristic
:type traffic_property_name: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_latency_characteristictraffic_property_name_get(uuid, traffic_property_name): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_latency_characteristictraffic_property_name_get
returns tapi.topology.LatencyCharacteristic # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param traffic_property_name: Id of latency-characteristic
:type traffic_property_name: str
:rtype: TapiTopologyLatencyCharacteristic
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_latency_characteristictraffic_property_name_post(uuid, traffic_property_name, tapi_topology_latency_characteristic=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_latency_characteristictraffic_property_name_post
creates tapi.topology.LatencyCharacteristic # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param traffic_property_name: Id of latency-characteristic
:type traffic_property_name: str
:param tapi_topology_latency_characteristic: tapi.topology.LatencyCharacteristic to be added to list
:type tapi_topology_latency_characteristic: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_topology_latency_characteristic = TapiTopologyLatencyCharacteristic.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_latency_characteristictraffic_property_name_put(uuid, traffic_property_name, tapi_topology_latency_characteristic=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_latency_characteristictraffic_property_name_put
creates or updates tapi.topology.LatencyCharacteristic # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param traffic_property_name: Id of latency-characteristic
:type traffic_property_name: str
:param tapi_topology_latency_characteristic: tapi.topology.LatencyCharacteristic to be added or updated
:type tapi_topology_latency_characteristic: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_topology_latency_characteristic = TapiTopologyLatencyCharacteristic.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_name_post(uuid, tapi_common_name_and_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_name_post
creates tapi.common.NameAndValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_common_name_and_value: tapi.common.NameAndValue to be added to list
:type tapi_common_name_and_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_name_and_value = TapiCommonNameAndValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_namevalue_name_delete(uuid, value_name): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_namevalue_name_delete
removes tapi.common.NameAndValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param value_name: Id of name
:type value_name: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_namevalue_name_get(uuid, value_name): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_namevalue_name_get
returns tapi.common.NameAndValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param value_name: Id of name
:type value_name: str
:rtype: TapiCommonNameAndValue
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_namevalue_name_post(uuid, value_name, tapi_common_name_and_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_namevalue_name_post
creates tapi.common.NameAndValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param value_name: Id of name
:type value_name: str
:param tapi_common_name_and_value: tapi.common.NameAndValue to be added to list
:type tapi_common_name_and_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_name_and_value = TapiCommonNameAndValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_namevalue_name_put(uuid, value_name, tapi_common_name_and_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_namevalue_name_put
creates or updates tapi.common.NameAndValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param value_name: Id of name
:type value_name: str
:param tapi_common_name_and_value: tapi.common.NameAndValue to be added or updated
:type tapi_common_name_and_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_name_and_value = TapiCommonNameAndValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_post(uuid, tapi_connectivity_connectivitycontext_connectivity_service=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_post
creates tapi.connectivity.connectivitycontext.ConnectivityService # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_connectivity_connectivitycontext_connectivity_service: tapi.connectivity.connectivitycontext.ConnectivityService to be added to list
:type tapi_connectivity_connectivitycontext_connectivity_service: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_connectivity_connectivitycontext_connectivity_service = TapiConnectivityConnectivitycontextConnectivityService.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_put(uuid, tapi_connectivity_connectivitycontext_connectivity_service=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_put
creates or updates tapi.connectivity.connectivitycontext.ConnectivityService # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_connectivity_connectivitycontext_connectivity_service: tapi.connectivity.connectivitycontext.ConnectivityService to be added or updated
:type tapi_connectivity_connectivitycontext_connectivity_service: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_connectivity_connectivitycontext_connectivity_service = TapiConnectivityConnectivitycontextConnectivityService.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_committed_burst_size_delete(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_committed_burst_size_delete
removes tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_committed_burst_size_get(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_committed_burst_size_get
returns tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: TapiCommonCapacityValue
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_committed_burst_size_post(uuid, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_committed_burst_size_post
creates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added to list
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_committed_burst_size_put(uuid, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_committed_burst_size_put
creates or updates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added or updated
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_committed_information_rate_delete(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_committed_information_rate_delete
removes tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_committed_information_rate_get(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_committed_information_rate_get
returns tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: TapiCommonCapacityValue
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_committed_information_rate_post(uuid, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_committed_information_rate_post
creates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added to list
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_committed_information_rate_put(uuid, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_committed_information_rate_put
creates or updates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added or updated
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_delete(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_delete
removes tapi.common.BandwidthProfile # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_get(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_get
returns tapi.common.BandwidthProfile # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: TapiCommonBandwidthProfile
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_peak_burst_size_delete(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_peak_burst_size_delete
removes tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_peak_burst_size_get(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_peak_burst_size_get
returns tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: TapiCommonCapacityValue
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_peak_burst_size_post(uuid, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_peak_burst_size_post
creates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added to list
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_peak_burst_size_put(uuid, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_peak_burst_size_put
creates or updates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added or updated
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_peak_information_rate_delete(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_peak_information_rate_delete
removes tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_peak_information_rate_get(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_peak_information_rate_get
returns tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: TapiCommonCapacityValue
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_peak_information_rate_post(uuid, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_peak_information_rate_post
creates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added to list
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_peak_information_rate_put(uuid, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_peak_information_rate_put
creates or updates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added or updated
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_post(uuid, tapi_common_bandwidth_profile=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_post
creates tapi.common.BandwidthProfile # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_common_bandwidth_profile: tapi.common.BandwidthProfile to be added to list
:type tapi_common_bandwidth_profile: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_bandwidth_profile = TapiCommonBandwidthProfile.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_put(uuid, tapi_common_bandwidth_profile=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_bandwidth_profile_put
creates or updates tapi.common.BandwidthProfile # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_common_bandwidth_profile: tapi.common.BandwidthProfile to be added or updated
:type tapi_common_bandwidth_profile: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_bandwidth_profile = TapiCommonBandwidthProfile.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_delete(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_delete
removes tapi.common.Capacity # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_get(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_get
returns tapi.common.Capacity # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: TapiCommonCapacity
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_post(uuid, tapi_common_capacity=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_post
creates tapi.common.Capacity # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_common_capacity: tapi.common.Capacity to be added to list
:type tapi_common_capacity: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity = TapiCommonCapacity.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_put(uuid, tapi_common_capacity=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_put
creates or updates tapi.common.Capacity # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_common_capacity: tapi.common.Capacity to be added or updated
:type tapi_common_capacity: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity = TapiCommonCapacity.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_total_size_delete(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_total_size_delete
removes tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_total_size_get(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_total_size_get
returns tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: TapiCommonCapacityValue
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_total_size_post(uuid, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_total_size_post
creates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added to list
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_total_size_put(uuid, tapi_common_capacity_value=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_requested_capacity_total_size_put
creates or updates tapi.common.CapacityValue # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_common_capacity_value: tapi.common.CapacityValue to be added or updated
:type tapi_common_capacity_value: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_capacity_value = TapiCommonCapacityValue.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_resilience_type_delete(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_resilience_type_delete
removes tapi.topology.ResilienceType # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_resilience_type_get(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_resilience_type_get
returns tapi.topology.ResilienceType # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: TapiTopologyResilienceType
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_resilience_type_post(uuid, tapi_topology_resilience_type=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_resilience_type_post
creates tapi.topology.ResilienceType # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_topology_resilience_type: tapi.topology.ResilienceType to be added to list
:type tapi_topology_resilience_type: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_topology_resilience_type = TapiTopologyResilienceType.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_resilience_type_put(uuid, tapi_topology_resilience_type=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_resilience_type_put
creates or updates tapi.topology.ResilienceType # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_topology_resilience_type: tapi.topology.ResilienceType to be added or updated
:type tapi_topology_resilience_type: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_topology_resilience_type = TapiTopologyResilienceType.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_risk_diversity_characteristic_post(uuid, tapi_topology_risk_characteristic=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_risk_diversity_characteristic_post
creates tapi.topology.RiskCharacteristic # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_topology_risk_characteristic: tapi.topology.RiskCharacteristic to be added to list
:type tapi_topology_risk_characteristic: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_topology_risk_characteristic = TapiTopologyRiskCharacteristic.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_risk_diversity_characteristicrisk_characteristic_name_delete(uuid, risk_characteristic_name): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_risk_diversity_characteristicrisk_characteristic_name_delete
removes tapi.topology.RiskCharacteristic # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param risk_characteristic_name: Id of risk-diversity-characteristic
:type risk_characteristic_name: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_risk_diversity_characteristicrisk_characteristic_name_get(uuid, risk_characteristic_name): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_risk_diversity_characteristicrisk_characteristic_name_get
returns tapi.topology.RiskCharacteristic # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param risk_characteristic_name: Id of risk-diversity-characteristic
:type risk_characteristic_name: str
:rtype: TapiTopologyRiskCharacteristic
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_risk_diversity_characteristicrisk_characteristic_name_post(uuid, risk_characteristic_name, tapi_topology_risk_characteristic=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_risk_diversity_characteristicrisk_characteristic_name_post
creates tapi.topology.RiskCharacteristic # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param risk_characteristic_name: Id of risk-diversity-characteristic
:type risk_characteristic_name: str
:param tapi_topology_risk_characteristic: tapi.topology.RiskCharacteristic to be added to list
:type tapi_topology_risk_characteristic: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_topology_risk_characteristic = TapiTopologyRiskCharacteristic.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_risk_diversity_characteristicrisk_characteristic_name_put(uuid, risk_characteristic_name, tapi_topology_risk_characteristic=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_risk_diversity_characteristicrisk_characteristic_name_put
creates or updates tapi.topology.RiskCharacteristic # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param risk_characteristic_name: Id of risk-diversity-characteristic
:type risk_characteristic_name: str
:param tapi_topology_risk_characteristic: tapi.topology.RiskCharacteristic to be added or updated
:type tapi_topology_risk_characteristic: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_topology_risk_characteristic = TapiTopologyRiskCharacteristic.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_schedule_delete(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_schedule_delete
removes tapi.common.TimeRange # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_schedule_get(uuid): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_schedule_get
returns tapi.common.TimeRange # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:rtype: TapiCommonTimeRange
"""
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_schedule_post(uuid, tapi_common_time_range=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_schedule_post
creates tapi.common.TimeRange # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_common_time_range: tapi.common.TimeRange to be added to list
:type tapi_common_time_range: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_time_range = TapiCommonTimeRange.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_connectivity_serviceuuid_schedule_put(uuid, tapi_common_time_range=None): # noqa: E501
"""data_context_connectivity_context_connectivity_serviceuuid_schedule_put
creates or updates tapi.common.TimeRange # noqa: E501
:param uuid: Id of connectivity-service
:type uuid: str
:param tapi_common_time_range: tapi.common.TimeRange to be added or updated
:type tapi_common_time_range: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_common_time_range = TapiCommonTimeRange.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_delete(): # noqa: E501
"""data_context_connectivity_context_delete
removes tapi.connectivity.ConnectivityContext # noqa: E501
:rtype: None
"""
return 'do some magic!'
def data_context_connectivity_context_get(): # noqa: E501
"""data_context_connectivity_context_get
returns tapi.connectivity.ConnectivityContext # noqa: E501
:rtype: TapiConnectivityConnectivityContext
"""
return 'do some magic!'
def data_context_connectivity_context_post(tapi_connectivity_connectivity_context=None): # noqa: E501
"""data_context_connectivity_context_post
creates tapi.connectivity.ConnectivityContext # noqa: E501
:param tapi_connectivity_connectivity_context: tapi.connectivity.ConnectivityContext to be added to list
:type tapi_connectivity_connectivity_context: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_connectivity_connectivity_context = TapiConnectivityConnectivityContext.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_connectivity_context_put(tapi_connectivity_connectivity_context=None): # noqa: E501
"""data_context_connectivity_context_put
creates or updates tapi.connectivity.ConnectivityContext # noqa: E501
:param tapi_connectivity_connectivity_context: tapi.connectivity.ConnectivityContext to be added or updated
:type tapi_connectivity_connectivity_context: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
tapi_connectivity_connectivity_context = TapiConnectivityConnectivityContext.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def data_context_topology_context_topologyuuid_nodenode_uuid_owned_node_edge_pointowned_node_edge_point_uuid_cep_list_connection_end_pointconnection_end_point_uuid_aggregated_connection_end_pointtopology_uuidaggregated_connection_end_point_node_uuidnode_edge_point_uuidaggregated_connection_end_point_connection_end_point_uuid_get(uuid, node_uuid, owned_node_edge_point_uuid, connection_end_point_uuid, topology_uuid, aggregated_connection_end_point_node_uuid, node_edge_point_uuid, aggregated_connection_end_point_connection_end_point_uuid): # noqa: E501
"""data_context_topology_context_topologyuuid_nodenode_uuid_owned_node_edge_pointowned_node_edge_point_uuid_cep_list_connection_end_pointconnection_end_point_uuid_aggregated_connection_end_pointtopology_uuidaggregated_connection_end_point_node_uuidnode_edge_point_uuidaggregated_connection_end_point_connection_end_point_uuid_get
returns tapi.connectivity.ConnectionEndPointRef # noqa: E501
:param uuid: Id of topology
:type uuid: str
:param node_uuid: Id of node
:type node_uuid: str
:param owned_node_edge_point_uuid: Id of owned-node-edge-point
:type owned_node_edge_point_uuid: str
:param connection_end_point_uuid: Id of connection-end-point
:type connection_end_point_uuid: str
:param topology_uuid: Id of aggregated-connection-end-point
:type topology_uuid: str
:param aggregated_connection_end_point_node_uuid: Id of aggregated-connection-end-point
:type aggregated_connection_end_point_node_uuid: str
:param node_edge_point_uuid: Id of aggregated-connection-end-point
:type node_edge_point_uuid: str
:param aggregated_connection_end_point_connection_end_point_uuid: Id of aggregated-connection-end-point
:type aggregated_connection_end_point_connection_end_point_uuid: str
:rtype: TapiConnectivityConnectionEndPointRef
"""
return 'do some magic!'
def data_context_topology_context_topologyuuid_nodenode_uuid_owned_node_edge_pointowned_node_edge_point_uuid_cep_list_connection_end_pointconnection_end_point_uuid_client_node_edge_pointtopology_uuidclient_node_edge_point_node_uuidnode_edge_point_uuid_get(uuid, node_uuid, owned_node_edge_point_uuid, connection_end_point_uuid, topology_uuid, client_node_edge_point_node_uuid, node_edge_point_uuid): # noqa: E501
"""data_context_topology_context_topologyuuid_nodenode_uuid_owned_node_edge_pointowned_node_edge_point_uuid_cep_list_connection_end_pointconnection_end_point_uuid_client_node_edge_pointtopology_uuidclient_node_edge_point_node_uuidnode_edge_point_uuid_get
returns tapi.topology.NodeEdgePointRef # noqa: E501
:param uuid: Id of topology
:type uuid: str
:param node_uuid: Id of node
:type node_uuid: str
:param owned_node_edge_point_uuid: Id of owned-node-edge-point
:type owned_node_edge_point_uuid: str
:param connection_end_point_uuid: Id of connection-end-point
:type connection_end_point_uuid: str
:param topology_uuid: Id of client-node-edge-point
:type topology_uuid: str
:param client_node_edge_point_node_uuid: Id of client-node-edge-point
:type client_node_edge_point_node_uuid: str
:param node_edge_point_uuid: Id of client-node-edge-point
:type node_edge_point_uuid: str
:rtype: TapiTopologyNodeEdgePointRef
"""
return 'do some magic!'
def data_context_topology_context_topologyuuid_nodenode_uuid_owned_node_edge_pointowned_node_edge_point_uuid_cep_list_connection_end_pointconnection_end_point_uuid_get(uuid, node_uuid, owned_node_edge_point_uuid, connection_end_point_uuid): # noqa: E501
"""data_context_topology_context_topologyuuid_nodenode_uuid_owned_node_edge_pointowned_node_edge_point_uuid_cep_list_connection_end_pointconnection_end_point_uuid_get
returns tapi.connectivity.ceplist.ConnectionEndPoint # noqa: E501
:param uuid: Id of topology
:type uuid: str
:param node_uuid: Id of node
:type node_uuid: str
:param owned_node_edge_point_uuid: Id of owned-node-edge-point
:type owned_node_edge_point_uuid: str
:param connection_end_point_uuid: Id of connection-end-point
:type connection_end_point_uuid: str
:rtype: TapiConnectivityCeplistConnectionEndPoint
"""
return 'do some magic!'
def data_context_topology_context_topologyuuid_nodenode_uuid_owned_node_edge_pointowned_node_edge_point_uuid_cep_list_connection_end_pointconnection_end_point_uuid_namevalue_name_get(uuid, node_uuid, owned_node_edge_point_uuid, connection_end_point_uuid, value_name): # noqa: E501
"""data_context_topology_context_topologyuuid_nodenode_uuid_owned_node_edge_pointowned_node_edge_point_uuid_cep_list_connection_end_pointconnection_end_point_uuid_namevalue_name_get
returns tapi.common.NameAndValue # noqa: E501
:param uuid: Id of topology
:type uuid: str
:param node_uuid: Id of node
:type node_uuid: str
:param owned_node_edge_point_uuid: Id of owned-node-edge-point
:type owned_node_edge_point_uuid: str
:param connection_end_point_uuid: Id of connection-end-point
:type connection_end_point_uuid: str
:param value_name: Id of name
:type value_name: str
:rtype: TapiCommonNameAndValue
"""
return 'do some magic!'
def data_context_topology_context_topologyuuid_nodenode_uuid_owned_node_edge_pointowned_node_edge_point_uuid_cep_list_connection_end_pointconnection_end_point_uuid_parent_node_edge_point_get(uuid, node_uuid, owned_node_edge_point_uuid, connection_end_point_uuid): # noqa: E501
"""data_context_topology_context_topologyuuid_nodenode_uuid_owned_node_edge_pointowned_node_edge_point_uuid_cep_list_connection_end_pointconnection_end_point_uuid_parent_node_edge_point_get
returns tapi.topology.NodeEdgePointRef # noqa: E501
:param uuid: Id of topology
:type uuid: str
:param node_uuid: Id of node
:type node_uuid: str
:param owned_node_edge_point_uuid: Id of owned-node-edge-point
:type owned_node_edge_point_uuid: str
:param connection_end_point_uuid: Id of connection-end-point
:type connection_end_point_uuid: str
:rtype: TapiTopologyNodeEdgePointRef
"""
return 'do some magic!'
def data_context_topology_context_topologyuuid_nodenode_uuid_owned_node_edge_pointowned_node_edge_point_uuid_cep_list_get(uuid, node_uuid, owned_node_edge_point_uuid): # noqa: E501
"""data_context_topology_context_topologyuuid_nodenode_uuid_owned_node_edge_pointowned_node_edge_point_uuid_cep_list_get
returns tapi.connectivity.context.topologycontext.topology.node.ownednodeedgepoint.CepList # noqa: E501
:param uuid: Id of topology
:type uuid: str
:param node_uuid: Id of node
:type node_uuid: str
:param owned_node_edge_point_uuid: Id of owned-node-edge-point
:type owned_node_edge_point_uuid: str
:rtype: TapiConnectivityContextTopologycontextTopologyNodeOwnednodeedgepointCepList
"""
return 'do some magic!'
def operations_create_connectivity_service_post(inline_object1=None): # noqa: E501
"""operations_create_connectivity_service_post
# noqa: E501
:param inline_object1:
:type inline_object1: dict | bytes
:rtype: TapiConnectivityCreateConnectivityService
"""
if connexion.request.is_json:
inline_object1 = InlineObject1.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def operations_delete_connectivity_service_post(inline_object6=None): # noqa: E501
"""operations_delete_connectivity_service_post
# noqa: E501
:param inline_object6:
:type inline_object6: dict | bytes
:rtype: None
"""
if connexion.request.is_json:
inline_object6 = InlineObject6.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
def operations_get_connection_details_post(inline_object12=None): # noqa: E501
"""operations_get_connection_details_post
# noqa: E501
:param inline_object12:
:type inline_object12: dict | bytes
:rtype: TapiConnectivityGetConnectionDetails
"""
if connexion.request.is_json:
inline_object12 = InlineObject12.from_dict(connexion.request.get_json()) # noqa: E501
return TapiConnectivityGetConnectionDetails(TapiConnectivityGetconnectiondetailsOutput(
database.connection(inline_object12.input.connection_id_or_name)))
def operations_get_connection_end_point_details_post(inline_object13=None): # noqa: E501
"""operations_get_connection_end_point_details_post
# noqa: E501
:param inline_object13:
:type inline_object13: dict | bytes
:rtype: TapiConnectivityGetConnectionEndPointDetails
"""
if connexion.request.is_json:
inline_object13 = InlineObject13.from_dict(connexion.request.get_json()) # noqa: E501
return TapiConnectivityGetConnectionEndPointDetails(TapiConnectivityGetconnectionendpointdetailsOutput(
database.connection_end_point(inline_object13.input.topology_id_or_name,
inline_object13.input.node_id_or_name,
inline_object13.input.nep_id_or_name,
inline_object13.input.cep_id_or_name)))
def operations_get_connectivity_service_details_post(inline_object14=None): # noqa: E501
"""operations_get_connectivity_service_details_post
# noqa: E501
:param inline_object14:
:type inline_object14: dict | bytes
:rtype: TapiConnectivityGetConnectivityServiceDetails
"""
if connexion.request.is_json:
inline_object14 = InlineObject14.from_dict(connexion.request.get_json()) # noqa: E501
return TapiConnectivityGetConnectivityServiceDetails(TapiConnectivityGetconnectivityservicedetailsOutput(
database.connectivity_service(inline_object14.input.service_id_or_name)))
def operations_get_connectivity_service_list_post(): # noqa: E501
"""operations_get_connectivity_service_list_post
# noqa: E501
:rtype: TapiConnectivityGetConnectivityServiceList
"""
return TapiConnectivityGetConnectivityServiceList(TapiConnectivityGetconnectivityservicelistOutput(
database.connectivity_service_list()))
def operations_update_connectivity_service_post(inline_object27=None): # noqa: E501
"""operations_update_connectivity_service_post
# noqa: E501
:param inline_object27:
:type inline_object27: dict | bytes
:rtype: TapiConnectivityUpdateConnectivityService
"""
if connexion.request.is_json:
inline_object27 = InlineObject27.from_dict(connexion.request.get_json()) # noqa: E501
return 'do some magic!'
|
176082
|
from enum import IntEnum
import struct
from .parser import MsftBandParser
from datetime import datetime
from .filetimes import datetime_to_filetime
class NotificationTypes(IntEnum):
"""Complete list of all Notification types"""
SMS = 1
Email = 2
IncomingCall = 11
AnsweredCall = 12
MissedCall = 13
HangupCall = 14
Voicemail = 15
CalendarEventAdd = 16
CalendarClear = 17
Messaging = 18
GenericDialog = 100
GenericUpdate = 101
GenericClearTile = 102
GenericClearPage = 103
class Notification:
guid = None
notification_type = NotificationTypes.GenericDialog
def serialize(self):
return struct.pack("<H", self.notification_type) + self.guid.bytes_le
class MessagingNotification(Notification):
notification_type = NotificationTypes.Messaging
datetime = None
def __init__(self, guid, title='', text=''):
self.guid = guid
self.title = title[:20]
self.text = text[:20]
self.datetime = datetime.now()
def serialize(self):
packet = super().serialize()
packet += struct.pack("H", len(self.title) * 2)
packet += struct.pack("H", len(self.text) * 2)
packet += struct.pack("<Qxx", datetime_to_filetime(self.datetime))
packet += MsftBandParser.serialize_text(self.title + self.text)
return packet
class GenericClearTileNotification(Notification):
notification_type = NotificationTypes.GenericClearTile
def __init__(self, guid):
self.guid = guid
|
176089
|
class Solution:
def rob(self, num):
ls = [[0, 0]]
for e in num:
ls.append([max(ls[-1][0], ls[-1][1]), ls[-1][0] + e])
return max(ls[-1])
|
176099
|
import torch.nn as nn
def conv_relu(in_channels, out_channels, kernel_size=3, stride=1,
padding=1, bias=True):
return [
nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size,
stride=stride, padding=padding, bias=bias),
nn.ReLU(inplace=True),
]
def conv_bn_relu(in_channels, out_channels, kernel_size=3, stride=1,
padding=1, bias=False):
return [
nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size,
stride=stride, padding=padding, bias=bias),
nn.BatchNorm2d(out_channels),
nn.ReLU(inplace=True),
]
def linear_bn_relu_drop(in_channels, out_channels, dropout=0.5, bias=False):
layers = [
nn.Linear(in_channels, out_channels, bias=bias),
nn.BatchNorm1d(out_channels),
nn.ReLU(inplace=True)
]
if dropout > 0:
layers.append(nn.Dropout(dropout))
return layers
|
176171
|
from pathlib import Path
from typing import Tuple
import re
from itertools import groupby
def find_suggestion_for_return(suggestions):
for s in suggestions:
if s.symbol_kind == "class-or-function":
return s
else:
return None
def annotate_line(line, suggestions):
para_suggestions = sorted(
(s for s in suggestions if s.symbol_kind == "parameter"), key=lambda x: x.file_location[1]
)
annotated_line = annotate_parameters(line, para_suggestions)
ret_suggestion = find_suggestion_for_return(suggestions)
if ret_suggestion is not None:
annotated_line = annotate_return(annotated_line, ret_suggestion)
return annotated_line
def insert_at(original, inserted, idx):
return original[:idx] + inserted + original[idx:]
def annotate_parameters(line, suggestions):
"""
Annotate the parameters of a function on a particular line
"""
annotated_line = " " + line
length_increase = 0
for s in suggestions:
assert line[s.file_location[1] :].startswith(s.name)
insertion_position = s.file_location[1] + len(s.name) + 1 + length_increase
annotated_line = insert_at(annotated_line, f": {s.suggestion}", insertion_position)
length_increase += len(s.suggestion) + 2
return annotated_line
def annotate_return(line, suggestion):
"""
Annotate the return of a function
"""
assert line.rstrip().endswith(":")
return line.rstrip()[:-1] + f" -> {suggestion.suggestion}" + ":\n"
def find_annotation_line(filepath, location, func_name):
with open(filepath) as f:
lines = f.readlines()
assert func_name in lines[location[0] - 1]
# Assume that the function's return is *not* already annotated.
func_def_end = re.compile(r"\)\s*:$")
annotation_lineno = location[0]
while annotation_lineno <= len(lines):
if func_def_end.search(lines[annotation_lineno - 1].rstrip()) is not None:
break
annotation_lineno += 1
else:
raise Exception("Cannot find the closing brace for the parameter list.")
return annotation_lineno
def group_suggestions(suggestions):
def key(s):
return s.filepath + str(s.annotation_lineno)
sorted_suggestions = sorted(suggestions, key=key)
return [list(it) for k, it in groupby(sorted_suggestions, key)]
ALIASES = {"typing.Text": "str"}
def annotation_rewrite(annotation: str) -> str:
for k, v in ALIASES.items():
annotation = annotation.replace(k, v)
return annotation
|
176274
|
import torch
import torchvision.transforms as T
import numpy as np
import cv2
from PIL import Image
class DictBatch(object):
def __init__(self, data):
"""
:param data: list of Dict of Tensors.
"""
self.keys = list(data[0].keys())
values = list(zip(*[list(d.values()) for d in data]))
for idx, key in enumerate(self.keys):
setattr(self, key, torch.cat(values[idx], dim=0))
def pin_memory(self):
for key in self.keys:
attr = getattr(self, key).pin_memory()
setattr(self, key, attr)
return self
def cuda(self):
for key in self.keys:
attr = getattr(self, key).cuda()
setattr(self, key, attr)
return self
def keys(self):
return self.keys
def __getitem__(self, key):
"""
:param key: str
"""
return getattr(self, key, None)
def collate_dict_wrapper(batch):
return DictBatch(batch)
class ResizeNormalize(object):
def __init__(self, size, use_cuda=False, normalize_rgb_values=False, toPIL=False, rgb_scaler=1.0):
'''
Used to resize, normalize and convert raw pixel observations.
:param x: Numpy array to be processed
:param size: int or tuple, (height,width) size
:param use_cuda: Boolean to determine whether to create Cuda Tensor
:param normalize_rgb_values: Maps the 0-255 values of rgb colours
to interval (0-1)
'''
if isinstance(size, int): size = (size,size)
ts = []
if toPIL: ts.append(T.ToPILImage())
ts.append(T.Resize(size=size))
ts.append(T.ToTensor())
self.scaling_operation = T.Compose(ts)
self.normalize_rgb_values = normalize_rgb_values
self.rgb_scaler = rgb_scaler
self.use_cuda = use_cuda
def __call__(self, x):
x = self.scaling_operation(x)
# WATCHOUT: it is necessary to cast the tensor into float before doing
# the division, otherwise the result is yielded as a uint8 (full of zeros...)
x = x.type(torch.FloatTensor)
x = x / 255. if self.normalize_rgb_values else x
x *= self.rgb_scaler
if self.use_cuda:
return x.cuda()
return x
def __repr__(self):
return self.__class__.__name__ + '()'
class AddEgocentricInvariance(object):
def __init__(self, marker_demisize=2):
'''
Add a central marker to enable egocentric invariance.
:param marker_demisize: Int, half the size of the marker.
'''
self.marker_demisize = marker_demisize
def __call__(self, x):
x = np.array(x)
dim = x.shape[-2]
xmax = x.max()
marker_colour = 0 if x.mean() > 127 else xmax
start = int(dim//2-self.marker_demisize)
end = int(dim//2+self.marker_demisize)
x[start:end, :, ...] = marker_colour
x[:,start:end, ...] = marker_colour
x = Image.fromarray(x.astype('uint8'))
return x
def __repr__(self):
return self.__class__.__name__ + '()'
class Rescale(object) :
def __init__(self, output_size) :
assert( isinstance(output_size, (int, tuple) ) )
self.output_size = output_size
def __call__(self, sample) :
image = sample
h,w = image.shape[:2]
new_h, new_w = self.output_size
img = cv2.resize(image, (new_h, new_w) )
return img
class RescaleNormalize(object):
def __init__(self, size, use_cuda=False, normalize_rgb_values=False):
'''
Used to resize, normalize and convert raw pixel observations.
:param x: Numpy array to be processed
:param size: int or tuple, (height,width) size
:param use_cuda: Boolean to determine whether to create Cuda Tensor
:param normalize_rgb_values: Maps the 0-255 values of rgb colours
to interval (0-1)
'''
if isinstance(size, int): size = (size,size)
ts = []
ts.append(Rescale(output_size=size))
ts.append(T.ToTensor())
self.scaling_operation = T.Compose(ts)
self.normalize_rgb_values = normalize_rgb_values
self.use_cuda = use_cuda
def __call__(self, x):
x = self.scaling_operation(x)
# WATCHOUT: it is necessary to cast the tensor into float before doing
# the division, otherwise the result is yielded as a uint8 (full of zeros...)
x = x.type(torch.FloatTensor)
x = x / 255. if self.normalize_rgb_values else x
if self.use_cuda:
return x.cuda()
return x
def __repr__(self):
return self.__class__.__name__ + '()'
|
176322
|
from netaddr import IPAddress
def test_reverse_dns_v4():
assert IPAddress('172.24.0.13').reverse_dns == '192.168.3.11.in-addr.arpa.'
def test_reverse_dns_v6():
assert IPAddress('fe80::feeb:daed').reverse_dns == ('d.e.a.d.b.e.e.f.0.0.0.0.0.0.0.0.'
'0.0.0.0.0.0.0.0.0.0.0.0.0.8.e.f.'
'ip6.arpa.')
|
176326
|
def main():
# equilibrate then isolate cold finger
info('Equilibrate then isolate coldfinger')
close(name="C", description="Bone to Turbo")
sleep(1)
open(name="B", description="Bone to Diode Laser")
sleep(20)
close(name="B", description="Bone to Diode Laser")
|
176370
|
import vcs
import sys
import os
# This test checks that png size is indeed controled by user
import struct
def get_image_info(fnm):
data = open(fnm,"rb").read()
w, h = struct.unpack('>LL', data[16:24])
width = int(w)
height = int(h)
return width, height
x=vcs.init()
x.drawlogooff()
x.setantialiasing(0)
x.plot([1,2,3,4,5,4,3,2,1],bg=1)
fnm = "test_png_set_size.png"
x.png(fnm,width=15)
print get_image_info(fnm)
assert(get_image_info(fnm) == (15,11))
x.png(fnm,height=16)
print get_image_info(fnm)
assert(get_image_info(fnm) == (20,16))
x.png(fnm,width=15,height=12)
print get_image_info(fnm)
assert(get_image_info(fnm) == (15,12))
os.remove(fnm)
|
176374
|
import nose.tools as nt
import numpy as np
import theano
import theano.tensor as T
import treeano
import treeano.nodes as tn
from treeano.sandbox.nodes import resnet
fX = theano.config.floatX
def test_zero_last_axis_partition_node():
network = tn.SequentialNode(
"s",
[tn.InputNode("i", shape=(None,)),
resnet._ZeroLastAxisPartitionNode("z", zero_ratio=0.5, axis=0)]
).network()
fn = network.function(["i"], ["s"])
x = np.arange(10).astype(fX)
ans = x.copy()
ans[5:] = 0
np.testing.assert_allclose(ans, fn(x)[0])
|
176408
|
import os
from dataloaders.visual_genome import VG, vg_collate
from lib.utils import define_model, load_ckpt, do_test
from config import cfg
from torch.utils.data import DataLoader
test_data = VG(cfg.test_data_name, num_val_im=5000, filter_duplicate_rels=True,
use_proposals=cfg.use_proposals, filter_non_overlap=cfg.mode == 'sgdet',
num_im=cfg.num_im)
test_loader = DataLoader(
dataset=test_data,
batch_size=cfg.num_gpus,
shuffle=False,
num_workers=cfg.num_workers,
collate_fn=lambda x: vg_collate(x, mode='rel', num_gpus=cfg.num_gpus, is_train=True if cfg.test_data_name == 'train' else False),
drop_last=True,
pin_memory=True,
)
if cfg.cache is not None and os.path.exists(cfg.cache):
# No need to load model
detector = None
else:
detector = define_model(cfg, test_data.ind_to_classes, test_data.ind_to_predicates)
load_ckpt(detector, cfg.ckpt)
detector.cuda()
do_test(detector, test_data, test_loader)
|
176423
|
from abc import ABC
from abc import abstractmethod
class GAN(ABC):
@property
@abstractmethod
def generators(self):
raise NotImplementedError
@property
@abstractmethod
def discriminators(self):
raise NotImplementedError
@abstractmethod
def predict(self, inputs):
raise NotImplementedError
|
176429
|
import numpy as np
import pytest # noqa: F401
from pandas_datareader._utils import RemoteDataError
from epymetheus.datasets import fetch_usstocks
# --------------------------------------------------------------------------------
def test_toomanyasset():
"""
Test if fetch_usstocks raises ValueError
when n_asset is too many.
"""
with pytest.raises(ValueError):
fetch_usstocks(n_assets=1000)
def test_usstocks():
try:
universe = fetch_usstocks(n_assets=2)
assert not np.isnan(universe.values).any(axis=None)
except RemoteDataError as e:
print("Skip", e)
|
176440
|
from franka_interface_msgs.msg import SensorData, SensorDataGroup
def sensor_proto2ros_msg(sensor_proto_msg, sensor_data_type, info=''):
sensor_ros_msg = SensorData()
sensor_ros_msg.type = sensor_data_type
sensor_ros_msg.info = info
sensor_data_bytes = sensor_proto_msg.SerializeToString()
sensor_ros_msg.size = len(sensor_data_bytes)
sensor_ros_msg.sensorData = sensor_data_bytes
return sensor_ros_msg
def make_sensor_group_msg(trajectory_generator_sensor_msg=None, feedback_controller_sensor_msg=None, termination_handler_sensor_msg=None):
sensor_group_msg = SensorDataGroup()
if trajectory_generator_sensor_msg is not None:
sensor_group_msg.has_trajectory_generator_sensor_data = True
sensor_group_msg.trajectoryGeneratorSensorData = trajectory_generator_sensor_msg
if feedback_controller_sensor_msg is not None:
sensor_group_msg.has_feedback_controller_sensor_data = True
sensor_group_msg.feedbackControllerSensorData = feedback_controller_sensor_msg
if termination_handler_sensor_msg is not None:
sensor_group_msg.has_termination_handler_sensor_data = True
sensor_group_msg.terminationHandlerSensorData = termination_handler_sensor_msg
return sensor_group_msg
|
176457
|
import re
def get_pars(text):
pars = re.findall(r'\[[0-9]+\]\n*(.*)', text)
pars = [p.strip() for p in pars]
return pars
def get_claims(text):
claims = re.findall(r'.*claim.*', text, re.IGNORECASE)
return claims
def get_claim_body(text):
match = re.search(r'^claim[s*]$', text, re.IGNORECASE | re.MULTILINE)
try:
claim = match.string[match.span()[0]:]
claim = claim.replace('*', '')
return claim
except(AttributeError):
return 'none'
if __name__ == '__main__':
import sys
text = sys.stdin.read()
for p in get_pars(text):
print 'PARAGRAPH'
print p
for p in get_claims(text):
print 'CLAIMS'
print p
print '---------'
print get_claim_body(text)
|
176487
|
from petisco.base.domain.message.message import Message
from petisco.base.domain.message.message_bus import MessageBus
class NotImplementedMessageBus(MessageBus):
def publish(self, message: Message):
self._check_is_message(message)
meta = self.get_configured_meta()
_ = message.update_meta(meta)
def retry_publish_only_on_store_queue(self, message: Message):
self._check_is_message(message)
meta = self.get_configured_meta()
_ = message.update_meta(meta)
def close(self):
pass
|
176490
|
import asyncio
import time
from telegram import main
import json
from sql_util import get_phone, change_status
from message_util import get_phone_test, cancel_all_recv
from config import message_token
from check_util import check_main
from myLogger import log_main
def run(phone, category):
loop = asyncio.get_event_loop()
loop.run_until_complete(main(phone, category))
loop.close()
def loop_for_next_try():
n = 0
while True:
n += 1
print('------็ฌฌ' + str(n) + 'ๆฌก------')
log_main.logger.info('------็ฌฌ' + str(n) + 'ๆฌก------')
time.sleep(5)
# ๆๅจ่พๅ
ฅๆๆบๅท
# phone = input("่ฏท่พๅ
ฅๆๆบๅท\n")
# ไปๆฐๆฎๅบไธญ่ทๅพๆๆบๅท
category = 1
# phone, phone_id = get_phone(category)
# ไปๆฅ็ ๅนณๅฐ่ทๅๆๆบๅท
print('ไปๆฅ็ ๅนณๅฐ่ทๅๆๆบๅท...')
try:
cancel_all_recv(message_token)
status, phone = get_phone_test(message_token)
except Exception as e:
print(e)
continue
if status != '1':
print("ๆๆบๅท่ทๅๅคฑ่ดฅ...")
log_main.logger.error(str(phone) + "ๆๆบๅท่ทๅๅคฑ่ดฅ...")
continue
try:
# ๅ
ๅคๆญ่ฏฅphoneๆฏๅฆๆณจๅ่ฟ
# if not check_main('8617121240943', phone):
# print('่ฏฅๆๆบๅทๆชๆณจๅ')
# change_status(phone_id, status=404)
# else:
run('86' + phone, category)
# change_status(phone_id, status=1)
except RuntimeError as e:
print(e)
log_main.logger.error(str(phone) + e.__str__())
# error = json.loads(e.__str__())
# error_code = error["error_code"]
# msg = error["msg"]
# print(msg)
# ไฟฎๆนๆฐๆฎๅบไธญ็ถๆ
# change_status(phone_id, status=error_code)
except Exception as e:
print(e)
log_main.logger.error(str(phone) + e.__str__())
if __name__ == '__main__':
loop_for_next_try()
|
176514
|
import numpy as np
import torch
import torch.nn as nn
from habitat_baselines.common.utils import Flatten
from habitat_baselines.rl.models.simple_cnn import SimpleCNN
class Contiguous(nn.Module):
r"""Converts a tensor to be stored contiguously if it is not already so.
"""
def __init__(self):
super(Contiguous, self).__init__()
def forward(self, x):
return x.contiguous()
class SimpleAllCNN(SimpleCNN):
r"""A Simple 3-Conv CNN followed by a fully connected layer. Takes in
observations and produces an embedding of the rgb and/or depth components
if they are present in the provided observations.
Args:
observation_space: The observation_space of the agent
output_size: The size of the embedding vector
"""
def __init__(self, observation_space, output_size):
nn.Module.__init__(self)
if "rgb" in observation_space.spaces:
self._n_input_rgb = observation_space.spaces["rgb"].shape[2]
else:
self._n_input_rgb = 0
if "depth" in observation_space.spaces:
self._n_input_depth = observation_space.spaces["depth"].shape[2]
else:
self._n_input_depth = 0
cnn_dims = None
if self._n_input_rgb > 0:
cnn_dims = np.array(
observation_space.spaces["rgb"].shape[:2], dtype=np.float32
)
elif self._n_input_depth > 0:
cnn_dims = np.array(
observation_space.spaces["depth"].shape[:2], dtype=np.float32
)
self._init_model(cnn_dims, output_size)
def _init_model(self, cnn_dims, output_size):
r"""cnn_dims: initial cnn dimensions.
"""
if self.is_blind:
self.cnn = nn.Sequential()
return
# kernel size for different CNN layers
self._cnn_layers_kernel_size = [(8, 8), (4, 4), (3, 3)]
# strides for different CNN layers
self._cnn_layers_stride = [(4, 4), (2, 2), (1, 1)]
for kernel_size, stride in zip(
self._cnn_layers_kernel_size, self._cnn_layers_stride
):
cnn_dims = self._conv_output_dim(
dimension=cnn_dims,
padding=np.array([0, 0], dtype=np.float32),
dilation=np.array([1, 1], dtype=np.float32),
kernel_size=np.array(kernel_size, dtype=np.float32),
stride=np.array(stride, dtype=np.float32),
)
self.cnn = nn.Sequential(
nn.Conv2d(
in_channels=self._n_input_rgb + self._n_input_depth,
out_channels=32,
kernel_size=self._cnn_layers_kernel_size[0],
stride=self._cnn_layers_stride[0],
),
nn.ReLU(True),
nn.Conv2d(
in_channels=32,
out_channels=64,
kernel_size=self._cnn_layers_kernel_size[1],
stride=self._cnn_layers_stride[1],
),
nn.ReLU(True),
nn.Conv2d(
in_channels=64,
out_channels=32,
kernel_size=self._cnn_layers_kernel_size[2],
stride=self._cnn_layers_stride[2],
),
Contiguous(),
Flatten(),
nn.Linear(32 * cnn_dims[0] * cnn_dims[1], output_size),
nn.ReLU(True),
)
self.layer_init()
class SimpleDepthCNN(SimpleAllCNN):
r""" SimpleAllCNN where the only allowed input is a depth observation
regardless of what other observation modalities are provided.
"""
def __init__(self, observation_space, output_size):
nn.Module.__init__(self)
assert (
"depth" in observation_space.spaces
), "Depth input required to use SimpleDepthCNN"
self._n_input_depth = observation_space.spaces["depth"].shape[2]
self._n_input_rgb = 0
cnn_dims = np.array(
observation_space.spaces["depth"].shape[:2], dtype=np.float32
)
self._init_model(cnn_dims, output_size)
def forward(self, observations):
# permute tensor to dimension [BATCH x CHANNEL x HEIGHT X WIDTH]
depth_observations = observations["depth"].permute(0, 3, 1, 2)
return self.cnn(depth_observations)
class SimpleRGBCNN(SimpleAllCNN):
r""" SimpleAllCNN where the only allowed input is an RGB observation
regardless of what other observation modalities are provided.
"""
def __init__(self, observation_space, output_size):
nn.Module.__init__(self)
assert (
"rgb" in observation_space.spaces
), "RGB input required to use SimpleRGBCNN"
self._n_input_depth = 0
self._n_input_rgb = observation_space.spaces["rgb"].shape[2]
cnn_dims = np.array(observation_space.spaces["rgb"].shape[:2], dtype=np.float32)
self._init_model(cnn_dims, output_size)
def forward(self, observations):
# permute tensor to dimension [BATCH x CHANNEL x HEIGHT X WIDTH]
rgb_observations = observations["rgb"].permute(0, 3, 1, 2)
rgb_observations = rgb_observations / 255.0 # normalize RGB
return self.cnn(rgb_observations)
|
176527
|
import time
import cv2
import pyscreenshot as ImageGrab
import numpy as np
class Screenshot(object):
def get_frame(self):
img = np.array(ImageGrab.grab().convert('RGB'), dtype=np.uint8)
img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR)
ret2, jpeg = cv2.imencode('.jpg', img)
return jpeg.tostring()
def __del__(self):
self.cam.release()
|
176576
|
import torch
import numpy as np
import torch.nn as nn
import torch.nn.functional as F
import GraphConv
class DominantModel(nn.Module):
def __init__(self, A_norm, dim_in, dim_out=16):
super(DominantModel, self).__init__()
self.gcn1 = GraphConv.GCN(A_norm, dim_in, 64)
self.gcn2 = GraphConv.GCN(A_norm, 64, 32)
self.gcn3 = GraphConv.GCN(A_norm, 32, dim_out)
self.deconv = GraphConv.GCN(A_norm, dim_out, dim_in)
def forward(self, X):
X = self.gcn1(X)
X = self.gcn2(X)
Z = self.gcn3(X)
return torch.sigmoid(Z.mm(Z.transpose(0, 1))), self.deconv(Z)
|
176588
|
from qgis.core import *
from osgeo import gdal
import math
import numpy as np
import os
MARGIN = 0.01
def weightedFunction(x, y, x0, y0, weight):
# the current weighted Function is a simple sqrt((x-x0)^1 + (y-y0)^2)/w
return math.sqrt((x - x0) ** 2 + (y - y0) ** 2) / weight
#Get the points vector layer
pointsVector = QgsVectorLayer(sys.argv, 'points', 'ogr')
#Add the vector layer to the map layer registry
QgsProject.instance().addMapLayer(pointsVector)
#get layer extents with a small margin to avoid ignoring points on bounding box's limit
bounding_box = pointsVector.extent()
extent_args = "-te " + str(bounding_box.xMinimum() - MARGIN) \
+ " " + str(bounding_box.yMinimum() - MARGIN) \
+ " " + str(bounding_box.xMaximum() + MARGIN) \
+ " " + str(bounding_box.yMaximum() + MARGIN)
os.system('gdal_rasterize -a z -ts 1000 1000 ' + extent_args + ' -l points "' + sys.argv + '" "./rasterPoints"')
rasterPoints=QgsRasterLayer('./rasterPoints', 'rasterPoints')
QgsProject.instance().addMapLayer(rasterPoints)
dataset = gdal.Open('./rasterPoints')
numpy_array = dataset.ReadAsArray()
width, height = numpy_array.shape
points = []
#get all the weighted points from the raster
print("get the points with their weights from raster")
for row in range(height):
for col in range(width):
if(numpy_array[row, col] != 0):
print(str(numpy_array[row, col]) + " at point : " + str(row) + " , " + str(col))
points.append([row, col, numpy_array[row,col]])
print("compute the weighted distance grid for each point")
distanceGrid = np.zeros(shape = (height, width))
for row in range(height):
for col in range(width):
index = 0
min_distance = weightedFunction(row, col, points[0][0], points[0][1], points[0][2])
for i in range(1, (len(points))):
weightedDistance = weightedFunction(row, col, points[i][0], points[i][1], points[i][2])
if(weightedDistance < min_distance):
min_distance = weightedDistance
index = i
distanceGrid[row, col] = index
#save the distance grid as an output raster
#output file name ( path to where to save the raster file )
print("save distance grid as raster GTiff")
outFileName = './rasterVoronoi.tiff'
#call the driver for the chosen format from GDAL
driver = gdal.GetDriverByName('GTiff')
#Create the file with dimensions of the input raster ( rasterized points )
output = driver.Create(outFileName, height, width, 1, gdal.GDT_Byte)
#set the Raster transformation of the resulting raster
output.SetGeoTransform(dataset.GetGeoTransform())
#set the projection of the resulting raster
output.SetProjection(dataset.GetProjection())
#insert data to the resulting raster in band 1 from the weighted distance grid
output.GetRasterBand(1).WriteArray(distanceGrid)
#Call the raster output file
rasterVoronoi = QgsRasterLayer('./rasterVoronoi.tiff', 'weighted Raster')
#Add it to the map layer registry ( display it on the map)
QgsProject.instance().addMapLayer(rasterVoronoi)
#polygonize the result raster
print("convert raster to shapefile")
os.system('gdal_polygonize.bat ./rasterVoronoi.tiff ./WeightedVoronoi.shp -b 1 -f "ESRI Shapefile" weighted')
weightedVoronoiVector = QgsVectorLayer('./WeightedVoronoi.shp', 'weighted voronoi', 'ogr')
#load the vector weighted voronoi diagram
QgsProject.instance().addMapLayer(weightedVoronoiVector)
print("End of script")
|
176606
|
import warnings
from math import ceil
import numpy as np
import openmdao.api as om
from wisdem.landbosse.model.Manager import Manager
from wisdem.landbosse.model.DefaultMasterInputDict import DefaultMasterInputDict
from wisdem.landbosse.landbosse_omdao.OpenMDAODataframeCache import OpenMDAODataframeCache
from wisdem.landbosse.landbosse_omdao.WeatherWindowCSVReader import read_weather_window
with warnings.catch_warnings():
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
import pandas as pd
use_default_component_data = -1.0
class LandBOSSE(om.Group):
def setup(self):
# Add a tower section height variable. The default value of 30 m is for transportable tower sections.
self.set_input_defaults("tower_section_length_m", 30.0, units="m")
self.set_input_defaults("blade_drag_coefficient", use_default_component_data) # Unitless
self.set_input_defaults("blade_lever_arm", use_default_component_data, units="m")
self.set_input_defaults("blade_install_cycle_time", use_default_component_data, units="h")
self.set_input_defaults("blade_offload_hook_height", use_default_component_data, units="m")
self.set_input_defaults("blade_offload_cycle_time", use_default_component_data, units="h")
self.set_input_defaults("blade_drag_multiplier", use_default_component_data) # Unitless
self.set_input_defaults("turbine_spacing_rotor_diameters", 4)
self.set_input_defaults("row_spacing_rotor_diameters", 10)
self.set_input_defaults("commissioning_pct", 0.01)
self.set_input_defaults("decommissioning_pct", 0.15)
self.set_input_defaults("trench_len_to_substation_km", 50.0, units="km")
self.set_input_defaults("interconnect_voltage_kV", 130.0, units="kV")
self.set_input_defaults("foundation_height", 0.0, units="m")
self.set_input_defaults("blade_mass", 8000.0, units="kg")
self.set_input_defaults("hub_mass", 15.4e3, units="kg")
self.set_input_defaults("nacelle_mass", 50e3, units="kg")
self.set_input_defaults("tower_mass", 240e3, units="kg")
self.set_input_defaults("turbine_rating_MW", 1500.0, units="kW")
self.add_subsystem("landbosse", LandBOSSE_API(), promotes=["*"])
class LandBOSSE_API(om.ExplicitComponent):
def setup(self):
# Clear the cache
OpenMDAODataframeCache._cache = {}
self.setup_inputs()
self.setup_outputs()
self.setup_discrete_outputs()
self.setup_discrete_inputs_that_are_not_dataframes()
self.setup_discrete_inputs_that_are_dataframes()
def setup_inputs(self):
"""
This method sets up the inputs.
"""
self.add_input("blade_drag_coefficient", use_default_component_data) # Unitless
self.add_input("blade_lever_arm", use_default_component_data, units="m")
self.add_input("blade_install_cycle_time", use_default_component_data, units="h")
self.add_input("blade_offload_hook_height", use_default_component_data, units="m")
self.add_input("blade_offload_cycle_time", use_default_component_data, units="h")
self.add_input("blade_drag_multiplier", use_default_component_data) # Unitless
# Even though LandBOSSE doesn't use foundation height, TowerSE does,
# and foundation height can be used with hub height to calculate
# tower height.
self.add_input("foundation_height", 0.0, units="m")
self.add_input("tower_section_length_m", 30.0, units="m")
self.add_input("nacelle_mass", 0.0, units="kg")
self.add_input("tower_mass", 0.0, units="kg")
# A discrete input below, number_of_blades, gives the number of blades
# on the rotor.
#
# The total mass of the rotor nacelle assembly (RNA) is the following
# sum:
#
# (blade_mass * number_of_blades) + nac_mass + hub_mass
self.add_input("blade_mass", use_default_component_data, units="kg", desc="The mass of one rotor blade.")
self.add_input("hub_mass", use_default_component_data, units="kg", desc="Mass of the rotor hub")
self.add_input(
"crane_breakdown_fraction",
val=0.0,
desc="0 means the crane is never broken down. 1 means it is broken down every turbine.",
)
self.add_input("construct_duration", val=9, desc="Total project construction time (months)")
self.add_input("hub_height_meters", val=80, units="m", desc="Hub height m")
self.add_input("rotor_diameter_m", val=77, units="m", desc="Rotor diameter m")
self.add_input("wind_shear_exponent", val=0.2, desc="Wind shear exponent")
self.add_input("turbine_rating_MW", val=1.5, units="MW", desc="Turbine rating MW")
self.add_input("fuel_cost_usd_per_gal", val=1.5, desc="Fuel cost USD/gal")
self.add_input(
"breakpoint_between_base_and_topping_percent", val=0.8, desc="Breakpoint between base and topping (percent)"
)
# Could not place units in turbine_spacing_rotor_diameters
self.add_input("turbine_spacing_rotor_diameters", desc="Turbine spacing (times rotor diameter)", val=4)
self.add_input("depth", units="m", desc="Foundation depth m", val=2.36)
self.add_input("rated_thrust_N", units="N", desc="Rated Thrust (N)", val=5.89e5)
# Can't set units
self.add_input("bearing_pressure_n_m2", desc="Bearing Pressure (n/m2)", val=191521)
self.add_input("gust_velocity_m_per_s", units="m/s", desc="50-year Gust Velocity (m/s)", val=59.5)
self.add_input("road_length_adder_m", units="m", desc="Road length adder (m)", val=5000)
# Can't set units
self.add_input("fraction_new_roads", desc="Percent of roads that will be constructed (0.0 - 1.0)", val=0.33)
self.add_input("road_quality", desc="Road Quality (0-1)", val=0.6)
self.add_input("line_frequency_hz", units="Hz", desc="Line Frequency (Hz)", val=60)
# Can't set units
self.add_input("row_spacing_rotor_diameters", desc="Row spacing (times rotor diameter)", val=10)
self.add_input(
"trench_len_to_substation_km", units="km", desc="Combined Homerun Trench Length to Substation (km)", val=50
)
self.add_input("distance_to_interconnect_mi", units="mi", desc="Distance to interconnect (miles)", val=5)
self.add_input("interconnect_voltage_kV", units="kV", desc="Interconnect Voltage (kV)", val=130)
self.add_input(
"critical_speed_non_erection_wind_delays_m_per_s",
units="m/s",
desc="Non-Erection Wind Delay Critical Speed (m/s)",
val=15,
)
self.add_input(
"critical_height_non_erection_wind_delays_m",
units="m",
desc="Non-Erection Wind Delay Critical Height (m)",
val=10,
)
self.add_discrete_input("road_distributed_winnd", val=False)
self.add_input("road_width_ft", units="ft", desc="Road width (ft)", val=20)
self.add_input("road_thickness", desc="Road thickness (in)", val=8)
self.add_input("crane_width", units="m", desc="Crane width (m)", val=12.2)
self.add_input("overtime_multiplier", desc="Overtime multiplier", val=1.4)
self.add_input("markup_contingency", desc="Markup contingency", val=0.03)
self.add_input("markup_warranty_management", desc="Markup warranty management", val=0.0002)
self.add_input("markup_sales_and_use_tax", desc="Markup sales and use tax", val=0)
self.add_input("markup_overhead", desc="Markup overhead", val=0.05)
self.add_input("markup_profit_margin", desc="Markup profit margin", val=0.05)
self.add_input("Mass tonne", val=(1.0,), desc="", units="t")
self.add_input(
"development_labor_cost_usd", val=1e6, desc="The cost of labor in the development phase", units="USD"
)
# Disabled due to Pandas conflict right now.
self.add_input("labor_cost_multiplier", val=1.0, desc="Labor cost multiplier")
self.add_input("commissioning_pct", 0.01)
self.add_input("decommissioning_pct", 0.15)
def setup_discrete_inputs_that_are_not_dataframes(self):
"""
This method sets up the discrete inputs that aren't dataframes.
"""
self.add_discrete_input("num_turbines", val=100, desc="Number of turbines in project")
# Since 3 blades are so common on rotors, that is a reasonable default
# value that will not need to be checked during component list
# assembly.
self.add_discrete_input("number_of_blades", val=3, desc="Number of blades on the rotor")
self.add_discrete_input(
"user_defined_home_run_trench", val=0, desc="Flag for user-defined home run trench length (0 = no; 1 = yes)"
)
self.add_discrete_input(
"allow_same_flag",
val=False,
desc="Allow same crane for base and topping (True or False)",
)
self.add_discrete_input(
"hour_day",
desc="Dictionary of normal and long hours for construction in a day in the form of {'long': 24, 'normal': 10}",
val={"long": 24, "normal": 10},
)
self.add_discrete_input(
"time_construct",
desc="One of the keys in the hour_day dictionary to specify how many hours per day construction happens.",
val="normal",
)
self.add_discrete_input(
"user_defined_distance_to_grid_connection",
desc="Flag for user-defined home run trench length (True or False)",
val=False,
)
# Could not place units in rate_of_deliveries
self.add_discrete_input("rate_of_deliveries", val=10, desc="Rate of deliveries (turbines per week)")
self.add_discrete_input("new_switchyard", desc="New Switchyard (True or False)", val=True)
self.add_discrete_input("num_hwy_permits", desc="Number of highway permits", val=10)
self.add_discrete_input("num_access_roads", desc="Number of access roads", val=2)
def setup_discrete_inputs_that_are_dataframes(self):
"""
This sets up the default inputs that are dataframes. They are separate
because they hold the project data and the way we need to hold their
data is different. They have defaults loaded at the top of the file
which can be overridden outside by setting the properties listed
below.
"""
# Read in default sheets for project data
default_project_data = OpenMDAODataframeCache.read_all_sheets_from_xlsx("ge15_public")
self.add_discrete_input(
"site_facility_building_area_df",
val=default_project_data["site_facility_building_area"],
desc="site_facility_building_area DataFrame",
)
self.add_discrete_input(
"components",
val=default_project_data["components"],
desc="Dataframe of components for tower, blade, nacelle",
)
self.add_discrete_input(
"crane_specs", val=default_project_data["crane_specs"], desc="Dataframe of specifications of cranes"
)
self.add_discrete_input(
"weather_window",
val=read_weather_window(default_project_data["weather_window"]),
desc="Dataframe of wind toolkit data",
)
self.add_discrete_input("crew", val=default_project_data["crew"], desc="Dataframe of crew configurations")
self.add_discrete_input(
"crew_price",
val=default_project_data["crew_price"],
desc="Dataframe of costs per hour for each type of worker.",
)
self.add_discrete_input(
"equip", val=default_project_data["equip"], desc="Collections of equipment to perform erection operations."
)
self.add_discrete_input(
"equip_price", val=default_project_data["equip_price"], desc="Prices for various type of equipment."
)
self.add_discrete_input("rsmeans", val=default_project_data["rsmeans"], desc="RSMeans price data")
self.add_discrete_input(
"cable_specs", val=default_project_data["cable_specs"], desc="cable specs for collection system"
)
self.add_discrete_input(
"material_price",
val=default_project_data["material_price"],
desc="Prices of materials for foundations and roads",
)
self.add_discrete_input("project_data", val=default_project_data, desc="Dictionary of all dataframes of data")
def setup_outputs(self):
"""
This method sets up the continuous outputs. This is where total costs
and installation times go.
To see how cost totals are calculated see, the compute_total_bos_costs
method below.
"""
self.add_output(
"bos_capex", 0.0, units="USD", desc="Total BOS CAPEX not including commissioning or decommissioning."
)
self.add_output(
"bos_capex_kW",
0.0,
units="USD/kW",
desc="Total BOS CAPEX per kW not including commissioning or decommissioning.",
)
self.add_output(
"total_capex", 0.0, units="USD", desc="Total BOS CAPEX including commissioning and decommissioning."
)
self.add_output(
"total_capex_kW",
0.0,
units="USD/kW",
desc="Total BOS CAPEX per kW including commissioning and decommissioning.",
)
self.add_output("installation_capex", 0.0, units="USD", desc="Total foundation and erection installation cost.")
self.add_output(
"installation_capex_kW", 0.0, units="USD", desc="Total foundation and erection installation cost per kW."
)
self.add_output("installation_time_months", 0.0, desc="Total balance of system installation time (months).")
def setup_discrete_outputs(self):
"""
This method sets up discrete outputs.
"""
self.add_discrete_output(
"landbosse_costs_by_module_type_operation", desc="The costs by module, type and operation", val=None
)
self.add_discrete_output(
"landbosse_details_by_module",
desc="The details from the run of LandBOSSE. This includes some costs, but mostly other things",
val=None,
)
self.add_discrete_output("erection_crane_choice", desc="The crane choices for erection.", val=None)
self.add_discrete_output(
"erection_component_name_topvbase",
desc="List of components and whether they are a topping or base operation",
val=None,
)
self.add_discrete_output(
"erection_components", desc="List of components with their values modified from the defaults.", val=None
)
def compute(self, inputs, outputs, discrete_inputs=None, discrete_outputs=None):
"""
This runs the ErectionCost module using the inputs and outputs into and
out of this module.
Note: inputs, discrete_inputs are not dictionaries. They do support
[] notation. inputs is of class 'openmdao.vectors.default_vector.DefaultVector'
discrete_inputs is of class openmdao.core.component._DictValues. Other than
[] brackets, they do not behave like dictionaries. See the following
documentation for details.
http://openmdao.org/twodocs/versions/latest/_srcdocs/packages/vectors/default_vector.html
https://mdolab.github.io/OpenAeroStruct/_modules/openmdao/core/component.html
Parameters
----------
inputs : openmdao.vectors.default_vector.DefaultVector
A dictionary-like object with NumPy arrays that hold float
inputs. Note that since these are NumPy arrays, they
need indexing to pull out simple float64 values.
outputs : openmdao.vectors.default_vector.DefaultVector
A dictionary-like object to store outputs.
discrete_inputs : openmdao.core.component._DictValues
A dictionary-like with the non-numeric inputs (like
pandas.DataFrame)
discrete_outputs : openmdao.core.component._DictValues
A dictionary-like for non-numeric outputs (like
pandas.DataFrame)
"""
# Put the inputs together and run all the modules
master_output_dict = dict()
master_input_dict = self.prepare_master_input_dictionary(inputs, discrete_inputs)
manager = Manager(master_input_dict, master_output_dict)
result = manager.execute_landbosse("WISDEM")
# Check if everything executed correctly
if result != 0:
raise Exception("LandBOSSE didn't execute correctly")
# Gather the cost and detail outputs
costs_by_module_type_operation = self.gather_costs_from_master_output_dict(master_output_dict)
discrete_outputs["landbosse_costs_by_module_type_operation"] = costs_by_module_type_operation
details = self.gather_details_from_master_output_dict(master_output_dict)
discrete_outputs["landbosse_details_by_module"] = details
# This is where we have access to the modified components, so put those
# in the outputs of the component
discrete_outputs["erection_components"] = master_input_dict["components"]
# Now get specific outputs. These have been refactored to methods that work
# with each module so as to keep this method as compact as possible.
self.gather_specific_erection_outputs(master_output_dict, outputs, discrete_outputs)
# Compute the total BOS costs
self.compute_total_bos_costs(costs_by_module_type_operation, master_output_dict, inputs, outputs)
def prepare_master_input_dictionary(self, inputs, discrete_inputs):
"""
This prepares a master input dictionary by applying all the necessary
modifications to the inputs.
Parameters
----------
inputs : openmdao.vectors.default_vector.DefaultVector
A dictionary-like object with NumPy arrays that hold float
inputs. Note that since these are NumPy arrays, they
need indexing to pull out simple float64 values.
discrete_inputs : openmdao.core.component._DictValues
A dictionary-like with the non-numeric inputs (like
pandas.DataFrame)
Returns
-------
dict
The prepared master input to go to the Manager.
"""
inputs_dict = {key: inputs[key][0] for key in inputs.keys()}
discrete_inputs_dict = {key: value for key, value in discrete_inputs.items()}
incomplete_input_dict = {**inputs_dict, **discrete_inputs_dict}
# Modify the default component data if needed and copy it into the
# appropriate values of the input dictionary.
modified_components = self.modify_component_lists(inputs, discrete_inputs)
incomplete_input_dict["project_data"]["components"] = modified_components
incomplete_input_dict["components"] = modified_components
# FoundationCost needs to have all the component data split into separate
# NumPy arrays.
incomplete_input_dict["component_data"] = modified_components
for component in incomplete_input_dict["component_data"].keys():
incomplete_input_dict[component] = np.array(incomplete_input_dict["component_data"][component])
# These are aliases because parts of the code call the same thing by
# difference names.
incomplete_input_dict["crew_cost"] = discrete_inputs["crew_price"]
incomplete_input_dict["cable_specs_pd"] = discrete_inputs["cable_specs"]
# read in RSMeans per diem:
crew_cost = discrete_inputs["crew_price"]
crew_cost = crew_cost.set_index("Labor type ID", drop=False)
incomplete_input_dict["rsmeans_per_diem"] = crew_cost.loc["RSMeans", "Per diem USD per day"]
# Calculate project size in megawatts
incomplete_input_dict["project_size_megawatts"] = float(
discrete_inputs["num_turbines"] * inputs["turbine_rating_MW"]
)
# Needed to avoid distributed wind keys
incomplete_input_dict["road_distributed_wind"] = False
defaults = DefaultMasterInputDict()
master_input_dict = defaults.populate_input_dict(incomplete_input_dict)
return master_input_dict
def gather_costs_from_master_output_dict(self, master_output_dict):
"""
This method extract all the cost_by_module_type_operation lists for
output in an Excel file.
It finds values for the keys ending in '_module_type_operation'. It
then concatenates them together so they can be easily written to
a .csv or .xlsx
On every row, it includes the:
Rotor diameter m
Turbine rating MW
Number of turbines
This enables easy mapping of new columns if need be. The columns have
spaces in the names so that they can be easily written to a user-friendly
output.
Parameters
----------
runs_dict : dict
Values are the names of the projects. Keys are the lists of
dictionaries that are lines for the .csv
Returns
-------
list
List of dicts to write to the .csv.
"""
line_items = []
# Gather the lists of costs
cost_lists = [value for key, value in master_output_dict.items() if key.endswith("_module_type_operation")]
# Flatten the list of lists that is the result of the gathering
for cost_list in cost_lists:
line_items.extend(cost_list)
# Filter out the keys needed and rename them to meaningful values
final_costs = []
for line_item in line_items:
item = {
"Module": line_item["module"],
"Type of cost": line_item["type_of_cost"],
"Cost / kW": line_item["usd_per_kw_per_project"],
"Cost / project": line_item["cost_per_project"],
"Cost / turbine": line_item["cost_per_turbine"],
"Number of turbines": line_item["num_turbines"],
"Rotor diameter (m)": line_item["rotor_diameter_m"],
"Turbine rating (MW)": line_item["turbine_rating_MW"],
"Project ID with serial": line_item["project_id_with_serial"],
}
final_costs.append(item)
return final_costs
def gather_details_from_master_output_dict(self, master_output_dict):
"""
This extracts the detail lists from all the modules to output
the detailed non-cost data from the model run.
Parameters
----------
master_output_dict : dict
The master output dict with the finished module output in it.
Returns
-------
list
List of dicts with detailed data.
"""
line_items = []
# Gather the lists of costs
details_lists = [value for key, value in master_output_dict.items() if key.endswith("_csv")]
# Flatten the list of lists
for details_list in details_lists:
line_items.extend(details_list)
return line_items
def gather_specific_erection_outputs(self, master_output_dict, outputs, discrete_outputs):
"""
This method gathers specific outputs from the ErectionCost module and places
them on the outputs.
The method does not return anything. Rather, it places the outputs directly
on the continuous of discrete outputs.
Parameters
----------
master_output_dict: dict
The master output dictionary out of LandBOSSE
outputs : openmdao.vectors.default_vector.DefaultVector
A dictionary-like object to store outputs.
discrete_outputs : openmdao.core.component._DictValues
A dictionary-like for non-numeric outputs (like
pandas.DataFrame)
"""
discrete_outputs["erection_crane_choice"] = master_output_dict["crane_choice"]
discrete_outputs["erection_component_name_topvbase"] = master_output_dict["component_name_topvbase"]
def compute_total_bos_costs(self, costs_by_module_type_operation, master_output_dict, inputs, outputs):
"""
This computes the total BOS costs from the master output dictionary
and places them on the necessary outputs.
Parameters
----------
costs_by_module_type_operation: List[Dict[str, Any]]
The lists of costs by module, type and operation.
master_output_dict: Dict[str, Any]
The master output dictionary from the run. Used to obtain the
construction time,
outputs : openmdao.vectors.default_vector.DefaultVector
The outputs in which to place the results of the computations
"""
bos_per_kw = 0.0
bos_per_project = 0.0
installation_per_project = 0.0
installation_per_kW = 0.0
for row in costs_by_module_type_operation:
bos_per_kw += row["Cost / kW"]
bos_per_project += row["Cost / project"]
if row["Module"] in ["ErectionCost", "FoundationCost"]:
installation_per_project += row["Cost / project"]
installation_per_kW += row["Cost / kW"]
commissioning_pct = inputs["commissioning_pct"]
decommissioning_pct = inputs["decommissioning_pct"]
commissioning_per_project = bos_per_project * commissioning_pct
decomissioning_per_project = bos_per_project * decommissioning_pct
commissioning_per_kW = bos_per_kw * commissioning_pct
decomissioning_per_kW = bos_per_kw * decommissioning_pct
outputs["total_capex_kW"] = np.round(bos_per_kw + commissioning_per_kW + decomissioning_per_kW, 0)
outputs["total_capex"] = np.round(bos_per_project + commissioning_per_project + decomissioning_per_project, 0)
outputs["bos_capex"] = round(bos_per_project, 0)
outputs["bos_capex_kW"] = round(bos_per_kw, 0)
outputs["installation_capex"] = round(installation_per_project, 0)
outputs["installation_capex_kW"] = round(installation_per_kW, 0)
actual_construction_months = master_output_dict["actual_construction_months"]
outputs["installation_time_months"] = round(actual_construction_months, 0)
def modify_component_lists(self, inputs, discrete_inputs):
"""
This method modifies the previously loaded default component lists with
data about blades, tower sections, if they have been provided as input
to the component.
It only modifies the project component data if default data for the proper
inputs have been overridden.
The default blade data is assumed to be the first component that begins
with the word "Blade"
This should take mass from the tower in WISDEM. Ideally, this should have
an input for transportable tower 4.3, large diameter steel tower LDST 6.2m, or
unconstrained key stone tower. Or give warnings about the boundaries
that we assume.
Parameters
----------
inputs : openmdao.vectors.default_vector.DefaultVector
A dictionary-like object with NumPy arrays that hold float
inputs. Note that since these are NumPy arrays, they
need indexing to pull out simple float64 values.
discrete_inputs : openmdao.core.component._DictValues
A dictionary-like with the non-numeric inputs (like
pandas.DataFrame)
Returns
-------
pd.DataFrame
The dataframe with the modified components.
"""
input_components = discrete_inputs["components"]
# This list is a sequence of pd.Series instances that have the
# specifications of each component.
output_components_list = []
# Need to convert kg to tonnes
kg_per_tonne = 1000
# Get the hub height
hub_height_meters = inputs["hub_height_meters"][0]
# Make the nacelle. This does not include the hub or blades.
nacelle_mass_kg = inputs["nacelle_mass"][0]
nacelle = input_components[input_components["Component"].str.startswith("Nacelle")].iloc[0].copy()
if inputs["nacelle_mass"] != use_default_component_data:
nacelle["Mass tonne"] = nacelle_mass_kg / kg_per_tonne
nacelle["Component"] = "Nacelle"
nacelle["Lift height m"] = hub_height_meters
output_components_list.append(nacelle)
# Make the hub
hub_mass_kg = inputs["hub_mass"][0]
hub = input_components[input_components["Component"].str.startswith("Hub")].iloc[0].copy()
hub["Lift height m"] = hub_height_meters
if hub_mass_kg != use_default_component_data:
hub["Mass tonne"] = hub_mass_kg / kg_per_tonne
output_components_list.append(hub)
# Make blades
blade = input_components[input_components["Component"].str.startswith("Blade")].iloc[0].copy()
# There is always a hub height, so use that as the lift height
blade["Lift height m"] = hub_height_meters
if inputs["blade_drag_coefficient"][0] != use_default_component_data:
blade["Coeff drag"] = inputs["blade_drag_coefficient"][0]
if inputs["blade_lever_arm"][0] != use_default_component_data:
blade["Lever arm m"] = inputs["blade_lever_arm"][0]
if inputs["blade_install_cycle_time"][0] != use_default_component_data:
blade["Cycle time installation hrs"] = inputs["blade_install_cycle_time"][0]
if inputs["blade_offload_hook_height"][0] != use_default_component_data:
blade["Offload hook height m"] = hub_height_meters
if inputs["blade_offload_cycle_time"][0] != use_default_component_data:
blade["Offload cycle time hrs"] = inputs["blade_offload_cycle_time"]
if inputs["blade_drag_multiplier"][0] != use_default_component_data:
blade["Multiplier drag rotor"] = inputs["blade_drag_multiplier"]
if inputs["blade_mass"][0] != use_default_component_data:
blade["Mass tonne"] = inputs["blade_mass"][0] / kg_per_tonne
# Assume that number_of_blades always has a reasonable value. It's
# default count when the discrete input is declared of 3 is always
# reasonable unless overridden by another input.
number_of_blades = discrete_inputs["number_of_blades"]
for i in range(number_of_blades):
component = f"Blade {i + 1}"
blade_i = blade.copy()
blade_i["Component"] = component
output_components_list.append(blade_i)
# Make tower sections
tower_mass_tonnes = inputs["tower_mass"][0] / kg_per_tonne
tower_height_m = hub_height_meters - inputs["foundation_height"][0]
default_tower_section = input_components[input_components["Component"].str.startswith("Tower")].iloc[0]
tower_sections = self.make_tower_sections(tower_mass_tonnes, tower_height_m, default_tower_section)
output_components_list.extend(tower_sections)
# Make the output component dataframe and return it.
output_components = pd.DataFrame(output_components_list)
return output_components
@staticmethod
def make_tower_sections(tower_mass_tonnes, tower_height_m, default_tower_section):
"""
This makes tower sections for a transportable tower.
Approximations:
- Weight is distributed uniformly among the sections
- The number of sections is either the maximum allowed by mass or
the maximum allowed by height, to maintain transportability.
For each tower section, calculate:
- lift height
- lever arm
- surface area
The rest of values should remain at their defaults.
Note: Tower sections are constrained in maximum diameter to 4.5 m.
However, their surface area is calculated with a 1.3 m radius
to agree more closely with empirical data. Also, tower sections
are approximated as cylinders.
Parameters
----------
tower_mass_tonnes: float
The total tower mass in tonnes
tower_height_m: float
The total height of the tower in meters.
default_tower_section: pd.Series
There are a number of values that are kept constant in creating
the tower sections. This series holds the values.
Returns
-------
List[pd.Series]
A list of series to be appended onto an output component list.
It is not a dataframe, because it is faster to append to a list
and make a dataframe once.
"""
tower_radius = 1.3
number_of_sections = max(ceil(tower_height_m / 30), ceil(tower_mass_tonnes / 80))
tower_section_height_m = tower_height_m / number_of_sections
tower_section_mass = tower_mass_tonnes / number_of_sections
tower_section_surface_area_m2 = np.pi * tower_section_height_m * (tower_radius ** 2)
sections = []
for i in range(number_of_sections):
lift_height_m = (i * tower_section_height_m) + tower_section_height_m
lever_arm = (i * tower_section_height_m) + (0.5 * tower_section_height_m)
name = f"Tower {i + 1}"
section = default_tower_section.copy()
section["Component"] = name
section["Mass tonne"] = tower_section_mass
section["Lift height m"] = lift_height_m
section["Surface area sq m"] = tower_section_surface_area_m2
section["Section height m"] = tower_section_height_m
section["Lever arm m"] = lever_arm
sections.append(section)
return sections
|
176616
|
from collections import deque
class PushSwapStacks:
"""
describe stacks for push-swap algorithm
"""
def __init__(self, initstate):
""" initstate: Iterable[_T]=..."""
self.stack_a = deque()
self.stack_b = deque()
self.new_data(initstate)
self.cmd = {
'pa': self.pa,
'pb': self.pb,
'sa': self.sa,
'sb': self.sb,
'ss': self.ss,
'ra': self.ra,
'rb': self.rb,
'rr': self.rr,
'rra': self.rra,
'rrb': self.rrb,
'rrr': self.rrr
}
def new_data(self, initstate):
self.stack_a.clear()
self.stack_b.clear()
tmp = sorted(initstate)
self.stack_a.extend([tmp.index(x) + 1 for x in initstate])
def do_cmd(self, op):
self.cmd[op]()
return op
def pa(self):
if len(self.stack_b):
self.stack_a.appendleft(self.stack_b.popleft())
def pb(self):
if len(self.stack_a):
self.stack_b.appendleft(self.stack_a.popleft())
def ra(self):
if len(self.stack_a):
self.stack_a.rotate(-1)
def rb(self):
if len(self.stack_b):
self.stack_b.rotate(-1)
def rr(self):
self.ra()
self.rb()
def rra(self):
if len(self.stack_a):
self.stack_a.rotate(1)
def rrb(self):
if len(self.stack_b):
self.stack_b.rotate(1)
def rrr(self):
self.rra()
self.rrb()
def sa(self):
self.stack_a[0], self.stack_a[1] = self.stack_a[1], self.stack_a[0]
def sb(self):
self.stack_b[0], self.stack_b[1] = self.stack_b[1], self.stack_b[0]
def ss(self):
self.sa()
self.sb()
|
176619
|
import logging
from datetime import datetime
import zmq
from .handler import Handler
LOGGER = logging.getLogger(__name__)
class ZmqHandler(Handler):
"""Zmq handler.
"""
def __init__(self, connection, **kwargs):
"""Constructor.
"""
super().__init__(**kwargs)
self._connection = connection
self._context = zmq.Context()
self._socket = None
def load(self, queue):
"""Load.
"""
super().load(queue=queue)
LOGGER.info('Binding connection %s as a publisher',
self._connection)
def create_table(self, table_name, fields, **kwargs):
"""Create table.
"""
assert self._socket, "Socket is not initialized"
def insert(self, table_name, fields):
"""Insert.
"""
assert self._socket, "Socket is not initialized"
native_fields = {
k: self.serialize(v) for k, v in fields.items()
if not v.is_auto_increment}
data = {
"table_name": table_name,
"data": native_fields
}
self._socket.send_json(data)
@staticmethod
def serialize(value):
"""Serialize value.
"""
if isinstance(value.value, datetime):
return str(value)
return value.value
def run(self):
"""Run.
"""
# The socket has to be initialized here due to pyzmq #1232
# https://github.com/zeromq/pyzmq/issues/1232
self._socket = self._context.socket(zmq.PUB)
self._socket.bind(self._connection)
super().run()
|
176624
|
import gpu
import bgl
from gpu_extras.batch import batch_for_shader
class BL_UI_Widget:
def __init__(self, x, y, width, height):
self.x = x
self.y = y
self.x_screen = x
self.y_screen = y
self.width = width
self.height = height
self._bg_color = (0.8, 0.8, 0.8, 1.0)
self._tag = None
self.context = None
self.__inrect = False
self._mouse_down = False
self.name = 'None'
def set_location(self, x, y):
self.x = x
self.y = y
self.x_screen = x
self.y_screen = y
self.update(x,y)
@property
def bg_color(self):
return self._bg_color
@bg_color.setter
def bg_color(self, value):
self._bg_color = value
@property
def tag(self):
return self._tag
@tag.setter
def tag(self, value):
self._tag = value
def draw(self):
self.shader.bind()
self.shader.uniform_float("color", self._bg_color)
bgl.glEnable(bgl.GL_BLEND)
self.batch_panel.draw(self.shader)
bgl.glDisable(bgl.GL_BLEND)
def init(self, context):
self.context = context
self.update(self.x, self.y)
def update(self, x, y):
area_height = self.get_area_height()
self.x_screen = x
self.y_screen = y
indices = ((0, 1, 2), (0, 2, 3))
y_screen_flip = area_height - self.y_screen
# bottom left, top left, top right, bottom right
vertices = (
(self.x_screen, y_screen_flip),
(self.x_screen, y_screen_flip - self.height),
(self.x_screen + self.width, y_screen_flip - self.height),
(self.x_screen + self.width, y_screen_flip))
self.shader = gpu.shader.from_builtin('2D_UNIFORM_COLOR')
self.batch_panel = batch_for_shader(self.shader, 'TRIS', {"pos" : vertices}, indices=indices)
def handle_event(self, event):
x = event.mouse_x
y = event.mouse_y
if(event.type == 'LEFTMOUSE'):
if(event.value == 'PRESS'):
self._mouse_down = True
return self.mouse_down(x, y)
else:
self._mouse_down = False
self.mouse_up(x, y)
elif(event.type == 'MOUSEMOVE'):
self.mouse_move(x, y)
inrect = self.is_in_rect(x, y)
# we enter the rect
if not self.__inrect and inrect:
self.__inrect = True
self.mouse_enter(event, x, y)
# we are leaving the rect
elif self.__inrect and not inrect:
self.__inrect = False
self.mouse_exit(event, x, y)
return False
return False
def get_area_height(self):
return self.context.area.height
def is_in_rect(self, x, y):
area_height = self.get_area_height()
widget_y = area_height - self.y_screen
if (
(self.x_screen <= x <= (self.x_screen + self.width)) and
(widget_y >= y >= (widget_y - self.height))
):
return True
return False
def mouse_down(self, x, y):
return self.is_in_rect(x,y)
def mouse_up(self, x, y):
pass
def mouse_enter(self, event, x, y):
pass
def mouse_exit(self, event, x, y):
pass
def mouse_move(self, x, y):
pass
|
176642
|
import math
import torch
from torch import nn as nn
class JSD(nn.Module):
def __init__(self):
super().__init__()
def forward(self, x, eps=1e-8):
logN = math.log(float(x.shape[0]))
y = torch.mean(x, 0)
y = y * (y + eps).log() / logN
y = y.sum()
x = x * (x + eps).log() / logN
x = x.sum(1).mean()
return 1.0 - x + y
class InfoLoss(nn.Module):
def __init__(self):
super().__init__()
def forward(self, x, eps=1e-8):
x = torch.mean(x, 0)
logN = math.log(float(x.shape[0]))
x = x * (x + eps).log() / logN
neg_entropy = x.sum()
return 1.0 + neg_entropy
class EntropyLoss(nn.Module):
def __init__(self):
super().__init__()
def forward(self, x, eps=1e-8):
logN = math.log(float(x.shape[0]))
x = x * (x + eps).log() / logN
neg_entropy = x.sum(1)
return -neg_entropy.mean()
infoLoss = InfoLoss()
entropyLoss = EntropyLoss()
jsd = JSD()
def MSE(p, target):
return (p - target) ** 2, 2 * (p - target)
|
176646
|
import os
import sys
sys.path.append(os.path.normpath(os.path.join(os.path.abspath(__file__), '..', '..', '..', "common")))
from env_indigo import *
indigo = Indigo()
# indigo::SmilesLoader::Error
try:
m = indigo.loadMolecule('CX')
except IndigoException as e:
print(getIndigoExceptionText(e))
# IndigoError
try:
r = indigo.loadReaction('C1=CC=CC=C1>>C1=CC=CC=C1')
for atom in r.iterateAtoms():
print(atom)
except IndigoException as e:
print(getIndigoExceptionText(e))
|
176660
|
from numpy import genfromtxt
import matplotlib
# matplotlib.use('Agg')
import matplotlib.pyplot as plt
''' ResNet-56 '''
train_error_52 = './epoch_error_train_52.csv'
train_error_52 = genfromtxt(train_error_52, delimiter=',')
valid_error_52 = './epoch_error_valid_52.csv'
valid_error_52 = genfromtxt(valid_error_52, delimiter=',')
train_error_53 = './epoch_error_train_53.csv'
train_error_53 = genfromtxt(train_error_53, delimiter=',')
valid_error_53 = './epoch_error_valid_53.csv'
valid_error_53 = genfromtxt(valid_error_53, delimiter=',')
# resnet56 (model 52)
# Training time: 127m 41s, 281 epochs
#
# Best [Valid] | epoch: 221 - loss: 0.3129 - acc: 0.9396
# [Test] loss 0.3042 - acc: 0.9356 - acc_topk: 0.9795
# resnet56 alpha (model 53)
# Training time: 134m 44s, 303 epochs
#
# Best [Valid] | epoch: 243 - loss: 0.2934 - acc: 0.9369
# [Test] loss 0.3062 - acc: 0.9345 - acc_topk: 0.9788
fig = plt.figure()
plt.xlabel('epoch', fontsize=14)
plt.ylabel('error (%)', fontsize=14)
plt.title('ResNet-56', fontsize=16)
plt.plot(train_error_53[1:,1], 100*train_error_53[1:,2], label='train (ReZero)', zorder=1)#, linewidth=1.5)
plt.plot(valid_error_53[1:,1], 100*valid_error_53[1:,2], label='valid (ReZero)', zorder=2)#, linewidth=1.5)
plt.plot(train_error_52[1:,1], 100*train_error_52[1:,2], 'c', label='train', zorder=3)#, linewidth=1.5)
plt.plot(valid_error_52[1:,1], 100*valid_error_52[1:,2], 'm', label='valid', zorder=4)#, linewidth=1.5)
plt.ticklabel_format(axis='y', style='sci')
plt.grid(True)
plt.legend(loc='upper right', fontsize='x-large') # upper right, lower right, lower left
# plt.ylim(5, 80)
# plt.xlim(1, 30)
# plt.savefig('resnet56_error_0_30.png', box_inches='tight')
plt.ylim(-2, 35)
plt.xlim(0, 250)
plt.savefig('resnet56_error.png', box_inches='tight')
plt.show()
train_loss_52 = './epoch_loss_train_52.csv'
train_loss_52 = genfromtxt(train_loss_52, delimiter=',')
valid_loss_52 = './epoch_loss_valid_52.csv'
valid_loss_52 = genfromtxt(valid_loss_52, delimiter=',')
train_loss_53 = './epoch_loss_train_53.csv'
train_loss_53 = genfromtxt(train_loss_53, delimiter=',')
valid_loss_53 = './epoch_loss_valid_53.csv'
valid_loss_53 = genfromtxt(valid_loss_53, delimiter=',')
fig = plt.figure()
plt.xlabel('epoch', fontsize=14)
plt.ylabel('loss', fontsize=14)
plt.title('ResNet-56', fontsize=16)
plt.plot(train_loss_53[1:,1], train_loss_53[1:,2], label='train (ReZero)', zorder=1)#, linewidth=1.5)
plt.plot(valid_loss_53[1:,1], valid_loss_53[1:,2], label='valid (ReZero)', zorder=2)#, linewidth=1.5)
plt.plot(train_loss_52[1:,1], train_loss_52[1:,2], 'c', label='train', zorder=3)#, linewidth=1.5)
plt.plot(valid_loss_52[1:,1], valid_loss_52[1:,2], 'm', label='valid', zorder=4)#, linewidth=1.5)
plt.ticklabel_format(axis='y', style='sci')
plt.grid(True)
plt.legend(loc='upper right', fontsize='x-large') # upper right, lower right, lower left
plt.ylim(.2, 2.5)
plt.xlim(1, 30)
plt.savefig('resnet56_loss_0_30.png', box_inches='tight')
# plt.ylim(-.1, 1.2)
# plt.xlim(0, 250)
# plt.savefig('resnet56_loss.png', box_inches='tight')
plt.show()
''' ResNet-20 '''
train_error_54 = './epoch_error_train_54.csv'
train_error_54 = genfromtxt(train_error_54, delimiter=',')
valid_error_54 = './epoch_error_valid_54.csv'
valid_error_54 = genfromtxt(valid_error_54, delimiter=',')
train_error_55 = './epoch_error_train_55.csv'
train_error_55 = genfromtxt(train_error_55, delimiter=',')
valid_error_55 = './epoch_error_valid_55.csv'
valid_error_55 = genfromtxt(valid_error_55, delimiter=',')
# resnet-20 alpha (model 54)
# Training time: 63m 9s, 327
#
# Best [Valid] | epoch: 267 - loss: 0.3237 - acc: 0.9256
# [Test] loss 0.3491 - acc: 0.9206 - acc_topk: 0.9723
# resnet-20 (model 55)
# Training time: 70m 3s, 398
#
# Best [Valid] | epoch: 338 - loss: 0.3026 - acc: 0.9237
# [Test] loss 0.3055 - acc: 0.9202 - acc_topk: 0.9742
fig = plt.figure()
plt.xlabel('epoch', fontsize=14)
plt.ylabel('error (%)', fontsize=14)
plt.title('ResNet-20', fontsize=16)
plt.plot(train_error_54[1:,1], 100*train_error_54[1:,2], label='train (ReZero)', zorder=1)#, linewidth=1.5)
plt.plot(valid_error_54[1:,1], 100*valid_error_54[1:,2], label='valid (ReZero)', zorder=2)#, linewidth=1.5)
plt.plot(train_error_55[1:,1], 100*train_error_55[1:,2], 'c', label='train', zorder=3)#, linewidth=1.5)
plt.plot(valid_error_55[1:,1], 100*valid_error_55[1:,2], 'm', label='valid', zorder=4)#, linewidth=1.5)
plt.ticklabel_format(axis='y', style='sci')
plt.grid(True)
plt.legend(loc='upper right', fontsize='x-large') # upper right, lower right, lower left
plt.ylim(5, 60)
plt.xlim(1, 30)
plt.savefig('resnet20_error_0_30.png', box_inches='tight')
# plt.ylim(-2, 35)
# plt.xlim(0, 300)
# plt.savefig('resnet20_error.png', box_inches='tight')
plt.show()
train_loss_54 = './epoch_loss_train_54.csv'
train_loss_54 = genfromtxt(train_loss_54, delimiter=',')
valid_loss_54 = './epoch_loss_valid_54.csv'
valid_loss_54 = genfromtxt(valid_loss_54, delimiter=',')
train_loss_55 = './epoch_loss_train_55.csv'
train_loss_55 = genfromtxt(train_loss_55, delimiter=',')
valid_loss_55 = './epoch_loss_valid_55.csv'
valid_loss_55 = genfromtxt(valid_loss_55, delimiter=',')
fig = plt.figure()
plt.xlabel('epoch', fontsize=14)
plt.ylabel('loss', fontsize=14)
plt.title('ResNet-20', fontsize=16)
plt.plot(train_loss_54[1:,1], train_loss_54[1:,2], label='train (ReZero)', zorder=1)#, linewidth=1.5)
plt.plot(valid_loss_54[1:,1], valid_loss_54[1:,2], label='valid (ReZero)', zorder=2)#, linewidth=1.5)
plt.plot(train_loss_55[1:,1], train_loss_55[1:,2], 'c', label='train', zorder=3)#, linewidth=1.5)
plt.plot(valid_loss_55[1:,1], valid_loss_55[1:,2], 'm', label='valid', zorder=4)#, linewidth=1.5)
plt.ticklabel_format(axis='y', style='sci')
plt.grid(True)
plt.legend(loc='upper right', fontsize='x-large') # upper right, lower right, lower left
plt.ylim(.2, 1.6)
plt.xlim(1, 30)
plt.savefig('resnet20_loss_0_30.png', box_inches='tight')
# plt.ylim(-.1, 1.2)
# plt.xlim(0, 300)
# plt.savefig('resnet20_loss.png', box_inches='tight')
plt.show()
''' ResNet-110 '''
train_error_58 = './epoch_error_train_58.csv'
train_error_58 = genfromtxt(train_error_58, delimiter=',')
valid_error_58 = './epoch_error_valid_58.csv'
valid_error_58 = genfromtxt(valid_error_58, delimiter=',')
train_error_59 = './epoch_error_train_59.csv'
train_error_59 = genfromtxt(train_error_59, delimiter=',')
valid_error_59 = './epoch_error_valid_59.csv'
valid_error_59 = genfromtxt(valid_error_59, delimiter=',')
# resnet-110 alpha (model 58)
# Training time: 301m 19s, 410 epochs
#
# Best [Valid] | epoch: 350 - loss: 0.2925 - acc: 0.9416
# [Test] loss 0.2796 - acc: 0.9412 - acc_topk: 0.9816
# resnet-110 (model 59)
# Training time: 240m 53s, 313 epochs
#
# Best [Valid] | epoch: 253 - loss: 0.3480 - acc: 0.9400
# [Test] loss 0.3392 - acc: 0.9361 - acc_topk: 0.9809
fig = plt.figure()
plt.xlabel('epoch', fontsize=14)
plt.ylabel('error (%)', fontsize=14)
plt.title('ResNet-110', fontsize=16)
plt.plot(train_error_58[1:,1], 100*train_error_58[1:,2], label='train (ReZero)', zorder=1)#, linewidth=1.5)
plt.plot(valid_error_58[1:,1], 100*valid_error_58[1:,2], label='valid (ReZero)', zorder=2)#, linewidth=1.5)
plt.plot(train_error_59[1:,1], 100*train_error_59[1:,2], 'c', label='train', zorder=3)#, linewidth=1.5)
plt.plot(valid_error_59[1:,1], 100*valid_error_59[1:,2], 'm', label='valid', zorder=4)#, linewidth=1.5)
plt.ticklabel_format(axis='y', style='sci')
plt.grid(True)
plt.legend(loc='upper right', fontsize='x-large') # upper right, lower right, lower left
# plt.ylim(5, 70)
# plt.xlim(1, 30)
# plt.savefig('resnet110_error_0_30.png', box_inches='tight')
plt.ylim(-2, 40)
plt.xlim(0, 300)
plt.savefig('resnet110_error.png', box_inches='tight')
plt.show()
train_loss_58 = './epoch_loss_train_58.csv'
train_loss_58 = genfromtxt(train_loss_58, delimiter=',')
valid_loss_58 = './epoch_loss_valid_58.csv'
valid_loss_58 = genfromtxt(valid_loss_58, delimiter=',')
train_loss_59 = './epoch_loss_train_59.csv'
train_loss_59 = genfromtxt(train_loss_59, delimiter=',')
valid_loss_59 = './epoch_loss_valid_59.csv'
valid_loss_59 = genfromtxt(valid_loss_59, delimiter=',')
fig = plt.figure()
plt.xlabel('epoch', fontsize=14)
plt.ylabel('loss', fontsize=14)
plt.title('ResNet-110', fontsize=16)
plt.plot(train_loss_58[1:,1], train_loss_58[1:,2], label='train (ReZero)', zorder=1)#, linewidth=1.5)
plt.plot(valid_loss_58[1:,1], valid_loss_58[1:,2], label='valid (ReZero)', zorder=2)#, linewidth=1.5)
plt.plot(train_loss_59[1:,1], train_loss_59[1:,2], 'c', label='train', zorder=3)#, linewidth=1.5)
plt.plot(valid_loss_59[1:,1], valid_loss_59[1:,2], 'm', label='valid', zorder=4)#, linewidth=1.5)
plt.ticklabel_format(axis='y', style='sci')
plt.grid(True)
plt.legend(loc='upper right', fontsize='x-large') # upper right, lower right, lower left
# plt.ylim(.2, 1.8)
# plt.xlim(1, 30)
# plt.savefig('resnet110_loss_0_30.png', box_inches='tight')
plt.ylim(-.1, 1.4)
plt.xlim(0, 300)
plt.savefig('resnet110_loss.png', box_inches='tight')
plt.show()
|
176663
|
import subprocess
from ctypes import *
# I know, i know, this is probably the worst solution to a trivial challenge. Still, if it aint broke...
class Ambient:
def __init__(self):
self.user32 = WinDLL("user32.dll")
def change_color(self, r,g,b):
rgb = r + (g<<8) + (b<<16)
self.user32.SetSysColors(1, byref(c_int(1)), byref(c_int(rgb)))
|
176738
|
import sys
from progress.bar import FillingSquaresBar
from time import sleep
from colored import fg, bg, attr, fore, style
from colored import stylize
from functools import wraps
from colored import fg, bg, attr, fore, style
def prefix(item):
'''
This function decorates the other functions with bars
'''
def decorator(fun):
@wraps(fun)
def wrapper(*args):
print('\n')
d = (stylize(item, fg("green_1")))
with FillingSquaresBar(d) as bar:
for _ in range(100):
sleep(0.01)
bar.next()
print('\n')
return fun(*args)
return wrapper
return decorator
def sub_prefix(item):
'''
This function decorates the other functions with bars
'''
def decorator(fun):
@wraps(fun)
def wrapper(*args):
d = (stylize(item, fg("dodger_blue_1")))
with FillingSquaresBar(d) as bar:
for _ in range(100):
sleep(0.005)
bar.next()
return fun(*args)
return wrapper
return decorator
def service_open(item,file_obj=None):
'''
This function decorates the open functions with bars
'''
print (fore.ORANGE_1 + ' [+] ' + f' {item}' + ' Endpoint Identified' + style.RESET)
if file_obj:
print(' [+] '+item + ' Endpoint Identified',file=file_obj)
def resource_available(item,file_obj=None):
'''
This function decorates the available resources with bars
'''
print (fore.ORANGE_1 + ' [+] ' + f' {item}' + ' Identified in the cluster' + style.RESET)
if file_obj:
print(' [+] '+item + ' Identified in the cluster',file=file_obj)
def scan_status(item):
'''
This function decorates items being scanned with bars
'''
d = (stylize(item, fg("dodger_blue_1")))
with FillingSquaresBar(d) as bar:
for _ in range(100):
sleep(0.005)
bar.next()
borderpadding = 2
def getLines(text):
'''
cowsay stuff
'''
lines = []
lines.append(text.strip())
return lines
def getMaxLineLength(lines):
'''
cowsay stuff
'''
maxLength = 0
for line in lines:
length = len(line)
if length > maxLength:
maxLength = length
return maxLength
def padLine(line, maxlinelength):
'''
cowsay stuff
'''
paddingLength = maxlinelength - len(line) - borderpadding
padding = ""
if paddingLength > 0:
padding = " " * paddingLength
return line + padding
def drawTextBox(lines):
'''
cowsay stuff
'''
maxlinelength = getMaxLineLength(lines) + borderpadding
horizontal_border = " " + ('-' * maxlinelength)
print(horizontal_border)
if len(lines) == 1:
print("< " + padLine(lines[0], maxlinelength) + " >")
else:
print("/ " + (" " * (maxlinelength - borderpadding)) + " \\")
for line in lines:
print("| " + padLine(line, maxlinelength) + " |")
print("\\ " + (" " * (maxlinelength - borderpadding)) + " /")
print(horizontal_border)
cow = [ fore.MAGENTA_1 +
" \\ ^__^",
" \\ (oo)\\________",
" (__)\\ )\\/\\",
" ||----W |",
" || ||"
+ style.RESET]
def drawAnimal():
"""Draws a cow in the terminal with ascii art"""
for line in cow:
print(line)
def cowsay(text):
'''
cowsay stuff
'''
lines = getLines(text)
drawTextBox(lines)
drawAnimal()
def print_msg_box(msg, indent=1, width=None, title=None, file_obj=None):
"""Print message-box with optional title."""
lines = msg.split('\n')
space = " " * indent
if not width:
width = max(map(len, lines))
box = f'โ{"โ" * (width + indent * 2)}โ\n' # upper_border
if title:
box += f'โ{space}{title:<{width}}{space}โ\n' # title
box += f'โ{space}{"-" * len(title):<{width}}{space}โ\n' # underscore
box += ''.join([f'โ{space}{line:<{width}}{space}โ\n' for line in lines])
box += f'โ{"โ" * (width + indent * 2)}โ' # lower_border
if file_obj:
print(box, file=file_obj)
|
176783
|
import os
import importlib
path = __file__.replace("__init__.py", "")
for file in os.listdir(path):
if "__" not in file:
globals()[file[:-3]] = getattr(importlib.import_module(__name__+"."+file[:-3]), file[:-3])
|
176791
|
import base64
import datetime
import hashlib
from io import BytesIO
from logging import getLogger
import OpenSSL.crypto
import asn1crypto.ocsp
import pytz
from OpenSSL import crypto
from OpenSSL.crypto import X509StoreContextError
from asn1crypto import pem
from bankid.experimental.helper import CompletionDataContainer, make_cert, NonceParse
_LOG = getLogger(__name__)
def verify_bankid_response(bank_id_response, ensure_certificates_still_valid=True, BANK_ID_ROOT_CERT=None):
if not isinstance(bank_id_response, dict):
raise TypeError("Response not a dictionary")
if 'completionData' not in bank_id_response:
raise AttributeError('Completion data missing in dictionary')
try:
cdc = CompletionDataContainer(bank_id_response['completionData'])
# First step is to hash the data and verify the digest matches
_LOG.info("1. Message Digest Verification\n")
bid_signed_data_raw_bytes = cdc.signature_container.bid_signed_data_raw.encode()
# TODO - Parse out of the XML which hashing algorithm should be sued
bid_signed_data_hash = hashlib.sha256(bid_signed_data_raw_bytes).digest().hex()
key_info_hash = hashlib.sha256(cdc.signature_container.key_info_raw.encode()).digest().hex()
signed_data_hash_from_signature = base64.b64decode(cdc.signature_container.signed_data_digest.text).hex()
key_info_hash_from_signature = base64.b64decode(cdc.signature_container.key_data_digest.text).hex()
if bid_signed_data_hash != signed_data_hash_from_signature:
raise AssertionError('Signed Data hash does not match!')
if key_info_hash != key_info_hash_from_signature:
raise AssertionError('Key Info hash does not match!')
_LOG.info("\n2. Signature verification\n")
# Helper function for the certificates
user_certificate_string = make_cert(cdc.signature_container.certificates[0].text)
# Making a certificate object out of it
user_certificate = crypto.load_certificate(crypto.FILETYPE_PEM,
BytesIO(user_certificate_string.encode()).read())
signature_bytes = base64.b64decode(cdc.signature_container.signature_value.text)
signed_info = cdc.signature_container.signed_info.encode()
try:
_LOG.debug("Certificate:", user_certificate.get_subject())
_LOG.debug("Signature Bytes:", signature_bytes)
_LOG.debug("Signature Data Raw:", signed_info)
OpenSSL.crypto.verify(user_certificate, signature_bytes, signed_info, 'sha256')
except OpenSSL.crypto.Error as e:
raise AssertionError('The BankID signature is not valid!')
_LOG.info("\n3. OCSP Response Verification\n")
ocsp = base64.b64decode(bank_id_response['completionData']['ocspResponse'])
ocsp_response = asn1crypto.ocsp.OCSPResponse.load(ocsp)
basic_ocsp_response = ocsp_response['response_bytes']['response'].parsed
# Some help by listing all the different parts of the OCSP response
_LOG.debug("TBS Response Data", basic_ocsp_response['tbs_response_data'])
_LOG.debug("SignatureAlgorithm", basic_ocsp_response['signature_algorithm'].signature_algo)
_LOG.debug("SignatureAlgorithm Hash Function", basic_ocsp_response['signature_algorithm'].hash_algo)
_LOG.debug("Signature", basic_ocsp_response['signature'].__bytes__())
_LOG.debug("Cert", basic_ocsp_response['certs'])
# Response content
_LOG.debug("version", basic_ocsp_response['tbs_response_data']['version'])
_LOG.debug("responderID", basic_ocsp_response['tbs_response_data']['responder_id']) # has native
_LOG.info("producedAt", basic_ocsp_response['tbs_response_data']['produced_at'])
cest = pytz.timezone('Europe/Stockholm')
ocsp_produced_at = basic_ocsp_response['tbs_response_data']['produced_at'].native
if not isinstance(ocsp_produced_at, datetime.datetime):
raise AssertionError('OCSP produced at is not a datetime!')
ocsp_produced_at = ocsp_produced_at.astimezone(cest).strftime('%Y-%m-%d %H:%M:%S')
_LOG.debug("responses", basic_ocsp_response['tbs_response_data']['responses'])
_LOG.debug("response Extentions", basic_ocsp_response['tbs_response_data']['response_extensions'])
_LOG.debug("Extentions")
extention = basic_ocsp_response['tbs_response_data']['response_extensions'][0]
_LOG.debug('extn_id', extention['extn_id'])
_LOG.debug('critical', extention['critical'])
# Cannot _LOG.debug the value without an exception being raised - need to parse that ourself later
# print ('extn_value', extention['extn_value'])
single_response = basic_ocsp_response['tbs_response_data']['responses'][0]
_LOG.debug("CertID", single_response['cert_id'])
_LOG.debug("certStatus", single_response['cert_status'])
_LOG.debug("thisUpdate", single_response['this_update'])
_LOG.debug("nextUpdate", single_response['next_update'])
_LOG.debug("singleExtensions", single_response['single_extensions'])
_LOG.info("3.1. OCSP Response - Verify success ")
if ocsp_response['response_status'].native != 'successful':
raise AssertionError('OCSP response status was not successful')
_LOG.info("3.2. OCSP Response - Verify signature ")
# Transform the asn1 certificate to an openssl certificate
der_bytes = basic_ocsp_response['certs'][0].dump()
pem_bytes = pem.armor('CERTIFICATE', der_bytes)
ocsp_certificate = crypto.load_certificate(crypto.FILETYPE_PEM, pem_bytes)
# Get the signature bytes
signature = basic_ocsp_response['signature'].__bytes__()
# Dump the TBS response data as DER bytes
signature_data = basic_ocsp_response['tbs_response_data'].dump()
# Define the hashing algorithm to be used
digest_method = basic_ocsp_response['signature_algorithm'].hash_algo
_LOG.debug("Certificate", ocsp_certificate.get_subject())
_LOG.debug("Signature", signature)
_LOG.debug("Signature data", signature_data)
_LOG.debug("Digest Method", digest_method)
try:
OpenSSL.crypto.verify(ocsp_certificate, signature, signature_data, digest_method)
except OpenSSL.crypto.Error as e:
raise AssertionError('The OCSP signature is not valid!')
_LOG.info("3.2. OCSP Response - Compare nonce")
nonce_computed = hashlib.sha1(bank_id_response['completionData']['signature'].encode('utf-8')).digest().hex()
# A helper because the asn1 library seems to have a problem with the nonce parsing in some form or the other
nonce_parser = NonceParse(extention.contents)
# Verify that the computed nonce is part of the nonce value given in the oscp
# Note that it only partially matches as we use sha-1 to compute the hash
_LOG.debug("Nonce value computed ", nonce_computed)
_LOG.debug("Nonce value presented", nonce_parser.value.hex())
if not nonce_parser.value.hex().startswith(nonce_computed):
raise AssertionError('Computed nonce not matching the OCSP nonce')
_LOG.info("\n4. Verify all the certificates by relying on the BankID root certificate as a trusted one \n")
user_cert = crypto.load_certificate(
crypto.FILETYPE_PEM, make_cert(cdc.signature_container.certificates[0].text).encode())
bank_user_cert = crypto.load_certificate(
crypto.FILETYPE_PEM, make_cert(cdc.signature_container.certificates[1].text).encode())
bank_bank_id_cert = crypto.load_certificate(
crypto.FILETYPE_PEM, make_cert(cdc.signature_container.certificates[2].text).encode())
bank_id_root_cert = crypto.load_certificate(crypto.FILETYPE_PEM, BANK_ID_ROOT_CERT.encode())
# 3. verify the certificate chain of the tbs certificate
# Make sure we respect or do not respect certificate expiration times
if not ensure_certificates_still_valid:
tomorrow = datetime.datetime.now() + datetime.timedelta(days=1)
bank_user_cert.set_notAfter(tomorrow.strftime('%Y%m%d%H%M%SZ').encode())
bank_bank_id_cert.set_notAfter(tomorrow.strftime('%Y%m%d%H%M%SZ').encode())
bank_id_root_cert.set_notAfter(tomorrow.strftime('%Y%m%d%H%M%SZ').encode())
ocsp_certificate.set_notAfter(tomorrow.strftime('%Y%m%d%H%M%SZ').encode())
user_cert.set_notAfter(tomorrow.strftime('%Y%m%d%H%M%SZ').encode())
store = crypto.X509Store()
store.add_cert(bank_user_cert)
store.add_cert(bank_bank_id_cert)
store.add_cert(bank_id_root_cert)
try:
# Verify the user certificate up to the root certificate
store_ctx = crypto.X509StoreContext(store, user_cert)
store_ctx.verify_certificate()
_LOG.debug('User Certificate issued by the respective bank... OK')
except X509StoreContextError:
raise AssertionError('BankID user certificate chain could not be verified.')
try:
# Verify the ocsp certificate up to the root certificate
store_ctx = crypto.X509StoreContext(store, ocsp_certificate)
store_ctx.verify_certificate()
_LOG.debug('OCSP Certificate issued by the respective bank... OK')
except X509StoreContextError:
raise AssertionError('OCSP certificate chain could not be verified.')
except Exception as e:
raise e
return ocsp_produced_at
|
176801
|
import rqalpha
config = {
"extra": {
"log_level": "verbose",
},
"mod": {
"live_trade": {
"lib": "./mod",
"enabled": True,
"priority": 100,
}
}
}
def run(baseConf):
config["base"] = baseConf
return rqalpha.run(config)
|
176818
|
print(open('teste_win1252.txt', errors='strict').read())
print(open('teste_win1252.txt', errors='replace').read())
print(open('teste_win1252.txt', errors='ignore').read())
print(open('teste_win1252.txt', errors='surrogateescape').read())
print(open('teste_win1252.txt', errors='backslashreplace').read())
|
176827
|
import pytest
import grpc
import uuid
from threading import Event
import yandex.cloud.compute.v1.zone_service_pb2_grpc as zone_service_pb2_grpc
import yandex.cloud.compute.v1.zone_service_pb2 as zone_service_pb2
from yandexcloud import RetryInterceptor
from yandexcloud import default_backoff, backoff_linear_with_jitter
from tests.grpc_server_mock import DEFAULT_ZONE, grpc_server, default_channel
class _FailFirstAttempts:
def __init__(self, fail_attempts, code=grpc.StatusCode.UNAVAILABLE):
self.__fail_attempts = fail_attempts
self.code = code
def handler(self, context):
if self.__fail_attempts > 0:
self.__fail_attempts -= 1
context.set_code(self.code)
return DEFAULT_ZONE
def reset(self, fail_attempts):
self.__fail_attempts = fail_attempts
def test_five_retries():
service = _FailFirstAttempts(5)
server = grpc_server(service.handler)
request = zone_service_pb2.GetZoneRequest(zone_id="id")
with default_channel() as channel:
for max_retry_count in range(4):
interceptor = RetryInterceptor(max_retry_count=max_retry_count)
ch = grpc.intercept_channel(channel, interceptor)
client = zone_service_pb2_grpc.ZoneServiceStub(ch)
with pytest.raises(grpc.RpcError) as e:
client.Get(request)
assert e.value.code() == grpc.StatusCode.UNAVAILABLE
service.reset(5)
interceptor = RetryInterceptor(max_retry_count=5)
ch = grpc.intercept_channel(channel, interceptor)
client = zone_service_pb2_grpc.ZoneServiceStub(ch)
res = client.Get(request)
assert res == DEFAULT_ZONE
server.stop(0)
def test_five_retries_internal():
service = _FailFirstAttempts(5, code=grpc.StatusCode.INTERNAL)
server = grpc_server(service.handler)
request = zone_service_pb2.GetZoneRequest(zone_id="id")
retriable_codes = (
grpc.StatusCode.UNAVAILABLE,
grpc.StatusCode.RESOURCE_EXHAUSTED,
grpc.StatusCode.INTERNAL,
)
with default_channel() as channel:
for max_retry_count in range(4):
interceptor = RetryInterceptor(max_retry_count=max_retry_count, retriable_codes=retriable_codes)
ch = grpc.intercept_channel(channel, interceptor)
client = zone_service_pb2_grpc.ZoneServiceStub(ch)
with pytest.raises(grpc.RpcError) as e:
client.Get(request)
assert e.value.code() == grpc.StatusCode.INTERNAL
service.reset(5)
interceptor = RetryInterceptor(max_retry_count=5, retriable_codes=retriable_codes)
ch = grpc.intercept_channel(channel, interceptor)
client = zone_service_pb2_grpc.ZoneServiceStub(ch)
res = client.Get(request)
assert res == DEFAULT_ZONE
server.stop(0)
class _RetriableCodes:
def __init__(self, retriable_codes):
self.__retriable_codes = retriable_codes
self.__get_count = 0
def handler(self, context):
if self.__get_count < len(self.__retriable_codes):
context.set_code(self.__retriable_codes[self.__get_count])
self.__get_count += 1
return DEFAULT_ZONE
def reset_state(self):
self.__get_count = 0
def test_retriable_codes():
retriable_codes = [grpc.StatusCode.RESOURCE_EXHAUSTED,
grpc.StatusCode.UNAVAILABLE,
grpc.StatusCode.DATA_LOSS]
service = _RetriableCodes(retriable_codes)
server = grpc_server(service.handler)
with default_channel() as channel:
for retry_qty in range(len(retriable_codes)):
interceptor = RetryInterceptor(max_retry_count=retry_qty, retriable_codes=retriable_codes)
ch = grpc.intercept_channel(channel, interceptor)
client = zone_service_pb2_grpc.ZoneServiceStub(ch)
with pytest.raises(grpc.RpcError) as e:
client.Get(zone_service_pb2.GetZoneRequest(zone_id="id"))
assert e.value.code() == retriable_codes[retry_qty]
service.reset_state()
interceptor = RetryInterceptor(max_retry_count=len(retriable_codes), retriable_codes=retriable_codes)
ch = grpc.intercept_channel(channel, interceptor)
client = zone_service_pb2_grpc.ZoneServiceStub(ch)
assert client.Get(zone_service_pb2.GetZoneRequest(zone_id="id")) == DEFAULT_ZONE
server.stop(0)
class _AlwaysUnavailable:
def __init__(self):
self.__get_count = 0
self.__t_checker = None
self.__error = False
@property
def error(self):
return self.__error
def handler(self, context):
if self.__t_checker and not self.__t_checker():
self.__error = True
self.__get_count += 1
if self.__get_count == 100:
pass
context.set_code(grpc.StatusCode.UNAVAILABLE)
return DEFAULT_ZONE
@pytest.mark.parametrize("backoff", [None, default_backoff(), backoff_linear_with_jitter(0.05, 0.1)])
def test_infinite_retries_deadline_and_backoff(backoff):
service = _AlwaysUnavailable()
server = grpc_server(service.handler)
with default_channel() as channel:
interceptor = RetryInterceptor(max_retry_count=-1, retriable_codes=[grpc.StatusCode.UNAVAILABLE],
add_retry_count_to_header=True, back_off_func=backoff)
ch = grpc.intercept_channel(channel, interceptor)
client = zone_service_pb2_grpc.ZoneServiceStub(ch)
with pytest.raises(grpc.RpcError) as e:
client.Get(zone_service_pb2.GetZoneRequest(zone_id="id"), timeout=5)
assert e.value.code() == grpc.StatusCode.DEADLINE_EXCEEDED
server.stop(0)
class _NeverReturnsInTime:
def __init__(self, shutdown):
self.__shutdown = shutdown
def handler(self, context):
time_remaining = context.time_remaining()
# using hack here, since deadline is never None. 31557600 ~= one year in seconds
if time_remaining < 31557600.:
self.__shutdown.wait()
context.set_code(grpc.StatusCode.UNAVAILABLE)
return DEFAULT_ZONE
def test_per_call_timeout():
shutdown = Event()
service = _NeverReturnsInTime(shutdown)
server = grpc_server(service.handler)
with default_channel() as channel:
interceptor = RetryInterceptor(max_retry_count=10, retriable_codes=[grpc.StatusCode.UNAVAILABLE],
per_call_timeout=1, add_retry_count_to_header=True)
ch = grpc.intercept_channel(channel, interceptor)
client = zone_service_pb2_grpc.ZoneServiceStub(ch)
with pytest.raises(grpc.RpcError) as e:
client.Get(zone_service_pb2.GetZoneRequest(zone_id="id"))
assert e.value.code() == grpc.StatusCode.DEADLINE_EXCEEDED
shutdown.set()
server.stop(1)
class _HeaderTokenAndRetryCount:
def __init__(self):
self.__query_count = 0
self.__token = None
self.__token_error = False
self.__header_error = False
@property
def error(self):
return self.__header_error or self.__token_error
def handler(self, context):
metadata = context.invocation_metadata()
if metadata is not None:
token = [v[1] for v in metadata if v[0] == "idempotency-key"]
if len(token) != 1:
self.__token_error = True
else:
# store token on first call, on consequent calls, check that token didn't change
if self.__query_count == 0:
self.__token = token[0]
else:
if self.__token != token[0]:
self.__token_error = True
if self.__query_count > 0:
retry_meta = [v[1] for v in metadata if v[0] == "x-retry-attempt"]
if len(retry_meta) != 1 or retry_meta[0] != str(self.__query_count):
self.__header_error = True
else:
self.__token_error = True
self.__header_error = True
self.__query_count += 1
context.set_code(grpc.StatusCode.UNAVAILABLE)
return DEFAULT_ZONE
def test_header_token_and_retry_count():
service = _HeaderTokenAndRetryCount()
server = grpc_server(service.handler)
with default_channel() as channel:
interceptor = RetryInterceptor(max_retry_count=100, retriable_codes=[grpc.StatusCode.UNAVAILABLE],
add_retry_count_to_header=True)
ch = grpc.intercept_channel(channel, interceptor)
client = zone_service_pb2_grpc.ZoneServiceStub(ch)
with pytest.raises(grpc.RpcError) as e:
client.Get(zone_service_pb2.GetZoneRequest(zone_id="id"))
assert e.value.code() == grpc.StatusCode.UNAVAILABLE
assert not service.error
server.stop(0)
class _TokenUnchanged:
def __init__(self, token):
self.__token = token
self.__token_changed = False
@property
def token_changed(self):
return self.__token_changed
def handler(self, context):
metadata = context.invocation_metadata()
if metadata is not None:
token = [v[1] for v in metadata if v[0] == "idempotency-key"]
if len(token) != 1 or token[0] != self.__token:
self.__token_changed = True
else:
self.__token_changed = True
context.set_code(grpc.StatusCode.UNAVAILABLE)
return DEFAULT_ZONE
def test_idempotency_token_not_changed():
token = str(uuid.uuid4())
service = _TokenUnchanged(token)
server = grpc_server(service.handler)
with default_channel() as channel:
interceptor = RetryInterceptor(max_retry_count=100, retriable_codes=[grpc.StatusCode.UNAVAILABLE],
add_retry_count_to_header=True)
ch = grpc.intercept_channel(channel, interceptor)
client = zone_service_pb2_grpc.ZoneServiceStub(ch)
with pytest.raises(grpc.RpcError) as e:
client.Get(zone_service_pb2.GetZoneRequest(zone_id="id"), metadata=[("idempotency-key", token)])
assert e.value.code() == grpc.StatusCode.UNAVAILABLE
assert not service.token_changed
server.stop(0)
|
176834
|
class FakeConfig(object):
def __init__(
self,
src_dir=None,
spec_dir=None,
stylesheet_urls=None,
script_urls=None,
stop_spec_on_expectation_failure=False,
stop_on_spec_failure=False,
random=True
):
self._src_dir = src_dir
self._spec_dir = spec_dir
self._stylesheet_urls = stylesheet_urls
self._script_urls = script_urls
self._stop_spec_on_expectation_failure = stop_spec_on_expectation_failure
self._stop_on_spec_failure = stop_on_spec_failure
self._random = random
self.reload_call_count = 0
def src_dir(self):
return self._src_dir
def spec_dir(self):
return self._spec_dir
def stylesheet_urls(self):
return self._stylesheet_urls
def script_urls(self):
return self._script_urls
def stop_spec_on_expectation_failure(self):
return self._stop_spec_on_expectation_failure
def stop_on_spec_failure(self):
return self._stop_on_spec_failure
def random(self):
return self._random
def reload(self):
self.reload_call_count += 1
|
176837
|
import os
import shutil
import time
import glob
import subprocess
import web
from libs import utils, form_utils
from libs.logger import logger
import settings
subscription_versions = ['normal', 'nomail', 'digest']
def __get_ml_dir(mail):
"""Get absolute path of the root directory of mailing list account."""
if not utils.is_email(mail):
return None
mail = str(mail).lower()
(_username, _domain) = mail.split('@', 1)
return os.path.join(settings.MLMMJ_SPOOL_DIR, _domain, _username)
def __get_ml_subscribers_dir(mail, subscription):
"""
Get absolute path of the directory used to store subscribers which
subscribed to given subscription version.
@mail -- mail address of mailing list account
@subscription -- subscription version: normal, nomail, digest.
"""
if subscription == 'digest':
return os.path.join(__get_ml_dir(mail=mail), 'digesters.d')
elif subscription == 'nomail':
return os.path.join(__get_ml_dir(mail=mail), 'nomailsubs.d')
else:
# subscription == 'normal'
return os.path.join(__get_ml_dir(mail=mail), 'subscribers.d')
def __remove_ml_sub_dir(mail, dirname):
if not dirname:
return (True, )
_ml_dir = __get_ml_dir(mail=mail)
_sub_dir = os.path.join(_ml_dir, dirname)
if os.path.exists(_sub_dir):
try:
shutil.rmtree(_sub_dir)
logger.debug("[{0}] {1}, removed sub-directory: {2}".format(web.ctx.ip, mail, _sub_dir))
except Exception as e:
logger.error("[{0}] {1}, error while removing sub-directory: {2}".format(web.ctx.ip, mail, _sub_dir))
return (False, repr(e))
return (True, )
def __set_file_permission(path):
_uid = os.getuid()
_gid = os.getgid()
try:
os.chown(path, _uid, _gid)
return (True, )
except Exception as e:
return (False, repr(e))
def __copy_dir_files(src, dest, create_dest=True):
"""Copy all regular files under source directory to dest directory."""
if create_dest:
if not os.path.exists(dest):
try:
os.makedirs(dest, mode=settings.MLMMJ_FILE_PERMISSION)
except Exception as e:
return (False, repr(e))
for fn in os.listdir(src):
_src_file = os.path.join(src, fn)
if os.path.isfile(_src_file):
shutil.copy(_src_file, dest)
return (True, )
def __has_ml_dir(mail, path=None):
if path:
_ml_dir = path
else:
_ml_dir = __get_ml_dir(mail=mail)
if os.path.exists(_ml_dir):
return True
else:
return False
def __has_param_file(f):
if os.path.exists(f):
return True
else:
return False
def __get_param_file(mail, param):
"""Get path to the file used to control parameter setting.
Sample value: /var/spool/mlmmj/<domain>/<username>/control/<param>
"""
if not utils.is_email(mail):
return None
(_username, _domain) = mail.split('@', 1)
return os.path.join(settings.MLMMJ_SPOOL_DIR,
_domain,
_username,
'control',
param)
def __remove_file(path):
if os.path.exists(path):
try:
os.remove(path)
except Exception as e:
logger.error("[{0}] error while removing parameter file: {1}, {2}".format(web.ctx.ip, path, e))
return (False, repr(e))
return (True, )
def __remove_param_file(mail, param):
_path = __get_param_file(mail=mail, param=param)
return __remove_file(_path)
def __get_param_type(param):
"""Get parameter type.
Possible param type must be one of: boolean, list, normal, text, or None (no such
param).
"""
for (_type, _param_dict) in list(settings.MLMMJ_PARAM_TYPES.items()):
if param in list(_param_dict.values()):
return _type
return None
def __get_boolean_param_value(mail, param):
_param_file = __get_param_file(mail=mail, param=param)
if __has_param_file(_param_file):
return 'yes'
else:
return 'no'
def __get_list_param_value(mail, param, is_email=False, param_file=None):
if not param_file:
param_file = __get_param_file(mail=mail, param=param)
_values = []
if __has_param_file(param_file):
try:
with open(param_file, "r", encoding="utf-8") as f:
_lines = f.readlines()
_lines = [_line.strip() for _line in _lines] # remove line breaks
_values = [_line for _line in _lines if _line] # remove empty values
if is_email:
_values = [str(i).lower() for i in _values]
except IOError:
# No such file.
pass
except Exception as e:
logger.error('Error while getting (list) parameter value: {0} -> {1}'.format(param, e))
_values.sort()
return _values
def __get_normal_param_value(mail, param, param_file=None):
# Only first line is used by mlmmj.
if not param_file:
param_file = __get_param_file(mail=mail, param=param)
try:
with open(param_file, 'r', encoding='utf-8') as f:
# Remove newline but keep spaces.
value = f.readline().rstrip('\n')
return value
except IOError:
# No such file.
return ''
except Exception as e:
logger.error("[{0}] {1}, error while getting parameter value: {2}, {3}".format(web.ctx.ip, mail, param, e))
return ''
def __get_text_param_value(mail, param, param_file=None):
# Full content is used by mlmmj.
if not param_file:
param_file = __get_param_file(mail=mail, param=param)
try:
with open(param_file, 'r', encoding='utf-8') as f:
value = f.read().rstrip('\n')
return value
except IOError:
# No such file.
return ''
except Exception as e:
logger.error("[{0}] {1}, error while getting parameter value: {2}, {3}".format(web.ctx.ip, mail, param, e))
return ''
def __get_other_param_value(mail, param):
if param in settings.MLMMJ_OTHER_PARAM_MAP:
_v = settings.MLMMJ_OTHER_PARAM_MAP[param]
_param_type = _v['type']
_mlmmj_param = _v['mlmmj_param']
_is_email = _v.get('is_email', False)
if _param_type == 'boolean':
return __get_boolean_param_value(mail=mail, param=_mlmmj_param)
elif _param_type == 'list':
return __get_list_param_value(mail, param=_mlmmj_param, is_email=_is_email)
elif _param_type == 'normal':
return __get_normal_param_value(mail, param=_mlmmj_param)
elif _param_type == 'text':
return __get_text_param_value(mail, param=_mlmmj_param)
return 'INVALID_PARAM'
def __get_param_value(mail, param):
"""Get value of given mailing list parameter.
Possible returned values:
- (False, <error_reason>)
- (True, {'type': 'boolean', 'value': 'yes|no'})
- (True, {'type': 'list', 'value': [...]})
- (True, {'type': 'normal', 'value': '...'})
- (True, {'type': 'text', 'value': '...'})
"""
if param in settings.MLMMJ_OTHER_WEB_PARAMS:
_v = settings.MLMMJ_OTHER_PARAM_MAP[param]
_param_type = _v['type']
_value = __get_other_param_value(mail=mail, param=param)
return (True, {'type': _param_type, 'value': _value})
if param not in settings.MLMMJ_PARAM_NAMES:
logger.error("[{0}] {1}, unknown parameter: {2}".format(web.ctx.ip, mail, param))
return (False, 'INVALID_PARAM')
_param_file = __get_param_file(mail=mail, param=param)
_param_type = __get_param_type(param=param)
_ret = {'type': _param_type, 'value': None}
# control file doesn't exist
if not __has_param_file(_param_file):
if _param_type == 'list':
_ret['value'] = []
elif _param_type == 'boolean':
_ret['value'] = 'no'
else:
_ret['value'] = ''
return (True, _ret)
if _param_type == 'boolean':
_ret['value'] = 'yes'
else:
if _param_type == 'text':
_func = __get_text_param_value
elif _param_type == 'list':
_func = __get_list_param_value
else:
# _param_type == 'normal':
_func = __get_normal_param_value
_ret['value'] = _func(mail=mail, param=param, param_file=_param_file)
return (True, _ret)
def __update_boolean_param(mail,
param,
value,
param_file=None,
touch_instead_of_create=False):
"""Create or remove parameter file for boolean type parameter.
@touch_instead_of_create - touch parameter file instead of re-create it.
"""
if not param_file:
param_file = __get_param_file(mail=mail, param=param)
if value == 'yes':
try:
if touch_instead_of_create:
open(param_file, 'a', encoding='utf-8').close()
else:
open(param_file, 'w', encoding='utf-8').close()
# Avoid some conflicts
if param == 'subonlypost':
__remove_param_file(mail=mail, param='modonlypost')
if param == 'modonlypost':
__remove_param_file(mail=mail, param='subonlypost')
# Create 'control/moderated' also
_f = __get_param_file(mail=mail, param='moderated')
open(_f, 'a', encoding='utf-8').close()
except Exception as e:
logger.error("[{0}] {1}, error while updating (boolean) parameter: {2} -> {3}, {4}".format(
web.ctx.ip, mail, param, value, e))
return (False, repr(e))
else:
qr = __remove_file(path=param_file)
if not qr[0]:
return qr
logger.info("[{0}] {1}, updated (boolean) parameter: {2} -> {3}".format(web.ctx.ip, mail, param, value))
return (True, )
def __update_normal_param(mail, param, value, param_file=None, is_email=False):
# Although we write all given value, but only first line is used by mlmmj.
if not param_file:
param_file = __get_param_file(mail=mail, param=param)
if param == 'maxmailsize':
try:
value = int(value)
except:
value = 0
if not value:
# Remove param file.
qr = __remove_file(path=param_file)
return qr
if value:
if is_email:
value = str(value).lower()
if not utils.is_email(value):
return (False, 'INVALID_EMAIL')
try:
if isinstance(value, int):
value = str(value)
with open(param_file, 'w', encoding='utf-8') as f:
f.write(value + '\n')
except Exception as e:
logger.error("[{0}] {1}, error while updating (normal) parameter: {2} -> {3}, {4}".format(
web.ctx.ip, mail, param, value, e))
return (False, repr(e))
else:
qr = __remove_file(path=param_file)
if not qr[0]:
return qr
logger.info("[{0}] {1}, updated (normal) parameter: {2} -> {3}".format(web.ctx.ip, mail, param, value))
return (True, )
def __update_list_param(mail, param, value, param_file=None, is_email=False):
if not param_file:
param_file = __get_param_file(mail=mail, param=param)
if isinstance(value, str):
_values = __convert_web_param_value_to_list(value=value, is_email=is_email)
else:
_values = value
if _values:
try:
param_file = __get_param_file(mail=mail, param=param)
if param == 'listaddress':
# Remove primary address(es)
_values = [v for v in _values if v != mail]
# Prepend primary address (must be first one)
_values = [mail] + _values
with open(param_file, 'w', encoding='utf-8') as f:
f.write('\n'.join(_values) + '\n')
logger.info("[{0}] {1}, updated: {2} -> {3}".format(web.ctx.ip, mail, param, ', '.join(_values)))
except Exception as e:
logger.error("[{0}] {1}, error while updating (list) parameter: {2} -> {3}, {4}".format(
web.ctx.ip, mail, param, value, e))
return (False, repr(e))
else:
qr = __remove_file(path=param_file)
if not qr[0]:
return qr
logger.info("[{0}] {1}, updated (list) parameter: {2} -> {3}".format(web.ctx.ip, mail, param, value))
return (True, )
def __update_text_param(mail,
param,
value,
param_file=None,
create_if_empty=False):
if not param_file:
param_file = __get_param_file(mail=mail, param=param)
if value:
try:
if isinstance(value, int):
value = str(value)
else:
value = value.strip()
# Footer text/html must ends with an empty line, otherwise
# the characters will be a mess.
with open(param_file, 'w', encoding='utf-8') as f:
f.write(value + '\n')
except Exception as e:
logger.error("[{0}] {1}, error while updating (normal) parameter: {2} -> {3}, {4}".format(
web.ctx.ip, mail, param, value, e))
return (False, repr(e))
else:
if create_if_empty:
# Footer text/html must ends with an empty line, otherwise
# the characters will be a mess.
with open(param_file, 'w', encoding='utf-8') as f:
f.write('\n')
else:
qr = __remove_file(path=param_file)
if not qr[0]:
return qr
logger.info("[{0}] {1}, updated (text) parameter: {2} -> {3}".format(web.ctx.ip, mail, param, value))
return (True, )
def __update_other_param(mail, param, value):
"""Update parameters which cannot be simply mapped to a mlmmj parameter."""
if param in settings.MLMMJ_OTHER_PARAM_MAP:
_v = settings.MLMMJ_OTHER_PARAM_MAP[param]
_param_type = _v['type']
_mlmmj_param = _v['mlmmj_param']
_is_email = _v.get('is_email', False)
_param_file = __get_param_file(mail=mail, param=_mlmmj_param)
if _param_type == 'boolean':
return __update_boolean_param(mail=mail,
param=_mlmmj_param,
param_file=_param_file,
value=value,
touch_instead_of_create=True)
elif _param_type == 'list':
return __update_list_param(mail=mail,
param=_mlmmj_param,
param_file=_param_file,
value=value,
is_email=_is_email)
elif _param_type == 'normal':
return __update_normal_param(mail=mail,
param=_mlmmj_param,
param_file=_param_file,
value=value,
is_email=_is_email)
elif _param_type == 'text':
return __update_text_param(mail=mail,
param=_mlmmj_param,
param_file=_param_file,
value=value)
return (True, )
def __update_mlmmj_param(mail, param, value):
"""Update individual parameter of mailing list account."""
_param_type = __get_param_type(param)
if _param_type == 'boolean':
_update_func = __update_boolean_param
elif _param_type == 'normal':
_update_func = __update_normal_param
elif _param_type == 'list':
_update_func = __update_list_param
elif _param_type == 'text':
_update_func = __update_text_param
elif _param_type == 'other':
_update_func = __update_other_param
else:
logger.error("[{0}] {1}, unknown parameter: {2}".format(web.ctx.ip, mail, param))
return (False, 'INVALID_PARAM_TYPE')
qr = _update_func(mail=mail, param=param, value=value)
return qr
def __update_mlmmj_params(mail, **kwargs):
"""Update multiple parameters of mailing list account. Abort if failed to
update any parameter.
Parameters must be used by mlmmj directly, not the ones used by web form.
"""
if kwargs:
for (k, v) in list(kwargs.items()):
qr = __update_mlmmj_param(mail=mail, param=k, value=v)
if not qr[0]:
return qr
# If we have `footer_html`, make sure `footer_text` always exists,
# otherwise AlterMIME may not work and email will be discarded.
if kwargs.get('footer_html'):
(_status, _d) = __get_param_value(mail, 'footer_text')
if _status and (not _d.get('value')):
__update_text_param(mail=mail,
param='footer_text',
value='',
create_if_empty=True)
return (True, )
def __convert_web_param_value_to_list(value, is_email=False):
try:
# Split by ',' and remove empty values
v = [i for i in value.replace(' ', '').split(',') if i]
except:
v = []
if v and is_email:
v = [str(i).lower() for i in v if utils.is_email(i)]
return v
def __convert_form_to_mlmmj_params(mail, form):
"""Convert variables in web form to (a dict of) mlmmj parameters."""
# Both 'moderate_subscription' and 'subscription_moderators' use same
# mlmmj parameter name 'submod'
if 'moderate_subscription' in form and 'subscription_moderators' in form:
_mod = form.get('moderate_subscription')
_moderators = form.get('subscription_moderators')
if _mod == 'yes':
if _moderators:
# If there's some moderators, it will create 'submod' file
# with emails of moderators. If file 'submod' presents, it
# means moderation subscription is enabled. So we should remove
# 'moderate_subscription' parameter here to avoid improper
# file removal or re-creation (with empty content)
form.pop('moderate_subscription')
else:
# If no subscription moderators, use an empty 'submod' file
# and use mailing list owners as subscription moderators.
form.pop('subscription_moderators')
else:
# remove 'subscription_moderators' and let mlmmjadmin remove
# 'submod' directly.
form.pop('subscription_moderators')
# solve conflict of 'only_moderator_can_post' and 'only_subscriber_can_post'
# 'only_moderator_can_post' should has higher priority
if 'only_moderator_can_post' in form and 'only_subscriber_can_post' in form:
if form.get('only_moderator_can_post') == 'yes':
form['only_subscriber_can_post'] = 'no'
# Store key:value of mlmmj parameters
kvs = {}
# Convert form variable names to mlmmj parameter names
for param in form:
kv = form_utils.get_dict_for_form_param(mail=mail, form=form, param=param)
kvs.update(kv)
return kvs
def __archive_ml(mail):
_dir = __get_ml_dir(mail=mail)
if __has_ml_dir(mail=mail, path=_dir):
_timestamp = time.strftime('-%Y%m%d%H%M%S', time.gmtime())
_new_dir = _dir + _timestamp
if settings.MLMMJ_ARCHIVE_DIR:
# Move to archive directory.
__base_dir = _new_dir.replace(settings.MLMMJ_SPOOL_DIR, settings.MLMMJ_ARCHIVE_DIR)
_new_dir = os.path.join(settings.MLMMJ_ARCHIVE_DIR, __base_dir)
# Create parent directory
if _new_dir.endswith('/'):
_new_dir = os.path.dirname(_new_dir)
# If new directory exists, append one more timestamp
if os.path.exists(_new_dir):
_new_dir = _new_dir + _timestamp
# Create archive directory
try:
os.makedirs(_new_dir, mode=settings.MLMMJ_FILE_PERMISSION)
except Exception as e:
_msg = "error while creating directory under archive directory ({0}), {1}".format(_new_dir, repr(e))
logger.error("[{0}] {1}, {2}".format(web.ctx.ip, mail, _msg))
return (False, _msg)
try:
# Don't use `os.rename()` to handle this move, it raises error
# if src and dest directories are not on same disk partition.
shutil.move(_dir, _new_dir)
logger.info("[{0}] {1}, archived: {2} -> {3}".format(web.ctx.ip, mail, _dir, _new_dir))
# Return new directory path
return (True, _new_dir)
except Exception as e:
logger.error("[{0}] {1}, error while archiving: {2} ({3} -> {4})".format(web.ctx.ip, mail, repr(e), _dir, _new_dir))
return (False, repr(e))
return (True, )
def __remove_lines_in_file(path, lines):
"""
Remove line from given file.
:param path: path to file
:param lines: a list/dict/tuple of lines you want to remove
"""
if not lines:
return (True, )
if not os.path.exists(path):
return (True, )
try:
with open(path, 'r', encoding='utf-8') as _f:
_file_lines = _f.readlines()
stripped_file_lines = [line.strip() for line in _file_lines]
given_lines = [line.strip() for line in lines]
filtered_lines = set(stripped_file_lines) - set(given_lines)
if filtered_lines:
with open(path, 'w', encoding='utf-8') as f:
f.write('\n'.join(filtered_lines) + '\n')
else:
# Remove file
qr = __remove_file(path=path)
if not qr[0]:
return qr
return (True, )
except Exception as e:
return (False, repr(e))
def __add_lines_in_file(f, lines):
"""
Add lines to given file.
@f -- path to file
@lines -- a list/dict/tuple of lines you want to remove
@sort_before_saving -- A switch to sort lines before saving.
"""
if not lines:
return (True, )
file_lines = []
try:
if os.path.exists(f):
with open(f, 'r', encoding='utf-8') as _f:
file_lines = _f.readlines()
lines = [i + '\n' for i in lines]
file_lines += lines
# Remove duplicate lines.
file_lines = list(set(file_lines))
with open(f, 'w', encoding='utf-8') as nf:
nf.write(''.join(file_lines))
return (True, )
except Exception as e:
return (False, repr(e))
def __add_subscribers_with_confirm(mail,
subscribers,
subscription='normal'):
"""
Add subscribers with confirm.
@mail -- mail address of mailing list
@subscribers -- a list/tuple/set of subscribers' mail addresses
@subscription -- subscription version (normal, digest, nomail)
"""
_dir = __get_ml_dir(mail)
# Get absolute path of command `mlmmj-sub`
_cmd_mlmmj_sub = settings.CMD_MLMMJ_SUB
if not _cmd_mlmmj_sub:
if os.path.exists('/usr/bin/mlmmj-sub'):
_cmd_mlmmj_sub = '/usr/bin/mlmmj-sub'
elif os.path.exists('/usr/local/bin/mlmmj-sub'):
_cmd_mlmmj_sub = '/usr/local/bin/mlmmj-sub'
else:
return (False, 'SUB_COMMAND_NOT_FOUND')
# mlmmj-sub arguments
#
# -L: Full path to list directory
# -a: Email address to subscribe
# -C: Request mail confirmation
# -d: Subscribe to `digest` version of the list
# -n: Subscribe to nomail version of the list
_cmd = [_cmd_mlmmj_sub, '-L', _dir, '-C']
if subscription == 'digest':
_cmd.append('-d')
elif subscription == 'nomail':
_cmd.append('-n')
# Directory used to store subscription confirm notifications
_subconf_dir = os.path.join(_dir, 'subconf')
_error = {}
for addr in subscribers:
try:
# Remove confirm file generated before this request
_old_conf_files = glob.glob(os.path.join(_subconf_dir, '????????????????-' + addr.replace('@', '=')))
for _f in _old_conf_files:
qr = __remove_file(path=_f)
if not qr[0]:
return qr
# Send new confirm
_new_cmd = _cmd[:] + ['-a', addr]
subprocess.Popen(_new_cmd, stdout=subprocess.PIPE)
logger.debug("[{0}] {1}, queued confirm mail for {2}.".format(web.ctx.ip, mail, addr))
except Exception as e:
logger.error("[{0}] {1}, error while subscribing {2}: {3}".format(web.ctx.ip, mail, addr, e))
_error[addr] = repr(e)
if not _error:
return (True, )
else:
return (False, repr(_error))
def has_subscriber(mail, subscriber, subscription=None):
"""
Check whether mailing list `<mail>` has subscriber `<subscriber>`.
Return `(True, <subscription>)` or `False`.
"""
mail = str(mail).lower()
subscriber = str(subscriber).lower()
if subscription:
subscriptions = [subscription]
else:
subscriptions = subscription_versions
for subscription in subscriptions:
_sub_dir = __get_ml_subscribers_dir(mail=mail, subscription=subscription)
_sub_file = os.path.join(_sub_dir, subscriber[0])
if os.path.exists(_sub_file):
with open(_sub_file, 'r', encoding='utf-8') as f:
for line in f:
if line.strip() == subscriber:
return (True, subscription)
return False
def is_maillist_exists(mail):
if __has_ml_dir(mail):
return True
else:
return False
# This is the function we can get both web parameter and mlmmj parameter names.
def get_web_param_value(mail, param):
"""Get mlmmj parameter value of given web parameter name."""
if param in settings.MLMMJ_WEB_PARAMS:
_mlmmj_param = settings.MLMMJ_WEB_PARAMS[param]
v = __get_param_value(mail=mail, param=_mlmmj_param)
if param == 'extra_addresses':
if mail in v[1]['value']:
v[1]['value'].remove(mail)
return v
else:
return (False, 'INVALID_PARAM')
def add_maillist_from_web_form(mail, form):
"""Add a mailing list based on data submited from web form.
@mail - mail address of mailing list account
@form - a dict of web form input
"""
domain = mail.split('@', 1)[-1]
# Store mlmmj parameters
kvs = {}
# Add empty values for 'remove_headers', 'custom_headers'. This will
# trigger form process functions to add pre-defined default values.
if 'remove_headers' not in form:
form['remove_headers'] = ''
if 'custom_headers' not in form:
form['custom_headers'] = ''
# Set 'owner' to 'postmaster@<domain>'
if 'owner' not in form:
form['owner'] = 'postmaster@' + domain
# Set 'owner' to 'postmaster@<domain>'
if 'moderators' not in form:
form['moderators'] = 'postmaster@' + domain
# If `footer_html` exists but `footer_text` doesn't
if 'footer_html' in form and 'footer_text' not in form:
form['footer_text'] = ''
kvs.update(__convert_form_to_mlmmj_params(mail=mail, form=form))
# Add (missing) default settings
_form = settings.MLMMJ_DEFAULT_PROFILE_SETTINGS
for param in _form:
# Avoid conflict parameters.
if param == 'only_subscriber_can_post' and \
form.get('only_moderator_can_post') == 'yes':
continue
if param == 'only_moderator_can_post' and \
form.get('only_subscriber_can_post') == 'yes':
continue
if param not in form:
kv = form_utils.get_dict_for_form_param(mail=mail, form=_form, param=param)
kvs.update(kv)
# Always set values
_form = settings.MLMMJ_FORCED_PROFILE_SETTINGS
for param in _form:
kv = form_utils.get_dict_for_form_param(mail=mail, form=_form, param=param)
kvs.update(kv)
qr = create_ml(mail=mail, **kvs)
return qr
def create_ml(mail, **kwargs):
"""Create required directories/files for a new mailing list on file system.
WARNING: it doesn't check whether account already exists in backend.
@mail - full email address of new mailing list you're going to create
@kwargs - dict of parameter/value pairs used to set account profile
"""
if not utils.is_email(mail):
return (False, 'INVALID_EMAIL')
mail = str(mail).lower()
_ml_dir = __get_ml_dir(mail=mail)
if not os.path.exists(_ml_dir):
try:
os.makedirs(_ml_dir, mode=settings.MLMMJ_FILE_PERMISSION)
except Exception as e:
_msg = "error while creating base directory ({0}), {1}".format(_ml_dir, repr(e))
logger.error("[{0}] {1}, {2}".format(web.ctx.ip, mail, _msg))
return (False, _msg)
# Create required sub-directories
for _dir in settings.MLMMJ_DEFAULT_SUB_DIRS:
_sub_dir = os.path.join(_ml_dir, _dir)
if not os.path.exists(_sub_dir):
try:
os.makedirs(_sub_dir, mode=settings.MLMMJ_FILE_PERMISSION)
except Exception as e:
_msg = "error while creating sub-directory ({0}), {1}".format(_sub_dir, repr(e))
logger.error("[{0}] {1}, {2}".format(web.ctx.ip, mail, _msg))
return (False, _msg)
else:
qr = __set_file_permission(_sub_dir)
if not qr[0]:
return qr
# Create file `control/listaddress` with primary address
_f = os.path.join(_ml_dir, 'control/listaddress')
with open(_f, 'w', encoding='utf-8') as f:
f.write('{0}\n'.format(mail))
# Create extra control file
index_path = os.path.join(_ml_dir, 'index')
open(index_path, 'w', encoding='utf-8').close()
# Copy skel/language template files
_sub_dir_text = os.path.join(_ml_dir, 'text')
_language = kwargs.get('language', 'en')
_src_dir = os.path.join(settings.MLMMJ_SKEL_DIR, _language)
if not os.path.exists(_src_dir):
logger.error("Skel directory does not exist: {0}".format(_src_dir))
return (False, 'SKEL_DIR_NOT_EXIST')
qr = __copy_dir_files(_src_dir, _sub_dir_text)
if not qr[0]:
return qr
qr = __update_mlmmj_params(mail=mail, **kwargs)
if not qr[0]:
return qr
return (True, )
def delete_ml(mail, archive=True):
"""Delete a mailing list account. If archive is True or 'yes', account is
'removed' by renaming its data directory.
"""
_ml_dir = __get_ml_dir(mail=mail)
if os.path.exists(_ml_dir):
if archive in [True, 'yes']:
qr = __archive_ml(mail=mail)
return qr
else:
try:
shutil.rmtree(_ml_dir)
logger.info("[{0}] {1}, removed without archiving.".format(web.ctx.ip, mail))
except Exception as e:
logger.error("[{0}] {1}, error while removing list from file system: {2}".format(web.ctx.ip, mail, repr(e)))
return (False, repr(e))
else:
logger.info("[{0}] {1}, removed (no data on file system).".format(web.ctx.ip, mail))
return (True, )
def update_web_form_params(mail, form):
"""Update mailing list profile with web form."""
kvs = {}
kvs.update(__convert_form_to_mlmmj_params(mail=mail, form=form))
return __update_mlmmj_params(mail=mail, **kvs)
def get_subscribers(mail, email_only=False):
"""Get subscribers of given subscription version.
:param mail: mail address of mailing list account
:param email_only: if True, return a list of subscribers' mail addresses.
"""
subscribers = []
for subscription in subscription_versions:
_dir = __get_ml_subscribers_dir(mail=mail, subscription=subscription)
try:
fns = os.listdir(_dir)
except:
continue
for fn in fns:
_addresses = [str(i).lower().strip() for i in open(os.path.join(_dir, fn)).readlines()]
if email_only:
subscribers += _addresses
else:
subscribers += [{'mail': i, 'subscription': subscription} for i in _addresses]
if email_only:
subscribers.sort()
return (True, subscribers)
def remove_subscribers(mail, subscribers):
"""Remove multiple subscribers from given mailing list.
:param mail: mail address of mailing list account
:param subscribers: a list/tuple/set of subscribers' mail addresses.
"""
mail = mail.lower()
subscribers = [str(i).lower() for i in subscribers if utils.is_email(i)]
if not subscribers:
return (True, )
grouped_subscribers = {}
for i in subscribers:
letter = i[0]
if letter in grouped_subscribers:
grouped_subscribers[letter].append(i)
else:
grouped_subscribers[letter] = [i]
for subscription in ['normal', 'digest', 'nomail']:
_dir = __get_ml_subscribers_dir(mail=mail, subscription=subscription)
for letter in grouped_subscribers:
# Get file stores the subscriber.
path = os.path.join(_dir, letter)
qr = __remove_lines_in_file(path=path, lines=grouped_subscribers[letter])
if not qr[0]:
return qr
return (True, )
def remove_all_subscribers(mail):
"""
Remove all subscribers.
:param mail: mail address of mailing list account
"""
mail = mail.lower()
_dirs = [__get_ml_subscribers_dir(mail=mail, subscription=i) for i in subscription_versions]
try:
for _dir in _dirs:
for fn in os.listdir(_dir):
_path = os.path.join(_dir, fn)
qr = __remove_file(path=_path)
if not qr[0]:
return qr
except Exception as e:
return (False, repr(e))
return (True, )
def add_subscribers(mail,
subscribers,
subscription='normal',
require_confirm=True):
"""Add subscribers to given subscription version of mailing list.
:param mail: mail address of mailing list account
:param subscribers: a list/tuple/set of subscribers' email addresses
:param subscription: subscription version: normal, nomail, digest.
:param require_confirm: subscription version: normal, nomail, digest.
"""
mail = mail.lower()
subscribers = [str(i).lower() for i in subscribers if utils.is_email(i)]
if not subscribers:
return (True, )
if require_confirm:
qr = __add_subscribers_with_confirm(mail=mail,
subscribers=subscribers,
subscription=subscription)
if not qr[0]:
logger.error("[{0}] {1} Failed to add subscribers (require "
"confirm): error={2}".format(web.ctx.ip, mail, qr[1]))
return qr
else:
grouped_subscribers = {}
for i in subscribers:
letter = i[0]
if letter in grouped_subscribers:
grouped_subscribers[letter].append(i)
else:
grouped_subscribers[letter] = [i]
_dir = __get_ml_subscribers_dir(mail=mail, subscription=subscription)
for letter in grouped_subscribers:
# Get file stores the subscriber.
path = os.path.join(_dir, letter)
qr = __add_lines_in_file(f=path, lines=grouped_subscribers[letter])
if not qr[0]:
logger.error('[{0}] {1} Failed to add subscribers to file: '
'error={2}'.format(web.ctx.ip, mail, qr[1]))
return qr
logger.info('[{0}] {1}, added subscribers without confirming: {2}.'.format(web.ctx.ip, mail, ', '.join(subscribers)))
return (True, )
def subscribe_to_lists(subscriber,
lists,
subscription='normal',
require_confirm=True):
"""Add one subscriber to multiple mailing lists.
@subscriber -- mail address of subscriber
@lists -- a list/tuple/set of mailing lists
@subscription -- subscription version: normal, nomail, digest.
@require_confirm -- subscription version: normal, nomail, digest.
"""
subscriber = subscriber.lower()
lists = [str(i).lower() for i in lists if utils.is_email(i)]
if not lists:
return (True, )
for ml in lists:
qr = add_subscribers(mail=ml,
subscribers=[subscriber],
subscription=subscription,
require_confirm=require_confirm)
if not qr[0]:
return qr
return (True, )
|
176856
|
import numpy
from amuse.test import amusetest
from amuse.units import units, nbody_system
from amuse.ic.brokenimf import *
# Instead of random, use evenly distributed numbers, just for testing
default_options = dict(random=False)
class TestMultiplePartIMF(amusetest.TestCase):
def test1(self):
print("Test MultiplePartIMF with default mass_boundaries and alphas, i.e. Salpeter")
instance = MultiplePartIMF(mass_max=100.0 | units.MSun)
self.assertEqual(instance.mass_boundaries, [0.1, 100.0] | units.MSun)
self.assertEqual(instance.alphas, [-2.35])
self.assertEqual(instance.number_of_bins, 1)
self.assertEqual(instance.fraction_per_bin, [1.0])
self.assertEqual(instance.cumulative_fractions, [0.0, 1.0])
self.assertAlmostEqual(instance.mass([0.0]), 0.1 | units.MSun)
self.assertAlmostEqual(instance.mass([1.0]), 100.0 | units.MSun)
self.assertAlmostEqual(instance.mass_mean(), 0.351 | units.MSun, 3)
def test2(self):
print("Test MultiplePartIMF with mass_boundaries and alphas")
instance = MultiplePartIMF(mass_boundaries = [1.0, 10.0, 100.0] | units.MSun,
alphas = [1.3, -3.3], **default_options)
self.assertEqual(instance.mass_boundaries, [1.0, 10.0, 100.0] | units.MSun)
self.assertEqual(instance.alphas, [1.3, -3.3])
self.assertEqual(instance.number_of_bins, 2)
self.assertAlmostEqual(instance.fraction_per_bin, numpy.array([0.5, 0.5]))
self.assertEqual(instance.cumulative_fractions, [0.0, 0.5, 1.0])
self.assertAlmostEqual(instance.mass([0.0]), 1.0 | units.MSun)
self.assertAlmostEqual(instance.mass([0.5]), 10.0 | units.MSun)
self.assertAlmostEqual(instance.mass([1.0]), 100.0 | units.MSun)
self.assertAlmostEqual(instance.mass_mean(), 11.9457684987 | units.MSun)
self.assertAlmostEqual(instance.mass_mean(), instance.next_mass(10000).mean(), 2)
def test3(self):
print("Test new_broken_power_law_mass_distribution with default mass_boundaries and alphas, i.e. Salpeter")
masses = new_broken_power_law_mass_distribution(10000, mass_max=100.0 | units.MSun, **default_options)
self.assertTrue((masses >= 0.1 | units.MSun).all())
self.assertTrue((masses <= 100.0 | units.MSun).all())
self.assertAlmostEqual(min(masses), 0.1 | units.MSun)
self.assertAlmostEqual(max(masses), 100.0 | units.MSun)
mass_mean = MultiplePartIMF(mass_boundaries=[0.1, 100.0]|units.MSun,
alphas=[-2.35]).mass_mean()
self.assertAlmostEqual(mass_mean, 0.35136877959 | units.MSun)
self.assertAlmostRelativeEqual(masses.mean(), 0.351 | units.MSun, 1)
def test4(self):
print("Test new_broken_power_law_mass_distribution with mass_boundaries and alphas")
masses = new_broken_power_law_mass_distribution(10000,
mass_boundaries = [1.0, 10.0, 100.0] | units.MSun,
alphas = [1.3, -3.3], **default_options)
self.assertTrue((masses >= 1.0 | units.MSun).all())
self.assertTrue((masses <= 100.0 | units.MSun).all())
self.assertAlmostEqual(min(masses), 1.0 | units.MSun)
self.assertAlmostEqual(max(masses), 100.0 | units.MSun)
mass_mean = MultiplePartIMF(mass_boundaries=[1.0, 10.0, 100.0]|units.MSun,
alphas=[1.3, -3.3]).mass_mean()
self.assertAlmostEqual(mass_mean, 11.9457684987 | units.MSun)
self.assertAlmostRelativeEqual(masses.mean(), 11.9457684987 | units.MSun, 1)
def test5(self):
print("Test new_scalo_mass_distribution")
masses = new_scalo_mass_distribution(10000, **default_options)
self.assertTrue((masses >= 0.1 | units.MSun).all())
self.assertTrue((masses <= 125.0 | units.MSun).all())
self.assertAlmostEqual(min(masses), 0.1 | units.MSun)
self.assertAlmostEqual(max(masses), 125.0 | units.MSun)
mass_mean = MultiplePartIMF(mass_boundaries=[0.10, 0.18, 0.42, 0.62, 1.18, 3.5, 125.0]|units.MSun,
alphas=[1.6, -1.01, -2.75, -2.08, -3.5, -2.63]).mass_mean()
self.assertAlmostEqual(mass_mean, 0.487756751788 | units.MSun)
self.assertAlmostRelativeEqual(masses.mean(), 0.487756751788 | units.MSun, 1)
def test6(self):
print("Test new_miller_scalo_mass_distribution")
masses = new_miller_scalo_mass_distribution(10000, **default_options)
self.assertTrue((masses >= 0.1 | units.MSun).all())
self.assertTrue((masses <= 125.0 | units.MSun).all())
self.assertAlmostEqual(min(masses), 0.1 | units.MSun)
self.assertAlmostEqual(max(masses), 125.0 | units.MSun)
mass_mean = MultiplePartIMF(mass_boundaries=[0.1, 1.0, 2.0, 10.0, 125.0]|units.MSun,
alphas=[-1.25, -2.0, -2.3, -3.3]).mass_mean()
self.assertAlmostEqual(mass_mean, 0.885783055149 | units.MSun)
self.assertAlmostRelativeEqual(masses.mean(), 0.885783055149 | units.MSun, 1)
def test7(self):
print("Test new_kroupa_mass_distribution")
masses = new_kroupa_mass_distribution(10000, **default_options)
self.assertTrue((masses >= 0.01 | units.MSun).all())
roundoff = 1.0 + 1.0e-12
self.assertTrue((masses <= (100.0 * roundoff) | units.MSun).all())
self.assertAlmostEqual(min(masses), 0.01 | units.MSun)
self.assertAlmostEqual(max(masses), 100.0 | units.MSun)
mass_mean = MultiplePartIMF(mass_boundaries=[0.01, 0.08, 0.5, 100.0]|units.MSun,
alphas=[-0.3, -1.3, -2.3]).mass_mean()
self.assertAlmostEqual(mass_mean, 0.376175542639 | units.MSun)
self.assertAlmostRelativeEqual(masses.mean(), 0.376175542639 | units.MSun, 1)
def test8(self):
print("Test with problematic alphas (new_salpeter_mass_distribution would give zero division errors)")
masses = new_broken_power_law_mass_distribution(10000,
mass_boundaries = [1.0, 10.0, 100.0] | units.MSun,
alphas = [-1, -2], **default_options)
self.assertTrue((masses >= 1.0 | units.MSun).all())
roundoff = 1.0 + 1.0e-12
self.assertTrue((masses <= (100.0 * roundoff) | units.MSun).all())
self.assertAlmostEqual(min(masses), 1.0 | units.MSun)
self.assertAlmostEqual(max(masses), 100.0 | units.MSun)
mass_mean = MultiplePartIMF(mass_boundaries=[1.0, 10.0, 100.0] | units.MSun,
alphas=[-1, -2]).mass_mean()
self.assertAlmostEqual(mass_mean, 10.0 | units.MSun)
self.assertAlmostRelativeEqual(masses.mean(), 10.0 | units.MSun, 1)
masses = new_broken_power_law_mass_distribution(101,
mass_boundaries = [1.0, 100.0] | units.MSun,
alphas = [-1], **default_options)
self.assertAlmostEqual(masses.median(), 10.0 | units.MSun)
|
176863
|
import logging
from sqlalchemy.orm import joinedload
import smartdb
from model import Machine, Schedule, MachineInterface
logger = logging.getLogger(__file__)
class ScheduleRepository:
__name__ = "ScheduleRepository"
def __init__(self, smart: smartdb.SmartDatabaseClient):
self.smart = smart
@staticmethod
def _lint_schedule_data(schedule_data: dict):
# {
# "roles": ["kubernetes-control-plane", "etcd-member"],
# "selector": {
# "mac": mac
# }
# }
try:
_ = schedule_data["selector"]["mac"]
_ = schedule_data["roles"]
except KeyError as e:
err_msg = "missing keys in schedule data: '%s'" % e
logger.error(err_msg)
raise TypeError(err_msg)
return schedule_data
def create_schedule(self, schedule_data: dict):
caller = "%s.%s" % (self.__name__, self.create_schedule.__name__)
schedule_data = self._lint_schedule_data(schedule_data)
@smartdb.cockroach_transaction
def callback(caller=caller):
commit = False
with self.smart.new_session() as session:
machine = session.query(Machine) \
.join(MachineInterface) \
.options(joinedload("schedules")) \
.filter(MachineInterface.mac == schedule_data["selector"]["mac"]) \
.first()
if not machine:
logger.error("machine mac %s not in db", schedule_data["selector"]["mac"])
return commit
else:
machine_already_scheduled = [s.role for s in machine.schedules]
for role in schedule_data["roles"]:
if role in machine_already_scheduled:
logger.info("machine mac %s already scheduled with role %s",
schedule_data["selector"]["mac"], role)
continue
session.add(Schedule(machine_id=machine.id, role=role))
logger.info("scheduling machine mac %s as role %s", schedule_data["selector"]["mac"], role)
commit = True
session.commit() if commit else None
return commit
return callback(caller)
def get_all_schedules(self):
result = dict()
with self.smart.new_session() as session:
for machine in session.query(Machine) \
.options(joinedload("interfaces")) \
.options(joinedload("schedules")) \
.join(Schedule) \
.filter(MachineInterface.as_boot == True):
if machine.schedules:
result[machine.interfaces[0].mac] = [k.role for k in machine.schedules]
return result
def get_roles_by_mac_selector(self, mac: str):
result = []
with self.smart.new_session() as session:
for s in session.query(Schedule) \
.join(Machine) \
.join(MachineInterface) \
.filter(MachineInterface.mac == mac):
result.append(s.role)
return result
def get_available_machines(self):
available_machines = []
with self.smart.new_session() as session:
for m in session.query(Machine) \
.join(MachineInterface) \
.options(joinedload("schedules")) \
.options(joinedload("interfaces")) \
.options(joinedload("disks")) \
.filter(MachineInterface.as_boot == True):
# TODO find a way to support cockroach and SQLite without this if
if not m.schedules:
available_machines.append({
"mac": m.interfaces[0].mac,
"ipv4": m.interfaces[0].ipv4,
"cidrv4": m.interfaces[0].cidrv4,
"as_boot": m.interfaces[0].as_boot,
"name": m.interfaces[0].name,
"fqdn": m.interfaces[0].fqdn,
"netmask": m.interfaces[0].netmask,
"created_date": m.created_date,
"disks": [{"path": k.path, "size-bytes": k.size} for k in m.disks],
})
return available_machines
def _construct_machine_dict(self, machine: Machine, role):
return {
"mac": machine.interfaces[0].mac,
"ipv4": machine.interfaces[0].ipv4,
"cidrv4": machine.interfaces[0].cidrv4,
"gateway": machine.interfaces[0].gateway,
"as_boot": machine.interfaces[0].as_boot,
"name": machine.interfaces[0].name,
"netmask": machine.interfaces[0].netmask,
"roles": role,
"created_date": machine.created_date,
"fqdn": machine.interfaces[0].fqdn,
"disks": [{"path": k.path, "size-bytes": k.size} for k in machine.disks],
}
def get_machines_by_role(self, role: str):
machines = []
with self.smart.new_session() as session:
for machine in session.query(Machine) \
.options(joinedload("interfaces")) \
.options(joinedload("disks")) \
.join(Schedule) \
.filter(MachineInterface.as_boot == True) \
.filter(Schedule.role == role):
machines.append(self._construct_machine_dict(machine, role))
return machines
def get_machines_by_roles(self, *roles):
if len(roles) == 1:
return self.get_machines_by_role(roles[0])
machines = []
roles = list(roles)
with self.smart.new_session() as session:
for machine in session.query(Machine) \
.options(joinedload("interfaces")) \
.options(joinedload("disks")) \
.join(Schedule) \
.filter(MachineInterface.as_boot == True):
# TODO Maybe do this with a sqlalchemy filter func
if len(roles) == len(roles) and set(k.role for k in machine.schedules) == set(roles):
machines.append(self._construct_machine_dict(machine, roles))
return machines
def get_role_ip_list(self, role: str):
ips = []
with self.smart.new_session() as session:
for machine in session.query(Machine) \
.options(joinedload("interfaces")) \
.join(MachineInterface) \
.join(Schedule) \
.filter(Schedule.role == role, MachineInterface.as_boot == True):
ips.append(machine.interfaces[0].ipv4)
return ips
|
176896
|
from djangoevents.domain import BaseAggregate
from djangoevents.domain import DomainEvent
from djangoevents.utils_abstract import abstract
from ..utils import camel_case_to_snake_case
from ..utils import list_aggregate_events
from ..utils import list_concrete_aggregates
from ..utils import _list_subclasses
from ..utils import _list_internal_classes
from unittest import mock
import pytest
def test_list_subclasses():
class Parent:
pass
class Child(Parent):
pass
class GrandChild(Child):
pass
assert set(_list_subclasses(Parent)) == {Child, GrandChild}
def test_list_subclasses_when_none():
class Parent:
pass
assert _list_subclasses(Parent) == []
def list_internal_classes():
class Parent:
class Child1:
pass
class Child2:
class GrandChild:
pass
# Please note that GrandChild is not on the list!
assert set(_list_internal_classes(Parent)) == {Parent.Child1, Parent.Child2}
def list_internal_classes_none():
class Parent:
pass
assert _list_internal_classes(Parent) == []
def test_list_aggregates_skip_abstract():
class Aggregate1(BaseAggregate):
pass
@abstract
class Aggregate2(BaseAggregate):
pass
with mock.patch('djangoevents.utils._list_subclasses') as list_subclasses:
list_subclasses.return_value = [Aggregate1, Aggregate2]
aggregates = list_concrete_aggregates()
assert aggregates == [Aggregate1]
def test_list_aggregates_none_present():
with mock.patch('djangoevents.utils._list_subclasses') as list_subclasses:
list_subclasses.return_value = []
aggregates = list_concrete_aggregates()
assert aggregates == []
def test_list_events_sample_event_appart_from_abstract():
class Aggregate(BaseAggregate):
class Evt1(DomainEvent):
def mutate_event(self, *args, **kwargs):
pass
class Evt2(DomainEvent):
def mutate_event(self, *args, **kwargs):
pass
class Evt3(DomainEvent):
# No mutate_event present
pass
@abstract
class Evt4(DomainEvent):
def mutate_event(self, *args, **kwargs):
pass
events = list_aggregate_events(Aggregate)
assert set(events) == {Aggregate.Evt1, Aggregate.Evt2}
def test_list_events_not_an_aggregate():
events = list_aggregate_events(list)
assert events == []
@pytest.mark.parametrize('name, expected_output', [
('UserRegistered', 'user_registered'),
('UserRegisteredWithEmail', 'user_registered_with_email'),
('HttpResponse', 'http_response'),
('HTTPResponse', 'http_response'),
('already_snake', 'already_snake'),
])
def test_camel_case_to_snake_case(name, expected_output):
assert expected_output == camel_case_to_snake_case(name)
|
176944
|
from pybluemonday import UGCPolicy, StrictPolicy, NewPolicy
from collections import namedtuple
Case = namedtuple("Case", ["input", "output"])
def test_StrictPolicy():
cases = [
Case(input="Hello, <b>World</b>!", output="Hello, World!"),
Case(input="<blockquote>Hello, <b>World</b>!", output="Hello, World!"),
Case(
input="<quietly>email me - addy in profile</quiet>",
output="email me - addy in profile",
),
]
p = StrictPolicy()
for case in cases:
assert p.sanitize(case.input) == case.output
def test_UGCPolicy():
cases = [
Case("Hello, World!", "Hello, World!"),
Case("Hello, <b>World</b>!", "Hello, <b>World</b>!"),
Case(
"<p>Hello, <b onclick=alert(1337)>World</b>!</p>",
"<p>Hello, <b>World</b>!</p>",
),
Case(
"<p onclick=alert(1337)>Hello, <b>World</b>!</p>",
"<p>Hello, <b>World</b>!</p>",
),
Case("""<a href="javascript:alert(1337)">foo</a>""", "foo"),
Case(
"""<img src="http://example.org/foo.gif">""",
"""<img src="http://example.org/foo.gif">""",
),
Case(
"""<img src="http://example.org/x.gif" alt="y" width=96 height=64 border=0>""",
"""<img src="http://example.org/x.gif" alt="y" width="96" height="64">""",
),
Case(
"""<img src="http://example.org/x.png" alt="y" width="widgy" height=64 border=0>""",
"""<img src="http://example.org/x.png" alt="y" height="64">""",
),
Case(
"""<a href="foo.html">Link text</a>""",
"""<a href="foo.html" rel="nofollow">Link text</a>""",
),
Case(
"""<a href="foo.html" onclick="alert(1337)">Link text</a>""",
"""<a href="foo.html" rel="nofollow">Link text</a>""",
),
Case(
"""<a href="http://example.org/x.html" onclick="alert(1337)">Link text</a>""",
"""<a href="http://example.org/x.html" rel="nofollow">Link text</a>""",
),
Case(
"""<a href="https://example.org/x.html" onclick="alert(1337)">Link text</a>""",
"""<a href="https://example.org/x.html" rel="nofollow">Link text</a>""",
),
Case(
"""<a href="//example.org/x.html" onclick="alert(1337)">Link text</a>""",
"""<a href="//example.org/x.html" rel="nofollow">Link text</a>""",
),
Case(
"""<a href="javascript:alert(1337).html" onclick="alert(1337)">Link text</a>""",
"""Link text""",
),
Case(
"""<a name="header" id="header">Header text</a>""",
"""<a id="header">Header text</a>""",
),
Case(
"""<img src="planets.gif" width="145" height="126" alt="" usemap="#demomap"><map name="demomap"><area shape="rect" coords="0,0,82,126" href="demo.htm" alt="1"><area shape="circle" coords="90,58,3" href="demo.htm" alt="2"><area shape="circle" coords="124,58,8" href="demo.htm" alt="3"></map>""",
"""<img src="planets.gif" width="145" height="126" alt="" usemap="#demomap"><map name="demomap"><area shape="rect" coords="0,0,82,126" href="demo.htm" alt="1" rel="nofollow"><area shape="circle" coords="90,58,3" href="demo.htm" alt="2" rel="nofollow"><area shape="circle" coords="124,58,8" href="demo.htm" alt="3" rel="nofollow"></map>""",
),
Case(
"""<table style="color: rgb(0, 0, 0);"><tbody><tr><th>Column One</th><th>Column Two</th></tr><tr><td align="center" style="background-color: rgb(255, 255, 254);"><font size="2">Size 2</font></td><td align="center" style="background-color: rgb(255, 255, 254);"><font size="7">Size 7</font></td></tr></tbody></table>""",
"""<table><tbody><tr><th>Column One</th><th>Column Two</th></tr><tr><td align="center">Size 2</td><td align="center">Size 7</td></tr></tbody></table>""",
),
Case(
"""xss<a href="http://www.google.de" style="color:red;" onmouseover=alert(1) onmousemove="alert(2)" onclick=alert(3)>g<img src="http://example.org"/>oogle</a>""",
"""xss<a href="http://www.google.de" rel="nofollow">g<img src="http://example.org"/>oogle</a>""",
),
Case(
"<table>Hallo\r\n<script>SCRIPT</script>\nEnde\n\r",
"<table>Hallo\n\nEnde\n\n",
),
]
p = UGCPolicy()
for case in cases:
assert p.sanitize(case.input) == case.output
|
176980
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from builtins import *
import numpy as np
from .rnn_base import RNNBase
from .utils import SymbolTable
class TextRNN(RNNBase):
"""TextRNN for strings of text."""
def _preprocess_data(self, candidates, extend=False):
"""Convert candidate sentences to lookup sequences
:param candidates: candidates to process
:param extend: extend symbol table for tokens (train), or lookup (test)?
"""
if not hasattr(self, 'word_dict'):
self.word_dict = SymbolTable()
data, ends = [], []
for candidate in candidates:
toks = candidate.get_contexts()[0].text.split()
# Either extend word table or retrieve from it
f = self.word_dict.get if extend else self.word_dict.lookup
data.append(np.array(list(map(f, toks))))
ends.append(len(toks))
return data, ends
|
176989
|
import numpy as np
import matplotlib
# matplotlib.use("TkAgg")
import matplotlib.pyplot as plt
from typing import *
import pandas as pd
import seaborn as sns
import math
sns.set()
class Accuracy(object):
def at_radii(self, radii: np.ndarray):
raise NotImplementedError()
class ApproximateAccuracy(Accuracy):
def __init__(self, data_file_path: str):
self.data_file_path = data_file_path
def at_radii(self, radii: np.ndarray) -> np.ndarray:
df = pd.read_csv(self.data_file_path, delimiter="\t")
return np.array([self.at_radius(df, radius) for radius in radii])
def at_radius(self, df: pd.DataFrame, radius: float):
return (df["correct"] & (df["radius"] >= radius)).mean()
class HighProbAccuracy(Accuracy):
def __init__(self, data_file_path: str, alpha: float, rho: float):
self.data_file_path = data_file_path
self.alpha = alpha
self.rho = rho
def at_radii(self, radii: np.ndarray) -> np.ndarray:
df = pd.read_csv(self.data_file_path, delimiter="\t")
return np.array([self.at_radius(df, radius) for radius in radii])
def at_radius(self, df: pd.DataFrame, radius: float):
mean = (df["correct"] & (df["radius"] >= radius)).mean()
num_examples = len(df)
return (
mean
- self.alpha
- math.sqrt(
self.alpha * (1 - self.alpha) * math.log(1 / self.rho) / num_examples
)
- math.log(1 / self.rho) / (3 * num_examples)
)
class Line(object):
def __init__(
self, quantity: Accuracy, legend: str, plot_fmt: str = "", scale_x: float = 1
):
self.quantity = quantity
self.legend = legend
self.plot_fmt = plot_fmt
self.scale_x = scale_x
def plot_certified_accuracy(
outfile: str,
title: str,
max_radius: float,
lines: List[Line],
radius_step: float = 0.01,
) -> None:
radii = np.arange(0, max_radius + radius_step, radius_step)
plt.figure()
for line in lines:
plt.plot(radii * line.scale_x, line.quantity.at_radii(radii), line.plot_fmt)
plt.ylim((0, 1))
plt.xlim((0, max_radius))
plt.tick_params(labelsize=14)
plt.xlabel("radius", fontsize=16)
plt.ylabel("certified accuracy", fontsize=16)
plt.legend([method.legend for method in lines], loc="upper right", fontsize=16)
plt.savefig(outfile + ".pdf")
plt.tight_layout()
plt.title(title, fontsize=20)
plt.tight_layout()
plt.savefig(outfile + ".png", dpi=300)
plt.close()
def smallplot_certified_accuracy(
outfile: str,
title: str,
max_radius: float,
methods: List[Line],
radius_step: float = 0.01,
xticks=0.5,
) -> None:
radii = np.arange(0, max_radius + radius_step, radius_step)
plt.figure()
for method in methods:
plt.plot(radii, method.quantity.at_radii(radii), method.plot_fmt)
plt.ylim((0, 1))
plt.xlim((0, max_radius))
plt.xlabel("radius", fontsize=22)
plt.ylabel("certified accuracy", fontsize=22)
plt.tick_params(labelsize=20)
plt.gca().xaxis.set_major_locator(plt.MultipleLocator(xticks))
plt.legend([method.legend for method in methods], loc="upper right", fontsize=20)
plt.tight_layout()
plt.savefig(outfile + ".pdf")
plt.close()
def latex_table_certified_accuracy(
outfile: str,
radius_start: float,
radius_stop: float,
radius_step: float,
methods: List[Line],
):
radii = np.arange(radius_start, radius_stop + radius_step, radius_step)
accuracies = np.zeros((len(methods), len(radii)))
for i, method in enumerate(methods):
accuracies[i, :] = method.quantity.at_radii(radii)
f = open(outfile, "w")
for radius in radii:
f.write("& $r = {:.3}$".format(radius))
f.write("\\\\\n")
f.write("\midrule\n")
for i, method in enumerate(methods):
f.write(method.legend)
for j, radius in enumerate(radii):
if i == accuracies[:, j].argmax():
txt = r" & \textbf{" + "{:.2f}".format(accuracies[i, j]) + "}"
else:
txt = " & {:.2f}".format(accuracies[i, j])
f.write(txt)
f.write("\\\\\n")
f.close()
def markdown_table_certified_accuracy(
outfile: str,
radius_start: float,
radius_stop: float,
radius_step: float,
methods: List[Line],
):
radii = np.arange(radius_start, radius_stop + radius_step, radius_step)
accuracies = np.zeros((len(methods), len(radii)))
for i, method in enumerate(methods):
accuracies[i, :] = method.quantity.at_radii(radii)
f = open(outfile, "w")
f.write("| | ")
for radius in radii:
f.write("r = {:.3} |".format(radius))
f.write("\n")
f.write("| --- | ")
for i in range(len(radii)):
f.write(" --- |")
f.write("\n")
for i, method in enumerate(methods):
f.write("<b> {} </b>| ".format(method.legend))
for j, radius in enumerate(radii):
if i == accuracies[:, j].argmax():
txt = "{:.2f}<b>*</b> |".format(accuracies[i, j])
else:
txt = "{:.2f} |".format(accuracies[i, j])
f.write(txt)
f.write("\n")
f.close()
if __name__ == "__main__":
latex_table_certified_accuracy(
"analysis/latex/vary_noise_cifar10",
0.25,
1.5,
0.25,
[
Line(
ApproximateAccuracy(
"data/certify/cifar10/resnet110/noise_0.12/test/sigma_0.12"
),
"$\sigma = 0.12$",
),
Line(
ApproximateAccuracy(
"data/certify/cifar10/resnet110/noise_0.25/test/sigma_0.25"
),
"$\sigma = 0.25$",
),
Line(
ApproximateAccuracy(
"data/certify/cifar10/resnet110/noise_0.50/test/sigma_0.50"
),
"$\sigma = 0.50$",
),
Line(
ApproximateAccuracy(
"data/certify/cifar10/resnet110/noise_1.00/test/sigma_1.00"
),
"$\sigma = 1.00$",
),
],
)
markdown_table_certified_accuracy(
"analysis/markdown/vary_noise_cifar10",
0.25,
1.5,
0.25,
[
Line(
ApproximateAccuracy(
"data/certify/cifar10/resnet110/noise_0.12/test/sigma_0.12"
),
"σ = 0.12",
),
Line(
ApproximateAccuracy(
"data/certify/cifar10/resnet110/noise_0.25/test/sigma_0.25"
),
"σ = 0.25",
),
Line(
ApproximateAccuracy(
"data/certify/cifar10/resnet110/noise_0.50/test/sigma_0.50"
),
"σ = 0.50",
),
Line(
ApproximateAccuracy(
"data/certify/cifar10/resnet110/noise_1.00/test/sigma_1.00"
),
"σ = 1.00",
),
],
)
latex_table_certified_accuracy(
"analysis/latex/vary_noise_imagenet",
0.5,
3.0,
0.5,
[
Line(
ApproximateAccuracy(
"data/certify/imagenet/resnet50/noise_0.25/test/sigma_0.25"
),
"$\sigma = 0.25$",
),
Line(
ApproximateAccuracy(
"data/certify/imagenet/resnet50/noise_0.50/test/sigma_0.50"
),
"$\sigma = 0.50$",
),
Line(
ApproximateAccuracy(
"data/certify/imagenet/resnet50/noise_1.00/test/sigma_1.00"
),
"$\sigma = 1.00$",
),
],
)
markdown_table_certified_accuracy(
"analysis/markdown/vary_noise_imagenet",
0.5,
3.0,
0.5,
[
Line(
ApproximateAccuracy(
"data/certify/imagenet/resnet50/noise_0.25/test/sigma_0.25"
),
"σ = 0.25",
),
Line(
ApproximateAccuracy(
"data/certify/imagenet/resnet50/noise_0.50/test/sigma_0.50"
),
"σ = 0.50",
),
Line(
ApproximateAccuracy(
"data/certify/imagenet/resnet50/noise_1.00/test/sigma_1.00"
),
"σ = 1.00",
),
],
)
plot_certified_accuracy(
"analysis/plots/vary_noise_cifar10",
"CIFAR-10, vary $\sigma$",
1.5,
[
Line(
ApproximateAccuracy(
"data/certify/cifar10/resnet110/noise_0.12/test/sigma_0.12"
),
"$\sigma = 0.12$",
),
Line(
ApproximateAccuracy(
"data/certify/cifar10/resnet110/noise_0.25/test/sigma_0.25"
),
"$\sigma = 0.25$",
),
Line(
ApproximateAccuracy(
"data/certify/cifar10/resnet110/noise_0.50/test/sigma_0.50"
),
"$\sigma = 0.50$",
),
Line(
ApproximateAccuracy(
"data/certify/cifar10/resnet110/noise_1.00/test/sigma_1.00"
),
"$\sigma = 1.00$",
),
],
)
plot_certified_accuracy(
"analysis/plots/vary_train_noise_cifar_050",
"CIFAR-10, vary train noise, $\sigma=0.5$",
1.5,
[
Line(
ApproximateAccuracy(
"data/certify/cifar10/resnet110/noise_0.25/test/sigma_0.50"
),
"train $\sigma = 0.25$",
),
Line(
ApproximateAccuracy(
"data/certify/cifar10/resnet110/noise_0.50/test/sigma_0.50"
),
"train $\sigma = 0.50$",
),
Line(
ApproximateAccuracy(
"data/certify/cifar10/resnet110/noise_1.00/test/sigma_0.50"
),
"train $\sigma = 1.00$",
),
],
)
plot_certified_accuracy(
"analysis/plots/vary_train_noise_imagenet_050",
"ImageNet, vary train noise, $\sigma=0.5$",
1.5,
[
Line(
ApproximateAccuracy(
"data/certify/imagenet/resnet50/noise_0.25/test/sigma_0.50"
),
"train $\sigma = 0.25$",
),
Line(
ApproximateAccuracy(
"data/certify/imagenet/resnet50/noise_0.50/test/sigma_0.50"
),
"train $\sigma = 0.50$",
),
Line(
ApproximateAccuracy(
"data/certify/imagenet/resnet50/noise_1.00/test/sigma_0.50"
),
"train $\sigma = 1.00$",
),
],
)
plot_certified_accuracy(
"analysis/plots/vary_noise_imagenet",
"ImageNet, vary $\sigma$",
4,
[
Line(
ApproximateAccuracy(
"data/certify/imagenet/resnet50/noise_0.25/test/sigma_0.25"
),
"$\sigma = 0.25$",
),
Line(
ApproximateAccuracy(
"data/certify/imagenet/resnet50/noise_0.50/test/sigma_0.50"
),
"$\sigma = 0.50$",
),
Line(
ApproximateAccuracy(
"data/certify/imagenet/resnet50/noise_1.00/test/sigma_1.00"
),
"$\sigma = 1.00$",
),
],
)
plot_certified_accuracy(
"analysis/plots/high_prob",
"Approximate vs. High-Probability",
2.0,
[
Line(
ApproximateAccuracy(
"data/certify/imagenet/resnet50/noise_0.50/test/sigma_0.50"
),
"Approximate",
),
Line(
HighProbAccuracy(
"data/certify/imagenet/resnet50/noise_0.50/test/sigma_0.50",
0.001,
0.001,
),
"High-Prob",
),
],
)
|
177050
|
from .trainer import Trainer, STOP_TIME
from .my_checkpoint_management import MyCheckpointManager
import signal
class EvolutionTrainer(Trainer):
def __init__(
self,
root_dir,
evolutions,
keep_checkpoint_steps,
save_interval_minutes=30,
signal_handler_signals=(signal.SIGINT, signal.SIGUSR1, signal.SIGTERM),
):
"""
Creates a new EvolutionTrainer object.
This will create the root_dir for training and a directory for
checkpoints and log files within this directory.
In contrast to the default Trainer, EvolutionTrainer supports evolutions.
Evolutions allow to implement trainings with multiple stages that need to run
the training script multiple times.
In addition EvolutionTrainer takes care of checkpoint management.
root_dir : str
path to the root of the training directory.
evolutions : list of objects
The evolution object must implement the attributes 'name'
and 'stop_step'.
'name' is used in the filename of the checkpoint.
'stop_step' is the iteration number at which the evolution
stops.
keep_checkpoint_steps : list or set of integers
A set of steps for which checkpoints are kept.
save_interval_minutes : int
The save interval in minutes. If the latest checkpoint is
older than this interval then a new checkpoint will be
created by save_if_needed()
signal_handler_signals : list of signals
This object will install a signal handler for these signals
that will cause keep_training() to return False and create
a checkpoint.
"""
if not evolutions:
raise ValueError("List of evolutions must not be empty")
if len([e.stop_step for e in evolutions]) != len(
set([e.stop_step for e in evolutions])):
raise ValueError("Duplicate 'stop_step' found in evolutions")
if len([e.name for e in evolutions]) != len(
set([e.name for e in evolutions])):
raise ValueError("Duplicate 'name' found in evolutions")
super().__init__(root_dir, signal_handler_signals)
self._evolutions = sorted(evolutions, key=lambda x: x.stop_step)
self._current_evolution = self._get_current_evolution()
if keep_checkpoint_steps:
self._keep_checkpoint_steps = set(keep_checkpoint_steps)
else:
self._keep_checkpoint_steps = set()
# make sure that we keep checkpoints for the last step of each evolution
for evo in self._evolutions:
self._keep_checkpoint_steps.add(evo.stop_step)
self._save_interval_minutes = save_interval_minutes
self._checkpoint_manager = None
def checkpoint_prefix_for_evo(self, evo):
return 'ckpt_{}'.format(evo.name)
def _get_current_evolution(self):
current_evo = self._evolutions[-1]
for evo in reversed(self._evolutions):
checkpoint_prefix = self.checkpoint_prefix_for_evo(evo)
ckpt_manager = MyCheckpointManager(
None,
self.checkpoint_dir, [],
checkpoint_prefix=checkpoint_prefix)
steps_checkpoints = ckpt_manager.get_steps_and_checkpoints()
if steps_checkpoints:
last_step = steps_checkpoints[-1][0]
if last_step < evo.stop_step:
# if there are checkpoints with a step smaller than the stop
# step then this is the current evo and we can break out of
# the loop
current_evo = evo
break
else:
# in this case evo is finished and the evo stored in
# current_evo from the previous iteration is the current evo
return current_evo
else:
# if there are no checkpoints we have to check if the previous
# evo was finished.
current_evo = evo
return current_evo
@property
def current_evolution(self):
return self._current_evolution
@property
def latest_checkpoint(self):
"""Returns the latest checkpoint across evolutions"""
checkpoint = None
for evo in reversed(self._evolutions):
checkpoint_prefix = self.checkpoint_prefix_for_evo(evo)
ckpt_manager = MyCheckpointManager(
None,
self.checkpoint_dir, [],
checkpoint_prefix=checkpoint_prefix)
if ckpt_manager.latest_checkpoint:
checkpoint = ckpt_manager.latest_checkpoint
break
return checkpoint
def _get_checkpoint_manager(self, checkpoint_fn):
if self._checkpoint_manager is None:
checkpoint_prefix = self.checkpoint_prefix_for_evo(
self.current_evolution)
self._checkpoint_manager = MyCheckpointManager(
checkpoint_fn,
self.checkpoint_dir,
self._keep_checkpoint_steps,
self._save_interval_minutes,
checkpoint_prefix,
)
return self._checkpoint_manager
def keep_training(
self,
step_var,
checkpoint_fn,
stop_time=STOP_TIME,
display_interval=10,
display_str_list=None,
runstats_interval_minutes=10,
step_var_increment=1,
):
"""
This function increments the step_var, displays and logs runtime information and saves checkpoints.
The function is intended to be used as the condition for the training loop, e.g.
trainer = EvolutionTrainer(train_dir)
step_var = torch.tensor(0)
checkpoint_fn = lambda step: {'step': step_var', 'model': model.state_dict()}
if trainer.latest_checkpoint:
checkpoint = torch.load(trainer.latest_checkpoint)
step_var = checkpoint['step']
model.load_state_dict(checkpoint['model'])
while trainer.keep_training(step_var, checkpoint_fn):
train()
step_var : Scalar torch Tensor.
The step variable that will be incremented each call.
checkpoint_fn : A function returning the dictionary to be saved.
stop_time : float or None
stop_time in seconds since the epoch. The default will be read from the
environment variable STOP_TIME.
Set to None for no stop_time or do not set the env var STOP_TIME.
display_interval : int
The interval in iterations for displaying runtime information on the console.
display_str_list : list
A list of additional objects that will be displayed with print().
runstats_interval_minutes: int
The interval for logging runtime statistics in minutes.
step_var_increment : int
The value to add to the step_var. If 0 the step_var will not be updated.
For a single stop_step the return behavior is
Returns True if step_var != stop_step
Returns False if step_var == stop_step or the current time is larger than stop_time
or if a signal has been received for which a signal handler was installed.
"""
status = super().keep_training(
step_var=step_var,
stop_step=self.current_evolution.stop_step,
checkpoint_manager=self._get_checkpoint_manager(checkpoint_fn),
stop_time=stop_time,
display_interval=display_interval,
display_str_list=display_str_list,
runstats_interval_minutes=runstats_interval_minutes,
step_var_increment=step_var_increment,
)
return status
|
177127
|
from typing import List
from deeppavlov.core.common.registry import register
@register("sentseg_restore_sent")
def SentSegRestoreSent(batch_words: List[List[str]], batch_tags: List[List[str]]) -> List[str]:
ret = []
for words, tags in zip(batch_words, batch_tags):
if len(tags) == 0:
ret.append("")
continue
sent = words[0]
punct = "" if tags[0] == "O" else tags[0][-1]
for word, tag in zip(words[1:], tags[1:]):
if tag != "O":
sent += punct
punct = tag[-1]
sent += " " + word
sent += punct
ret.append(sent)
return ret
|
177130
|
import os
import numpy as np
import pandas as pd
import h5py
from bmtk.utils.sonata.utils import add_hdf5_magic, add_hdf5_version
def create_single_pop_h5():
h5_file_old = h5py.File('spike_files/spikes.old.h5', 'r')
node_ids = h5_file_old['/spikes/gids']
timestamps = h5_file_old['/spikes/timestamps']
with h5py.File('spike_files/spikes.one_pop.h5', 'w') as h5:
add_hdf5_magic(h5)
add_hdf5_version(h5)
core_grp = h5.create_group('/spikes/v1')
core_grp.attrs['sorting'] = 'by_time'
ts_ds = core_grp.create_dataset('timestamps', data=timestamps, dtype=np.float64)
ts_ds.attrs['units'] = 'milliseconds'
nids_ds = core_grp.create_dataset('node_ids', data=node_ids, dtype=np.uint64)
def create_multipop_csv(dir_path='/local1/workspace/bmtk/docs/examples/NWB_files'):
lgn_h5 = h5py.File(os.path.join(dir_path, 'lgn_spikes.nwb'), 'r')
tw_h5 = h5py.File(os.path.join(dir_path, 'tw_spikes.nwb'), 'r')
full_df = pd.DataFrame({
'timestamps': pd.Series(dtype=np.float64),
'population': pd.Series(dtype=np.string_),
'node_ids': pd.Series(dtype=np.uint64)
})
for pop_name, pop_h5, n_nodes in [('lgn', lgn_h5, 4000), ('tw', tw_h5, 2000)]:
spike_train_grp = pop_h5['/processing/trial_0/spike_train']
for node_id in range(n_nodes):
tmp_df = pd.DataFrame({
'timestamps': spike_train_grp[str(node_id)]['data'][()],
'population': pop_name,
'node_ids': np.uint64(node_id)
})
full_df = full_df.append(tmp_df)
full_df = full_df[['timestamps', 'population', 'node_ids']]
full_df.to_csv('spike_files/spikes.multipop.csv', sep=' ', index=False)
def create_multipop_h5():
spikes_df = pd.read_csv('spike_files/spikes.multipop.csv', sep=' ')
lgn_spikes_df = spikes_df[spikes_df['population'] == 'lgn']
tw_spikes_df = spikes_df[spikes_df['population'] == 'tw']
with h5py.File('spike_files/spikes.multipop.h5', 'w') as h5:
add_hdf5_magic(h5)
add_hdf5_version(h5)
lgn_grp = h5.create_group('/spikes/lgn')
lgn_grp.attrs['sorting'] = 'by_id'
ts_ds = lgn_grp.create_dataset('timestamps', data=lgn_spikes_df['timestamps'], dtype=np.float64)
ts_ds.attrs['units'] = 'milliseconds'
lgn_grp.create_dataset('node_ids', data=lgn_spikes_df['node_ids'], dtype=np.uint64)
tw_grp = h5.create_group('/spikes/tw')
tw_grp.attrs['sorting'] = 'by_id'
ts_ds = tw_grp.create_dataset('timestamps', data=tw_spikes_df['timestamps'], dtype=np.float64)
ts_ds.attrs['units'] = 'milliseconds'
tw_grp.create_dataset('node_ids', data=tw_spikes_df['node_ids'], dtype=np.uint64)
def create_nwb():
spikes_df = pd.read_csv('spike_files/spikes.one_pop.csv', sep=' ')
with h5py.File('spike_files/spikes.onepop.v1.0.nwb', 'w') as h5:
spikes_grp = h5.create_group('/processing/trial_0/spike_train')
for node_id in range(14):
timestamps = spikes_df[spikes_df['node_ids'] == node_id]['timestamps'].values
data_ds = spikes_grp.create_dataset('{}/data'.format(node_id), data=timestamps, dtype=np.float64)
data_ds.attrs['dimension'] = 'time'
data_ds.attrs['unit'] = 'millisecond'
if __name__ == '__main__':
# create_multipop_csv()
# create_multipop_h5()
create_nwb()
|
177140
|
from .imgurdownloader import ImgurDownloader # NOQA
# defining __version__ variable is pointless
__author__ = '<NAME> <<EMAIL>>'
__all__ = []
|
177156
|
import hashlib
import hmac
import time
class BitfinexAuth():
def __init__(self, api_key: str, secret_key: str):
self.api_key = api_key
self.secret_key = secret_key
self.last_nonce = 0
def _sign_payload(self, payload) -> str:
sig = hmac.new(self.secret_key.encode('utf8'),
payload.encode('utf8'),
hashlib.sha384).hexdigest()
return sig
def get_nonce(self) -> int:
nonce = int(round(time.time() * 1_000_000))
if self.last_nonce == nonce:
nonce = nonce + 1
elif self.last_nonce > nonce:
nonce = self.last_nonce + 1
self.last_nonce = nonce
return nonce
def generate_auth_payload(self, payload, nonce = None):
"""
Sign payload
"""
nonce = nonce if nonce is not None else self.get_nonce()
sig = self._sign_payload(payload)
payload = {
"apiKey": self.api_key,
"authSig": sig,
"authNonce": nonce,
"authPayload": payload,
"event": 'auth',
}
return payload
def generate_api_headers(self, path, body):
"""
Generate headers for a signed payload
"""
nonce = str(self.get_nonce())
signature = "/api/" + path + nonce + body
sig = self._sign_payload(signature)
return {
"bfx-nonce": nonce,
"bfx-apikey": self.api_key,
"bfx-signature": sig,
"content-type": "application/json"
}
|
177178
|
import functools
import glob
import os
import shlex
from typing import Callable, Iterable, List, Optional, Tuple
from bs4 import BeautifulSoup, Comment, Doctype
from mitmproxy import ctx, http
import modules.arguments as A
import modules.constants as C
import modules.csp as csp
import modules.inject as inject
import modules.inline as inline
import modules.metadata as metadata
from modules.misc import sanitize
from modules.requests import CONTENT_TYPE, containsQueryParam, inferEncoding
import modules.text as T
import modules.userscript as userscript
from modules.userscript import Userscript, UserscriptError
from modules.utilities import first, flag, fromOptional, itemList, second
PATTERN_USERSCRIPT: str = "*.user.js"
RELEVANT_CONTENT_TYPES: List[str] = ["text/html", "application/xhtml+xml"]
CHARSET_DEFAULT: str = "utf-8"
TAB: str = " "
LIST_ITEM_PREFIX: str = TAB + "โข "
HTML_PARSER: str = "lxml"
# lxml handles non-uppercase DOCTYPE correctly; html.parser does not: It emits
# <!DOCTYPE doctype html> if the original source code contained <!doctype html>.
HTML_INFO_COMMENT_PREFIX: str = f"""
[{T.INFO_MESSAGE}]
"""
def logInfo(s: str) -> None:
try:
ctx.log.info(s)
except Exception:
print(s)
def logWarning(s: str) -> None:
try:
ctx.log.warn(s)
except Exception:
print(s)
def logError(s: str) -> None:
try:
ctx.log.error(s)
except Exception:
print(s)
def indexOfDTD(soup: BeautifulSoup) -> Optional[int]:
index: int = 0
for item in soup.contents:
if isinstance(item, Doctype):
return index
index += 1
return None
bulletList: Callable[[Iterable[str]], str] = functools.partial(itemList, LIST_ITEM_PREFIX)
def unsafeSequencesMessage(script: Userscript) -> str:
sequences = script.unsafeSequences
return f"""{script.name} cannot be injected because it contains {"these unsafe sequences" if len(sequences) > 1 else "this unsafe sequence"}:
{itemList(TAB, sequences)}
<script> tags cannot contain any of these sequences (case-insensitive):
{itemList(TAB, inline.DANGEROUS_SEQUENCES)}
Possible solutions:
""" + bulletList([
f"Make sure the userscript does not contain any of the sequences listed above.",
f"Make the userscript available online and give it a {metadata.tag(userscript.directive_downloadURL)}",
f"Remove the {flag(A.inline)} flag.",
])
# Because ctx.options is not subscriptable and we want to be able to use
# expressions as keys:
def option(key: str):
return ctx.options.__getattr__(sanitize(key))
def loadUserscripts(directory: str) -> List[Userscript]:
loadedUserscripts: List[Tuple[Userscript, str]] = []
workingDirectory = os.getcwd()
logInfo(f"""Looking recursively for userscripts ({PATTERN_USERSCRIPT}) in directory `{directory}` ...""")
os.chdir(directory)
pattern = "**/" + PATTERN_USERSCRIPT
# recursive=True only affects the meaning of "**".
# https://docs.python.org/3/library/glob.html#glob.glob
for unsafe_filename in glob.glob(pattern, recursive=True):
filename = shlex.quote(unsafe_filename)
logInfo("Loading " + filename + " ...")
try:
content = open(filename).read()
except PermissionError:
logError("Could not read file `"+filename+"`: Permission denied.")
continue
except Exception as e:
logError("Could not read file `"+filename+"`: " + str(e))
continue
try:
script = userscript.create(content)
if script.downloadURL is None:
logWarning(f"""{script.name} will be injected inline because it does not have a {metadata.tag(userscript.directive_downloadURL)}.""")
loadedUserscripts.append((script, filename))
if script.downloadURL is None and len(script.unsafeSequences) > 0:
logError(unsafeSequencesMessage(script))
except metadata.MetadataError as err:
logError("Metadata error:")
logError(str(err))
continue
except UserscriptError as err:
logError("Userscript error:")
logError(str(err))
continue
os.chdir(workingDirectory) # so mitmproxy does not unload the script
logInfo("")
logInfo(str(len(loadedUserscripts)) + " userscript(s) loaded:")
logInfo("")
logInfo(bulletList(map(
lambda s: f"{first(s).name} ({second(s)})",
loadedUserscripts
)))
logInfo("")
return list(map(first, loadedUserscripts))
class UserscriptInjector:
def __init__(self):
self.userscripts: List[Userscript] = []
def load(self, loader):
loader.add_option(sanitize(A.inline), bool, False, A.inline_help)
loader.add_option(sanitize(A.no_default_userscripts), bool, False, A.no_default_userscripts_help)
loader.add_option(sanitize(A.list_injected), bool, False, A.list_injected_help)
loader.add_option(sanitize(A.bypass_csp), Optional[str], A.bypass_csp_default, A.bypass_csp_help)
loader.add_option(sanitize(A.userscripts_dir), Optional[str], A.userscripts_dir_default, A.userscripts_dir_help)
loader.add_option(sanitize(A.query_param_to_disable), str, A.query_param_to_disable_default, A.query_param_to_disable_help)
def configure(self, updates):
useDefaultUserscripts = True
if sanitize(A.no_default_userscripts) in updates and option(A.no_default_userscripts):
logInfo(f"""Built-in default userscripts will be skipped due to {flag(A.no_default_userscripts)} flag.""")
useDefaultUserscripts = False
if sanitize(A.inline) in updates and option(A.inline):
logWarning(f"""Only inline injection will be used due to {flag(A.inline)} flag.""")
if sanitize(A.query_param_to_disable) in updates:
logInfo(f"""Userscripts will not be injected when the request URL contains a `{option(A.query_param_to_disable)}` query parameter.""")
if sanitize(A.userscripts_dir) in updates:
userscripts = loadUserscripts(C.DEFAULT_USERSCRIPTS_DIR) if useDefaultUserscripts else []
userscriptsDirectory = option(A.userscripts_dir)
if userscriptsDirectory is None:
logWarning(f"No custom userscripts will be loaded, because {flag(A.userscripts_dir)} was not provided.")
else:
userscripts.extend(loadUserscripts(userscriptsDirectory))
self.userscripts = userscripts
def response(self, flow: http.HTTPFlow):
response = flow.response
if CONTENT_TYPE in response.headers:
if any(map(lambda t: t in response.headers[CONTENT_TYPE], RELEVANT_CONTENT_TYPES)):
# Response is a web page; proceed.
injections: List[csp.Injection] = []
soup = BeautifulSoup(
response.content,
HTML_PARSER,
from_encoding=inferEncoding(response)
)
requestURL = flow.request.pretty_url # should work in transparent mode too, unless the Host header is spoofed
if containsQueryParam(option(A.query_param_to_disable), flow.request):
logInfo(f"""Not injecting any userscripts into {requestURL} because it contains a `{option(A.query_param_to_disable)}` query parameter.""")
return
isApplicable: Callable[[Userscript], bool] = userscript.applicableChecker(requestURL)
for script in self.userscripts:
if isApplicable(script):
useInline = option(A.inline) or script.downloadURL is None
if useInline and len(script.unsafeSequences) > 0:
logError(unsafeSequencesMessage(script))
continue
logInfo(f"""Injecting {script.name}{"" if script.version is None else " " + C.VERSION_PREFIX + script.version} into {requestURL} ({"inline" if useInline else "linked"}) ...""")
shouldUseNonce = useInline and option(A.bypass_csp) == A.bypass_csp_script # If not inline, then URL is used for bypassing; if bypass for nothing or everything, then the nonce would have no effect anyway.
nonce = csp.generateNonce() if shouldUseNonce else None
result = inject.inject(script, soup, inject.Options(
inline = option(A.inline),
nonce = nonce
))
if type(result) is BeautifulSoup:
soup = result
injections.append(csp.Injection(
userscript = script,
nonce = nonce,
))
else:
logError("Injection failed due to the following error:")
logError(str(result))
handleContentSecurityPolicy(response, injections)
index_DTD: Optional[int] = indexOfDTD(soup)
# Insert information comment:
if option(A.list_injected):
namesOfInjectedScripts = [ i.userscript.name + ("" if i.userscript.version is None else " " + T.stringifyVersion(i.userscript.version)) for i in injections ]
soup.insert(0 if index_DTD is None else 1+index_DTD, Comment(
HTML_INFO_COMMENT_PREFIX + (
"No matching userscripts for this URL." if namesOfInjectedScripts == []
else "These scripts were inserted:\n" + bulletList(namesOfInjectedScripts)
) + "\n"
))
# Serialize and encode:
response.content = str(soup).encode(
fromOptional(soup.original_encoding, CHARSET_DEFAULT),
"replace"
)
def handleContentSecurityPolicy(response: http.HTTPFlow.response, injections: List[csp.Injection]):
# If there is a CSP header, we may need to modify it for the userscript(s) to work.
ContentSecurityPolicy = "Content-Security-Policy"
if ContentSecurityPolicy in response.headers:
bypassCspValue = option(A.bypass_csp)
if bypassCspValue == A.bypass_csp_script:
logInfo(f"Bypassing host site's Content Security Policy for userscripts only (not any resources injected _by_ userscripts, such as stylesheets and images). Try `{flag(A.bypass_csp)} {A.bypass_csp_everything}` if something does not work properly.")
response.headers[ContentSecurityPolicy] = csp.headerWithScriptsAllowed(response.headers[ContentSecurityPolicy], injections)
elif bypassCspValue == A.bypass_csp_everything:
logInfo(f"Bypassing host site's Content Security Policy altogether due to `{flag(A.bypass_csp)} {A.bypass_csp_everything}`.")
del response.headers[ContentSecurityPolicy]
else:
logWarning(f"Host site has a Content Security Policy. Try the {flag(A.bypass_csp)} flag if userscripts don't work properly.")
addons = [ UserscriptInjector() ]
|
177220
|
from marshmallow import INCLUDE, Schema, fields, post_load, pre_load
class Urls:
def __init__(
self,
digital_purchase_date=None,
foc_date=None,
onsale_date=None,
unlimited_date=None,
wiki=None,
detail=None,
**kwargs,
):
self.digital_purchase_date = digital_purchase_date
self.foc_date = foc_date
self.onsale_date = onsale_date
self.unlimited_date = unlimited_date
self.wiki = wiki
self.detail = detail
self.unknown = kwargs
class UrlsSchema(Schema):
digitalPurchaseDate = fields.Url(attribute="digital_purchase_date")
focDate = fields.Url(attribute="foc_date")
onsaleDate = fields.Url(attribute="onsale_date")
unlimitedDate = fields.Url(attribute="unlimited_date")
# Should these go into a separate class like CharacterUrls?
# For now let's put them here, but it may be something to consider to split them.
wiki = fields.Url()
detail = fields.Url()
class Meta:
unknown = INCLUDE
@pre_load
def process_input(self, data, **kwargs):
return {d["type"]: d["url"] for d in data}
@post_load
def make(self, data, **kwargs):
return Urls(**data)
|
177235
|
import os
import shutil
import readdy
import tempfile
import unittest
import numpy as np
class TestTopologyReactionCount(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
cls.dir = tempfile.mkdtemp("test-topology-reaction-count")
@classmethod
def tearDownClass(cls) -> None:
shutil.rmtree(cls.dir, ignore_errors=True)
def _test_kernel(self, kernel):
system = readdy.ReactionDiffusionSystem(box_size=[20, 20, 20])
system.topologies.add_type("T1")
system.topologies.add_type("T2")
system.add_species("A")
system.add_topology_species("B")
system.topologies.configure_harmonic_bond("B", "B", 1., .1)
system.add_topology_species("C")
system.topologies.configure_harmonic_bond("C", "C", 1., .1)
system.topologies.add_spatial_reaction("attach: T1(B) + (A) -> T1(B--B)", rate=1e-1, radius=.5)
def flip1(topology):
recipe = readdy.StructuralReactionRecipe(topology)
for v in topology.graph.vertices:
recipe.change_particle_type(v, "C")
recipe.change_topology_type("T2")
return recipe
def flip2(topology):
recipe = readdy.StructuralReactionRecipe(topology)
for v in topology.graph.vertices:
recipe.change_particle_type(v, "B")
recipe.change_topology_type("T1")
return recipe
system.topologies.add_structural_reaction("flip_types_1", "T1", flip1, lambda x: 5e-2)
system.topologies.add_structural_reaction("flip_types_2", "T2", flip2, lambda x: 5e-2)
sim = system.simulation(kernel=kernel)
sim.output_file = os.path.join(self.dir, "out_{}.h5".format(kernel))
collected_counts = []
def callback(results):
nonlocal collected_counts
collected_counts.append(results)
sim.observe.reaction_counts(1, callback=callback)
sim.observe.number_of_particles(1, types=["A", "B", "C"])
sim.add_particles("A", np.random.normal(scale=1, size=(1000, 3)))
for _ in range(10):
sim.add_topology("T1", "B", np.random.normal(size=(1, 3)))
sim.run(1000, timestep=1, show_summary=False)
traj = readdy.Trajectory(sim.output_file)
times, n_particles = traj.read_observable_number_of_particles()
times2, counts = traj.read_observable_reaction_counts()
np.testing.assert_array_equal(times, times2)
assert not counts["reactions"]
spatials = counts["spatial_topology_reactions"]
n_spatial = 0
cA_prev = None
for t, (cA, cB, cC), cc in zip(times, n_particles, collected_counts):
assert cA_prev is None or cA <= cA_prev
np.testing.assert_equal(cA + cB + cC, 1010)
cc_normal = cc[0]
assert not cc_normal
cc_spatial = cc[1]
cc_structural = cc[2]
n_spatial += spatials["attach"][t]
assert cA == 1000 - n_spatial, f"Got {cA} A particles, expected {1000 - n_spatial}, at time t {t}"
for sp in cc_spatial.keys():
recorded = spatials[sp][t]
assert cc_spatial[sp] == recorded, f"Got {cc_spatial[sp]} != {recorded} (t={t})"
for st in cc_structural.keys():
recorded = counts["structural_topology_reactions"][st][t]
assert cc_structural[st] == recorded, f"Got {cc_structural[st]} != {recorded} (t={t})"
cA_prev = cA
def test_scpu(self):
self._test_kernel("SingleCPU")
def test_cpu(self):
self._test_kernel("CPU")
if __name__ == '__main__':
unittest.main()
|
177272
|
Import("env")
import os
import tempfile
def cprint(*args, **kwargs):
print(f'pre_script_patch_debug.py:', *args, **kwargs)
def get_winterrupts_path():
winterrupts_path = None
for pio_package in env.PioPlatform().dump_used_packages():
pio_dir = env.PioPlatform().get_package_dir(pio_package['name'])
# TODO: This should change for non-mega cores!
possible_path = os.path.join(pio_dir, 'cores', 'MegaCore', 'WInterrupts.c')
if os.path.exists(possible_path):
cprint(f'Found WInterrupts.c: {possible_path}')
winterrupts_path = possible_path
return winterrupts_path
def patch_function_factory(src_path, output_suffix, replacement_list):
"""
Creates a function that will return a filepath to a patched source file
:param src_path: The actual source path on disk, this is different than node.get_abspath()
:param output_suffix: The suffix for the output temporary file
:param replacement_list: List of 'in'/'out' pairs that should be replaced
:return: Build Middleware function
"""
def out_func(node):
# patch_path_key needs to be kept in sync with post_script_remove_patched_files.py
# so that after a successful build the patched file can be removed
patch_path_key = '_patched_'
with tempfile.NamedTemporaryFile(mode='w', suffix=f'{patch_path_key}{output_suffix}', delete=False) as tf:
patched_filepath = tf.name
cprint(f'Patching {src_path}')
cprint(f'Replacement path: {patched_filepath}')
cprint(f'Build path: {node.get_abspath()}')
with open(src_path, 'r') as wint_f:
for wint_line in wint_f.readlines():
# Default is to just path the line through un-replaced
out_line = wint_line
# Now we check if line is in the replacements list
for replacement in replacement_list:
if replacement['in'] in wint_line:
out_line = replacement['out']
break
# Write the (possibly replaced) line to the output temporary file
tf.write(out_line)
return env.File(patched_filepath)
return out_func
source_patch_dict = {
'*WInterrupts.c': {
'actual_src_path': get_winterrupts_path(),
'patches': [
{
'in': 'IMPLEMENT_ISR(INT7_vect, EXTERNAL_INT_7)',
'out': '''\
#if defined(OAT_DEBUG_BUILD)
#pragma message "OAT_DEBUG_BUILD is defined, ISR 7 disabled in WInterrupts.c"
#else
IMPLEMENT_ISR(INT7_vect, EXTERNAL_INT_7)
#endif
'''
},
]
}
}
for filepath_glob, file_patch_info in source_patch_dict.items():
file_src_path = file_patch_info['actual_src_path']
if not file_src_path:
cprint(f'Could not find {filepath_glob} to patch! Skipping...')
continue
env.AddBuildMiddleware(
patch_function_factory(src_path=file_src_path,
replacement_list=file_patch_info['patches'],
output_suffix='WInterrupts.c'),
filepath_glob
)
|
177332
|
import pytest
from ymmsl import Reference, Settings
from libmuscle.settings_manager import SettingsManager
@pytest.fixture
def settings_manager() -> SettingsManager:
return SettingsManager()
def test_create(settings_manager):
assert len(settings_manager.base) == 0
assert len(settings_manager.overlay) == 0
def test_get_setting(settings_manager):
ref = Reference
settings_manager.base[ref('test')] = 13
assert settings_manager.get_setting(ref('instance'), ref('test')) == 13
settings_manager.overlay[ref('test2')] = 14
assert settings_manager.get_setting(ref('instance'), ref('test2')) == 14
settings_manager.base[ref('test2')] = 'test'
assert settings_manager.get_setting(ref('instance'), ref('test2')) == 14
settings_manager.overlay = Settings()
assert settings_manager.get_setting(ref('instance'), ref('test2')) == \
'test'
settings_manager.base[ref('test3')] = 'base_test3'
settings_manager.base[ref('instance.test3')] = 'base_instance_test3'
assert settings_manager.get_setting(ref('instance'), ref('test3')) == \
'base_instance_test3'
assert settings_manager.get_setting(ref('instance2'), ref('test3')) == \
'base_test3'
settings_manager.overlay[ref('test3')] = 'overlay_test3'
settings_manager.overlay[ref('instance.test3')] = 'overlay_instance_test3'
assert settings_manager.get_setting(ref('instance'), ref('test3')) == \
'overlay_instance_test3'
assert settings_manager.get_setting(ref('instance2'), ref('test3')) == \
'overlay_test3'
settings_manager.base[ref('instance.test4')] = 'base_test4'
settings_manager.overlay[ref('test4')] = 'overlay_test4'
assert settings_manager.get_setting(ref('instance'), ref('test4')) == \
'base_test4'
assert settings_manager.get_setting(ref('instance[10]'), ref('test4')
) == 'base_test4'
settings_manager.base[ref('instance[10].test5')] = 'base_test5'
settings_manager.overlay[ref('test5')] = 'overlay_test5'
assert settings_manager.get_setting(ref('instance'), ref('test5')) == \
'overlay_test5'
assert settings_manager.get_setting(ref('instance[10]'), ref('test5')
) == 'base_test5'
assert settings_manager.get_setting(ref('instance[11]'), ref('test5')
) == 'overlay_test5'
|
177350
|
import logging
import codecs
from scribeui_pyramid.modules.maps.models import Map
from scribeui_pyramid.modules.workspaces.models import Workspace
from BeautifulSoup import BeautifulStoneSoup
from pyramid.view import view_config
from pyramid.response import FileResponse
from sqlalchemy.orm.exc import NoResultFound
log = logging.getLogger(__name__)
import pprint, os, fnmatch
class APIMapcacheViewer(object):
def __init__(self, request):
self.request = request
self.matchdict = request.matchdict
@view_config(
route_name='mapcache.getLayers',
permission='view',
renderer='json',
request_method='GET'
)
def getLayers(self):
response = {
'status': 0,
'errors': [],
'layers': [],
'layernames': []
}
try:
mapID = self.request.GET.get('map')
except KeyError as e:
response['errors'].append('A map ID is required.')
return response
if mapID is None:
response['errors'].append('A map ID is required.')
return response
try:
map = Map.by_id(mapID)
except NoResultFound, e:
response['errors'].append('This map is unavailable or does not exist.')
return response
workspace = Workspace.by_id(map.workspace_id)
if(workspace.name == self.request.userid):
##First, let's browse the folders to find some finished jobs.
## TODO: Add optional additional paths.
paths = []
## Add the default folder
workspacesDirectory = self.request.registry.settings.get('workspaces.directory', '') + '/'
mapDirectory = workspacesDirectory + self.request.userid + '/' + map.name + '/'
mapcacheDirectory = self.request.registry.settings.get('mapcache.output.directory', '')
if not mapcacheDirectory or mapcacheDirectory == '':
mapcacheDirectory = mapDirectory+'mapcache/'
else:
mapcacheDirectory = mapcacheDirectory.rstrip('/') + '/'
paths.append(mapcacheDirectory)
level = 3 #maximum depth for recursive search
for path in paths:
some_dir = path.rstrip(os.path.sep)
if os.path.isdir(some_dir):
num_sep = some_dir.count(os.path.sep)
for root, dirs, filenames in os.walk(some_dir):
num_sep_this = root.count(os.path.sep)
if num_sep + level <= num_sep_this:
del dirs[:]
for filename in fnmatch.filter(filenames, 'mapcacheConfig.xml'):
response['layernames'].append(os.path.split(root)[1])
response['layers'].append(os.path.join(root, filename))
pprint.pprint(os.path.join(root, filename))
break
else:
response['errors'].append('Access denied.')
return response
@view_config(
route_name='mapcache.tiles',
permission='view',
renderer='json',
request_method='GET'
)
def tiles(self):
response = {
'status': 0,
'errors': [],
}
# Validation
try:
mapID = self.request.GET.get('map')
except KeyError as e:
response['errors'].append('A map ID is required.')
if mapID is None:
response['errors'].append('A map ID is required.')
try:
jobPath = self.request.GET.get('job')
except KeyError as e:
response['errors'].append('A job path is required.')
if jobPath is None:
response['errors'].append('A job path is required.')
try:
tilerequest = self.request.GET.get('request')
except KeyError as e:
response['errors'].append('A tile request is required.')
if tilerequest is None:
response['errors'].append('A tile request is required.')
try:
map = Map.by_id(mapID)
except NoResultFound, e:
response['errors'].append('This map is unavailable or does not exist.')
return response
workspace = Workspace.by_id(map.workspace_id)
if len(response['errors']) is 0:
if workspace.name == self.request.userid:
if(os.path.isfile(jobPath)):
# We now get the tileset and the grid from the config file
# NOTE: We only support 1 tileset per config file for now, but several grids are supported.
try:
with codecs.open(jobPath, encoding='utf8') as f:
content = f.read()
f.close()
except IOError:
response['errors'].append("An error occured while opening '" + config_file + "' file.")
return response
content = BeautifulStoneSoup(content)
tileset = content.mapcache.find('tileset')
#Finding out the name of the tileset
tilesetname = tileset['name']
#Getting the tileset's grid
grid = tileset.find("grid").getText()
#getting the cache's path
cacheName = tileset.find("cache").getText()
cache = content.find("cache", {"name":cacheName})
cachePath = cache.find("base").getText()
tilesPath = os.path.join(cachePath,tilesetname)
tilesPath = os.path.join(tilesPath,grid)
if(os.path.isdir(tilesPath)):
requestArgs = tilerequest.split("/")
z = str(int(requestArgs[2])+1).zfill(2)
x = requestArgs[3].zfill(9)
y = requestArgs[4]
y = y.replace(".png","")
y = y.zfill(9)
imagepath = os.path.join(tilesPath, z, x[:3], x[3:6], x[6:9], y[:3], y[3:6],y[6:9]+".png")
if os.path.isfile(imagepath):
return FileResponse(
imagepath,
content_type="image/png"
)
else:
response['errors'].append('Access denied.')
return response
|
177378
|
from django.test import TestCase
from rest_framework import serializers
class EmptySerializerTestCase(TestCase):
def test_empty_serializer(self):
class FooBarSerializer(serializers.Serializer):
foo = serializers.IntegerField()
bar = serializers.SerializerMethodField('get_bar')
def get_bar(self, obj):
return 'bar'
serializer = FooBarSerializer()
self.assertEquals(serializer.data, {'foo': 0})
|
177414
|
from datetime import datetime
import os
from collections import defaultdict
import os
import yaml
from .postgresql_manager import PostgreSQL_Manager
ROOT_DIR = os.path.abspath(os.path.dirname(__file__))
class OpenDataWriter(object):
def __init__(self, config):
self._config = config
allowed_fields_file_path = (config.field_data_file if config.field_data_file.startswith('/') else
os.path.join(ROOT_DIR, '..', 'cfg_lists', config.field_data_file))
schema = self._get_schema(allowed_fields_file_path)
db_data = {'host_address': config.postgres['host_address'],
'port_number': config.postgres['port'],
'database_name': config.postgres['database_name'],
'table_name': config.postgres['table_name'],
'user': config.postgres['user'],
'password': config.postgres['password'],
'table_schema': schema,
'readonly_users': config.postgres['readonly_users']}
self._db_manager = PostgreSQL_Manager(**db_data)
def write_records(self, records):
self._db_manager.add_data(records)
def _ensure_directory(self, path):
if not os.path.exists(path):
os.makedirs(path)
def _get_schema(self, field_data_file_path):
with open(field_data_file_path) as field_data_file:
schema = []
for field_name, field_data in yaml.safe_load(field_data_file)['fields'].items():
if field_name not in ['id']:
schema.append((field_name, field_data['type']))
schema.sort()
return schema
|
177415
|
import json
import logging
import os
import re
import sys
import urllib.parse
from urllib.error import HTTPError, URLError
from urllib.request import Request
import praw
import requests
import yaml
from shared.exceptions import AlreadyProcessed
def load_configuration():
conf_file = os.path.join(os.path.dirname(__file__), os.environ['CONFIG'])
with open(conf_file, encoding='utf8') as f:
config = yaml.safe_load(f)
# load dependent configuration
config['FOOTER'] = "\n\n *** \n" + config['INFO_LINK'] + " | " + config[
'CONTACT_LINK']
return config
CONFIG = load_configuration()
def authenticate():
"""Authenticate via praw.ini file, look at praw documentation for more info"""
authentication = praw.Reddit(site_name=CONFIG['BOT_NAME'])
logging.info(f'Authenticated as {authentication.user.me()}')
return authentication
def log(service, stdout=False):
if stdout:
logging.basicConfig(
stream=sys.stdout,
level=logging.INFO,
format=f'{service:<6}: %(asctime)s %(levelname)s: %(message)s',
datefmt='%Y-%m-%d %H:%M:%S'
)
else:
logging.basicConfig(
filename=f"shared/logs/bot.log",
level=logging.INFO,
format=f'{service:<6}: %(asctime)s %(levelname)s: %(message)s',
datefmt='%Y-%m-%d %H:%M:%S'
)
def get_reddit_item(reddit, request):
if request['type'] == "message":
return reddit.inbox.message(request['id'])
else:
return reddit.comment(request['id'])
def contains_link(string):
"""Returns link or empty string"""
match_link = re.search(
r"https?://(www\.)?[-a-zA-Z0-9@:%._+~#=]{2,256}\.[a-z]{2,6}\b([-a-zA-Z0-9@:%_+.~#?&/=]*)", string)
return match_link[0] if match_link else ""
def contains_username(name, string):
"""Returns regex search"""
return re.search(r"(?i)u/" + name, string)
def get_lock(request_id):
return f"{CONFIG['REDIS_REQUESTS_LOCKED']}:{request_id}"
def open_lock(redis, request_id):
# Remove redundant lock to free up space
lock = get_lock(request_id)
redis.delete(lock)
def handle_failed_request(redis, request, current_set, exception):
if request['retries'] > 10:
open_lock(redis, request['id'])
request.update(
error=str(exception)
)
next_set = CONFIG['REDIS_REQUESTS_FAILED']
logging.error(f"Reached retry limit. Pushing request {request['id']} : {request['link']} to failed requests.")
else:
request['retries'] += 1
next_set = current_set
request_json = json.dumps(request)
redis.sadd(next_set, request_json)
def is_link_valid(link):
# Check if download is valid without downloading
if "reddit.tube" in link:
if requests.head(link, timeout=10).ok:
return True
return False
try:
status_code = urllib.request.urlopen(link, timeout=2).getcode()
return status_code == 200
except (HTTPError, URLError, ValueError):
return False
def already_processed_check(redis, request):
if redis.sismember(CONFIG['REDIS_REQUESTS_SUCCESS'], request['id']):
raise AlreadyProcessed(request['link'])
|
177493
|
from __future__ import division, print_function
import tensorflow as tf
import wave, os, sys
import soundfile as sf
import numpy as np
import librosa
from datetime import datetime
def create_adam_optimizer(learning_rate, momentum):
return tf.train.AdamOptimizer(learning_rate=learning_rate, epsilon=1e-6)
def create_sgd_optimizer(learning_rate, momentum):
return tf.train.MomentumOptimizer(learning_rate=learning_rate,
momentum=momentum)
def create_rmsprop_optimizer(learning_rate, momentum):
return tf.train.RMSPropOptimizer(learning_rate=learning_rate,
momentum=momentum,
epsilon=1e-5)
optimizer_factory = {
'adam': create_adam_optimizer,
'sgd': create_sgd_optimizer,
'rmsprop': create_rmsprop_optimizer
}
def save(saver, sess, logdir, step):
model_name = 'model.ckpt'
checkpoint_path = os.path.join(logdir, model_name)
print('Storing checkpoint to {} ...'.format(logdir), end='')
sys.stdout.flush()
if not os.path.exists(logdir):
os.makedirs(logdir)
saver.save(sess, checkpoint_path, global_step=step)
print('Done.')
def load(saver, sess, logdir):
print("Trying to restore saved checkpoints from {} ...".format(logdir),
end='')
ckpt = tf.train.get_checkpoint_state(logdir)
if ckpt:
print(" Checkpoint found: {}".format(ckpt.model_checkpoint_path))
global_step = int(ckpt.model_checkpoint_path.split('/')[-1].split('-')[-1])
print(" Global step was: {}".format(global_step))
print(" Restoring...", end="")
saver.restore(sess, ckpt.model_checkpoint_path)
print(" Done.")
return global_step
else:
print(" No checkpoint found.")
return None
def get_default_logdir(logdir_root, started_time):
logdir = os.path.join(logdir_root, 'train', started_time)
return logdir
def validate_directories(args):
# Arrangement
logdir_root = args.logdir_root
if logdir_root is None:
logdir_root = './logdir'
logdir = args.logdir
if logdir is None:
logdir = get_default_logdir(logdir_root,
"{0:%Y-%m-%dT%H-%M-%S}".format(datetime.now()))
print('Using default logdir: {}'.format(logdir))
restore_from = args.restore_from
if restore_from is None:
restore_from = logdir
return {
'logdir': logdir,
'logdir_root': args.logdir_root,
'restore_from': restore_from
}
def learning_rate_decay(global_step, args, hparams):
warm_up_step = int(hparams.warm_up_step)
decay_step = int(hparams.decay_step)
learning_rate = tf.cond(
global_step < warm_up_step,
lambda: tf.convert_to_tensor(args.learning_rate),
lambda: tf.train.exponential_decay(args.learning_rate, global_step -
warm_up_step + 1, decay_step, 0.5))
return tf.maximum(hparams.mini_lr, learning_rate)
def create_optimizer(global_step, args, hparams):
with tf.variable_scope('optimizer'):
learning_rate_decayed = learning_rate_decay(global_step, args, hparams)
optimizer = optimizer_factory[args.optimizer](
learning_rate=learning_rate_decayed, momentum=args.momentum)
return learning_rate_decayed, optimizer
def one_hot(samples, channels):
with tf.name_scope('one_hot_encode'):
encoded = tf.one_hot(samples, depth=channels, dtype=tf.float32)
shape = [samples.get_shape().as_list()[0], -1, channels]
encoded = tf.reshape(encoded, shape)
return encoded
|
177577
|
from app.extensions import db
class User(db.Document):
"""User model """
username = db.StringField()
password = db.StringField()
def to_json2(self):
"""Returns a json representantion of the user.
:returns: a json object.
"""
return {
'id': str(self.id),
'username': self.username
}
|
177580
|
class Marker(object):
def __init__(self):
self._body = None
def make(self):
raise NotImplementedError
def update(self, marker):
pass
def show(self):
if self._body is None:
self._body = self.make()
self.update(self._body)
def hide(self):
if self._body is not None:
self._body.remove()
self._body = None
|
177626
|
from bs4 import BeautifulSoup
import urlparse
import datetime
from scraper import *
class General(Scraper):
def log_index_page(self):
"""Logs the index page, used for test purposes"""
url = self.url_provider.get_page_url('overview')
res = self.open_url(url)
self.logger.info(res.read())
def get_game_datetime(self):
url = self.url_provider.get_page_url('overview')
res = self.open_url(url)
soup = BeautifulSoup(res.read(), "lxml")
datetime_data = soup.find("li", {"class": "OGameClock"}).text
game_datetime = datetime.datetime.strptime(datetime_data, "%d.%m.%Y %H:%M:%S")
return game_datetime
def get_resources(self, planet):
self.logger.info('Getting resources data for planet %s' % planet.name)
url = self.url_provider.get_page_url('resources', planet)
res = self.open_url(url)
soup = BeautifulSoup(res.read(), "lxml")
metal = int(soup.find(id='resources_metal').text.replace('.', ''))
crystal = int(soup.find(id='resources_crystal').text.replace('.', ''))
deuterium = int(soup.find(id='resources_deuterium').text.replace('.', ''))
energy = int(soup.find(id='resources_energy').text.replace('.', ''))
return Resources(metal, crystal, deuterium, energy)
def get_planets(self):
self.logger.info('Getting planets')
url = self.url_provider.get_page_url('resources')
res = self.open_url(url)
soup = BeautifulSoup(res.read(), "lxml")
links = soup(attrs={'class': "planetlink"})
planets = [Planet((str(link(attrs={'class': "planet-name"})[0].contents[0])),
urlparse.parse_qs(link['href'])['cp'][0],
parse_coordinates(str(link(attrs={'class': "planet-koords"})[0].contents[0])))
for link in links]
return planets
def parse_coordinates(coords):
return coords.replace('[', '').replace(']', '')
|
177658
|
import torch
import torch.optim as optim
import torch.nn.functional as F
import numpy as np
import thinplate as tps
from numpy.testing import assert_allclose
def test_pytorch_grid():
c_dst = np.array([
[0., 0],
[1., 0],
[1, 1],
[0, 1],
], dtype=np.float32)
c_src = np.array([
[10., 10],
[20., 10],
[20, 20],
[10, 20],
], dtype=np.float32) / 40.
theta = tps.tps_theta_from_points(c_src, c_dst)
theta_r = tps.tps_theta_from_points(c_src, c_dst, reduced=True)
np_grid = tps.tps_grid(theta, c_dst, (20,20))
np_grid_r = tps.tps_grid(theta_r, c_dst, (20,20))
pth_theta = torch.tensor(theta).unsqueeze(0)
pth_grid = tps.torch.tps_grid(pth_theta, torch.tensor(c_dst), (1, 1, 20, 20)).squeeze().numpy()
pth_grid = (pth_grid + 1) / 2 # convert [-1,1] range to [0,1]
pth_theta_r = torch.tensor(theta_r).unsqueeze(0)
pth_grid_r = tps.torch.tps_grid(pth_theta_r, torch.tensor(c_dst), (1, 1, 20, 20)).squeeze().numpy()
pth_grid_r = (pth_grid_r + 1) / 2 # convert [-1,1] range to [0,1]
assert_allclose(np_grid, pth_grid)
assert_allclose(np_grid_r, pth_grid_r)
assert_allclose(np_grid_r, np_grid)
|
177671
|
import io
import asyncio
import threading
import queue
import logging
from starlette import concurrency
logger = logging.getLogger('vaex.file.async')
class WriteStream(io.RawIOBase):
'''File like object that has a sync write API, and a generator as consumer.
This is useful for letting 1 thread write to this object, while another thread
in consuming the byte chunks via an iterator, or async iterator.
To let the writer thread stop, simply close the file.
'''
def __init__(self, queue_size=3):
self.writes = []
self.queue = queue.Queue(queue_size)
self.closed = False
self.pos = 0
self.exception = None
def __iter__(self):
yield from self.chunks()
async def __aiter__(self):
import starlette.concurrency
async for item in starlette.concurrency.iterate_in_threadpool(self.chunks()):
yield item
def __enter__(self, *args):
return self
def __exit__(self, type, value, traceback):
self.close()
self.exception = value
def read(self, *args):
return b''
def seek(self, *args):
if self.closed:
return ValueError('stream closed')
return 0
def readinto(self, *args):
return b''
def tell(self):
if self.closed:
return ValueError('stream closed')
return self.pos
def flush(self):
if self.closed:
return ValueError('stream closed')
pass
def writable(self):
return True
def readable(self):
return False
def seekable(self):
return False
def write(self, b):
if self.closed:
return ValueError('stream closed')
logger.debug('write: %r', len(b))
buffer = memoryview(b)
# print("--", bytes(buffer))
self.pos += len(buffer)
# we need to copy it seems (otherwise if we use a buffered writer, it will reuse the memory)
self.queue.put(bytes(buffer))
# print(len(buffer))
return len(buffer)
def close(self, force=False):
'''Note that close can block due to the queue, using force=True the queue will be cleared'''
logger.debug('closing stream, putting None element in queue to stop chunk yielding')
# make sure nobody will add new items to the queue, at max 1 (the current write)
self.closed = True
if force:
self._force_put(None)
else:
self.queue.put(None)
def closed(self):
return self.closed
def chunks(self):
logger.debug('yielding chunks')
while True:
logger.debug('waiting for chunk')
item = self.queue.get()
if item is None:
logger.debug('stop yielding, file closed')
break
# if isinstance(item, BaseException):
# logger.debug('stop yielding, exception occured')
# raise item
yield item
if self.exception:
raise self.exception
def getvalue(self):
return b''.join(bytes(k) for k in self.chunks())
# def stop(self, exception):
# '''Empty the queue, and put the exception on the queue, and close the file.
# If we don't empty the queue, this might block
# '''
# self._force_put(exception)
# self.closed = True
def _force_put(self, item):
# keep trying to put an exception on the queue
# we dont want to do this blocking, since a producer
# thread might fill this up.
done = False
while not done:
logger.debug('clearing queue')
while not self.queue.empty():
self.queue.get(block=False)
try:
logger.debug('put %r on queue', item)
self.queue.put(item, block=False)
done = True
except queue.Full:
logger.debug('retry putting exception on queue, because it was filled')
|
177681
|
import unittest
from katas.kyu_7.sum_squares_of_numbers_in_list_that_may_contain_more_lists \
import SumSquares
class SumSquaresTestCase(unittest.TestCase):
def test_equal_1(self):
self.assertEqual(SumSquares([1, 2, 3]), 14)
def test_equal_2(self):
self.assertEqual(SumSquares([[1, 2], 3]), 14)
def test_equal_3(self):
self.assertEqual(SumSquares([[[[[[[[[1]]]]]]]]]), 1)
def test_equal_4(self):
self.assertEqual(SumSquares([10, [[10], 10], [10]]), 400)
def test_equal_5(self):
self.assertEqual(SumSquares(
[1, [[3], 10, 5, [2, [3], [4], [5, [6]]]], [10]]
), 325)
|
177684
|
import numpy as np
import pybullet as p
import pybullet_data as pd
import pybullet_utils.bullet_client as bc
from gym import spaces
try:
from .. import Environment
from .robots import get_robot
from .tasks import get_task
except ImportError:
from karolos.environments import Environment
from karolos.environments.robot_task_environments.robots import get_robot
from karolos.environments.robot_task_environments.tasks import get_task
class RobotTaskEnvironment(Environment):
def __init__(self, task_config, robot_config, render=False,
bullet_client=None, **kwargs):
self.render = render
self.task_config = task_config
self.robot_config = robot_config
if bullet_client is None:
connection_mode = p.GUI if render else p.DIRECT
bullet_client = bc.BulletClient(connection_mode)
bullet_client.setAdditionalSearchPath(pd.getDataPath())
time_step = 1. / 300.
bullet_client.setTimeStep(time_step)
bullet_client.setRealTimeSimulation(0)
bullet_client.loadURDF("plane.urdf")
self.bullet_client = bullet_client
self.task = get_task(task_config, self.bullet_client)
self.robot = get_robot(robot_config, self.bullet_client)
self.action_space = self.robot.action_space
self.observation_space = spaces.Dict({
'robot': self.robot.observation_space,
'task': self.task.observation_space,
})
self.reward_function = self.task.reward_function
self.success_criterion = self.task.success_criterion
def reset(self, desired_state=None):
"""
Reset the environment and return new state
"""
try:
if desired_state is not None:
observation_robot = self.robot.reset(desired_state["robot"])
observation_task, goal_info, _ = self.task.reset(self.robot,
observation_robot,
desired_state[
"task"])
else:
observation_robot = self.robot.reset()
observation_task, goal_info, _ = self.task.reset(self.robot,
observation_robot)
except AssertionError as e:
return e
state = {
'robot': observation_robot,
'task': observation_task
}
return state, goal_info
def step(self, action):
observation_robot = self.robot.step(action)
observation_task, goal_info, done = self.task.step(observation_robot)
state = {
'robot': observation_robot,
'task': observation_task
}
return state, goal_info, done
if __name__ == "__main__":
import time
env_kwargs = {
"render": True,
"task_config": {
"name": "pick_place",
# "max_steps": 25
},
"robot_config": {
"name": "panda",
# "scale": .1,
# "sim_time": .1
}
}
env = RobotTaskEnvironment(**env_kwargs)
p.resetDebugVisualizerCamera(cameraDistance=1.5,
cameraYaw=70,
cameraPitch=-27,
cameraTargetPosition=(0, 0, 0)
)
time_step = p.getPhysicsEngineParameters()["fixedTimeStep"]
while True:
obs = env.reset()
for _ in np.arange(1. / time_step):
action = env.action_space.sample()
time.sleep(time_step)
observation, goal, done = env.step(action)
reward = env.reward_function(False, goal)
|
177685
|
from typing import Any
from graphscale.grapple.graphql_printer import print_graphql_defs
from graphscale.grapple.parser import parse_grapple
def assert_graphql_def(snapshot: Any, graphql: str) -> None:
result = print_graphql_defs(parse_grapple(graphql))
snapshot.assert_match(result)
def test_basic_type(snapshot: Any) -> None:
assert_graphql_def(snapshot, """type Test { name: String }""")
def test_non_pythonic_name(snapshot: Any) -> None:
assert_graphql_def(snapshot, """type Test { longName: String }""")
def test_nonnullable_type(snapshot: Any) -> None:
assert_graphql_def(snapshot, """type Test { name: String! }""")
def test_list_type(snapshot: Any) -> None:
assert_graphql_def(snapshot, """type Test { names: [String] }""")
def test_list_of_reqs(snapshot: Any) -> None:
assert_graphql_def(snapshot, """type Test { names: [String!] }""")
def test_req_list(snapshot: Any) -> None:
assert_graphql_def(snapshot, """type Test { names: [String]! }""")
def test_req_list_of_reqs(snapshot: Any) -> None:
assert_graphql_def(snapshot, """type Test { names: [String!]! }""")
def test_double_list(snapshot: Any) -> None:
assert_graphql_def(snapshot, """type Test { matrix: [[String]] }""")
def test_ref_to_self(snapshot: Any) -> None:
assert_graphql_def(snapshot, """type Test { other: Test }""")
def test_args(snapshot: Any) -> None:
assert_graphql_def(snapshot, """type Test { relatives(skip: Int, take: Int) : [Test] }""")
def test_args_defaults(snapshot: Any) -> None:
assert_graphql_def(
snapshot, """type Test {
many_args(
defaultTen: Int = 10,
defaultTwenty: Int = 20,
defaultZero: Int = 0,
strArg: String = "foo",
defaultTrue: Boolean = true,
defaultFalse: Boolean = false,
) : [Test]
}"""
)
def test_enum(snapshot: Any) -> None:
assert_graphql_def(
snapshot, """
type Hospital {
status: HospitalStatus
reqStatus: HospitalStatus!
}
enum HospitalStatus {
AS_SUBMITTED
}
"""
)
|
177773
|
def test_logout(ui):
driver = ui.driver
driver.find_element_by_css_selector(".user-dropdown").click()
driver.find_element_by_css_selector(".logout-button").click()
assert driver.find_element_by_css_selector('.login-form')
def test_recorded_sessions_visible(ui_session):
assert ui_session is not None
|
177807
|
user_input = 999
while user_input != "0":
user_input = input("์ญ์ง์ ์ซ์๋ฅผ ์
๋ ฅํด์ฃผ์ธ์ : ")
try:
decimal_number = int(user_input)
print(bin(decimal_number))
except ValueError as e:
print(e)
print("Error - 10์ง์ ์ซ์๋ง ์
๋ ฅํด์ฃผ์๊ธฐ ๋ฐ๋๋๋ค.")
|
177847
|
import json
import re
import requests
from rich.console import Console
LOGIN_URL = "https://www.headspace.com/login"
AUTH_URL = "https://auth.headspace.com/co/authenticate"
BEARER_TOKEN_URL = "https://auth.headspace.com/authorize"
session = requests.Session()
console = Console()
headers = {
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:88.0) Gecko/20100101 Firefox/88.0",
"Accept": "*/*",
"Accept-Language": "en-US,en;q=0.5",
"Content-Type": "application/json",
"Origin": "https://www.headspace.com",
"Connection": "keep-alive",
"TE": "Trailers",
}
session.headers.update(headers)
def get_client_id():
response = session.get(LOGIN_URL)
client_id = re.findall(r'"clientId":"(.+?)",', response.text)[0]
return client_id
def prompt():
email = console.input(f"[bold red]?[/] Email: ")
password = console.input(f"[bold red]?[/] Password: ", password=True)
return email, password
def get_bearer_token(client_id, login_ticket):
params = {
"client_id": client_id,
"response_type": "token",
"response_mode": "web_message",
"redirect_uri": "https://www.headspace.com/auth",
"scope": "openid email",
"audience": "https://api.prod.headspace.com",
"realm": "User-Password-Headspace",
"login_ticket": login_ticket,
"prompt": "none",
}
response = session.get(BEARER_TOKEN_URL, params=params)
html = response.text
bearer_token = re.findall(r'"access_token":"(.+?)"', html)[0]
return bearer_token
def authenticate(email, password):
data = {
"client_id": get_client_id(),
"username": email,
"password": password,
"realm": "User-Password-Headspace",
"credential_type": "http://auth0.com/oauth/grant-type/password-realm",
}
response = session.post(
AUTH_URL,
headers=headers,
data=json.dumps(data),
)
resp_json: dict = response.json()
try:
login_ticket = resp_json["login_ticket"]
except KeyError:
if "error" in resp_json.keys():
console.print(resp_json["error"], style="red")
if "error_description" in resp_json.keys():
console.print(resp_json["error_description"])
else:
console.print(resp_json)
return False
bearer_token = get_bearer_token(data["client_id"], login_ticket)
bearer_token = "bearer " + bearer_token
return bearer_token
|
177873
|
import ctypes
import numpy as np
from devito.tools.utils import prod
__all__ = ['numpy_to_ctypes', 'numpy_to_mpitypes', 'numpy_view_offsets']
def numpy_to_ctypes(dtype):
"""Map numpy types to ctypes types."""
return {np.int32: ctypes.c_int,
np.float32: ctypes.c_float,
np.int64: ctypes.c_int64,
np.float64: ctypes.c_double}[dtype]
def numpy_to_mpitypes(dtype):
"""Map numpy types to MPI datatypes."""
return {np.int32: 'MPI_INT',
np.float32: 'MPI_FLOAT',
np.int64: 'MPI_LONG',
np.float64: 'MPI_DOUBLE'}[dtype]
def numpy_view_offsets(array, base=None):
"""
Retrieve the offset of a view from its base array along each dimension and side.
:param array: A :class:`numpy.ndarray`.
:param base: The base of ``array``. Most of the times the ``base`` is available
through ``array.base``. However, if this function is to be called
within ``__array_finalize__``, where ``base`` hasn't been set yet,
the ``base`` has to be provided explicitly
"""
if not isinstance(array, np.ndarray):
raise TypeError("Expected a `numpy.ndarray`, got `%s`" % type(array))
if array.base is None:
if base is None:
raise ValueError("Cannot access ``array``'s base.")
else:
base = array.base
start_byte_distance = np.byte_bounds(array)[0] - np.byte_bounds(base)[0]
start_elem_distance = start_byte_distance // array.itemsize
assert start_byte_distance % array.itemsize == 0
end_byte_distance = np.byte_bounds(array)[1] - np.byte_bounds(base)[0]
end_elem_distance = (end_byte_distance // array.itemsize) - 1
assert end_byte_distance % array.itemsize == 0
offsets = []
for i, s in enumerate(base.shape):
hyperplane_size = prod(base.shape[i+1:])
# Start
lofs = start_elem_distance // hyperplane_size
start_elem_distance -= lofs*hyperplane_size
# End
rofs = end_elem_distance // hyperplane_size
end_elem_distance -= rofs*hyperplane_size
offsets.append((lofs, s-rofs-1))
return tuple(offsets)
|
177895
|
import argparse
import fnmatch
import os
import pathlib
import sys
import textwrap
import yaml
def write_header(output):
header='''
###
# This file is automatically generated by {}. Do not edit!
###
'''
output.write(textwrap.dedent(header.format(os.path.basename(__file__))).lstrip())
output.write('\n')
def to_ipv4(proto, field, data):
prop = {
'type': 'string',
'description': 'IPv4 {} address'.format(field),
'pattern': '^((25[0-5]|2[0-4][0-9]|[01]?[1-9]?[0-9])\.){3}(25[0-5]|2[0-4][0-9]|[01]?[1-9]?[0-9])$'
}
if 'default' in data:
prop['default'] = data['default']
return prop
def to_ipv6(proto, field, data):
prop = {
'type': 'string',
'description': 'IPv6 {} address'.format(field),
'pattern': '^((::[0-9a-fA-F]{1,4})|([0-9a-fA-F]{1,4}::)|(([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F])|(([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}))$'
}
if 'default' in data:
prop['default'] = data['default']
return prop
def to_mac(proto, field, data):
prop = {
'type': 'string',
'description': 'Ethernet MAC {} address'.format(field),
'pattern': '^([0-9a-fA-F]{1,2}(.|-|:)){5}[0-9a-fA-F]{1,2}$'
}
if 'default' in data:
prop['default'] = data['default']
return prop
def to_description(proto, field, data):
if 'description' in data:
return data['description']
else:
return '{} {}'.format(proto, field).translate(str.maketrans('_', ' '))
def to_bool(proto, field, data):
prop = {
'type': 'boolean',
'description': to_description(proto, field, data)
}
return prop
def to_number(proto, field, data):
length = int(data['length'])
if length == 1:
return to_bool(proto, field, data)
max_value = int(pow(2, length)) - 1
if 'multipleOf' in data:
max_value *= int(data['multipleOf'])
prop = {
'type': 'integer',
'description': to_description(proto, field, data),
'format': 'int32' if data['length'] < 32 else 'int64',
'minimum': 0,
'maximum': max_value
}
if 'default' in data:
prop['default'] = int(data['default'])
if 'multipleOf' in data:
prop['multipleOf'] = int(data['multipleOf'])
return prop
def to_enumeration_items(proto, field, data):
return [next(iter(t)) for t in data['items']]
def to_enumeration(proto, field, data):
if 'uniqueItems' in data:
prop = {
'type': 'string',
'description': to_description(proto, field, data),
'enum': to_enumeration_items(proto, field, data)
}
else:
prop = {
'type': 'array',
'description': to_description(proto, field, data),
'items': {
'enum': to_enumeration_items(proto, field, data),
'type': 'string'
}
}
if 'default' in data:
prop['default'] = data['default']
return prop
def dummy(field):
return dict()
def to_property(proto, field, data):
format_dispatch = {
'ipv4': to_ipv4,
'ipv6': to_ipv6,
'mac': to_mac,
'number': to_number,
'enumeration': to_enumeration
}
prop = dict()
if 'format' in data and data['format'] in format_dispatch:
prop.update(format_dispatch[data['format']](proto, field, data))
else:
prop.update(to_number(proto, field, data))
return prop
def translate_fields(proto, blob):
properties = dict()
for field, props in blob.items():
properties[field] = to_property(proto, field, props)
return properties
def to_swagger_name(name):
return 'PacketProtocol{}'.format(name.lower().capitalize())
"""
Begin script proper
"""
def main():
parser = argparse.ArgumentParser(
description="Generate swagger protocol specification from YAML definitions")
parser.add_argument('--indir',
nargs='?',
type=pathlib.Path,
default=os.getcwd(),
help="input directory containing YAML definitions")
parser.add_argument('--outfile',
nargs='?',
type=argparse.FileType('w'),
default=sys.stdout,
help="output swagger definition")
args = parser.parse_args()
if not os.path.isdir(args.indir):
sys.stderr.write('Input directory, {}, does not exist\n'.format(args.indir))
sys.exit(1)
if not os.access(args.indir, os.R_OK):
sys.stderr.write('Input directory, {}, is not readable\n'.format(args.indir))
sys.exit(1)
definitions = dict()
for yaml_file in filter(lambda f: fnmatch.fnmatch(f, '*.yaml'), os.listdir(args.indir)):
with open(os.path.join(args.indir, yaml_file), 'r') as f:
for name, data in yaml.load(f, Loader=yaml.FullLoader).items():
definitions[name] = data
swagger = dict()
for name, data in definitions.items():
swagger_name = to_swagger_name(name)
swagger[swagger_name] = {
'type': 'object',
'description': 'Describes {} {} header'.format(
'an' if name.lower()[0] in 'aeio' else 'a',
name),
}
swagger[swagger_name]['properties'] = translate_fields(name, data['fields'])
write_header(args.outfile)
args.outfile.write(yaml.dump({'definitions': swagger}))
if __name__ == "__main__":
main()
|
177909
|
class CloudWatchEvent:
def __init__(self,
schedule_expression: str,
is_active: bool,
name: str =None):
self.name = name
self.schedule_expression = schedule_expression
self.is_active = is_active
|
177964
|
from app import __metadata__ as meta
from configparser import ConfigParser, ExtendedInterpolation
from pathlib import Path
from shutil import copy
import os
import io
import logging
log = logging.getLogger(__name__)
APP = meta.APP_NAME
'''
This component is responsible for configuration management.
If this is the first time the app is run then it will create the
config file, and set the system environment variables for the app
The logic for checking the existence of the conf file
and creating it if it's missing should be in init.py
TODO:
Add 2 more config sources
1) service - Get config from a service endpoint (JSON)
2) db - Get config from a database
'''
class MetaConf(object):
def __init__(self):
conf = ConfigParser()
conf.read_string(meta.__default_config__)
self.config = conf
self.resolveMacros()
self.log_file, self.config_file = self.getConfPaths()
def resolveMacros(self):
for section in self.config.sections():
for k in self.config.options(section):
v = self.config.get(section, k)
if '{{app_root}}' in v:
if 'SNAP_USER_COMMON' in Config.envGet2('SNAP_USER_COMMON'):
v = v.replace('{{app_root}}', self.getSnapPath())
else:
v = v.replace('{{app_root}}', self.getConfRoot())
self.config.set(section, k, v)
def getConfRoot(self):
module_dir = os.path.dirname(__file__)
app_dir = os.path.dirname(module_dir)
return os.path.dirname(app_dir)
def getSnapPath(self):
if not os.path.exists(str(Path.home()) + '/conf'):
os.makedirs(str(Path.home()) + '/conf')
os.makedirs(str(Path.home()) + '/logs')
Path(str(Path.home()) + '/logs/lxdui.log').touch()
copy(self.getConfRoot() + '/conf/auth.conf', str(Path.home()) + '/conf')
copy(self.getConfRoot() + '/conf/log.conf', str(Path.home()) + '/conf')
return str(Path.home())
def getConfPaths(self):
f = io.StringIO()
self.config.write(f)
c = ConfigParser(interpolation=ExtendedInterpolation())
f.seek(0)
c.read_file(f)
log_file = c.get(APP, '{}.log.file'.format(APP.lower()))
config_file = c.get(APP, '{}.conf.file'.format(APP.lower()))
return log_file, config_file
class Config(object):
def __init__(self, **kwargs):
"""
Initialises the Config object and loads the configuration into memory.
Order of operations:
1) if a config file has been provided then use that one
2) check to see if we have a local config file, and if so use that
3) no config file found so we'll create one with defaults
:param kwargs: conf=</path/to/config/file> #External source
"""
m = MetaConf()
self.config = None
self.log_file = m.log_file
self.config_file = m.config_file
# conf file specified by the caller
if kwargs:
file = kwargs.get('conf')
log.info('Loading external config file: {}'.format(file))
if file:
self.config = self.load('external', file)
self.envSet(log=self.log_file, conf=file)
else:
raise Exception('Unsupported parameter {}'.format(kwargs))
# no conf parameters specified so check local conf file
elif Path(self.config_file).exists():
log.info('Using config file path = {}'.format(self.config_file))
self.config = self.load('ini', self.config_file)
self.envSet()
# load the default config from meta
elif meta.AUTO_LOAD_CONFIG:
log.info('Load default config (meta)')
self.config = m.config
self.envSet()
self.save()
else:
raise Exception('Unable to load the configuration.')
def load(self, conf_type, *file_path):
"""
Load the configuration into memory.
The configuration is stored in the Config object.
:param conf_type:
:param file_path:
:return:
"""
if conf_type == 'external':
external_conf_file = Path(*file_path)
config = self.getConfig(external_conf_file)
return config
elif conf_type == 'ini':
conf = self.getConfig(self.config_file)
return conf
elif conf_type == 'service':
raise Exception('Not implemented.')
elif conf_type == 'db':
raise Exception('Not implemented.')
else:
raise Exception('Unable to determine configuration type.')
def get(self, section, key):
"""
Retrieve a configuration parameter.
:param section: The section of the ini file to search
:param key: The key to look up
:return: Returns the value associated with the key
"""
return self.config.get(section, key)
def set(self, section, key, value):
"""
Update a configuration parameter.
:param section: The section of the ini config file to update
:param key: The key that needs to be updated
:param value: The new value associated with the key
:return:
"""
self.config.set(section, key, value)
def show(self):
"""
Prints out a listing of the config file to the console.
:return:
"""
for section in self.config.sections():
for k in self.config.options(section):
v = self.config.get(section, k)
print('{} = {}'.format(k, v))
def save(self):
"""
Save the contents of the config object to the conf file.
:return:
"""
with open(self.config_file, 'w') as f:
self.config.write(f)
@staticmethod
def envGet():
"""
Retrieve the environment variables containing the log and conf paths.
:return: Returns a dictionary containing the file paths
"""
env = {}
for k, v in os.environ.items():
if k in ['LXDUI_LOG', 'LXDUI_CONF']:
env.update({k: os.environ.get(k)})
return env
@staticmethod
def envGet2(key):
"""
Retrieve the environment variables containing the log and conf paths.
:return: Returns a dictionary containing the file paths
"""
env = {}
for k, v in os.environ.items():
if key == k:
env.update({k: os.environ.get(k)})
return env
def envSet(self, **kwargs):
"""
Set the environment variables for the log and the conf file
:param kwargs: Specify log=<log_path> and cong=<conf_path>
:return:
"""
log_path = None
conf_path = None
if kwargs.get('log') and kwargs.get('conf'):
log_path = kwargs.get('log')
conf_path = kwargs.get('conf')
log.debug('Setting environment variables')
else:
log_path = self.log_file
conf_path = self.config_file
os.environ['{}_LOG'.format(APP)] = log_path
os.environ['{}_CONF'.format(APP)] = conf_path
def envShow(self):
if not self.envGet():
print('Environment variables for {} have not been set'.format(APP))
else:
for k, v in self.envGet().items():
print('{} = {}'.format(k, v))
def getConfig(self, file):
"""
Checks to ensure that the file exists Retrieves the contents of the config file.
:param file: A string representing the path to the conf file.
:return: Returns a config object.
"""
# if the file exists then read the contents
if Path(file).exists():
try:
config = self.parseConfig(file)
return config
except IOError as e:
log.info('Unable to open file.', e)
else:
raise FileNotFoundError
@staticmethod
def parseConfig(file):
"""
Parses the config file. The file must be of ini format.
If the file exists but is empty and exception will be generated.
:param file: The path of the file to parse
:return: Return a config object
"""
# make sure the file is not empty
size = Path(file).stat().st_size
if size != 0:
config = ConfigParser(interpolation=ExtendedInterpolation())
config.read(file.__str__())
return config
else:
raise Exception('File is empty.')
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.