text string | size int64 | token_count int64 |
|---|---|---|
from util import *
from arclib import bz2
from bz2 import compress, decompress
def test_incremental_compress():
basic_test_c(bz2.Compressor(), decompress)
def test_incremental_decompress():
basic_test_d(bz2.Decompressor(), compress)
| 243 | 85 |
from train.train_agent import train_agent
from train.train_agents import train_agents
| 86 | 25 |
import argparse
import json
import logging
import multiprocessing as mp
import os
import time
from typing import List
from detectron2.structures import BoxMode
from detectron2 import model_zoo
from detectron2.data import MetadataCatalog, DatasetCatalog
from detectron2.structures import BoxMode
from detectron2.utils.visualizer import Visualizer
from detectron2.config import get_cfg
from detectron2.engine import DefaultPredictor, DefaultTrainer
from detectron2.evaluation import COCOEvaluator, inference_on_dataset
from detectron2.data import build_detection_test_loader
from detectron2.utils.visualizer import ColorMode
from detectron2.modeling import build_model
import detectron2.data.transforms as T
from detectron2.checkpoint import DetectionCheckpointer
import numpy as np
import pandas as pd
import torch
import torchvision
from utils.frame_reader import FrameReaderMgrBase
from utils.file_downloader import FileDownloader
import tator
log_filename = "detectron2_inference.log"
logging.basicConfig(
handlers=[logging.FileHandler(log_filename, mode="w"), logging.StreamHandler()],
format="%(asctime)s %(levelname)s:%(message)s",
datefmt="%m/%d/%Y %I:%M:%S %p",
level=logging.INFO,
)
logger = logging.getLogger(__name__)
class FrameReaderMgr(FrameReaderMgrBase):
def __init__(
self,
*,
augmentation: T.Augmentation,
**kwargs,
):
super().__init__(**kwargs)
self._augmentation = augmentation
def _format_img(self, img, frame_num):
h, w = img.shape[:2]
img = self._augmentation.get_transform(img).apply_image(img)
img = torch.as_tensor(img.astype("float32").transpose(2, 0, 1))
return {"image": img, "height": h, "width": w, "frame_num": frame_num}
class LocalizationGenerator:
def __init__(self, model_nms, nms_threshold, localization_type):
self._model_nms = model_nms
self._nms_threshold = nms_threshold
self._localization_type = localization_type
def __call__(self, element, frame, media_id):
"""
Yields `LocalizationSpec`s from the model detections in a video frame.
"""
element["instances"] = element["instances"][
self._model_nms(
element["instances"].pred_boxes.tensor,
element["instances"].scores,
self._nms_threshold,
)
.to("cpu")
.tolist()
]
instance_dict = element["instances"].get_fields()
pred_boxes = instance_dict["pred_boxes"]
scores = instance_dict["scores"]
pred_classes = instance_dict["pred_classes"]
# TODO check attribute names and determine if they should be dynamic
# yield LocalizationSpec
for box, score, cls in zip(pred_boxes, scores, pred_classes):
x1, y1, x2, y2 = box.tolist()
yield {
"type": self._localization_type,
"media_id": media_id,
"frame": frame,
"x": x1,
"y": y1,
"width": x2 - x1,
"height": y2 - y1,
"Species": cls,
"Score": score,
}
def parse_args():
parser = argparse.ArgumentParser(description="Testing script for testing video data.")
parser.add_argument("video_path", help="Path to video file")
parser.add_argument(
"--inference-config",
help="Path to inference config file.",
# TODO remove default here
default="/mnt/md0/Projects/Fathomnet/Training_Files/2021-06-29-Detectron/detectron_files/fathomnet_config.yaml",
)
parser.add_argument(
"--builtin-model-config",
help="Path to built-in model config file.",
# TODO remove default here
default="COCO-Detection/retinanet_R_50_FPN_3x.yaml",
)
parser.add_argument(
"--model-weights",
help="Path to the trained model weights",
# TODO remove default here
default="/home/hugh/mycode/detectron/out/model_0076543.pth",
)
parser.add_argument(
"--gpu", help="Id of the GPU to use (as reported by nvidia-smi).", default=0, type=int
)
parser.add_argument(
"--score-threshold", help="Threshold to filter detections", default=0.7, type=float
)
parser.add_argument(
"--batch-size", help="batch size for frames to process at a time", default=4, type=int
)
parser.add_argument(
"--nms-threshold", help="threshold for NMS routine to suppress", default=0.55, type=float
)
parser.add_argument("--media-ids", help="The ids of the media to process", nargs="+", type=int)
parser.add_argument(
"--localization-type", help="The id of the localization type to generate", type=int
)
parser.add_argument("--host", type=str, help="Tator host to use")
parser.add_argument("--token", type=str, help="Token to use for tator.")
parser.add_argument(
"--work-dir", type=str, help="The name of the directory to use for local storage"
)
return parser.parse_args()
def main(
*,
inference_config: str,
builtin_model_config: str,
model_weights: str,
video_path: str,
batch_size: int,
nms_threshold: float,
score_threshold: float,
gpu: int,
media_ids: List[int],
localization_type: int,
host: str,
token: str,
work_dir: str,
):
# Download associated media
api = tator.get_api(host=host, token=token)
download = FileDownloader(work_dir, api)
media_paths = download(media_ids)
# Instantiate the model
cfg = get_cfg()
cfg.merge_from_file(model_zoo.get_config_file(builtin_model_config))
cfg.merge_from_file(inference_config)
cfg.MODEL.RETINANET.SCORE_THRESH_TEST = 0.3 # TODO magic number
cfg.MODEL.WEIGHTS = model_weights
cfg.MODEL.DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
model = build_model(cfg) # returns a torch.nn.Module
checkpointer = DetectionCheckpointer(model)
checkpointer.load(cfg.MODEL.WEIGHTS)
model.eval()
# Separate NMS layer
model_nms = torchvision.ops.nms
aug = T.ResizeShortestEdge(
short_edge_length=[cfg.INPUT.MIN_SIZE_TEST],
max_size=cfg.INPUT.MAX_SIZE_TEST,
sample_style="choice",
)
localization_generator = LocalizationGenerator(model_nms, nms_threshold, localization_type)
frame_reader = FrameReaderMgr(augmentation=aug)
results = []
for media_id, media_path in zip(media_ids, media_paths):
with frame_reader(media_path):
logger.info(f"Generating detections for {media_id}")
st = time.time()
while True:
try:
batch = frame_reader.get_frames(batch_size)
except:
break
else:
frames = [ele["frame_num"] for ele in batch]
with torch.no_grad():
model_outputs = model(batch)
results.extend(
loc
for frame_detections, frame in zip(model_outputs, frames)
for loc in localization_generator(frame_detections, frame, media_id)
)
if results:
created_ids = []
for response in tator.util.chunked_create(
tator_api.create_localization_list, project, localization_spec=results
):
created_ids += response.id
n_requested = len(results)
n_created = len(created_ids)
if n_created == n_requested:
logger.info(f"Created {n_created} localizations for {media_id}!")
else:
logger.warning(
f"Requested the creation of {n_requested} localizations, but only {n_created} were created for {media_id}"
)
else:
logger.info(f"No detections for media {media_id}")
if __name__ == "__main__":
# parse arguments
args = parse_args()
main(**vars(args))
logger.info("Finished")
| 8,104 | 2,495 |
from typing import Any, Callable, Dict, List, Tuple
try:
from typing_extensions import Protocol
except ImportError:
from typing import Protocol # type: ignore
StrHeaderListType = List[Tuple[str, str]]
RawHeaderListType = List[Tuple[bytes, bytes]]
HeaderDictType = Dict[str, Any]
ParamDictType = Dict[str, str]
AddNoteMethodType = Callable[..., None]
class HttpResponseExchange(Protocol):
def response_start(
self, status_code: bytes, status_phrase: bytes, res_hdrs: RawHeaderListType
) -> None:
...
def response_body(self, chunk: bytes) -> None:
...
def response_done(self, trailers: RawHeaderListType) -> None:
...
| 679 | 211 |
from dataclasses import dataclass
from typing import Union
NoneType = type(None)
@dataclass
class UserDiffStats:
def __init__(self, data):
self.easy=data["easy"]
self.expert=data["expert"]
self.expertPlus=data["expertPlus"]
self.hard=data["hard"]
self.normal=data["normal"]
self.total=data["total"]
easy: int
expert: int
expertPlus: int
hard: int
normal: int
total: int
@dataclass
class UserStats:
def __init__(self, data):
self.totalUpvotes=data["totalUpvotes"]
self.totalDownvotes=data["totalDownvotes"]
self.totalMaps=data["totalMaps"]
self.rankedMaps=data["rankedMaps"]
self.avgBpm=data["avgBpm"]
self.avgDuration=data["avgDuration"]
self.avgScore=data["avgScore"]
self.firstUpload=data["firstUpload"]
self.lastUpload=data["lastUpload"]
self.diffStats=UserDiffStats(data["diffStats"])
totalUpvotes: int
totalDownvotes: int
totalMaps: int
rankedMaps: int
avgBpm: float
avgDuration: float
avgScore: float
firstUpload: str
lastUpload: str
diffStats: UserDiffStats
@dataclass
class UserDetail:
def __init__(self, data):
self.id=data["id"]
self.name=data["name"]
self.hash=None
if "hash" in data: # Hashes are a legacy field for old beatsaver accounts
self.hash=data["hash"]
self.avatar=data["avatar"]
self.stats=None
if "stats" in data:
self.stats=UserStats(data["stats"])
id: str
name: str
hash: Union[str, NoneType]
avatar: str
stats: UserStats | 1,660 | 535 |
import math
from typing import Sequence, Tuple
import torch
from torch import nn
from torch.nn import functional as F
from tensorfn.config import config_model
from pydantic import StrictInt, StrictFloat
from .layer import DropPath, tuple2, PositionwiseFeedForward
LayerNorm = lambda x: nn.LayerNorm(x, eps=1e-6)
def patchify(input, size):
batch, height, width, dim = input.shape
return (
input.view(batch, height // size, size, width // size, size, dim)
.permute(0, 1, 3, 2, 4, 5)
.reshape(batch, height // size, width // size, -1)
)
class MultiHeadedLocalAttention(nn.Module):
def __init__(
self, dim, n_head, dim_head, input_size, window_size, shift, dropout=0
):
super().__init__()
self.dim_head = dim_head
self.n_head = n_head
self.weight = nn.Linear(dim, n_head * dim_head * 3, bias=True)
self.linear = nn.Linear(n_head * dim_head, dim)
self.input_size = input_size
self.window_size = window_size
self.dropout = dropout
self.shift = shift
y_pos, x_pos, local_mask = self.make_mask_pos(input_size, window_size, shift)
pos_size = y_pos.shape[0]
pos = y_pos * (2 * window_size - 1) + x_pos
self.register_buffer("pos", pos[0].reshape(window_size ** 2, window_size ** 2))
self.rel_pos = nn.Embedding((2 * window_size - 1) ** 2, n_head)
self.rel_pos.weight.detach().zero_()
if shift:
self.register_buffer(
"local_mask",
~local_mask.reshape(pos_size, window_size ** 2, window_size ** 2),
)
def make_mask_pos(self, input_size, window_size, shift):
h, w = input_size
h //= window_size
w //= window_size
yy, xx = torch.meshgrid(
torch.arange(window_size * h), torch.arange(window_size * w)
)
if shift:
roll = -math.floor(window_size / 2)
yy = torch.roll(yy, (roll, roll), (0, 1))
xx = torch.roll(xx, (roll, roll), (0, 1))
y_c = (
yy.view(h, window_size, w, window_size)
.permute(0, 2, 1, 3)
.reshape(-1, window_size, window_size)
)
x_c = (
xx.view(h, window_size, w, window_size)
.permute(0, 2, 1, 3)
.reshape(-1, window_size, window_size)
)
x_diff = (
x_c.transpose(1, 2).unsqueeze(1) - x_c.transpose(1, 2).unsqueeze(2)
).transpose(2, 3)
x_flag = x_diff.abs() < window_size
y_diff = y_c.unsqueeze(1) - y_c.unsqueeze(2)
y_flag = y_diff.abs() < window_size
x_diff = x_diff.unsqueeze(1)
y_diff = y_diff.unsqueeze(2)
if shift:
local_mask = x_flag.unsqueeze(1) & y_flag.unsqueeze(2)
x_diff = x_diff * local_mask
y_diff = y_diff * local_mask
else:
local_mask = None
x_diff = x_diff.expand(-1, window_size, -1, -1, -1)
y_diff = y_diff.expand(-1, -1, window_size, -1, -1)
x_pos = x_diff + (window_size - 1)
y_pos = y_diff + (window_size - 1)
return y_pos, x_pos, local_mask
def forward(self, input):
batch, height, width, dim = input.shape
h_stride = height // self.window_size
w_stride = width // self.window_size
window = self.window_size
if self.shift:
roll = -math.floor(window / 2)
input = torch.roll(input, (roll, roll), (1, 2))
def reshape(input):
return (
input.reshape(
batch,
h_stride,
window,
w_stride,
window,
self.n_head,
self.dim_head,
)
.permute(0, 1, 3, 5, 2, 4, 6)
.reshape(batch, -1, self.n_head, window * window, self.dim_head)
)
query, key, value = self.weight(input).chunk(3, dim=-1) # B, S, H, W^2, D
query = reshape(query)
key = reshape(key).transpose(-2, -1)
value = reshape(value)
score = query @ key / math.sqrt(self.dim_head) # B, S, H, W^2, W^2
rel_pos = self.rel_pos(self.pos) # W^2, W^2, H
score = score + rel_pos.permute(2, 0, 1).unsqueeze(0).unsqueeze(1)
if self.shift:
score = score.masked_fill(
self.local_mask.unsqueeze(0).unsqueeze(2), float("-inf")
)
attn = F.softmax(score, -1)
attn = F.dropout(attn, self.dropout, training=self.training)
out = attn @ value # B, S, H, W^2, D
out = (
out.view(
batch, h_stride, w_stride, self.n_head, window, window, self.dim_head
)
.permute(0, 1, 4, 2, 5, 3, 6)
.reshape(batch, height, width, self.n_head * self.dim_head)
)
out = self.linear(out)
if self.shift:
out = torch.roll(out, (-roll, -roll), (1, 2))
return out
class TransformerLayer(nn.Module):
def __init__(
self,
dim,
n_head,
dim_head,
dim_ff,
input_size,
window_size,
shift,
activation=nn.SiLU,
drop_ff=0,
drop_attn=0,
drop_path=0,
):
super().__init__()
self.norm_attn = LayerNorm(dim)
self.attn = MultiHeadedLocalAttention(
dim, n_head, dim_head, input_size, window_size, shift, drop_attn
)
self.drop_path = DropPath(drop_path)
self.norm_ff = LayerNorm(dim)
self.ff = PositionwiseFeedForward(
dim, dim_ff, activation=activation, dropout=drop_ff
)
def set_drop_path(self, p):
self.drop_path.p = p
def forward(self, input):
out = input + self.drop_path(self.attn(self.norm_attn(input)))
out = out + self.drop_path(self.ff(self.norm_ff(out)))
return out
class PatchEmbedding(nn.Module):
def __init__(self, in_dim, out_dim, window_size):
super().__init__()
self.window_size = window_size
self.linear = nn.Linear(in_dim * window_size * window_size, out_dim)
self.norm = nn.LayerNorm(out_dim)
def forward(self, input):
out = patchify(input, self.window_size)
out = self.linear(out)
out = self.norm(out)
return out
class PatchMerge(nn.Module):
def __init__(self, in_dim, out_dim, window_size):
super().__init__()
self.window_size = window_size
self.norm = nn.LayerNorm(in_dim * window_size * window_size)
self.linear = nn.Linear(in_dim * window_size * window_size, out_dim, bias=False)
def forward(self, input):
out = patchify(input, self.window_size)
out = self.norm(out)
out = self.linear(out)
return out
def reduce_size(size, reduction):
return (size[0] // reduction, size[1] // reduction)
@config_model(name="swin_transformer", namespace="model", use_type=True)
class SwinTransformer(nn.Module):
def __init__(
self,
image_size: Tuple[StrictInt, StrictInt],
n_class: StrictInt,
depths: Tuple[StrictInt, StrictInt, StrictInt, StrictInt],
dims: Tuple[StrictInt, StrictInt, StrictInt, StrictInt],
dim_head: StrictInt,
n_heads: Tuple[StrictInt, StrictInt, StrictInt, StrictInt],
dim_ffs: Tuple[StrictInt, StrictInt, StrictInt, StrictInt],
window_size: StrictInt,
drop_ff: StrictFloat = 0.0,
drop_attn: StrictFloat = 0.0,
drop_path: StrictFloat = 0.0,
):
super().__init__()
self.depths = depths
def make_block(i, in_dim, input_size, reduction):
return self.make_block(
depths[i],
in_dim,
dims[i],
n_heads[i],
dim_head,
dim_ffs[i],
input_size,
window_size,
reduction,
drop_ff,
drop_attn,
)
self.patch_embedding = PatchEmbedding(3, dims[0], 4)
self.block1 = make_block(0, 3, reduce_size(image_size, 4), 1)
self.block2 = make_block(1, dims[0], reduce_size(image_size, 4), 2)
self.block3 = make_block(2, dims[1], reduce_size(image_size, 4 * 2), 2)
self.block4 = make_block(3, dims[2], reduce_size(image_size, 4 * 2 * 2), 2)
self.final_linear = nn.Sequential(nn.LayerNorm(dims[-1]))
linear = nn.Linear(dims[-1], n_class)
nn.init.normal_(linear.weight, std=0.02)
nn.init.zeros_(linear.bias)
self.classifier = nn.Sequential(nn.AdaptiveAvgPool2d(1), nn.Flatten(1), linear)
self.apply(self.init_weights)
self.set_dropout(None, drop_path)
def set_dropout(self, dropout, drop_path):
n_blocks = sum(self.depths)
dp_rate = [drop_path * float(i) / n_blocks for i in range(n_blocks)]
i = 0
for block in self.block1:
try:
block.set_drop_path(dp_rate[i])
i += 1
except:
continue
for block in self.block2:
try:
block.set_drop_path(dp_rate[i])
i += 1
except:
continue
for block in self.block3:
try:
block.set_drop_path(dp_rate[i])
i += 1
except:
continue
for block in self.block4:
try:
block.set_drop_path(dp_rate[i])
i += 1
except:
continue
def init_weights(self, module):
if isinstance(module, nn.Linear):
nn.init.normal_(module.weight, std=0.02)
if module.bias is not None:
nn.init.zeros_(module.bias)
elif isinstance(module, nn.LayerNorm):
nn.init.ones_(module.weight)
nn.init.zeros_(module.bias)
def make_block(
self,
depth,
in_dim,
dim,
n_head,
dim_head,
dim_ff,
input_size,
window_size,
reduction,
drop_ff,
drop_attn,
):
block = []
if reduction > 1:
block.append(PatchMerge(in_dim, dim, reduction))
for i in range(depth):
block.append(
TransformerLayer(
dim,
n_head,
dim_head,
dim_ff,
reduce_size(input_size, reduction),
window_size,
shift=i % 2 == 0,
drop_ff=drop_ff,
drop_attn=drop_attn,
)
)
return nn.Sequential(*block)
def forward(self, input):
out = self.patch_embedding(input.permute(0, 2, 3, 1))
out = self.block1(out)
out = self.block2(out)
out = self.block3(out)
out = self.block4(out)
out = self.final_linear(out).permute(0, 3, 1, 2)
out = self.classifier(out)
return out
| 11,599 | 4,133 |
import tensorflow as tf
if tf.__version__.startswith('1.'):
tf.enable_eager_execution()
| 93 | 30 |
import json
def search_records():
cleaned_data = open('lifetech_cleandata.json')
data = json.load(cleaned_data)
my_dic={}
for record in data:
number = record.get("number")
cnic = record.get("cnic")
my_dic=record
my_dic= basic_info_merger(my_dic)
result01 = search_taxpayers_record(cnic)
result02 = search_redbook_record(cnic, number)
result03 = search_terrorists_record(cnic, number)
print(f'[+] searching for number > {number}')
result = {}
if result01 is not None:
result = merge_found_records(my_dic, result01)
if result02 and result03 is not None:
result = merge_found_records(my_dic, result01, result02, result03)
elif result02 is not None:
result = merge_found_records(my_dic, result01, result02)
elif result03 is not None:
result = merge_found_records(my_dic, result01, result03)
if result02 is not None:
result = result02
if result03 is not None:
result = merge_found_records(my_dic ,result02, result03)
print(result)
elif result03 is not None:
result = merge_found_records(my_dic, result03)
else:
result = (my_dic)
main_dbt_handler(number, result)
# def basic_info_merger(dict):
# with open ('basic_number_info.json', 'r') as basic_num_info:
# num_info = json.load(basic_num_info)
# for data in num_info:
# # print(data)
# number = str(data.get('number'))[2:-2]
# # print(number)
# number2 = '+92'+ dict['number']
# # print ("with" + number2)
# if ('+92'+dict['number'])==number:
# print("number matcheddd")
# new_dict = merge_found_records(dict, data)
# return new_dict
# return dict
def basic_info_merger(dict):
with open ('basic_number_info.json', 'r') as basic_num_info:
num_info = json.load(basic_num_info)
for data in num_info:
# print(data)
number = data.get('number')
# print(number)
number2 = '+92'+ dict['number']
# print ("with" + number2)
if ('+92'+dict['number'])==number:
print("number matcheddd")
new_dict = merge_found_records(dict, data)
return new_dict
return dict
def merge_found_records(*dicts):
return {
k: [d[k] for d in dicts if k in d]
for k in set(k for d in dicts for k in d)
}
def search_taxpayers_record(cnic):
with open('snooper/sheet7.json', 'r') as tax_payers:
tax_payers = json.load(tax_payers)
for records in tax_payers['Sheet1']:
tax_payers_dictionary = {}
if cnic == records['NTN']:
# tax_payers_dictionary['CNIC'] = cnic
tax_payers_dictionary['BUSINESS_NAME'] = records['BUSINESS_NAME']
tax_payers_dictionary['NAME REGISTERED TO'] = records['NAME']
return tax_payers_dictionary
def search_redbook_record(cnic, number):
with open('snooper/redbook.json', 'r') as redbook:
redbook = json.load(redbook)
for data2 in redbook:
redbook_dictionary = {}
if cnic == (data2['CNIC']):
# redbook_dictionary['CNIC'] = cnic
redbook_dictionary['F/NAME'] = data2['PARENTAGE']
redbook_dictionary['ADDRESS'] = data2['ADDRESS']
redbook_dictionary['PHONE NUM'] = data2['PHONE NUM']
redbook_dictionary['FIR'] = data2['FIR no.']
return redbook_dictionary
def search_terrorists_record(cnic, number):
with open('snooper/data.json', 'r') as terrorists:
terrorists = json.load(terrorists)
for data2 in terrorists:
terrorists_dictionary = {}
if cnic == (data2['CNIC']):
# terrorists_dictionary['CNIC'] = cnic
terrorists_dictionary['F/NAME'] = data2['FNAME']
terrorists_dictionary['ADDRESS'] = data2['ADDRESS']
terrorists_dictionary['REWARD'] = data2['REWARD']
terrorists_dictionary['FIR'] = data2['FIR']
terrorists_dictionary['RELIGIOUS/POLITICAL AFFILIATION'] = data2['RELIGIOUS/POLITICAL AFFILIATION']
return terrorists_dictionary
def main_dbt_handler(number, record):
if record:
with open('main_dbt.json', 'a+') as main_dbt:
json.dump(record, main_dbt, indent=4)
main_dbt.write('\n')
main_dbt.close()
print(str(record)+'\n')
else:
print('[-] No criminal record found....\n[-] No business or tax payers record fount....\n')
search_records()
# import json
# def search_records():
# cleaned_data = open('lifetech_cleandata.json')
# data = json.load(cleaned_data)
# my_dic={}
# for record in data:
# number = record.get("number")
# cnic = record.get("cnic")
# my_dic=record
# lifetech_dic = {}
# lifetech_dic['NAME'] = record['name']
# lifetech_dic['CNIC'] = record['cnic']
# lifetech_dic['PHONE NUM'] = record['number']
# if 'city' in my_dic:
# lifetech_dic['CITY'] = record['city']
# if 'address'in my_dic:
# lifetech_dic['ADDRESS'] = record['address']
# result01 = search_taxpayers_record(cnic)
# result02 = search_redbook_record(cnic, number)
# result03 = search_terrorists_record(cnic, number)
# print(f'[+] searching for number > {number}')
# result = {}
# if result01 is not None:
# result = merge_found_records(lifetech_dic, result01)
# if result02 and result03 is not None:
# result = merge_found_records(lifetech_dic, result01, result02, result03)
# elif result02 is not None:
# result = merge_found_records(lifetech_dic, result01, result02)
# elif result03 is not None:
# result = merge_found_records(lifetech_dic, result01, result03)
# elif result02 is not None:
# result = result02
# if result03 is not None:
# result = merge_found_records(lifetech_dic ,result02, result03)
# print(result)
# elif result03 is not None:
# result = merge_found_records(lifetech_dic, result03)
# else:
# result= lifetech_dic
# main_dbt_handler(number, result)
# def merge_found_records(*dicts):
# return {
# k: [d[k] for d in dicts if k in d]
# for k in set(k for d in dicts for k in d)
# }
# def search_taxpayers_record(cnic):
# with open('snooper/sheet7.json', 'r') as tax_payers:
# tax_payers = json.load(tax_payers)
# for records in tax_payers['Sheet1']:
# tax_payers_dictionary = {}
# if cnic == records['NTN']:
# # tax_payers_dictionary['CNIC'] = cnic
# tax_payers_dictionary['BUSINESS_NAME'] = records['BUSINESS_NAME']
# tax_payers_dictionary['NAME REGISTERED TO'] = records['NAME']
# return tax_payers_dictionary
# def search_redbook_record(cnic, number):
# with open('snooper/redbook.json', 'r') as redbook:
# redbook = json.load(redbook)
# for data2 in redbook:
# redbook_dictionary = {}
# if cnic == (data2['CNIC']):
# # redbook_dictionary['CNIC'] = cnic
# redbook_dictionary['F/NAME'] = data2['PARENTAGE']
# redbook_dictionary['ADDRESS'] = data2['ADDRESS']
# redbook_dictionary['PHONE NUM'] = data2['PHONE NUM']
# redbook_dictionary['FIR'] = data2['FIR no.']
# return redbook_dictionary
# def search_terrorists_record(cnic, number):
# with open('snooper/data.json', 'r') as terrorists:
# terrorists = json.load(terrorists)
# for data2 in terrorists:
# terrorists_dictionary = {}
# if cnic == (data2['CNIC']):
# # terrorists_dictionary['CNIC'] = cnic
# terrorists_dictionary['F/NAME'] = data2['FNAME']
# terrorists_dictionary['ADDRESS'] = data2['ADDRESS']
# terrorists_dictionary['REWARD'] = data2['REWARD']
# terrorists_dictionary['FIR'] = data2['FIR']
# terrorists_dictionary['RELIGIOUS/POLITICAL AFFILIATION'] = data2['RELIGIOUS/POLITICAL AFFILIATION']
# return terrorists_dictionary
# def main_dbt_handler(number, record):
# if record:
# with open('main_dbt.json', 'a+') as main_dbt:
# json.dump(record, main_dbt, indent=4)
# main_dbt.write('\n')
# main_dbt.close()
# print(str(record)+'\n')
# else:
# print('[-] No criminal record found....\n[-] No business or tax payers record fount....\n')
# search_records()
# import json
# def search_records():
# cleaned_data = open('lifetech_cleandata.json')
# data = json.load(cleaned_data)
# for record in data:
# number = record.get("number")
# cnic = record.get("cnic")
# result01 = search_taxpayers_record(cnic)
# result02 = search_redbook_record(cnic, number)
# result03 = search_terrorists_record(cnic, number)
# print(f'[+] searching for number > {number}')
# result = {}
# if result01 is not None:
# result = result01
# if result02 and result03 is not None:
# result = merge_found_records(result01, result02, result03)
# elif result02 is not None:
# result = merge_found_records(result01, result02)
# elif result03 is not None:
# result = merge_found_records(result01, result03)
# elif result02 is not None:
# result = result02
# if result03 is not None:
# result = merge_found_records(result02, result03)
# elif result03 is not None:
# result = merge_found_records(result03)
# main_dbt_handler(number, result)
# def merge_found_records(*dicts):
# return {
# k: [d[k] for d in dicts if k in d]
# for k in set(k for d in dicts for k in d)
# }
# def search_taxpayers_record(cnic):
# with open('sheet7.json', 'r') as tax_payers:
# tax_payers = json.load(tax_payers)
# for records in tax_payers['Sheet1']:
# tax_payers_dictionary = {}
# if cnic == records['NTN']:
# # tax_payers_dictionary['CNIC'] = cnic
# tax_payers_dictionary['BUSINESS_NAME'] = records['BUSINESS_NAME']
# tax_payers_dictionary['NAME REGISTERED TO'] = records['NAME']
# return tax_payers_dictionary
# def search_redbook_record(cnic, number):
# with open('redbook.json', 'r') as redbook:
# redbook = json.load(redbook)
# for data2 in redbook:
# redbook_dictionary = {}
# if cnic == (data2['CNIC']):
# # redbook_dictionary['CNIC'] = cnic
# redbook_dictionary['F/NAME'] = data2['PARENTAGE']
# redbook_dictionary['ADDRESS'] = data2['ADDRESS']
# redbook_dictionary['PHONE NUM'] = data2['PHONE NUM']
# redbook_dictionary['FIR'] = data2['FIR no.']
# return redbook_dictionary
# def search_terrorists_record(cnic, number):
# with open('data.json', 'r') as terrorists:
# terrorists = json.load(terrorists)
# for data2 in terrorists:
# terrorists_dictionary = {}
# if cnic == (data2['CNIC']):
# # terrorists_dictionary['CNIC'] = cnic
# terrorists_dictionary['F/NAME'] = data2['FNAME']
# terrorists_dictionary['ADDRESS'] = data2['ADDRESS']
# terrorists_dictionary['REWARD'] = data2['REWARD']
# terrorists_dictionary['FIR'] = data2['FIR']
# return terrorists_dictionary
# def main_dbt_handler(number, record):
# if record:
# with open('main_dbt.json', 'a+') as main_dbt:
# json.dump(record, main_dbt, indent=4)
# main_dbt.write('\n')
# main_dbt.close()
# print(str(record)+'\n')
# else:
# print('[-] No criminal record found....\n[-] No business or tax payers record fount....\n')
# search_records()
| 12,340 | 4,046 |
# from django import forms
# from .models import ExcelUpload
#
#
# class ExcelUploadForm(forms.ModelForm):
# class Meta:
# model = ExcelUpload
# fields = ('document', )
| 189 | 53 |
from .loading import *
from .utils_dict_list import *
from .get_logger import get_neptune_logger, get_tensorboard_logger
from .map_dict import Map,DotDict | 154 | 49 |
import gzip
from pathlib import Path
import numpy as np
data_path = Path(__file__).parent / '..' / 'data'
train_images_file = data_path / 'train-images-idx3-ubyte.gz'
train_labels_file = data_path / 'train-labels-idx1-ubyte.gz'
test_images_file = data_path / 't10k-images-idx3-ubyte.gz'
test_labels_file = data_path / 't10k-labels-idx1-ubyte.gz'
def gz_to_npz(file):
return Path(str(file)[:-3] + '.npz')
train_images_file_array = gz_to_npz(train_images_file)
train_labels_file_array = gz_to_npz(train_labels_file)
test_images_file_array = gz_to_npz(test_images_file)
test_labels_file_array = gz_to_npz(test_labels_file)
def read_int(f, size=1):
return int.from_bytes(f.read1(size), 'big', signed=False)
def read_images(file, magic_number):
print('Read images', str(file))
with gzip.open(str(file)) as f:
assert magic_number == read_int(f, 4)
n_images = read_int(f, 4)
n_rows = read_int(f, 4)
n_cols = read_int(f, 4)
images = []
for n in range(n_images):
data = np.reshape(np.frombuffer(f.read1(n_rows*n_cols), dtype=np.ubyte, count=n_rows*n_cols), (n_rows, n_cols))
images.append(data)
return images
def read_labels(file, magic_number, n_images):
print('Read labels', str(file))
with gzip.open(str(file)) as f:
assert magic_number == read_int(f, 4)
assert n_images == read_int(f, 4)
labels = []
for n in range(n_images):
labels.append(read_int(f))
return labels
def get_data():
if not test_images_file_array.exists():
print('Pre-extracted data does not exist... Creating data....')
train_images = read_images(train_images_file, 2051)
train_labels = read_labels(train_labels_file, 2049, len(train_images))
test_images = read_images(test_images_file, 2051)
test_labels = read_labels(test_labels_file, 2049, len(test_images))
np.savez_compressed(str(train_images_file_array), data=train_images)
np.savez_compressed(str(train_labels_file_array), data=train_labels)
np.savez_compressed(str(test_images_file_array), data=test_images)
np.savez_compressed(str(test_labels_file_array), data=test_labels)
return np.load(train_images_file_array)['data'], \
np.load(train_labels_file_array)['data'], \
np.load(test_images_file_array)['data'], \
np.load(test_labels_file_array)['data']
if __name__ == "__main__":
import matplotlib.pyplot as plt
train_img, train_lbl, test_img, test_lbl = get_data()
plt.imshow(train_img[1], cmap='Greys')
plt.title('Number: ' + str(train_lbl[1]))
plt.show()
| 2,676 | 1,004 |
from __future__ import absolute_import
from __future__ import print_function
import os.path
import numpy
# from nipype.interfaces.base import (
# TraitedSpec, traits, File, isdefined,
# CommandLineInputSpec, CommandLine)
from nipype.interfaces.base import (
TraitedSpec, traits, BaseInterface, File, isdefined,
Directory, CommandLineInputSpec, CommandLine, InputMultiPath)
class GrepInputSpec(CommandLineInputSpec):
match_str = traits.Str(argstr='%s', position=0,
desc="The string to search for")
in_file = File(argstr='%s', position=1,
desc="The file to search")
out_file = File(genfile=True, argstr='> %s', position=2,
desc=("The file to contain the search results"))
class GrepOutputSpec(TraitedSpec):
out_file = File(exists=True, desc="The search results")
class Grep(CommandLine):
"""Creates a zip repository from a given folder"""
_cmd = 'grep'
input_spec = GrepInputSpec
output_spec = GrepOutputSpec
def _list_outputs(self):
outputs = self._outputs().get()
outputs['out_file'] = self._gen_filename('out_file')
return outputs
def _gen_filename(self, name):
if name == 'out_file':
if isdefined(self.inputs.out_file):
fname = self.inputs.out_file
else:
fname = os.path.join(os.getcwd(), 'search_results.txt')
else:
assert False
return fname
class AwkInputSpec(CommandLineInputSpec):
format_str = traits.Str(argstr="'%s'", position=0,
desc="The string to search for")
in_file = File(argstr='%s', position=1,
desc="The file to parse")
out_file = File(genfile=True, argstr='> %s', position=2,
desc=("The file to contain the parsed results"))
class AwkOutputSpec(TraitedSpec):
out_file = File(exists=True, desc="The parsed results")
class Awk(CommandLine):
"""Creates a zip repository from a given folder"""
_cmd = 'awk'
input_spec = AwkInputSpec
output_spec = AwkOutputSpec
def _list_outputs(self):
outputs = self._outputs().get()
outputs['out_file'] = self._gen_filename('out_file')
return outputs
def _gen_filename(self, name):
if name == 'out_file':
if isdefined(self.inputs.out_file):
fname = self.inputs.out_file
else:
fname = os.path.join(os.getcwd(), 'awk_results.txt')
else:
assert False
return fname
class ConcatFloatsInputSpec(TraitedSpec):
in_files = InputMultiPath(desc='file name')
class ConcatFloatsOutputSpec(TraitedSpec):
out_list = traits.List(traits.Float, desc='input floats')
class ConcatFloats(BaseInterface):
"""Joins values from a list of files into a single list"""
input_spec = ConcatFloatsInputSpec
output_spec = ConcatFloatsOutputSpec
def _list_outputs(self):
out_list = []
for path in self.inputs.in_files:
with open(path) as f:
val = float(f.read())
out_list.append(val)
outputs = self._outputs().get()
outputs['out_list'] = out_list
return outputs
def _run_interface(self, runtime):
# Do nothing
return runtime
class ExtractMetricsInputSpec(TraitedSpec):
in_list = traits.List(traits.Float, desc='input floats')
class ExtractMetricsOutputSpec(TraitedSpec):
std = traits.Float(desc="The standard deviation")
avg = traits.Float(desc="The average")
class ExtractMetrics(BaseInterface):
"""Joins values from a list of files into a single list"""
input_spec = ExtractMetricsInputSpec
output_spec = ExtractMetricsOutputSpec
def _list_outputs(self):
values = self.inputs.in_list
outputs = self._outputs().get()
outputs['std'] = numpy.std(values)
outputs['avg'] = numpy.average(values)
return outputs
def _run_interface(self, runtime):
# Do nothing
return runtime
grep = Grep()
grep.inputs.match_str = 'height'
grep.inputs.in_file = '/Users/tclose/Desktop/arcana_tutorial/subject1/visit1/metrics.txt'
grep.inputs.out_file = '/Users/tclose/Desktop/test-out.txt'
grep.run()
awk = Awk()
awk.inputs.format_str = '{print $2}'
awk.inputs.in_file = '/Users/tclose/Desktop/test-out.txt'
awk.inputs.out_file = '/Users/tclose/Desktop/test-awk.txt'
awk.run()
concat_floats = ConcatFloats()
concat_floats.inputs.in_files = [
'/Users/tclose/Desktop/arcana_tutorial/subject1/visit1/awk.txt',
'/Users/tclose/Desktop/arcana_tutorial/subject1/visit2/awk.txt',
'/Users/tclose/Desktop/arcana_tutorial/subject2/visit1/awk.txt']
result = concat_floats.run()
print('Output list {}'.format(result.outputs.out_list))
extract_metrics = ExtractMetrics()
extract_metrics.inputs.in_list = result.outputs.out_list
result = extract_metrics.run()
print('Average: {}'.format(result.outputs.avg))
print('Std.: {}'.format(result.outputs.std))
| 5,063 | 1,622 |
from Bio import SeqIO
import sys
with open("phase0.fasta", 'w') as phase0, open("phase1.fasta", 'w') as phase1:
for record in SeqIO.parse(sys.argv[1], "fasta"):
if record.id.endswith('0'):
phase0.write(record.format("fasta"))
elif record.id.endswith('1'):
phase1.write(record.format("fasta"))
else:
print(record.id)
| 381 | 128 |
import time
from tests.integration.integration_test_case import IntegrationTestCase
from app.settings import RESPONDENT_ACCOUNT_URL
class TestSession(IntegrationTestCase):
def test_session_expired(self):
self.get('/session-expired')
self.assertInPage('Your session has expired')
def test_session_signed_out(self):
self.get('/signed-out')
self.assertInPage('Your survey answers have been saved')
self.assertInPage(RESPONDENT_ACCOUNT_URL)
def test_session_signed_out_with_overridden_Account_url(self):
self.launchSurvey(account_service_url='https://ras.ons.gov.uk')
self.get('/signed-out')
self.assertInPage('Your survey answers have been saved')
self.assertNotInPage(RESPONDENT_ACCOUNT_URL)
self.assertInPage('https://ras.ons.gov.uk')
def test_session_signed_out_with_none_overridden_Account_url(self):
self.launchSurvey(account_service_url=None)
self.get('/signed-out')
self.assertInPage('Your survey answers have been saved')
self.assertInPage(RESPONDENT_ACCOUNT_URL)
def test_session_jti_token_expired(self):
self.launchSurvey(exp=time.time() - float(60))
self.assertStatusUnauthorised()
| 1,245 | 392 |
import os
from gym.envs.mujoco import reacher3dof
from rllab.envs.gym_env import GymEnv
os.environ['MKL_SERVICE_FORCE_INTEL'] = '1'
os.environ['MUJOCO_GL'] = 'egl'
env = GymEnv("Reacher3DOF-v1", mode='oracle', force_reset=True)
time_step = env.reset()
print(time_step)
while True:
env.render()
time_step = env.step(env.action_space.sample())
# action = policy(observation)
# observation, reward, done, info = env.step(action)
#
# if done:
# observation, info = env.reset(return_info=True)
print(time_step)
env.close()
| 557 | 220 |
import spacy
from spacy.lang.de.examples import sentences
#from collections import OrderedDict
#import numpy as np
nlp = spacy.load('de_core_news_sm')
doc = nlp("Weil die Sonne scheint, ist es warm, nachdem ich ein Eis, das sehr lecker war, gegessen habe.")
print(doc.text)
#for token in doc:
# print(token.text, token.pos_, token.dep_)
#TODO add recursion!
#TODO check for empty main clauses!
def split_relative_clauses(sentence):
relc = []
main = []
rc_left = []
rc_right = []
start = 0
for token in sentence:
print(token, token.i, token.dep_)
if token.dep_ == "rc":
start = token.left_edge.i
rel_clause = sentence[token.left_edge.i: token.right_edge.i+1]
rc_right.append(token.i+1)
rc_left.append(token.left_edge.i)
relc.append(rel_clause)
count = 0
for j in rc_left:
print(start, rc_left, rc_right)
end = j
if start == end:
end = rc_left[count]
main1 = sentence[start: rc_right[count]]
start = rc_right[count]
count += 1
if len(main1) > 1:
main.append(main1)
print("main: ", main)
print("relcl: ", relc)
def split_adverbial_clauses(sentence):
advclauses = []
main = []
advcl_left = []
advcl_right = []
for token in sentence:
if token.dep_ == "cp":
adverbial_clause = sentence[token.left_edge.i : token.head.i+1]
advcl_right.append(token.head.i+1)
advcl_left.append(token.left_edge.i)
advclauses.append(adverbial_clause)
start = 0
count = 0
for j in advcl_left:
end = j
main1 = sentence[start: end]
start = advcl_right[count]
count += 1
if len(main1) > 1:
main.append(main1)
print(main)
print(advclauses)
for a in advclauses:
split_relative_clauses(a)
def split_coordinate_clauses1(sentence):
for token in sentence:
if token.dep_ == "oc":
rel_clause = sentence[token.left_edge.i : token.head.i+1]
main1 = sentence[:token.left_edge.i]
main2 = sentence[token.head.i+1: ]
print(rel_clause)
print(main1)
print(main2)
def split_coordinate_clauses2(sentence):
for token in sentence:
if token.dep_ == "cd":
rel_clause = sentence[token.left_edge.i : token.head.i+1]
main1 = sentence[:token.left_edge.i]
main2 = sentence[token.i: ]
print(rel_clause)
print(main1)
print(main2)
#def split_into_clauses(sentence):
#split_relative_clauses(doc)
split_adverbial_clauses(doc)
#split_coordinate_clauses1(doc)
#split_coordinate_clauses2(doc) | 2,773 | 962 |
from django.contrib import admin
from django.urls import path, include
from django.conf import settings
from django.conf.urls import url
urlpatterns = [
url(r'^api-auth/', include('rest_framework.urls')),
path('', include('lobby.urls')),
path('', include('connectquatro.urls')),
path('admin/', admin.site.urls),
]
| 332 | 105 |
# -*- coding: utf-8 -*-
'''
Some common, generic utilities
'''
from __future__ import absolute_import
from base64 import urlsafe_b64encode, urlsafe_b64decode
def urlsafe_nopadding_b64encode(data):
'''URL safe Base64 encode without padding (=)'''
return urlsafe_b64encode(data).rstrip('=')
def urlsafe_nopadding_b64decode(data):
'''URL safe Base64 decode without padding (=)'''
padding = len(data) % 4
if padding != 0:
padding = 4 - padding
padding = '=' * padding
data = data + padding
return urlsafe_b64decode(data)
def const_equal(str_a, str_b):
'''Constant time string comparison'''
if len(str_a) != len(str_b):
return False
result = True
for i in range(len(str_a)):
result &= (str_a[i] == str_b[i])
return result
| 805 | 284 |
# ----------------------------------------------------------------------------
# Copyright (c) 2016-2018, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import datetime
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('payments', '0003_workshop_location'),
]
operations = [
migrations.RenameField(
model_name='order',
old_name='email',
new_name='contact_email',
),
migrations.AddField(
model_name='orderitem',
name='email',
field=models.EmailField(default='example@example.com', max_length=254),
preserve_default=False,
),
migrations.AddField(
model_name='workshop',
name='closing_date',
field=models.DateField(default=datetime.datetime(2016, 8, 7, 23, 54, 27, 693604, tzinfo=utc)),
preserve_default=False,
),
migrations.AlterUniqueTogether(
name='orderitem',
unique_together=set([('order', 'rate', 'email')]),
),
migrations.AlterUniqueTogether(
name='workshop',
unique_together=set([('title', 'slug')]),
),
migrations.RemoveField(
model_name='orderitem',
name='quantity',
),
]
| 1,655 | 473 |
# -*- coding: utf-8 -*-
"""Main module."""
from pyspark.sql import DataFrame
from pyspark.sql.functions import lit, col, to_timestamp
def standardize_parking_bay(parkingbay_sdf: DataFrame, load_id, loaded_on):
t_parkingbay_sdf = (
parkingbay_sdf
.withColumn("last_edit", to_timestamp("last_edit", "YYYYMMddHHmmss"))
.select(
col("bay_id").cast("int").alias("bay_id"),
"last_edit",
"marker_id",
"meter_id",
"rd_seg_dsc",
col("rd_seg_id").cast("int").alias("rd_seg_id"),
"the_geom",
lit(load_id).alias("load_id"),
lit(loaded_on.isoformat()).alias("loaded_on")
)
)
return t_parkingbay_sdf
def standardize_sensordata(sensordata_sdf: DataFrame, load_id, loaded_on):
t_sensordata_sdf = (
sensordata_sdf
.select(
col("bay_id").cast("int").alias("bay_id"),
"st_marker_id",
col("lat").cast("float").alias("lat"),
col("lon").cast("float").alias("lon"),
"location",
"status",
lit(load_id).alias("load_id"),
lit(loaded_on.isoformat()).alias("loaded_on")
)
)
return t_sensordata_sdf
| 1,304 | 462 |
from avalon import harmony
class CreateTemplate(harmony.Creator):
"""Composite node for publishing to templates."""
name = "templateDefault"
label = "Template"
family = "harmony.template"
def __init__(self, *args, **kwargs):
super(CreateTemplate, self).__init__(*args, **kwargs)
| 311 | 94 |
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""'functions get-logs' command."""
from googlecloudsdk.api_lib.functions import util
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.core import log
from googlecloudsdk.core import properties
class GetLogs(base.ListCommand):
"""Show logs produced by functions.
This command is deprecated. Please use `gcloud preview app logs read` instead.
This command displays log entries produced by all functions running in a
region, or by a single function if it is specified through a command argument.
By default, when no extra flags are specified, the most recent 20 log entries
are displayed.
"""
SEVERITIES = ['DEBUG', 'INFO', 'ERROR']
@staticmethod
def Args(parser):
"""Register flags for this command."""
base.LIMIT_FLAG.RemoveFromParser(parser)
parser.add_argument(
'name', nargs='?',
help=('Name of the function which logs are to be displayed. If no name '
'is specified, logs from all functions are displayed.'))
parser.add_argument(
'--execution-id',
help=('Execution ID for which logs are to be displayed.'))
parser.add_argument(
'--start-time', required=False, type=arg_parsers.Datetime.Parse,
help=('Return only log entries which timestamps are not earlier than '
'the specified time. The timestamp must be in RFC3339 UTC "Zulu" '
'format. If --start-time is specified, the command returns '
'--limit earliest log entries which appeared after '
'--start-time.'))
parser.add_argument(
'--end-time', required=False, type=arg_parsers.Datetime.Parse,
help=('Return only log entries which timestamps are not later than '
'the specified time. The timestamp must be in RFC3339 UTC "Zulu" '
'format. If --end-time is specified but --start-time is not, the '
'command returns --limit latest log entries which appeared '
'before --end-time.'))
parser.add_argument(
'--limit', required=False, type=arg_parsers.BoundedInt(1, 1000),
default=20,
help=('Number of log entries to be fetched; must not be greater than '
'1000.'))
parser.add_argument(
'--min-log-level', choices=GetLogs.SEVERITIES,
help=('Minimum level of logs to be fetched; can be one of DEBUG, INFO, '
'ERROR.'))
parser.add_argument(
'--show-log-levels', action='store_true', default=True,
help=('Print a log level of each log entry.'))
parser.add_argument(
'--show-function-names', action='store_true', default=True,
help=('Print a function name before each log entry.'))
parser.add_argument(
'--show-execution-ids', action='store_true', default=True,
help=('Print an execution ID before each log entry.'))
parser.add_argument(
'--show-timestamps', action='store_true', default=True,
help=('Print a UTC timestamp before each log entry.'))
@util.CatchHTTPErrorRaiseHTTPException
def Run(self, args):
"""This is what gets called when the user runs this command.
Args:
args: an argparse namespace. All the arguments that were provided to this
command invocation.
Yields:
Objects representing log entries.
"""
log.warn('This command is deprecated. '
'Please use `gcloud preview app logs read` instead.')
logging_client = self.context['logging_client']
logging = self.context['logging_messages']
project = properties.VALUES.core.project.Get(required=True)
log_filter = (
'resource.type="cloud_function" '
'labels."cloudfunctions.googleapis.com/region"="{0}" '
.format(args.region))
if args.name:
log_filter += (
'labels."cloudfunctions.googleapis.com/function_name"="{0}" '
.format(args.name))
if args.execution_id:
log_filter += 'labels."execution_id"="{0}" '.format(args.execution_id)
if args.min_log_level:
log_filter += 'severity>={0} '.format(args.min_log_level)
if args.start_time:
order = 'asc'
start_time = args.start_time.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
log_filter += 'timestamp>="{0}" '.format(start_time)
else:
order = 'desc'
if args.end_time:
end_time = args.end_time.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
log_filter += 'timestamp<="{0}" '.format(end_time)
# TODO(user): Consider using paging for listing more than 1000 log entries.
# However, reversing the order of received latest N entries before a
# specified timestamp would be problematic with paging.
request = logging.ListLogEntriesRequest(
projectIds=[project], filter=log_filter,
orderBy='timestamp {0}'.format(order), pageSize=args.limit)
response = logging_client.entries.List(request=request)
entries = response.entries if order == 'asc' else reversed(response.entries)
for entry in entries:
row = dict(
log=entry.textPayload
)
if entry.severity:
severity = str(entry.severity)
if severity in GetLogs.SEVERITIES:
# Use short form (first letter) for expected severities.
row['level'] = severity[0]
else:
# Print full form of unexpected severities.
row['level'] = severity
for label in entry.labels.additionalProperties:
if label.key == 'cloudfunctions.googleapis.com/function_name':
row['name'] = label.value
if label.key == 'execution_id':
row['execution_id'] = label.value
if entry.timestamp:
row['time_utc'] = util.FormatTimestamp(entry.timestamp)
yield row
def Format(self, args):
fields = []
if args.show_log_levels:
fields.append('level')
if args.show_function_names:
fields.append('name')
if args.show_execution_ids:
fields.append('execution_id')
if args.show_timestamps:
fields.append('time_utc')
fields.append('log')
return 'table({0})'.format(','.join(fields))
| 6,702 | 1,957 |
from fixture.utils import equal_list, equal_dict
from ezcode.graph import NegativeCycleExist
from ezcode.graph.directed import DirectedGraph
from ezcode.graph.undirected import UndirectedGraph
def test_undirected_graph():
"""
A ------ C
| /|\
| / | \
| / | \
| / | E
| / | /
| / | /
| / |/
B ------ D
"""
graph_str = """
A B C D E
A * *
B * * *
C * * * *
D * * *
E * *
"""[1:]
graph = UndirectedGraph(edges=[["A", "B"], ["A", "C"], ["B", "C"], ["B", "D"], ["C", "D"], ["C", "E"], ["D", "E"]])
benchmark = {
"A": {"A": 0, "B": 1, "C": 1, "D": 2, "E": 2},
"B": {"A": 1, "B": 0, "C": 1, "D": 1, "E": 2},
"C": {"A": 1, "B": 1, "C": 0, "D": 1, "E": 1},
"D": {"A": 2, "B": 1, "C": 1, "D": 0, "E": 1},
"E": {"A": 2, "B": 2, "C": 1, "D": 1, "E": 0}
}
assert graph_str == str(graph)
for n1, b in benchmark.items():
assert equal_dict(graph.bfs_path_value(n1), b)
assert equal_dict(graph.dijkstra(n1), b)
assert equal_dict(graph.spfa(n1), b)
for n2 in benchmark.keys():
assert equal_list(benchmark[n1][n2], graph.dfs_path_value(n1, n2))
assert equal_dict(graph.floyd(), benchmark)
def test_undirected_weighted_graph():
"""
A --0.2- C
| /| \
| / | 0.8
0.8 / | \
| / 0.9 E
| 0.5 | /
| / | 0.3
| / | /
B --0.9- D
"""
graph_str = """
A B C D E
A 0.8 0.2
B 0.8 0.5 0.9
C 0.2 0.5 0.9 0.8
D 0.9 0.9 0.3
E 0.8 0.3
"""[1:]
graph = UndirectedGraph(edges=[["A", "B"], ["A", "C"], ["B", "C"], ["B", "D"], ["C", "D"], ["C", "E"], ["D", "E"]], weights=[0.8, 0.2, 0.5, 0.9, 0.9, 0.8, 0.3])
assert graph_str == str(graph)
resolution = 0.0001
benchmark_1 = {
"A": {"A": 0, "B": 0.7, "C": 0.2, "D": 1.1, "E": 1.0},
"B": {"A": 0.7, "B": 0, "C": 0.5, "D": 0.9, "E": 1.2},
"C": {"A": 0.2, "B": 0.5, "C": 0, "D": 0.9, "E": 0.8},
"D": {"A": 1.1, "B": 0.9, "C": 0.9, "D": 0, "E": 0.3},
"E": {"A": 1.0, "B": 1.2, "C": 0.8, "D": 0.3, "E": 0 }
}
for n1, benchmark in benchmark_1.items():
assert equal_dict(graph.dijkstra(n1), benchmark, resolution=resolution)
assert equal_dict(graph.spfa(n1), benchmark, resolution=resolution)
for n2 in benchmark_1.keys():
assert equal_list(benchmark_1[n1][n2], graph.dfs_path_value(n1, n2), resolution=resolution)
assert equal_dict(graph.floyd(), benchmark_1)
benchmark_2 = {
"A": {"A": 1, "B": 0.8, "C": 0.648, "D": 0.72, "E": 0.5184},
"B": {"A": 0.8, "B": 1, "C": 0.81, "D": 0.9, "E": 0.648 },
"C": {"A": 0.648, "B": 0.81, "C": 1, "D": 0.9, "E": 0.8 },
"D": {"A": 0.72, "B": 0.9, "C": 0.9, "D": 1, "E": 0.72 },
"E": {"A": 0.5184, "B": 0.648, "C": 0.8, "D": 0.72, "E": 1 }
}
for n1, benchmark in benchmark_2.items():
assert equal_dict(graph.dijkstra(n1, self_loop_weight=1, disconnected_edge_weight=0, path_value_func=lambda a,b: a * b, min_max_func=max), benchmark, resolution=resolution)
assert equal_dict(graph.spfa(n1, self_loop_weight=1, disconnected_edge_weight=0, path_value_func=lambda a, b: a * b, min_max_func=max), benchmark, resolution=resolution)
for n2 in benchmark_2.keys():
assert equal_list(benchmark_2[n1][n2], graph.dfs_path_value(n1, n2, self_loop_weight=1, disconnected_edge_weight=0, path_value_func=lambda a, b: a * b, min_max_func=max), resolution=resolution)
assert equal_dict(graph.floyd(self_loop_weight=1, disconnected_edge_weight=0, path_value_func=lambda a, b: a * b, min_max_func=max), benchmark_2, resolution=resolution)
def test_negative_cycle_detection():
graph = UndirectedGraph(edges=[["A", "B"], ["A", "C"], ["A", "D"], ["B", "C"], ["B", "D"], ["C", "D"]], weights=[2, 3, 2, -3, 1, 1])
try:
graph.spfa("A", check_cycle=True)
except NegativeCycleExist:
assert True
else:
assert False
def test_directed_graph():
"""
a <----- c
| |
| v
| f ---> e
| ^
v |
d -----> b
"""
graph_str = """
a b c d e f
a *
b *
c * *
d *
e
f *
"""[1:]
graph = DirectedGraph(edges=[("c", "a"), ("b", "f"), ("e", None), ("a", "d"), ("c", "f"), ("d", "b"), ("f", "e")])
assert graph_str == str(graph)
assert equal_list(graph.topological_order(), ["e", "f", "b", "d", "a", "c"])
assert graph.is_acyclic_graph()
assert not DirectedGraph(edges=[("a", "b"), ("b", "a")]).is_acyclic_graph()
graph_str = """
a b c d e f
a *
b *
c * * *
d * *
e
f *
"""[1:]
graph = DirectedGraph(edges=[("a", "b"), ("c", "b"), ("d", "a"), ("b", "d"), ("c", "a"), ("d", "c"), ("c", "f"), ("f", "d"), ("e", None)])
assert graph_str == str(graph)
x = float("inf")
benchmark = {
"a": {"a": 0, "b": 1, "c": 3, "d": 2, "e": x, "f": 4},
"b": {"a": 2, "b": 0, "c": 2, "d": 1, "e": x, "f": 3},
"c": {"a": 1, "b": 1, "c": 0, "d": 2, "e": x, "f": 1},
"d": {"a": 1, "b": 2, "c": 1, "d": 0, "e": x, "f": 2},
"e": {"a": x, "b": x, "c": x, "d": x, "e": 0, "f": x},
"f": {"a": 2, "b": 3, "c": 2, "d": 1, "e": x, "f": 0}
}
assert graph_str == str(graph)
for n1, b in benchmark.items():
assert equal_dict(graph.bfs_path_value(n1), b)
assert equal_dict(graph.dijkstra(n1), b)
assert equal_dict(graph.spfa(n1), b)
for n2 in benchmark.keys():
assert equal_list(benchmark[n1][n2], graph.dfs_path_value(n1, n2))
assert equal_dict(graph.floyd(), benchmark)
def test_directed_weighted_graph():
graph_str = """
a b c d e f
a 0.8
b 0.8
c 0.5 0.7 0.6
d 0.6 0.8
e
f 0.4
"""[1:]
graph = DirectedGraph(
edges=[("a", "b"), ("c", "b"), ("d", "a"), ("b", "d"), ("c", "a"), ("d", "c"), ("c", "f"), ("f", "d"), ("e", None)],
weights=[0.8, 0.7, 0.6, 0.8, 0.5, 0.8, 0.6, 0.4, None]
)
assert graph_str == str(graph)
x, resolution = float("inf"), 0.0001
benchmark_1 = {
"a": {"a": 0, "b": 0.8, "c": 2.4, "d": 1.6, "e": x, "f": 3.0, },
"b": {"a": 1.4, "b": 0, "c": 1.6, "d": 0.8, "e": x, "f": 2.2, },
"c": {"a": 0.5, "b": 0.7, "c": 0, "d": 1.0, "e": x, "f": 0.6, },
"d": {"a": 0.6, "b": 1.4, "c": 0.8, "d": 0, "e": x, "f": 1.4, },
"e": {"a": x, "b": x, "c": x, "d": x, "e": 0, "f": x, },
"f": {"a": 1.0, "b": 1.8, "c": 1.2, "d": 0.4, "e": x, "f": 0, }
}
for n1, benchmark in benchmark_1.items():
assert equal_dict(graph.dijkstra(n1), benchmark, resolution=resolution)
assert equal_dict(graph.spfa(n1), benchmark, resolution=resolution)
for n2 in benchmark_1.keys():
assert equal_list(benchmark_1[n1][n2], graph.dfs_path_value(n1, n2), resolution=resolution)
assert equal_dict(graph.floyd(), benchmark_1, resolution=resolution)
benchmark_2 = {
"a": {"a": 1, "b": 0.8, "c": 0.512, "d": 0.64, "e": 0, "f": 0.3072},
"b": {"a": 0.48, "b": 1, "c": 0.64, "d": 0.8, "e": 0, "f": 0.384 },
"c": {"a": 0.5, "b": 0.7, "c": 1, "d": 0.56, "e": 0, "f": 0.6 },
"d": {"a": 0.6, "b": 0.56, "c": 0.8, "d": 1, "e": 0, "f": 0.48 },
"e": {"a": 0, "b": 0, "c": 0, "d": 0, "e": 1, "f": 0 },
"f": {"a": 0.24, "b": 0.224, "c": 0.32, "d": 0.4, "e": 0, "f": 1 }
}
for n1, benchmark in benchmark_2.items():
assert equal_dict(graph.dijkstra(n1, self_loop_weight=1, disconnected_edge_weight=0, path_value_func=lambda a, b: a * b, min_max_func=max), benchmark, resolution=resolution)
assert equal_dict(graph.spfa(n1, self_loop_weight=1, disconnected_edge_weight=0, path_value_func=lambda a, b: a * b, min_max_func=max), benchmark, resolution=resolution)
for n2 in benchmark_2.keys():
assert equal_list(benchmark_2[n1][n2], graph.dfs_path_value(n1, n2, self_loop_weight=1, disconnected_edge_weight=0, path_value_func=lambda a, b: a * b, min_max_func=max), resolution=resolution)
assert equal_dict(graph.floyd(self_loop_weight=1, disconnected_edge_weight=0, path_value_func=lambda a, b: a * b, min_max_func=max), benchmark_2, resolution=resolution)
def test_eulerian_path():
"""
A ------ C
| /|\
| / | \
| / | \
| / | E
| / | /
| / | /
| / |/
B ------ D
"""
graph = UndirectedGraph(edges=[["A", "B"], ["A", "C"], ["B", "C"], ["B", "D"], ["C", "D"], ["C", "E"], ["D", "E"]])
assert graph.eulerian_path(start_node="A") is None
assert graph.eulerian_path(start_node="E") is None
assert graph.eulerian_path(start_node="D") == ["D", "B", "A", "C", "D", "E", "C", "B"]
assert graph.eulerian_path() == ["B", "A", "C", "B", "D", "C", "E", "D"]
"""
A -- B
| \
| \
D C
"""
graph = UndirectedGraph(edges=[["A", "B"], ["A", "C"], ["A", "D"]])
assert graph.eulerian_path() is None
"""
A <--- B
| ^
| |
v |
D ---> C <--- E
|
v
F
"""
graph = DirectedGraph(edges=[["B", "A"], ["A", "D"], ["D", "C"], ["C", "B"], ["E", "C"], ["C", "F"]])
assert graph.eulerian_path(start_node="A") is None
assert graph.eulerian_path(start_node="B") is None
assert graph.eulerian_path(start_node="C") is None
assert graph.eulerian_path(start_node="D") is None
assert graph.eulerian_path(start_node="F") is None
assert graph.eulerian_path(start_node="E") == ["E", "C", "B", "A", "D", "C", "F"]
assert graph.eulerian_path(start_node="E") == graph.eulerian_path()
"""
A <--- B ---> F
| ^
| |
v |
D ---> C <--- E
"""
graph = DirectedGraph(edges=[["B", "A"], ["A", "D"], ["D", "C"], ["C", "B"], ["E", "C"], ["B", "F"]])
assert graph.eulerian_path() is None
| 10,697 | 4,686 |
from .abc_coffee import AbcCoffeeProgram
from .ingredients import Milk, Coffee
from .programs import (Cappuccino, Doppio, Espresso, Latte, Lungo, Macchiato)
| 157 | 61 |
#coding=utf8
import time, sys, Queue
import MySQLdb
from MySQLdb import cursors
from multiprocessing.managers import BaseManager
from multiprocessing.sharedctypes import RawArray
from multiprocessing import Process, freeze_support, Array
reload(sys)
sys.setdefaultencoding('utf8')
def work(server_addr):
# 数据库连接
from database.db import baidu_db
# 网络连接
class QueueManager(BaseManager):
pass
QueueManager.register('get_task_queue')
QueueManager.register('get_result_queue')
print('Connect to server %s...' % server_addr)
m = QueueManager(address=(server_addr, 5000), authkey='abc')
m.connect()
task = m.get_task_queue()
result = m.get_result_queue()
while True:
try:
(nn, ii, jj, name) = task.get(timeout=100)
candidates = baidu_db.getCandidates(name)
result.put((nn, ii, jj, candidates))
except Queue.Empty:
print 'queue is empty'
continue
print 'worker exit.'
if __name__ == '__main__':
freeze_support()
if len(sys.argv) > 1:
num = int(sys.argv[1])
else:
num = 3
print 'total process number is %d'%num
processes = []
for i in xrange(num):
processes.append(Process(target=work, args = ('192.168.1.104',)))
for p in processes:
p.start()
for p in processes:
p.join() | 1,384 | 469 |
from invoke.exceptions import UnexpectedExit
from utils import execute_command, is_file_exists
import logging
'''
is_genkey_unique
'''
def is_genkey_unique(config):
is_unique = True
duplicates = []
tmp = {}
for conf in config["masternodes"] :
if "private_key" in conf :
if tmp.get(conf["private_key"]) == None:
tmp[conf["private_key"]] = conf["connection_string"]
else :
duplicates.append(conf["connection_string"])
duplicates.append(tmp.get(conf["private_key"]))
is_unique = False
continue
return (is_unique, duplicates)
'''
is_vps_installed
'''
def is_vps_installed(connection):
is_installed = False
try:
# Search for libdb4.8-dev package,
result = execute_command(connection, 'dpkg-query -W --showformat=\'${Status}\n\' libdb4.8-dev | grep -c "install ok installed"')
if result.stdout == '1\n' :
is_installed = True
except UnexpectedExit:
logging.info('{} does not exist !'.format(dir))
return is_installed
'''
is_polis_installed
'''
def is_polis_installed(connection, dir):
if not dir.endswith('/'):
return is_file_exists(connection, "{}/{}".format(dir, 'polisd'))
else:
return is_file_exists(connection, "{}{}".format(dir, 'polisd'))
'''
is_monitoring_script_installed
'''
def is_monitoring_script_installed(connection):
is_installed = False
try:
# Search for Polis/sentinel in crontable
result = execute_command(connection, 'crontab -l | grep -c "polischk.sh"')
if result.stdout == '1\n' :
is_installed = True
except UnexpectedExit:
logging.info('Monitoring script is not installed !')
return is_installed
'''
BUG : Must be logged in root !
TODO : add an interactive shell to ask user for credentials
'''
def install_vps(connection, swap_supported = False):
try:
cmds_create_swap = [ "touch /var/swap.img",
"chmod 600 /var/swap.img",
"dd if=/dev/zero of=/var/swap.img bs=1024k count=2000",
"mkswap /var/swap.img",
"swapon /var/swap.img",
"echo \"/var/swap.img none swap sw 0 0\" >> /etc/fstab" ]
cmds_apt_get = [ "apt-get update -y",
"apt-get upgrade -y",
"apt-get dist-upgrade -y",
"apt-get install nano htop git -y",
"apt-get install build-essential libtool autotools-dev automake pkg-config libssl-dev libevent-dev bsdmainutils software-properties-common -y",
"apt-get install libboost-all-dev -y",
"add-apt-repository ppa:bitcoin/bitcoin -y",
"apt-get update -y",
"apt-get install libdb4.8-dev libdb4.8++-dev -y" ]
if swap_supported :
logging.info("Create SWAP file !")
for cmd in cmds_create_swap :
execute_command(connection, '{}'.format(cmd))
logging.info("Download dependencies !")
for cmd in cmds_apt_get:
execute_command(connection, '{}'.format(cmd))
except Exception as e:
logging.error('Could not install vps', exc_info=e)
| 3,474 | 1,013 |
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
# pylint: disable=too-many-lines
# pylint: disable=too-many-statements
from azure.cli.core.commands.parameters import (
get_enum_type, get_three_state_flag, file_type)
from azure.cli.core.commands.validators import validate_file_or_dict
def load_arguments(self, _):
with self.argument_context('cloud-service create') as c:
c.argument('upgrade_mode', arg_type=get_enum_type(['Auto', 'Manual', 'Simultaneous']), help='Update mode for '
'the cloud service. Role instances are allocated to update domains when the service is deployed. '
'Updates can be initiated manually in each update domain or initiated automatically in all update '
'domains. Possible Values are Auto, Manual, Simultaneous. '
'If not specified, the default value is Auto. If set to Manual, PUT '
'UpdateDomain must be called to apply the update. If set to Auto, the update is automatically '
'applied to each update domain in sequence.')
c.argument('roles', nargs='+', help='List of roles separated by space for the cloud service. Format: '
'RoleName:SkuName:SkuCapacity:SkuTier.',
arg_group='Role Profile')
c.argument('load_balancer_configurations', nargs='+', arg_group='Network Profile',
options_list=['--load-balancer-configurations', '--lb'],
help='The list of load balancer configurations separated by space for the cloud service. '
'The public IP is a mandatory field. Format: '
'LBName:FrontendIPConfiguration:PublicIPAddress:Subnet:PrivateIP.')
c.argument('secrets', nargs='+', arg_group='Os Profile',
help='Specify certificates separated by space that should be installed onto the role instances. '
'Format: KeyVaultName:CertificateUrl:CertificateUrl2:...:CertificateUrlN')
c.argument('configuration', type=file_type, help='Specify the XML service configuration (.cscfg) '
'for the cloud service. Expected value: xml-string/@xml-file.')
c.argument('configuration_url', type=str, help='Specify a URL that refers to the location of the service '
'configuration in the Blob service. The service package URL can be Shared Access Signature (SAS) '
'URI from any storage account. This is a write-only property and is not returned in GET calls.')
c.argument('package_url', type=str, help='Specify a URL that refers to the location of the service package '
'in the Blob service. The service package URL can be Shared Access Signature (SAS) URI from any '
'storage account. This is a write-only property and is not returned in GET calls.')
c.argument('start_cloud_service', arg_type=get_three_state_flag(), help='Indicate whether to start '
'the cloud service immediately after it is created. The default value is `true`. If false, the '
'service model is still deployed, but the code is not run immediately. Instead, the service is '
'PoweredOff until you call Start, at which time the service will be started. A deployed service '
'still incurs charges, even if it is poweredoff.')
c.argument('extensions', type=validate_file_or_dict, arg_group='Extension Profile',
help='List of extensions for the cloud service. Expected value: json-string/@json-file. Example: '
'[{"properties": {"type": "RDP", "autoUpgradeMinorVersion": false, "protectedSettings": "settings",'
'"publisher": "Microsoft.Windows.Azure.Extensions", "settings": "settings", '
'"typeHandlerVersion": "1.2.1"}, "name": "RDPExtension"}]')
| 4,330 | 1,093 |
#! /usr/bin/env python
import logging
from tornado.ioloop import IOLoop
from stormed import Connection, Message
msg = Message('Hello World!')
def on_connect():
ch = conn.channel()
ch.queue_declare(queue='hello')
ch.publish(msg, exchange='', routing_key='hello')
conn.close(callback=done)
def done():
print " [x] Sent 'Hello World!'"
io_loop.stop()
logging.basicConfig()
conn = Connection(host='localhost')
conn.connect(on_connect)
io_loop = IOLoop.instance()
io_loop.start()
| 504 | 167 |
from abc import ABCMeta, abstractmethod
class IOperator(metaclass=ABCMeta):
@abstractmethod
def operator(self):
pass
class Component(IOperator):
def operator(self):
return 10.0
class Wrapper(IOperator):
def __init__(self, obj):
self.obj = obj
def operator(self):
return self.obj.operator() + 5.0
comp = Component()
comp = Wrapper(comp)
print(comp.operator())
| 420 | 136 |
from .metadata_parser import parse_metadata
from nmfamv2.metadata import Metadata
def read_metafile(metafile_path):
parsed_metadata = parse_metadata(metafile_path)
# Validator and object creation are combined
Metadata(parsed_metadata)
# Parser
# Validator
# Object
| 281 | 86 |
"""Daily clean up of DB tables."""
import logging
from helpers.report_helper import ReportHelper
logger = logging.getLogger(__file__)
def main():
"""Regular clean up of database tables."""
r = ReportHelper()
try:
r.cleanup_db_tables()
except Exception as e:
logger.exception("Exception encountered when trying to clean up DB tables")
raise e
if __name__ == '__main__':
main()
| 428 | 125 |
"""
Copyright 2017-2018 Fizyr (https://fizyr.com)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# import keras
import math
from tensorflow import keras
import tensorflow as tf
import tensorflow_addons as tfa
import numpy as np
from utils.anchors import anchors_for_shape
from layers import RegressBoxes
def focal(alpha=0.25, gamma=1.5):
"""
Create a functor for computing the focal loss.
Args
alpha: Scale the focal weight with alpha.
gamma: Take the power of the focal weight with gamma.
Returns
A functor that computes the focal loss using the alpha and gamma.
"""
def _focal(y_true, y_pred):
"""
Compute the focal loss given the target tensor and the predicted tensor.
As defined in https://arxiv.org/abs/1708.02002
Args
y_true: Tensor of target data from the generator with shape (B, N, num_classes).
y_pred: Tensor of predicted data from the network with shape (B, N, num_classes).
Returns
The focal loss of y_pred w.r.t. y_true.
"""
labels = y_true[:, :, :-1]
# -1 for ignore, 0 for background, 1 for object
anchor_state = y_true[:, :, -1]
classification = y_pred
# filter out "ignore" anchors
indices = tf.where(keras.backend.not_equal(anchor_state, -1))
labels = tf.gather_nd(labels, indices)
classification = tf.gather_nd(classification, indices)
# compute the focal loss
alpha_factor = keras.backend.ones_like(labels) * alpha
alpha_factor = tf.where(keras.backend.equal(labels, 1), alpha_factor, 1 - alpha_factor)
# (1 - 0.99) ** 2 = 1e-4, (1 - 0.9) ** 2 = 1e-2
focal_weight = tf.where(keras.backend.equal(labels, 1), 1 - classification, classification)
focal_weight = alpha_factor * focal_weight ** gamma
cls_loss = focal_weight * keras.backend.binary_crossentropy(labels, classification)
# compute the normalizer: the number of positive anchors
normalizer = tf.where(keras.backend.equal(anchor_state, 1))
normalizer = keras.backend.cast(keras.backend.shape(normalizer)[0], keras.backend.floatx())
normalizer = keras.backend.maximum(keras.backend.cast_to_floatx(1.0), normalizer)
return keras.backend.sum(cls_loss) / normalizer
#loss = tf.math.divide_no_nan(keras.backend.sum(cls_loss), normalizer)
#return tf.where(tf.math.is_nan(loss), 0., loss)
return _focal
def smooth_l1(sigma=3.0):
"""
Create a smooth L1 loss functor.
Args
sigma: This argument defines the point where the loss changes from L2 to L1.
Returns
A functor for computing the smooth L1 loss given target data and predicted data.
"""
sigma_squared = sigma ** 2
def _smooth_l1(y_true, y_pred):
""" Compute the smooth L1 loss of y_pred w.r.t. y_true.
Args
y_true: Tensor from the generator of shape (B, N, 5). The last value for each box is the state of the anchor (ignore, negative, positive).
y_pred: Tensor from the network of shape (B, N, 4).
Returns
The smooth L1 loss of y_pred w.r.t. y_true.
"""
# separate target and state
regression = y_pred
regression_target = y_true[:, :, :-1]
anchor_state = y_true[:, :, -1]
# filter out "ignore" anchors
indices = tf.where(keras.backend.equal(anchor_state, 1))
regression = tf.gather_nd(regression, indices)
regression_target = tf.gather_nd(regression_target, indices)
# compute smooth L1 loss
# f(x) = 0.5 * (sigma * x)^2 if |x| < 1 / sigma / sigma
# |x| - 0.5 / sigma / sigma otherwise
regression_diff = regression - regression_target
regression_diff = keras.backend.abs(regression_diff)
regression_loss = tf.where(
keras.backend.less(regression_diff, 1.0 / sigma_squared),
0.5 * sigma_squared * keras.backend.pow(regression_diff, 2),
regression_diff - 0.5 / sigma_squared
)
# compute the normalizer: the number of positive anchors
normalizer = keras.backend.maximum(1, keras.backend.shape(indices)[0])
normalizer = keras.backend.cast(normalizer, dtype=keras.backend.floatx())
return keras.backend.sum(regression_loss) / normalizer
return _smooth_l1
def smooth_l1_quad(sigma=3.0):
"""
Create a smooth L1 loss functor.
Args
sigma: This argument defines the point where the loss changes from L2 to L1.
Returns
A functor for computing the smooth L1 loss given target data and predicted data.
"""
sigma_squared = sigma ** 2
def _smooth_l1(y_true, y_pred):
""" Compute the smooth L1 loss of y_pred w.r.t. y_true.
Args
y_true: Tensor from the generator of shape (B, N, 5). The last value for each box is the state of the anchor (ignore, negative, positive).
y_pred: Tensor from the network of shape (B, N, 4).
Returns
The smooth L1 loss of y_pred w.r.t. y_true.
"""
# separate target and state
regression = y_pred
regression = tf.concat([regression[..., :4], tf.sigmoid(regression[..., 4:9])], axis=-1)
regression_target = y_true[:, :, :-1]
anchor_state = y_true[:, :, -1]
# filter out "ignore" anchors
indices = tf.where(keras.backend.equal(anchor_state, 1))
regression = tf.gather_nd(regression, indices)
regression_target = tf.gather_nd(regression_target, indices)
# compute smooth L1 loss
# f(x) = 0.5 * (sigma * x)^2 if |x| < 1 / sigma / sigma
# |x| - 0.5 / sigma / sigma otherwise
regression_diff = regression - regression_target
regression_diff = keras.backend.abs(regression_diff)
box_regression_loss = tf.where(
keras.backend.less(regression_diff[..., :4], 1.0 / sigma_squared),
0.5 * sigma_squared * keras.backend.pow(regression_diff[..., :4], 2),
regression_diff[..., :4] - 0.5 / sigma_squared
)
alpha_regression_loss = tf.where(
keras.backend.less(regression_diff[..., 4:8], 1.0 / sigma_squared),
0.5 * sigma_squared * keras.backend.pow(regression_diff[..., 4:8], 2),
regression_diff[..., 4:8] - 0.5 / sigma_squared
)
ratio_regression_loss = tf.where(
keras.backend.less(regression_diff[..., 8], 1.0 / sigma_squared),
0.5 * sigma_squared * keras.backend.pow(regression_diff[..., 8], 2),
regression_diff[..., 8] - 0.5 / sigma_squared
)
# compute the normalizer: the number of positive anchors
normalizer = keras.backend.maximum(1, keras.backend.shape(indices)[0])
normalizer = keras.backend.cast(normalizer, dtype=keras.backend.floatx())
box_regression_loss = tf.reduce_sum(box_regression_loss) / normalizer
alpha_regression_loss = tf.reduce_sum(alpha_regression_loss) / normalizer
ratio_regression_loss = tf.reduce_sum(ratio_regression_loss) / normalizer
return box_regression_loss + alpha_regression_loss + 16 * ratio_regression_loss
return _smooth_l1
''' ProbIoU '''
EPS = 1e-3
def helinger_dist(x1,y1,a1,b1, x2,y2,a2,b2, freezed=False):
'''
Dh = sqrt(1 - exp(-Db))
Db = 1/4*((x1-x2)²/(a1+a2) + (y1-y2)²/(b1+b2))-ln2 \
1/2*ln((a1+a2)*(b1+b2)) - 1/4*ln(a1*a2*b1*b2)
'''
if freezed:
B1 = 1/4.*(tf.math.pow(x1-x2, 2.)/(a1+a2+EPS) + tf.math.pow(y1-y2, 2.)/(b1+b2+EPS))
B2 = 1/2.*tf.math.log((a1+a2)*(b1+b2)+EPS)
B3 = 1/4.*tf.math.log(a1*a2*b1*b2+EPS)
Db = B1 + B2 - B3 - tf.math.log(2.)
else:
Db = tf.math.pow(x1-x2, 2.)/(2*a1+EPS) + tf.math.pow(y1-y2, 2.)/(2*b1+EPS)
Db = tf.clip_by_value(Db, EPS, 100.)
return tf.math.sqrt(1 - tf.math.exp(-Db) + EPS)
def get_probiou_values(array):
# xmin, ymin, xmax, ymax
xmin = array[:,0]; ymin = array[:,1]
xmax = array[:,2]; ymax = array[:,3]
# get ProbIoU values
x = (xmin + xmax)/2.
y = (ymin + ymax)/2.
a = tf.math.pow((xmax - xmin), 2.)/12.
b = tf.math.pow((ymax - ymin), 2.)/12.
return x, y, a, b
def calc_probiou(mode, target, pred, freezed=False):
l1 = helinger_dist(
*get_probiou_values(target),
*get_probiou_values(pred),
freezed=freezed
)
if mode=='probioul1':
return l1
l2 = tf.math.pow(l1, 2.)
l2 = - tf.math.log(1. - l2 + EPS)
return l2
def calc_diou_ciou(mode, bboxes1, bboxes2):
# xmin, ymin, xmax, ymax
rows = tf.cast(tf.shape(bboxes1)[0], 'float32')
cols = tf.cast(tf.shape(bboxes2)[0], 'float32')
cious = tf.zeros((rows, cols), dtype='float32')
dious = tf.zeros((rows, cols), dtype='float32')
if rows * cols == 0:
return cious
exchange = False
if rows > cols:
bboxes1, bboxes2 = bboxes2, bboxes1
cious = tf.zeros((cols, rows), dtype='float32')
dious = tf.zeros((cols, rows), dtype='float32')
exchange = True
w1 = bboxes1[:, 2] - bboxes1[:, 0]
h1 = bboxes1[:, 3] - bboxes1[:, 1]
w2 = bboxes2[:, 2] - bboxes2[:, 0]
h2 = bboxes2[:, 3] - bboxes2[:, 1]
area1 = w1 * h1
area2 = w2 * h2
center_x1 = (bboxes1[:, 2] + bboxes1[:, 0]) / 2.
center_y1 = (bboxes1[:, 3] + bboxes1[:, 1]) / 2.
center_x2 = (bboxes2[:, 2] + bboxes2[:, 0]) / 2.
center_y2 = (bboxes2[:, 3] + bboxes2[:, 1]) / 2.
inter_max_xy = tf.math.minimum(bboxes1[:, 2:],bboxes2[:, 2:])
inter_min_xy = tf.math.maximum(bboxes1[:, :2],bboxes2[:, :2])
out_max_xy = tf.math.maximum(bboxes1[:, 2:],bboxes2[:, 2:])
out_min_xy = tf.math.minimum(bboxes1[:, :2],bboxes2[:, :2])
inter = inter_max_xy - inter_min_xy
inter = tf.where(inter<0., 0., inter)
inter_area = inter[:, 0] * inter[:, 1]
inter_diag = (center_x2 - center_x1)**2. + (center_y2 - center_y1)**2.
outer = out_max_xy - out_min_xy
outer = tf.where(outer<0., 0., outer)
outer_diag = (outer[:, 0] ** 2.) + (outer[:, 1] ** 2.)
union = area1+area2-inter_area
if mode=='diou':
dious = inter_area / union - (inter_diag) / outer_diag
dious = tf.clip_by_value(dious, -1.0, 1.0)
if exchange:
dious = tf.transpose(dious)
return 1. - dious
u = (inter_diag) / outer_diag
iou = inter_area / union
v = (4. / (math.pi ** 2.)) * tf.math.pow((tf.math.atan(w2 / h2) - tf.math.atan(w1 / h1)), 2.)
S = tf.stop_gradient(1. - iou)
alpha = tf.stop_gradient(v / (S + v))
cious = iou - (u + alpha * v)
cious = tf.clip_by_value(cious, -1.0, 1.0)
if exchange:
cious = tf.transpose(cious)
return 1. - cious
def iou_loss(mode, phi, weight, anchor_parameters=None, freeze_iterations=0):
assert phi in range(7)
image_sizes = [512, 640, 768, 896, 1024, 1280, 1408]
input_size = float(image_sizes[phi])
it = 0
def _iou(y_true, y_pred):
nonlocal it
# separate target and state
regression = y_pred
regression_target = y_true[:, :, :-1]
anchor_state = y_true[:, :, -1]
# convert to boxes values: xmin, ymin, xmax, ymax
anchors = anchors_for_shape((input_size, input_size), anchor_params=anchor_parameters)
anchors_input = np.expand_dims(anchors, axis=0)
regression = RegressBoxes(name='boxes')([anchors_input, regression[..., :4]])
regression_target = RegressBoxes(name='boxes')([anchors_input, regression_target[..., :4]])
# filter out "ignore" anchors
indices = tf.where(keras.backend.equal(anchor_state, 1))
regression = tf.gather_nd(regression, indices)
regression_target = tf.gather_nd(regression_target, indices)
if 'probiou' in mode:
loss = calc_probiou(mode, regression_target, regression, freezed=freeze_iterations>it)
it += 1
elif mode in ('diou', 'ciou'):
loss = calc_diou_ciou(mode, regression, regression_target)
else:
# requires: y_min, x_min, y_max, x_max
xmin, ymin, xmax, ymax = tf.unstack(regression, axis=-1)
regression = tf.stack([ymin,xmin,ymax,xmax], axis=-1)
xmin, ymin, xmax, ymax = tf.unstack(regression_target, axis=-1)
regression_target = tf.stack([ymin,xmin,ymax,xmax], axis=-1)
loss = tfa.losses.GIoULoss(mode=mode, reduction=tf.keras.losses.Reduction.NONE) (regression_target, regression)
return tf.cast(weight, 'float32') * loss
return _iou | 13,356 | 4,918 |
import PySimpleGUI as pg
import time
import sys
from pygame import mixer
# Section Popup
def win2m():
lay2 = [[pg.T(f'', key='T')], [pg.OK()]]
win2 = pg.Window('Popup', lay2, location=(250 ,0), no_titlebar=True)
return win2
def sound():
mixer.init()
mixer.music.load("notification.mp3")
mixer.music.set_volume(0.7)
mixer.music.play()
def main():
# Color thingy
pg.theme('dark amber')
# Main Window
layout = [
[pg.Text('Timer = 0', key='timer', visible = False), pg.DropDown([(0.05, 0.05), (25, 5), (15, 2)], key='drop', )],
[pg.B('CLOSE'), pg.B('START')]
]
win = pg.Window('Pomodoro', layout, location=(0,0), finalize=True, no_titlebar=True)
while True:
# Reads for events and values
e, v = win.read()
# Closes the program
if e == pg.WINDOW_CLOSED or e == 'CLOSE':
win.close()
sys.exit()
# Starts the counter upon pressing START
if e == 'START':
# Defines how long each section is
WORK_T, BREAK_T = v['drop']
# Hides Elements
win['drop'].update(visible = False)
win['START'].hide_row()
win['timer'].update(visible = True)
# Start the counter at 0.00 and goes up to WORK_T
M = 0
T = time.time()
while M < WORK_T:
M = round((time.time() - T)/60, 2)
M = M + 0.00
win['timer'].update(M)
win.refresh()
# Popup window to indicateb break time
sound()
if M >= WORK_T:
win2 = win2m()
win2.finalize()
win2['T'].update(f'GOOD JOB!\nENJOY YOUR {BREAK_T} MINUTE BREAK NOW!')
e2, v2 = win2.read()
if e2 == pg.WINDOW_CLOSED or 'OK':
win2.close()
# Start the counter at 0.00 and goes up to BREAK_T
M = 0
win['timer'].update(M)
win.refresh()
T = time.time()
while M < BREAK_T:
M = round((time.time() - T)/60, 2)
M = M + 0.00
win['timer'].update(M)
win.refresh()
# Resets win to default
if M >= BREAK_T:
sound()
win2 = win2m()
win2.finalize()
win2['T'].update(f'GOOD JOB!\nSECTION IS OVER.')
win2.refresh()
e2, v2 = win2.read()
if e2 == pg.WINDOW_CLOSED or 'OK':
win2.close()
win['drop'].update(visible = True)
win['START'].unhide_row()
win['timer'].update(visible = False)
e, v = win.read()
if __name__ == '__main__':
main()
| 3,093 | 1,033 |
class Data(object):
def __init__(self, attributes):
self._keys = list(attributes.keys())
for key in attributes:
setattr(self, key, attributes[key])
def __str__(self):
return "Data({0})".format(", ".join(
"{0}={1!r}".format(key, getattr(self, key))
for key in self._keys
))
def __repr__(self):
return str(self)
def data(**kwargs):
return Data(kwargs)
| 449 | 138 |
import bpy
from .main import ToolPanel
from ..operators import retargeting, detector
from ..core.icon_manager import Icons
from ..core.retargeting import get_target_armature
from bpy.types import PropertyGroup, UIList
from bpy.props import StringProperty
# Retargeting panel
class RetargetingPanel(ToolPanel, bpy.types.Panel):
bl_idname = 'VIEW3D_PT_rsl_retargeting_v2'
bl_label = 'Retargeting'
def draw(self, context):
layout = self.layout
layout.use_property_split = False
row = layout.row(align=True)
row.label(text='Select the armatures:')
row = layout.row(align=True)
row.prop(context.scene, 'rsl_retargeting_armature_source', icon='ARMATURE_DATA')
row = layout.row(align=True)
row.prop(context.scene, 'rsl_retargeting_armature_target', icon='ARMATURE_DATA')
anim_exists = False
for obj in bpy.data.objects:
if obj.animation_data and obj.animation_data.action:
anim_exists = True
if not anim_exists:
row = layout.row(align=True)
row.label(text='No animated armature found!', icon='INFO')
return
if not context.scene.rsl_retargeting_armature_source or not context.scene.rsl_retargeting_armature_target:
self.draw_import_export(layout)
return
if not context.scene.rsl_retargeting_bone_list:
row = layout.row(align=True)
row.scale_y = 1.2
row.operator(retargeting.BuildBoneList.bl_idname, icon_value=Icons.CALIBRATE.get_icon())
self.draw_import_export(layout)
return
subrow = layout.row(align=True)
row = subrow.row(align=True)
row.scale_y = 1.2
row.operator(retargeting.BuildBoneList.bl_idname, text='Rebuild Bone List', icon_value=Icons.CALIBRATE.get_icon())
row = subrow.row(align=True)
row.scale_y = 1.2
row.alignment = 'RIGHT'
row.operator(retargeting.ClearBoneList.bl_idname, text="", icon='X')
layout.separator()
row = layout.row(align=True)
row.template_list("RSL_UL_BoneList", "Bone List", context.scene, "rsl_retargeting_bone_list", context.scene, "rsl_retargeting_bone_list_index", rows=1, maxrows=10)
row = layout.row(align=True)
row.prop(context.scene, 'rsl_retargeting_auto_scaling')
row = layout.row(align=True)
row.label(text='Use Pose:')
row.prop(context.scene, 'rsl_retargeting_use_pose', expand=True)
row = layout.row(align=True)
row.scale_y = 1.4
row.operator(retargeting.RetargetAnimation.bl_idname, icon_value=Icons.CALIBRATE.get_icon())
self.draw_import_export(layout)
row = layout.row(align=True)
row.scale_y = 1.4
row.operator(retargeting.RenameVRMBones.bl_idname, text='Rename VRM Bones', icon_value=Icons.CALIBRATE.get_icon())
row = layout.row(align=True)
row.scale_y = 1.4
row.operator(retargeting.RenameVRMBonesStandard.bl_idname, text='Rename VRM Bones to Standard', icon_value=Icons.CALIBRATE.get_icon())
def draw_import_export(self, layout):
layout.separator()
row = layout.row(align=True)
row.label(text='Custom Naming Schemes:')
row.operator(detector.SaveCustomBonesRetargeting.bl_idname, text='Save')
subrow = layout.row(align=True)
row = subrow.row(align=True)
row.scale_y = 0.9
row.operator(detector.ImportCustomBones.bl_idname, text='Import')
row.operator(detector.ExportCustomBones.bl_idname, text='Export')
row = subrow.row(align=True)
row.scale_y = 0.9
row.alignment = 'RIGHT'
row.operator(detector.ClearCustomBones.bl_idname, text='', icon='X')
class BoneListItem(PropertyGroup):
"""Properties of the bone list items"""
bone_name_source: StringProperty(
name="Source Bone",
description="The source bone name",
default="Undefined")
bone_name_target: StringProperty(
name="Target Bone",
description="The target bone name",
default="")
bone_name_key: StringProperty(
name="Auto Detection Key",
description="The automatically detected bone key",
default="")
class RSL_UL_BoneList(UIList):
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index):
armature_target = get_target_armature()
layout = layout.split(factor=0.36, align=True)
layout.label(text=item.bone_name_source)
if armature_target:
layout.prop_search(item, 'bone_name_target', armature_target.pose, "bones", text='')
| 4,703 | 1,558 |
from no_imports import *
| 25 | 9 |
import os
import pytest
from leapp.libraries.stdlib import api, CalledProcessError, run
from leapp.models import SELinuxModule, SELinuxModules
from leapp.reporting import Report
from leapp.snactor.fixture import current_actor_context
TEST_MODULES = [
['400', 'mock1'],
['99', 'mock1'],
['300', 'mock1'],
['400', 'mock2'],
['999', 'mock3'],
]
TEST_TEMPLATES = [
['200', 'base_container']
]
SEMANAGE_COMMANDS = [
['fcontext', '-t', 'httpd_sys_content_t', '"/web(/.*)?"'],
['fcontext', '-t', 'cgdcbxd_var_run_t', '"/ganesha(/.*)?"'],
['fcontext', '-t', 'mock_file_type_t', '"/mock_directory(/.*)?"'],
['port', '-t', 'http_port_t', '-p', 'udp', '81'],
['permissive', 'abrt_t']
]
testmoduledir = 'tests/mock_modules/'
def _run_cmd(cmd, logmsg='', split=False):
try:
return run(cmd, split=split).get('stdout', '')
except CalledProcessError as e:
if logmsg:
api.current_logger().warning('{}: {}'.format(logmsg, e.stderr))
return None
@pytest.fixture(scope='module')
def semodule_lfull_initial():
yield _run_cmd(['semodule', '-lfull'], logmsg='Error listing SELinux customizations')
@pytest.fixture(scope='module')
def semanage_export_initial():
yield _run_cmd(['semanage', 'export'], logmsg='Error listing SELinux customizations')
@pytest.fixture(scope='function')
def destructive_selinux_env():
tests_dir = os.path.join(os.getenv('PYTEST_CURRENT_TEST').rsplit(os.path.sep, 2)[0], testmoduledir)
# try to install compatibility module - needed on newer systems - failure to install is expected on rhel 7
_run_cmd(['semodule', '-X', '100', '-i', os.path.join(tests_dir, 'compat.cil')])
semodule_command = ['semodule']
for priority, module in TEST_MODULES + TEST_TEMPLATES:
semodule_command.extend(['-X', priority, '-i', os.path.join(tests_dir, module + '.cil')])
_run_cmd(semodule_command, logmsg='Error installing mock modules')
for command in SEMANAGE_COMMANDS:
_run_cmd(['semanage', command[0], '-a'] + command[1:], logmsg='Error applying selinux customizations')
yield
for command in SEMANAGE_COMMANDS:
_run_cmd(['semanage', command[0], '-d'] + command[1:])
semodule_command = ['semodule']
for priority, module in reversed(TEST_MODULES + TEST_TEMPLATES +
[['400', 'permissive_abrt_t'], ['100', 'compat']]):
semodule_command.extend(['-X', priority, '-r', module])
_run_cmd(semodule_command)
@pytest.mark.skipif(os.getenv('DESTRUCTIVE_TESTING', False) in [False, '0'],
reason='Test disabled by default because it would modify the system')
def test_SELinuxPrepare(current_actor_context, semodule_lfull_initial, semanage_export_initial,
destructive_selinux_env):
before_test = []
for cmd in (['semodule', '-lfull'], ['semanage', 'export']):
res = _run_cmd(cmd, 'Error listing SELinux customizations')
before_test.append(res)
# XXX still not sure about logging in tests
api.current_logger().info('Before test: {}'.format(res))
# Make sure that initial semodule/semanage commands don't match before tests ones
assert before_test != [semodule_lfull_initial, semanage_export_initial]
semodule_list = [SELinuxModule(name=module, priority=int(prio), content='', removed=[])
for (prio, module) in TEST_MODULES + [['400', 'permissive_abrt_t'], ['100', 'compat']]]
template_list = [SELinuxModule(name=module, priority=int(prio), content='', removed=[])
for (prio, module) in TEST_TEMPLATES]
current_actor_context.feed(SELinuxModules(modules=semodule_list, templates=template_list))
current_actor_context.run()
# check if all given modules and local customizations where removed
semodule_res = _run_cmd(['semodule', '-lfull'], 'Error listing SELinux modules')
assert semodule_lfull_initial == semodule_res
semanage_res = _run_cmd(['semanage', 'export'], 'Error listing SELinux customizations')
assert semanage_export_initial == semanage_res
| 4,128 | 1,422 |
#!/usr/bin/env python3
from __future__ import absolute_import, division, print_function, unicode_literals
import argparse
import os
import subprocess
from glob import glob
import pandas as pd
import matplotlib
matplotlib.use('Agg')
from matplotlib import pyplot as plt
import base64
import json
import numpy as np
import re
from io import open # pylint: disable=W0622
import jinja2
__version__ = open(os.path.join(os.path.dirname(os.path.realpath(__file__)),
'version')).read()
class Template(object):
"""
Utility class for generating a config file from a jinja template.
https://github.com/oesteban/endofday/blob/f2e79c625d648ef45b08cc1f11fd0bd84342d604/endofday/core/template.py
"""
def __init__(self, template_str):
self.template_str = template_str
self.env = jinja2.Environment(
loader=jinja2.FileSystemLoader(searchpath='/'),
trim_blocks=True, lstrip_blocks=True)
def compile(self, configs):
"""Generates a string with the replacements"""
template = self.env.get_template(self.template_str)
return template.render(configs)
def generate_conf(self, configs, path):
"""Saves the oucome after replacement on the template to file"""
output = self.compile(configs)
with open(path, 'w+') as output_file:
output_file.write(output)
class IndividualTemplate(Template):
"""Specific template for the individual report"""
def __init__(self):
#super(IndividualTemplate, self).__init__(pkgrf('mriqc', 'data/reports/individual.html'))
super(IndividualTemplate, self).__init__('/code/reports/individual.html')
class GroupTemplate(Template):
"""Specific template for the individual report"""
def __init__(self):
#super(GroupTemplate, self).__init__(pkgrf('mriqc', 'data/reports/group.html'))
super(GroupTemplate, self).__init__('/code/reports/group.html')
def read_report_snippet(in_file):
"""Add a snippet into the report"""
import os.path as op
import re
from io import open # pylint: disable=W0622
is_svg = (op.splitext(op.basename(in_file))[1] == '.svg')
with open(in_file) as thisfile:
if not is_svg:
return thisfile.read()
svg_tag_line = 0
content = thisfile.read().split('\n')
corrected = []
for i, line in enumerate(content):
if "<svg " in line:
line = re.sub(' height="[0-9.]+[a-z]*"', '', line)
line = re.sub(' width="[0-9.]+[a-z]*"', '', line)
if svg_tag_line == 0:
svg_tag_line = i
corrected.append(line)
return '\n'.join(corrected[svg_tag_line:])
def make_montage(prefix, ulay=None, olay=None, cbar='FreeSurfer_Seg_i255',
opacity=4, montx=3, monty=1, blowup=1, delta_slices='-1 -1 -1',
func_range_perc=100):
if ulay is None and olay is None:
raise Exception("overlay and underlay can't both be undefined")
elif ulay is None and olay is not None:
ulay = olay
olay = None
cmd = '/code/@chauffeur_afni' + \
' -ulay ' + ulay
if olay is not None:
cmd += ' -olay ' + olay
cmd += ' -set_dicom_xyz `3dCM {i}`'.format(i=olay)
cmd += ' -cbar ' + cbar + \
' -opacity %d'%opacity
else:
cmd += ' -olay_off'
cmd += ' -set_dicom_xyz `3dCM {i}`'.format(i=ulay)
cmd += ' -prefix ' + prefix + \
' -do_clean' + \
' -delta_slices '+ delta_slices + \
' -montx %d'%montx + \
' -monty %d'%monty + \
' -blowup %d'%blowup + \
' -func_range_perc %f' %func_range_perc + \
' -save_ftype JPEG'
return cmd
def make_motion_plot(subj_dir, subj_id):
# Read the three files in
motion_file = os.path.join(subj_dir,'dfile_rall.1D')
motion = pd.read_csv(motion_file, sep='\s*', engine = 'python', names = ['$\Delta$A-P [mm]','$\Delta$L-R [mm]','$\Delta$I-S [mm]','Yaw [$^\circ$]','Pitch [$^\circ$]','Roll [$^\circ$]'])
enorm_file = os.path.join(subj_dir,'motion_{subj_id}_enorm.1D'.format(subj_id=subj_id))
enorm = pd.read_csv(enorm_file, sep='\s*', engine = 'python', names = ['enorm'])
outlier_file = os.path.join(subj_dir,'outcount_rall.1D')
outliers = pd.read_csv(outlier_file, sep='\s*', engine = 'python', names = ['outliers'])
# make a dataframe
mot_df = pd.concat([outliers,enorm,motion], axis = 1)
# Plot the dataframe
axs = mot_df.plot(subplots = True, figsize = (4,5))
ldgs = []
for ax in axs:
box = ax.get_position()
ax.legend()
ax.set_position([box.x0, box.y0, box.width * 0.8, box.height])
ldgs.append(ax.legend(loc='center left', bbox_to_anchor=(1, 0.5)))
plt.tight_layout()
# save the figure
qc_dir = os.path.join(subj_dir,'qc')
img_dir = os.path.join(qc_dir,'img')
if not os.path.exists(qc_dir):
os.mkdir(qc_dir)
if not os.path.exists(img_dir):
os.mkdir(img_dir)
out_path = os.path.join(img_dir,'motion_plot.svg')
plt.savefig(out_path, tight_layout = True, bbox_extra_artists=ldgs, bbox_inches='tight')
return out_path
def run(command, env={}, shell=False):
merged_env = os.environ
merged_env.update(env)
process = subprocess.Popen(command, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, shell=shell,
env=merged_env)
while True:
line = process.stdout.readline()
line = str(line, 'utf-8')[:-1]
print(line)
if line == '' and process.poll() is not None:
break
if process.returncode != 0:
raise Exception("Non zero return code: %d"%process.returncode)
task_re = re.compile('.*task-([^_]*)_.*')
parser = argparse.ArgumentParser(description='Example BIDS App entrypoint script.')
parser.add_argument('bids_dir', help='The directory with the input dataset '
'formatted according to the BIDS standard.')
parser.add_argument('output_dir', help='The directory where the output files '
'should be stored. If you are running group level analysis '
'this folder should be prepopulated with the results of the'
'participant level analysis.')
parser.add_argument('analysis_level', help='Level of the analysis that will be performed. '
'Multiple participant level analyses can be run independently '
'(in parallel) using the same output_dir.'
'Only "participant" is currently supported.',
choices=['participant', 'group'])
parser.add_argument('--participant_label', help='The label(s) of the participant(s) that should be analyzed. The label '
'corresponds to sub-<participant_label> from the BIDS spec '
'(so it does not include "sub-"). If this parameter is not '
'provided all subjects should be analyzed. Multiple '
'participants can be specified with a space separated list.',
nargs="+")
parser.add_argument('--session_label', help='The label(s) of the sessions(s) that should be analyzed. The label '
'corresponds to ses-<session_label> from the BIDS spec '
'(so it does not include "ses-"). If this parameter is not '
'provided all sessions should be analyzed. Multiple '
'sessions can be specified with a space separated list.',
nargs="+")
parser.add_argument('--task_label', help='The label(s) of the tasks(s) that should be analyzed. The label '
'corresponds to task-<task_label> from the BIDS spec '
'(so it does not include "task-"). If this parameter is not '
'provided all tasks will be analyzed. Multiple '
'tasks can be specified with a space separated list.',
nargs="+")
parser.add_argument('--afni_proc', help='Optional: command string for afni proc. '
'Parameters that vary by subject '
'should be encapsulated in curly braces and must all be included '
'{{subj_id}}, {{out_dir}}, {{anat_path}}, or {{epi_paths}}.'
'The first _T1w for each subject will currently be used as the anat.'
'All of the _bold will be used as the functionals.'
'Example:'
'-subj_id {subj_id} '
'-scr_overwrite -out_dir {{out_dir}} '
'-blocks tshift align tlrc volreg blur mask scale '
'-copy_anat {{anat_path}} -tcat_remove_first_trs 0 '
'-dsets {{epi_paths}} -volreg_align_to MIN_OUTLIER '
'-volreg_align_e2a -volreg_tlrc_warp -blur_size 4.0 -bash')
parser.add_argument('--report_only', dest='report_only', action='store_true')
parser.add_argument('-v', '--version', action='version',
version='afni_proc BIDS-App {}'.format(__version__))
args = parser.parse_args()
bad_chars = ['`', '|', '&', ';', '>', '<', '$', '?', '\.', ':', '[', ']']
if args.afni_proc is not None:
cmd_skeleton = args.afni_proc
for bc in bad_chars:
if bc in cmd_skeleton:
raise Exception("Unsafe character '%s' found in command: %s"%(bc, cmd_skeleton))
cmd_skeleton = 'python /opt/afni/afni_proc.py -check_results_dir no -script {ses_dir}/proc.bids.{subj_id}.{ses_id}.{task_id} '+ cmd_skeleton
else:
cmd_skeleton = "python /opt/afni/afni_proc.py -check_results_dir no -subj_id {subj_id} \
-script {ses_dir}/proc.bids.{subj_id}.{ses_id}.{task_id} -scr_overwrite -out_dir {out_dir} \
-blocks tshift align tlrc volreg blur mask scale \
-copy_anat {anat_path} -tcat_remove_first_trs 0 \
-dsets {epi_paths} -align_opts_aea -cost lpc+ZZ -giant_move \
-tlrc_base MNI152_T1_2009c+tlrc -tlrc_NL_warp \
-volreg_align_to MIN_OUTLIER \
-volreg_align_e2a -volreg_tlrc_warp -blur_size 4.0 -bash"""
run(('bids-validator %s'%args.bids_dir).split(' '))
# Get path for report directory
reports_dir = os.path.join(args.output_dir,"reports")
subjects_to_analyze = []
# only for a subset of subjects
if args.participant_label:
subjects_to_analyze = args.participant_label[0].split(' ')
# for all subjects
else:
subject_dirs = glob(os.path.join(args.bids_dir, "sub-*"))
subjects_to_analyze = sorted([subject_dir.split("-")[-1] for subject_dir in subject_dirs])
# TODO: throw early error if they've specified participants, labels,
# and subjects in such a way that there is nothing to analyze
# make sessions to analyze
# make tasks to analyze
all_configs = []
report_num = 0
for subject_label in subjects_to_analyze:
# get anatomical path
anat_path = sorted(list(glob(os.path.join(args.bids_dir, "sub-%s"%subject_label,
"anat", "*_T1w.nii*")) + glob(os.path.join(args.bids_dir,"sub-%s"%subject_label,"ses-*","anat", "*_T1w.nii*"))))[0]
subj_out_dir = os.path.join(args.output_dir, "sub-%s"%subject_label)
# Do sessions exist
sessions_dirs = list(glob(os.path.join(args.bids_dir,"sub-%s"%subject_label,"ses-*")))
sessions_list = [session_dir.split("-")[-1] for session_dir in sessions_dirs]
if len(sessions_list) > 0:
sessions_exist = True
if args.session_label:
sessions_to_analyze = sorted(set(args.session_label[0].split(' ')).intersection(set(sessions_list)))
else:
sessions_to_analyze = sessions_list
else:
sessions_exist = False
sessions_to_analyze = ['']
for session_label in sessions_to_analyze:
if sessions_exist:
session_out_dir = os.path.join(subj_out_dir,"ses-%s"%session_label)
else:
session_out_dir = subj_out_dir
os.makedirs(session_out_dir, exist_ok = True)
all_epi_paths = sorted(set(glob(os.path.join(args.bids_dir, "sub-%s"%subject_label,
"func", "*bold.nii*")) + glob(os.path.join(args.bids_dir,"sub-%s"%subject_label,"ses-%s"%session_label,"func", "*bold.nii*"))))
# Which tasks to analyze
try:
tasks_in_session = set([task_re.findall(epi)[0] for epi in all_epi_paths])
except:
print("Tasks: ",[epi for epi in all_epi_paths if len(task_re.findall(epi))==0])
raise Exception("A bold scan without a task label exists. Not permitted")
if args.task_label:
tasks_to_analyze = sorted(set(args.task_label[0].split(' ')).intersection(tasks_in_session))
else:
tasks_to_analyze = sorted(tasks_in_session)
for task_label in tasks_to_analyze:
epi_paths = ' '.join(sorted(set(glob(os.path.join(args.bids_dir, "sub-%s"%subject_label,
"func", "*%s*bold.nii*"%task_label)) + glob(os.path.join(args.bids_dir,"sub-%s"%subject_label,"ses-%s"%session_label,"func", "*%s*bold.nii*"%task_label)))))
task_out_dir = os.path.join(session_out_dir,task_label)
task_qc_dir = os.path.join(task_out_dir, 'qc')
task_qc_img_dir = os.path.join(task_qc_dir, 'img')
if args.analysis_level == 'participant':
config = {}
cmd = cmd_skeleton.format(subj_id=subject_label,ses_id = session_label, task_id = task_label, out_dir=task_out_dir,
anat_path=anat_path, epi_paths=epi_paths, ses_dir = session_out_dir)
if '{' in cmd:
raise Exception("Unsafe character '{' found in command: %s"%cmd.join(' '))
cmd = cmd.replace(' ', ' ').split(' ')
if not args.report_only:
print(' '.join(cmd), flush = True)
run(cmd)
print('bash -c "$(set -o pipefail && tcsh -xef {ses_dir}/proc.bids.{subj_id}.{ses_id}.{task_id} 2>&1 | tee {ses_dir}/output.proc.bids.{subj_id}.{ses_id}.{task_id})"'.format(subj_id = subject_label,ses_id = session_label, task_id = task_label, ses_dir = session_out_dir), flush = True)
run('bash -c "set -o pipefail && tcsh -xef {ses_dir}/proc.bids.{subj_id}.{ses_id}.{task_id} 2>&1 > {ses_dir}/output.proc.bids.{subj_id}.{ses_id}.{task_id}"'.format(subj_id = subject_label,ses_id = session_label, task_id = task_label, ses_dir = session_out_dir), shell=True)
run("mv {ses_dir}/proc.bids.{subj_id}.{ses_id}.{task_id} {out_dir};mv {ses_dir}/output.proc.bids.{subj_id}.{ses_id}.{task_id} {out_dir}".format(subj_id = subject_label,ses_id = session_label, task_id = task_label, ses_dir = session_out_dir, out_dir = task_out_dir), shell=True)
pbs = glob(os.path.join(task_out_dir, 'pb*'))
if len(pbs) > 0:
pb_lod = []
for pb in pbs:
pbd = {}
pbn = pb.split('/')[-1].split('.')
pbd['path'] = pb
pbd['filename'] = pb.split('/')[-1]
pbd['pb'] = int(pbn[0][-2:])
pbd['subj'] = pbn[1]
pbd['run'] = int(pbn[2][-2:])
pbd['block'] = pbn[3].split('+')[0]
pbd['orientation'] = pbn[3].split('+')[-1]
pb_lod.append(pbd)
pb_df = pd.DataFrame(pb_lod)
config['subj_id'] = pb_df.subj.unique()[0]
config['task_label'] = task_label
config['num_runs'] = len(pb_df.run.unique())
config['blocks'] = ' '.join(pb_df.block.unique())
config['report_num'] = report_num
report_num += 1
if session_label != '':
config['session_label'] = session_label
try:
mot_path = make_motion_plot(task_out_dir, subject_label)
config['motion_report'] = read_report_snippet(mot_path)
except FileNotFoundError:
pass
warn_list = ['3dDeconvolve.err',
'out.pre_ss_warn.txt',
'out.cormat_warn.txt']
warns = {}
for wf in warn_list:
wf_path = os.path.join(task_out_dir, wf)
try:
if os.path.getsize(wf_path) > 0:
with open(wf_path, 'r') as h:
warns[wf] = h.readlines()
warns[wf] = [ww.replace('\n', '') for ww in warns[wf]]
except FileNotFoundError:
pass
if len(warns) > 0:
config['warnings'] = warns
if not os.path.exists(task_qc_dir):
os.mkdir(task_qc_dir)
if not os.path.exists(task_qc_img_dir):
os.mkdir(task_qc_img_dir)
if not os.path.exists(reports_dir):
os.mkdir(reports_dir)
try:
anat_out_path = os.path.join(task_out_dir, 'anat_final.%s+tlrc.HEAD'%subject_label)
anat_exts = np.array([float(ss) for ss in subprocess.check_output(["3dinfo", "-extent", anat_out_path]).decode().split('\t')])
anat_lrext = np.abs(anat_exts[0]) + np.abs(anat_exts[1])
anat_mont_dim = np.floor(np.sqrt(anat_lrext))
print("#######\n mont_dim = %f \n#########"%anat_mont_dim)
run(make_montage(os.path.join(task_qc_img_dir, 'anatomical_montage'),
ulay=anat_out_path,
montx=anat_mont_dim, monty=anat_mont_dim), shell=True)
func_path = pb_df.loc[pb_df['block'] == 'volreg', 'path'].values[0] + '[0]'
func_rext = float(subprocess.check_output(["3dinfo", "-Rextent", func_path]))
func_lext = float(subprocess.check_output(["3dinfo", "-Lextent", func_path]))
func_lrext = np.abs(func_lext) + np.abs(func_rext)
func_mont_dim = np.floor(np.sqrt(func_lrext))
run(make_montage(os.path.join(task_qc_img_dir, 'functional_montage'),
ulay=anat_out_path,
olay=func_path, montx=anat_mont_dim, monty=anat_mont_dim,
cbar='gray_scale', opacity=9), shell=True)
with open(os.path.join(task_qc_img_dir, 'anatomical_montage.sag.jpg'), 'rb') as h:
anat_bs = base64.b64encode(h.read()).decode()
with open(os.path.join(task_qc_img_dir, 'functional_montage.sag.jpg'), 'rb') as h:
func_bs = base64.b64encode(h.read()).decode()
config['volreg_report_anat'] = anat_bs
config['volreg_report_func'] = func_bs
config['anat_ap_ext'] = np.abs(anat_exts[2]) + np.abs(anat_exts[3]) + 1
config['anat_is_ext'] = np.abs(anat_exts[4]) + np.abs(anat_exts[5]) + 1
print("#######\n anat_ap_ext = %f \n#########"%config['anat_ap_ext'])
except (FileNotFoundError, ValueError):
pass
tpl = IndividualTemplate()
if sessions_exist:
tpl.generate_conf(config, os.path.join(reports_dir, 'sub-%s_ses-%s_task-%s_individual.html'%(subject_label, session_label, task_label)))
else:
tpl.generate_conf(config, os.path.join(reports_dir, 'sub-%s_task-%s_individual.html'%(subject_label, task_label)))
with open(os.path.join(task_qc_dir, 'individual.json'), 'w') as h:
json.dump(config, h)
elif args.analysis_level == 'group':
with open(os.path.join(task_qc_dir, 'individual.json'), 'r') as h:
all_configs.append(json.load(h))
if args.analysis_level == 'group':
if not os.path.exists(reports_dir):
os.mkdir(reports_dir)
tpl = GroupTemplate()
#print(all_configs)
tpl.generate_conf({'configs':all_configs}, os.path.join(reports_dir, 'group.html'))
| 20,919 | 6,964 |
from app import app
from flask import render_template, request, flash
from .form import *
from automlk.monitor import get_heart_beeps
from automlk.context import get_config, set_config
@app.route('/monitor', methods=['GET'])
def monitor():
# monitor workers
return render_template('monitor.html', controller=get_heart_beeps('controller'),
grapher=get_heart_beeps('grapher'), worker_text=get_heart_beeps('worker_text'),
workers=get_heart_beeps('worker'), config=get_config())
@app.route('/config', methods=['GET', 'POST'])
def config():
# view/edit configuration
form = ConfigForm()
if request.method == 'POST':
if form.validate():
try:
set_config(data=form.data.data,
theme=form.theme.data,
bootstrap=form.bootstrap.data,
graph_theme=form.graph_theme.data,
store=form.store.data,
store_url=form.store_url.data)
except Exception as e:
flash(str(e))
else:
config = get_config()
# copy data to form
form.data.data = config['data']
form.theme.data = config['theme']
form.bootstrap.data = config['bootstrap']
form.graph_theme.data = config['graph_theme']
form.store.data = config['store']
form.store_url.data = config['store_url']
return render_template('config.html', form=form, config=get_config())
| 1,547 | 423 |
import errand_boy
from errand_boy import run
from errand_boy.transports import unixsocket
from .base import mock, BaseTestCase
class MainTestCase(BaseTestCase):
def test_client(self):
argv = ['/srv/errand-boy/errand_boy/run.py', 'ls', '-al']
cmd = ' '.join(argv[1:])
with self.UNIXSocketTransport_patcher as UNIXSocketTransport,\
self.sys_patcher as mocked_sys:
mock_process = mock.Mock()
mock_process.returncode = 0
stdout = 'foo'
stderr = 'bar'
transport = mock.Mock()
transport.run_cmd.return_value = stdout, stderr, mock_process.returncode
UNIXSocketTransport.return_value = transport
run.main(argv)
self.assertEqual(transport.run_cmd.call_count, 1)
self.assertEqual(transport.run_cmd.call_args_list[0][0][0], cmd)
self.assertEqual(mocked_sys.stdout.write.call_count, 1)
self.assertEqual(mocked_sys.stdout.write.call_args_list[0][0][0], stdout)
self.assertEqual(mocked_sys.stderr.write.call_count, 1)
self.assertEqual(mocked_sys.stderr.write.call_args_list[0][0][0], stderr)
self.assertEqual(mocked_sys.exit.call_count, 1)
self.assertEqual(mocked_sys.exit.call_args_list[0][0][0], mock_process.returncode)
def test_server_no_options(self):
argv = ['/srv/errand-boy/errand_boy/run.py']
with self.UNIXSocketTransport_patcher as UNIXSocketTransport:
transport = mock.Mock()
UNIXSocketTransport.return_value = transport
run.main(argv)
self.assertEqual(transport.run_server.call_count, 1)
self.assertEqual(transport.run_server.call_args_list[0][0], tuple())
self.assertEqual(transport.run_server.call_args_list[0][1], {'max_accepts': 5000, 'max_child_tasks': 100, 'pool_size': 10})
def test_server_with_options(self):
argv = ['/srv/errand-boy/errand_boy/run.py', '--max-accepts', '5']
with self.UNIXSocketTransport_patcher as UNIXSocketTransport:
transport = mock.Mock()
UNIXSocketTransport.return_value = transport
run.main(argv)
self.assertEqual(transport.run_server.call_count, 1)
self.assertEqual(transport.run_server.call_args_list[0][0], tuple())
self.assertEqual(transport.run_server.call_args_list[0][1], {'max_accepts': int(argv[2]), 'max_child_tasks': 100, 'pool_size': 10})
| 2,471 | 829 |
import socket
from contextlib import suppress
from os import SEEK_END, stat
from pathlib import Path
from re import search, split, findall
from sys import exc_info
from threading import Thread
from time import sleep
from traceback import format_exc
from colorama import Fore, Style
from discord import Webhook, RequestsWebhookAdapter
from components.localization import get_translation
from config.init_config import Config, BotVars
class Watcher:
_running = True
_thread = None
# Constructor
def __init__(self, watch_file: Path, call_func_on_change=None, *args, **kwargs):
self._cached_stamp = None
self._filename: Path = watch_file
self._call_func_on_change = call_func_on_change
self._refresh_delay_secs = Config.get_cross_platform_chat_settings().refresh_delay_of_console_log
self._args = args
self._kwargs = kwargs
# Look for changes
def look(self):
stamp = stat(self._filename).st_mtime
if stamp != self._cached_stamp:
temp = self._cached_stamp
self._cached_stamp = stamp
if self._call_func_on_change is not None and temp is not None:
BotVars.watcher_last_line = self._call_func_on_change(file=self._filename,
last_line=BotVars.watcher_last_line,
*self._args, **self._kwargs)
# Keep watching in a loop
def watch(self):
while self._running:
try:
# Look for changes
sleep(self._refresh_delay_secs)
self.look()
except FileNotFoundError:
print(get_translation("Watcher Error: File '{0}' wasn't found!").format(self._filename.as_posix()))
except UnicodeDecodeError:
print(get_translation("Watcher Error: Can't decode strings from file '{0}'"
", check that minecraft server saves it in utf-8 encoding!\n"
"(Ensure you have '-Dfile.encoding=UTF-8' as one of the arguments "
"to start the server in start script)").format(self._filename.as_posix()))
except BaseException:
exc = format_exc().rstrip("\n")
print(get_translation("Watcher Unhandled Error: {0}").format(exc_info()[0]) +
f"\n{Style.DIM}{Fore.RED}{exc}{Style.RESET_ALL}")
def start(self):
self._thread = Thread(target=self.watch, daemon=True)
self._thread.start()
def stop(self):
self._running = False
if self._thread is not None:
self._thread.join()
self._thread = None
def is_running(self):
return self._running
def create_watcher():
if BotVars.watcher_of_log_file is not None and BotVars.watcher_of_log_file.is_running():
BotVars.watcher_of_log_file.stop()
from components.additional_funcs import get_server_version
server_version = get_server_version()
if 7 <= server_version:
path_to_server_log = "logs/latest.log"
elif 0 <= server_version < 7:
path_to_server_log = "server.log"
else:
return
BotVars.watcher_of_log_file = Watcher(watch_file=Path(Config.get_selected_server_from_list().working_directory,
path_to_server_log),
call_func_on_change=_check_log_file)
def create_chat_webhook():
if Config.get_cross_platform_chat_settings().webhook_url:
BotVars.webhook_chat = Webhook.from_url(url=Config.get_cross_platform_chat_settings().webhook_url,
adapter=RequestsWebhookAdapter())
def _check_log_file(file: Path, last_line: str = None):
if Config.get_cross_platform_chat_settings().channel_id is None:
return
last_lines = _get_last_n_lines(file,
Config.get_cross_platform_chat_settings().number_of_lines_to_check_in_console_log,
last_line)
if len(last_lines) == 0:
return last_line
if last_line is None:
last_lines = last_lines[-1:]
mention_max_words = 5
mention_max_right_symbols = 5
for line in last_lines:
if search(r"INFO", line) and "*" not in split(r"<([^>]*)>", line, maxsplit=1)[0] and \
search(r"<([^>]*)> (.*)", line):
player_nick, player_message = search(r"<([^>]*)>", line)[0], \
split(r"<([^>]*)>", line, maxsplit=1)[-1].strip()
if search(r"@[^\s]+", player_message):
split_arr = split(r"@[^\s]+", player_message)
mentions = [[i[1:]] for i in findall(r"@[^\s]+", player_message)]
for i_mention in range(len(mentions)):
for words_number in range(mention_max_words + 1):
if len(split_arr[1 + i_mention]) < words_number:
break
found = False
add_string = " ".join(split_arr[1 + i_mention].lstrip(" ").split(" ")[:words_number]) \
if words_number > 0 else ""
for symbols_number in range(mention_max_right_symbols + 1):
mention = f"{mentions[i_mention][0]} {add_string}".lower() \
if len(add_string) > 0 else mentions[i_mention][0].lower()
cut_right_string = None
if symbols_number > 0:
cut_right_string = mention[-symbols_number:]
mention = mention[:-symbols_number]
found = False
# Check mention of everyone and here
for mention_pattern in ["a", "e", "everyone", "p", "here"]:
if mention_pattern == mention:
mentions[i_mention] = [mention_pattern]
if cut_right_string is not None:
mentions[i_mention].extend([None, cut_right_string])
found = True
break
# Check mention on user mention
for member in BotVars.bot_for_webhooks.guilds[0].members:
if member.name.lower() == mention:
mentions[i_mention] = [member.name if len(add_string) == 0
else [member.name, add_string], member]
if cut_right_string is not None:
mentions[i_mention].append(cut_right_string)
found = True
break
elif member.display_name.lower() == mention:
mentions[i_mention] = [member.display_name if len(add_string) == 0
else [member.display_name, add_string], member]
if cut_right_string is not None:
mentions[i_mention].append(cut_right_string)
found = True
break
if found:
break
# Check mention on role mention
for role in BotVars.bot_for_webhooks.guilds[0].roles:
if role.name.lower() == mention:
mentions[i_mention] = [role.name if len(add_string) == 0
else [role.name, add_string], role]
if cut_right_string is not None:
mentions[i_mention].append(cut_right_string)
found = True
break
if found:
break
# Check mention on minecraft nick mention
for user in Config.get_settings().known_users:
if user.user_minecraft_nick.lower() == mention:
if len(mentions[i_mention]) == 1:
mentions[i_mention] = [user.user_minecraft_nick if len(add_string) == 0
else [user.user_minecraft_nick, add_string], []]
if cut_right_string is not None:
mentions[i_mention].append(cut_right_string)
if isinstance(mentions[i_mention][1], list):
mentions[i_mention][1] += [m for m in BotVars.bot_for_webhooks.guilds[0].members
if m.id == user.user_discord_id]
found = True
if found:
break
if found:
break
insert_numb = 1
mention_nicks = []
for mention in mentions:
if isinstance(mention[0], str):
is_list = False
elif isinstance(mention[0], list):
is_list = True
else:
raise ValueError("mention[0] is not string or list!")
if (mention[0] if not is_list else mention[0][0]) in ["a", "e", "everyone"]:
if len(mention) == 3:
split_arr[insert_numb] = f"{mention[2]}{split_arr[insert_numb]}"
split_arr.insert(insert_numb, f"@everyone")
if "@a" not in mention_nicks:
mention_nicks.append("@a")
elif (mention[0] if not is_list else mention[0][0]) in ["p", "here"]:
if len(mention) == 3:
split_arr[insert_numb] = f"{mention[2]}{split_arr[insert_numb]}"
split_arr.insert(insert_numb, f"@here")
if "@a" not in mention_nicks:
mention_nicks.append("@a")
elif len(mention) > 1 and isinstance(mention[1], list):
if not is_list:
if len(mention) == 3:
split_arr[insert_numb] = f"{mention[2]}{split_arr[insert_numb]}"
split_arr.insert(insert_numb,
f"@{mention[0]} ({', '.join([mn.mention for mn in mention[1]])})")
else:
split_arr[insert_numb] = split_arr[insert_numb][1:].lstrip(mention[0][1])
if len(mention) == 3:
split_arr[insert_numb] = f"{mention[2]}{split_arr[insert_numb]}"
split_arr.insert(insert_numb,
f"@{mention[0][0]} ({', '.join([mn.mention for mn in mention[1]])})")
if "@a" not in mention_nicks:
mention_nicks.append(mention[0] if not is_list else mention[0][0])
else:
if not is_list:
if len(mention) == 3:
split_arr[insert_numb] = f"{mention[2]}{split_arr[insert_numb]}"
split_arr.insert(insert_numb,
mention[1].mention if len(mention) > 1 and
mention[1] is not None else f"@{mention[0]}")
else:
split_arr[insert_numb] = split_arr[insert_numb][1:].lstrip(mention[0][1])
if len(mention) == 3:
split_arr[insert_numb] = f"{mention[2]}{split_arr[insert_numb]}"
split_arr.insert(insert_numb,
mention[1].mention if len(mention) > 1 and
mention[1] is not None else f"@{mention[0][0]}")
insert_numb += 2
player_message = "".join(split_arr)
if len(mention_nicks) > 0:
from components.additional_funcs import announce, connect_rcon, times
with suppress(ConnectionError, socket.error):
with connect_rcon() as cl_r:
with times(0, 60, 20, cl_r):
for nick in mention_nicks:
announce(nick, f"@{player_nick[1:-1]} -> @{nick if nick != '@a' else 'everyone'}",
cl_r)
BotVars.webhook_chat.send(f"**{player_nick}** {player_message}")
return last_lines[-1]
def _get_last_n_lines(file, number_of_lines, last_line):
list_of_lines = []
with open(file, 'rb') as read_obj:
read_obj.seek(-2, SEEK_END)
buffer = bytearray()
pointer_location = read_obj.tell()
while pointer_location >= 0:
read_obj.seek(pointer_location)
pointer_location = pointer_location - 1
new_byte = read_obj.read(1)
if new_byte == b'\n':
decoded_line = buffer[::-1].decode().strip()
if decoded_line == last_line:
return list(reversed(list_of_lines))
list_of_lines.append(decoded_line)
if len(list_of_lines) == number_of_lines:
return list(reversed(list_of_lines))
buffer = bytearray()
else:
buffer.extend(new_byte)
if len(buffer) > 0:
list_of_lines.append(buffer[::-1].decode().strip())
return list(reversed(list_of_lines))
| 14,591 | 3,841 |
#!/usr/bin/python3.7
"""
Set all playlist descriptions.
Example result:
Resident Advisor Archive www.residentarchive.com @residentarchive
"""
import boto3
import spotipy
from pprint import pprint
dynamodb = boto3.resource("dynamodb", region_name='eu-west-1')
ra_playlists = dynamodb.Table('ra_playlists')
scope = 'playlist-modify-public playlist-modify-private'
sp = spotipy.Spotify(auth_manager=spotipy.SpotifyOAuth(scope=scope))
# Get all
playlists = ra_playlists.scan()
pprint(len(playlists['Items']))
for p in playlists['Items']:
desc = "Resident Advisor Archive www.residentarchive.com @residentarchive"
print(p.get('spotify_playlist'), desc)
sp.playlist_change_details(None, p.get('spotify_playlist'), description=desc) | 745 | 261 |
import pandas as pd
pd.set_option('display.width', 1000)
pd.set_option('display.max_rows', 10000)
pd.set_option('display.max_columns', 10000)
def round_value(value, binary=False):
divisor = 1024. if binary else 1000.
if value // divisor**4 > 0:
return str(round(value / divisor**4, 2)) + 'T'
elif value // divisor**3 > 0:
return str(round(value / divisor**3, 2)) + 'G'
elif value // divisor**2 > 0:
return str(round(value / divisor**2, 2)) + 'M'
elif value // divisor > 0:
return str(round(value / divisor, 2)) + 'K'
return str(value)
def report_format(collected_nodes):
data = list()
for node in collected_nodes:
name = node.name
Flops = node.Flops
data.append([name, Flops])
df = pd.DataFrame(data)
df.columns = ['module name', 'Flops']
total_flops = df['Flops'].sum()
# Add Total row
total_df = pd.Series([total_flops
],
index=['Flops'],
name='total')
df = df.append(total_df)
df = df.fillna(' ')
df['Flops'] = df['Flops'].apply(lambda x: '{:,}'.format(x))
summary = str(df) + '\n'
summary += "=" * len(str(df).split('\n')[0])
summary += '\n'
summary += "-" * len(str(df).split('\n')[0])
summary += '\n'
summary += "Total Flops: {}Flops\n".format(round_value(total_flops))
return summary
| 1,428 | 532 |
class Region(object):
def __init__(self, cells):
self.cells = cells
def print(self):
print(self.cells)
def get_missing_numbers(self):
missing_numbers = []
for i in range(1, 10):
if i in self.get_cell_values():
continue
missing_numbers.append(i)
return missing_numbers
def get_cell_values(self):
results = []
for cell in self.cells:
results.append(cell.value)
return results
| 513 | 151 |
try:
from ipware.ip import get_client_ip
except ImportError:
from ipware.ip2 import get_client_ip
| 106 | 36 |
from setuptools import setup, find_packages
# See:
# https://packaging.python.org/guids/distruting-packages-using-setuptools
setup(
name="cigarbox",
version="0.1.0",
description="utility libraries",
long_description="utility libraries",
long_description_content_type="text/plain",
url="https://github.com/smherwig/cigarbox",
author="Stephen M. Herwig",
author_email="smherwig@cs.umd.edu",
classifiers=[
"Developement Status :: 2 - Pre-Alpha",
"License :: OSI Approved :: BSD License",
"Natural Language :: English",
"Programming Language :: Python :: 2.7",
],
keywords="data structures",
packages=find_packages(),
python_requires='>=2.7',
)
| 736 | 237 |
# -*- coding: utf-8 -*-
import types
from crossover import Client
from crossover import _Requester
def test_client_attributes():
client = Client("redis://localhost:6379/0")
assert isinstance(client, Client)
assert isinstance(client.test, _Requester)
assert isinstance(client.call_task, types.MethodType)
| 322 | 104 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2020 tecnovert
# Distributed under the MIT software license, see the accompanying
# file LICENSE.txt or http://www.opensource.org/licenses/mit-license.php.
import os
import sys
import time
import signal
import logging
import subprocess
from io import StringIO
from unittest.mock import patch
from xmrswap.rpc import callrpc, callrpc_xmr, callrpc_xmr_na
from xmrswap.util import dumpje
from xmrswap.contrib.rpcauth import generate_salt, password_to_hmac
import bin.xmrswaptool as swapTool
TEST_DATADIRS = os.path.expanduser(os.getenv('TEST_DATADIRS', '/tmp/xmrswap'))
NUM_NODES = 3
BASE_PORT = 14792
BASE_RPC_PORT = 19792
XMR_NUM_NODES = 3
XMR_BASE_P2P_PORT = 17792
XMR_BASE_RPC_PORT = 21792
XMR_BASE_ZMQ_PORT = 22792
XMR_BASE_WALLET_RPC_PORT = 23792
bin_suffix = ('.exe' if os.name == 'nt' else '')
PARTICL_BINDIR = os.path.expanduser(os.getenv('PARTICL_BINDIR', '.'))
PARTICLD = os.getenv('PARTICLD', 'particld' + bin_suffix)
PARTICL_CLI = os.getenv('PARTICL_CLI', 'particl-cli' + bin_suffix)
PARTICL_TX = os.getenv('PARTICL_TX', 'particl-tx' + bin_suffix)
BITCOIN_BINDIR = os.path.expanduser(os.getenv('BITCOIN_BINDIR', ''))
BITCOIND = os.getenv('BITCOIND', 'bitcoind' + bin_suffix)
BITCOIN_CLI = os.getenv('BITCOIN_CLI', 'bitcoin-cli' + bin_suffix)
BITCOIN_TX = os.getenv('BITCOIN_TX', 'bitcoin-tx' + bin_suffix)
XMR_BINDIR = os.path.expanduser(os.getenv('XMR_BINDIR', ''))
XMRD = os.getenv('XMRD', 'monerod' + bin_suffix)
XMR_WALLET_RPC = os.getenv('XMR_WALLET_RPC', 'monero-wallet-rpc' + bin_suffix)
def prepareXmrDataDir(datadir, node_id, conf_file):
node_dir = os.path.join(datadir, 'xmr' + str(node_id))
if not os.path.exists(node_dir):
os.makedirs(node_dir)
cfg_file_path = os.path.join(node_dir, conf_file)
if os.path.exists(cfg_file_path):
return
with open(cfg_file_path, 'w+') as fp:
fp.write('regtest=1\n')
fp.write('keep-fakechain=1\n')
fp.write('data-dir={}\n'.format(node_dir))
fp.write('fixed-difficulty=1\n')
# fp.write('offline=1\n')
fp.write('p2p-bind-port={}\n'.format(XMR_BASE_P2P_PORT + node_id))
fp.write('rpc-bind-port={}\n'.format(XMR_BASE_RPC_PORT + node_id))
fp.write('p2p-bind-ip=127.0.0.1\n')
fp.write('rpc-bind-ip=127.0.0.1\n')
fp.write('zmq-rpc-bind-port={}\n'.format(XMR_BASE_ZMQ_PORT + node_id))
fp.write('zmq-rpc-bind-ip=127.0.0.1\n')
for i in range(0, XMR_NUM_NODES):
if node_id == i:
continue
fp.write('add-exclusive-node=127.0.0.1:{}\n'.format(XMR_BASE_P2P_PORT + i))
def prepareDataDir(datadir, node_id, conf_file):
node_dir = os.path.join(datadir, str(node_id))
if not os.path.exists(node_dir):
os.makedirs(node_dir)
cfg_file_path = os.path.join(node_dir, conf_file)
if os.path.exists(cfg_file_path):
return
with open(cfg_file_path, 'w+') as fp:
fp.write('regtest=1\n')
fp.write('[regtest]\n')
fp.write('port=' + str(BASE_PORT + node_id) + '\n')
fp.write('rpcport=' + str(BASE_RPC_PORT + node_id) + '\n')
salt = generate_salt(16)
fp.write('rpcauth={}:{}${}\n'.format('test' + str(node_id), salt, password_to_hmac(salt, 'test_pass' + str(node_id))))
fp.write('daemon=0\n')
fp.write('printtoconsole=0\n')
fp.write('server=1\n')
fp.write('discover=0\n')
fp.write('listenonion=0\n')
fp.write('bind=127.0.0.1\n')
fp.write('debug=1\n')
fp.write('debugexclude=libevent\n')
fp.write('fallbackfee=0.01\n')
fp.write('acceptnonstdtxn=0\n')
fp.write('txindex=1\n')
fp.write('findpeers=0\n')
# minstakeinterval=5 # Using walletsettings stakelimit instead
for i in range(0, NUM_NODES):
if node_id == i:
continue
fp.write('addnode=127.0.0.1:{}\n'.format(BASE_PORT + i))
def startXmrDaemon(node_dir, bin_dir, daemon_bin, opts=[]):
daemon_bin = os.path.expanduser(os.path.join(bin_dir, daemon_bin))
args = [daemon_bin, '--config-file=' + os.path.join(os.path.expanduser(node_dir), 'monerod.conf')] + opts
logging.info('Starting node {} --data-dir={}'.format(daemon_bin, node_dir))
return subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
def startXmrWalletRPC(node_dir, bin_dir, wallet_bin, node_id, opts=[]):
daemon_bin = os.path.expanduser(os.path.join(bin_dir, wallet_bin))
data_dir = os.path.expanduser(node_dir)
args = [daemon_bin]
args += ['--daemon-address=localhost:{}'.format(XMR_BASE_RPC_PORT + node_id)]
args += ['--no-dns']
args += ['--rpc-bind-port={}'.format(XMR_BASE_WALLET_RPC_PORT + node_id)]
args += ['--wallet-dir={}'.format(os.path.join(data_dir, 'wallets'))]
args += ['--log-file={}'.format(os.path.join(data_dir, 'wallet.log'))]
args += ['--rpc-login=test{0}:test_pass{0}'.format(node_id)]
args += ['--shared-ringdb-dir={}'.format(os.path.join(data_dir, 'shared-ringdb'))]
args += opts
logging.info('Starting daemon {} --wallet-dir={}'.format(daemon_bin, node_dir))
wallet_stdout = open(os.path.join(data_dir, 'wallet_stdout.log'), 'w')
wallet_stderr = open(os.path.join(data_dir, 'wallet_stderr.log'), 'w')
return subprocess.Popen(args, stdin=subprocess.PIPE, stdout=wallet_stdout, stderr=wallet_stderr, cwd=data_dir)
def startDaemon(node_dir, bin_dir, daemon_bin, opts=[]):
daemon_bin = os.path.expanduser(os.path.join(bin_dir, daemon_bin))
args = [daemon_bin, '-datadir=' + os.path.expanduser(node_dir)] + opts
logging.info('Starting node {} -datadir={}'.format(daemon_bin, node_dir))
return subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
def callnoderpc(node_id, method, params=[], wallet=None):
auth = 'test{0}:test_pass{0}'.format(node_id)
return callrpc(BASE_RPC_PORT + node_id, auth, method, params, wallet)
def make_rpc_func(node_id):
node_id = node_id
auth = 'test{0}:test_pass{0}'.format(node_id)
def rpc_func(method, params=None, wallet=None):
nonlocal node_id, auth
return callrpc(BASE_RPC_PORT + node_id, auth, method, params, wallet)
return rpc_func
def checkSoftForks(ro):
if 'bip9_softforks' in ro:
assert(ro['bip9_softforks']['csv']['status'] == 'active')
assert(ro['bip9_softforks']['segwit']['status'] == 'active')
else:
assert(ro['softforks']['csv']['active'])
assert(ro['softforks']['segwit']['active'])
def callSwapTool(swap_file, method=None, json_params=None, str_param=None):
testargs = ['xmrswaptool.py', swap_file]
if method:
testargs.append(method)
if json_params is not None:
testargs.append('"' + dumpje(json_params) + '"')
if str_param is not None:
testargs.append(str_param)
print('testargs', ' '.join(testargs))
with patch.object(sys, 'argv', testargs):
with patch('sys.stdout', new=StringIO()) as fake_out:
try:
swapTool.main()
except Exception as e:
logging.info('swapTool failed: stdout: %s', fake_out.getvalue())
raise e
return fake_out.getvalue()
def waitForXMRNode(rpc_offset, max_tries=7):
for i in range(max_tries + 1):
try:
callrpc_xmr_na(XMR_BASE_RPC_PORT + rpc_offset, 'get_block_count')
return
except Exception as ex:
if i < max_tries:
logging.warning('Can\'t connect to XMR RPC: %s. Retrying in %d second/s.', str(ex), (i + 1))
time.sleep(i + 1)
raise ValueError('waitForXMRNode failed')
def waitForXMRWallet(rpc_offset, auth, max_tries=7):
for i in range(max_tries + 1):
try:
callrpc_xmr(XMR_BASE_WALLET_RPC_PORT + rpc_offset, auth, 'get_languages')
return
except Exception as ex:
if i < max_tries:
logging.warning('Can\'t connect to XMR wallet RPC: %s. Retrying in %d second/s.', str(ex), (i + 1))
time.sleep(i + 1)
raise ValueError('waitForXMRWallet failed')
def stopNodes(self):
self.stop_nodes = True
if self.update_thread is not None:
try:
self.update_thread.join()
except Exception:
logging.info('Failed to join update_thread')
self.update_thread = None
for d in self.xmr_daemons:
logging.info('Interrupting %d', d.pid)
try:
d.send_signal(signal.SIGINT)
except Exception as e:
logging.info('Interrupting %d, error %s', d.pid, str(e))
for d in self.xmr_daemons:
try:
d.wait(timeout=20)
if d.stdout:
d.stdout.close()
if d.stderr:
d.stderr.close()
if d.stdin:
d.stdin.close()
except Exception as e:
logging.info('Closing %d, error %s', d.pid, str(e))
self.xmr_daemons = []
for d in self.daemons:
logging.info('Interrupting %d', d.pid)
try:
d.send_signal(signal.SIGINT)
except Exception as e:
logging.info('Interrupting %d, error %s', d.pid, str(e))
for d in self.daemons:
try:
d.wait(timeout=20)
if d.stdout:
d.stdout.close()
if d.stderr:
d.stderr.close()
if d.stdin:
d.stdin.close()
except Exception as e:
logging.info('Closing %d, error %s', d.pid, str(e))
self.daemons = []
| 9,707 | 3,816 |
from random import randint
def sort(lista):
print('SORTEANDO OS VALORES DA LISTA: ', end='')
for n in range(0, 5):
v = randint(1, 10)
lista.append(v)
print(f'Os valores são {numeros}.')
print('Pronto!')
def somapar(lista):
s = 0
for v in lista:
if v % 2 == 0:
s += v
print(f'A soma dos valores pares entre {lista} é {s}.')
numeros = list()
sort(numeros)
somapar(numeros)
| 440 | 177 |
if isinstance(other, float):
other = Measurement(other, 0)
| 71 | 23 |
# -*- coding: utf-8 -*-
# Copyright 2016 Open Permissions Platform Coalition
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software distributed under the License is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
#
"""
Python Behave configuration file
"""
import functools
import os
import json
import urlparse
from collections import defaultdict, deque
from contextlib import contextmanager
from copy import deepcopy
import requests
from chub import API
from chub.api import API_VERSION
import config
def default_request_data():
"""Default request data
"""
defaults = {
'base_url': None,
'service_name': None,
'endpoint_name': None,
'endpoint': None,
'params': {},
'body': None,
'headers': {}
}
return ((k, v) for k, v in defaults.iteritems())
class NotExists(object):
pass
def preserve_attributes(obj, attributes):
attr_to_value = {attr: deepcopy(getattr(obj, attr, NotExists()))
for attr in attributes}
return attr_to_value
def restore_attributes(obj, attr_to_value):
for attr, value in attr_to_value.items():
if isinstance(value, NotExists):
try:
delattr(obj, attr)
except AttributeError:
pass
else:
setattr(obj, attr, value)
@contextmanager
def keep_request_data(context):
"""Context manager for keeping the original
request data.
"""
attr_to_value = preserve_attributes(
context, [k for k, _ in default_request_data()])
yield
restore_attributes(context, attr_to_value)
@contextmanager
def keep_attributes(context, *attributes):
"""
a decorator to keep a copy of a list of attributes of the context before
execute_steps and put the value back aftwards
"""
attr_to_value = preserve_attributes(context, attributes)
yield
restore_attributes(context, attr_to_value)
def clean_step(step_impl):
"""
a decorator for a step definition which keeps
the original request data
"""
@functools.wraps(step_impl)
def wrapped(context, *args, **kwargs):
with keep_request_data(context):
returned = step_impl(context, *args, **kwargs)
return returned
return wrapped
def reset_request_data(context):
"""Copy the data to different keys and reset the value to default.
subsequent steps
"""
for name, default in default_request_data():
setattr(context, name, default)
def clean_execute_steps(context, steps_text, save_response_data=None):
"""Execute steps with the keep_request_data context
manager.
Useful for calling execute_steps during request build up.
:param context: the context object
:param steps_text: the steps to be expecuted
:param save_respones_data: (optional) context attribute name which will be
assigned the response object's data
"""
with keep_request_data(context):
context.reset_request_data()
context.execute_steps(steps_text)
if save_response_data:
setattr(context, save_response_data,
context.response_object.get('data'))
def make_session(verify):
sess = requests.Session()
sess.verify = verify
return sess
def make_keychain():
keychain = {
'CA_CRT': config.CA_CRT
}
key_dir = os.path.join(os.path.dirname(__file__), 'steps/data/')
keys = next(os.walk(key_dir))[2]
for key in keys:
keychain[key] = os.path.join(key_dir, key)
return keychain
def set_services(context):
context.organisation_services = defaultdict(lambda: defaultdict(deque))
context.services = config.SERVICES.copy()
sess = make_session(config.CA_CRT)
token = sess.post(
'{}/login'.format(config.SERVICES['accounts']),
data=json.dumps({'email': 'opp@example.com',
'password': 'password'})
).json()['data']['token']
registered_services = sess.get(
'{}/services?organisation_id={}'.format(
config.SERVICES['accounts'], config.test_org),
headers={'Authorization': token}).json()['data']
repos = []
for service in registered_services:
service_type = service['service_type']
context.organisation_services[config.test_org][service_type].append(service)
if service_type == 'repository':
repos.append(service)
set_repository_services(context, repos)
def set_repository_services(context, repositories):
for repo in repositories:
context.services[repo['name']] = \
'{}/{}/repository'.format(repo['location'], API_VERSION)
# TODO THE REPOSITORY is the term used before
# we have multiple repositories as default
# the term needs updating and we should assume
# the new default of at least two repositories
context.the_repository = repositories[0]
context.repository_services = repositories
context.services['repository'] = \
'{}/{}/repository'.format(repositories[0]['location'], API_VERSION)
def before_scenario(context, scenario):
context.keychain = make_keychain()
context.http_client = make_session(context.keychain['CA_CRT'])
context.keep_request_data = keep_request_data.__get__(context)
context.reset_request_data = reset_request_data.__get__(context)
context.clean_execute_steps = clean_execute_steps.__get__(context)
context.reset_request_data()
context.keep_attributes = keep_attributes.__get__(context)
def before_all(context):
"""
Executes the code before all the tests are run
"""
set_services(context)
context.api = {}
context.repositories = {}
for service, location in context.services.items():
url = urlparse.urlparse(location)
api = API(url.scheme + '://' + url.netloc, async=False)
try:
context.api[service] = getattr(api, url.path.split('/')[2])
except:
context.api[service] = getattr(api, service)
| 6,412 | 1,829 |
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.animation as animation
DT = 0.01
FRAMERATE = 60
N_ROWS = 64
SECONDS = 10
def read_field_file(file_path, type):
if type != 'scalar' and type != 'vector':
raise ValueError('type must be scalar or vector')
file_str = open(file_path, 'r').read()
frame_arr = file_str.split('\n\n')
frame_arr = [frame for frame in frame_arr if frame]
frame_arr = [frame.split('\n') for frame in frame_arr]
frame_arr = [[row.split(' ') for row in frame] for frame in frame_arr]
if type == 'scalar':
frame_arr = [[[float(item) for item in row] for row in frame] for frame in frame_arr]
elif type == 'vector':
def string_to_vector(string):
string = string.replace('(', '')
string = string.replace(')', '')
pair = tuple(string.split(','))
pair = (float(pair[0]), float(pair[1]))
return pair
frame_arr = [[[string_to_vector(item) for item in row] for row in frame] for frame in frame_arr]
frame_arr = np.array(frame_arr)
return frame_arr
def read_velocity():
return read_field_file('velocity.txt', 'vector')
def read_pressure():
return read_field_file('pressure.txt', 'scalar')
def read_divergence(absolute = True):
divergence = read_field_file('divergence.txt', 'scalar')
if(absolute):
divergence = np.abs(divergence)
return divergence
def read_color():
return read_field_file('color.txt', 'scalar')
velocity_frames = read_velocity()
pressure_frames = read_pressure()
color_frames = read_color()
divergence_frames = read_divergence()
frame_interval = 1000//FRAMERATE
frame_count = velocity_frames.shape[0]
fig, (ax1, ax2, ax3) = plt.subplots(1, 3)
ax1.set_title('Pressure and Velocity')
ax2.set_title('Color')
ax3.set_title('Absolute Divergence (Bad!)')
artists = []
foo = np.random.random(size=(64, 64))
artists.append(ax1.quiver(foo, foo, scale=100, scale_units='inches', color='blue'))
artists.append(ax1.imshow(foo, cmap='hot', interpolation='nearest', vmin=-2, vmax=2, animated=True))
artists.append(ax2.imshow(foo, interpolation='nearest', vmin=0, vmax=1, animated=True))
artists.append(ax3.imshow(foo, cmap='hot', interpolation='nearest', vmin=0, vmax=1, animated=True))
def update(i):
u = velocity_frames[i, :, :, 0]
v = velocity_frames[i, :, :, 1]
pressure_frame = pressure_frames[i, :, :]
color_frame = color_frames[i, :, :]
divergence_frame = divergence_frames[i, :, :]
artists[0].set_UVC(u, v)
artists[1].set_array(pressure_frame)
artists[2].set_array(color_frame)
artists[3].set_array(divergence_frame)
return artists
ani = animation.FuncAnimation(fig, update, frames=frame_count, interval=frame_interval, blit=True)
plt.show() | 2,802 | 990 |
RTS2_FITS_LUTs = {}
RTS2_FITS_LUTs['BNL'] = {
0: {
# 'MJD' : 'JD',
# 'MONDIODE' : 'AMP0.CURRENT.MIN',
'MONOWL': 'MONOCH.WAVELENG',
'FILTER': 'MONOCH.FILT_1',
'CONTROLL': 'INSTRUME',
'CCDTEMP': 'CRYO.C.TEMP',
'IMGTYPE': 'TESTTYPE',
'TEMP_SET': 'CRYO.2.SETPT',
'CTLRCFG': 'CONFIG',
'TSTAND': 'TELESCOP',
'CCD_SERN': 'CCD_SER'
},
'TEST_COND': {
'MONOWL': 'MONOCH.WAVELENG',
'FILTER': 'MONOCH.FILT_1',
'CCDTEMP': 'CRYO.C.TEMP',
'TEMP_SET': 'CRYO.2.SETPT'
},
'CCD_COND': {
'V_OD1': 'BIAS_1.OD1_Vmeas',
'V_OD2': 'BIAS_1.OD2_Vmeas',
'V_OD3': 'BIAS_1.OD3_Vmeas',
'V_OD4': 'BIAS_1.OD4_Vmeas',
'V_OD5': 'BIAS_1.OD5_Vmeas',
'V_OD6': 'BIAS_1.OD6_Vmeas',
'V_OD7': 'BIAS_1.OD7_Vmeas',
'V_OD8': 'BIAS_1.OD8_Vmeas',
'V_OD9': 'BIAS_1.OD1_Vmeas',
'V_OD10': 'BIAS_1.OD2_Vmeas',
'V_OD11': 'BIAS_1.OD3_Vmeas',
'V_OD12': 'BIAS_1.OD4_Vmeas',
'V_OD13': 'BIAS_1.OD5_Vmeas',
'V_OD14': 'BIAS_1.OD6_Vmeas',
'V_OD15': 'BIAS_1.OD7_Vmeas',
'V_OD16': 'BIAS_1.OD8_Vmeas',
'V_RD1': 'BIAS_2.RD_Vmeas',
'V_RD2': 'BIAS_2.RD_Vmeas',
'V_RD3': 'BIAS_2.RD_Vmeas',
'V_RD4': 'BIAS_2.RD_Vmeas',
'V_RD5': 'BIAS_2.RD_Vmeas',
'V_RD6': 'BIAS_2.RD_Vmeas',
'V_RD7': 'BIAS_2.RD_Vmeas',
'V_RD8': 'BIAS_2.RD_Vmeas',
'V_RD9': 'BIAS_2.RD_Vmeas',
'V_RD10': 'BIAS_2.RD_Vmeas',
'V_RD11': 'BIAS_2.RD_Vmeas',
'V_RD12': 'BIAS_2.RD_Vmeas',
'V_RD13': 'BIAS_2.RD_Vmeas',
'V_RD14': 'BIAS_2.RD_Vmeas',
'V_RD15': 'BIAS_2.RD_Vmeas',
'V_RD16': 'BIAS_2.RD_Vmeas',
'V_OG1': 'BIAS_1.OG_Vmeas',
'V_OG2': 'BIAS_1.OG_Vmeas',
'V_OG3': 'BIAS_1.OG_Vmeas',
'V_OG4': 'BIAS_1.OG_Vmeas',
'V_OG5': 'BIAS_1.OG_Vmeas',
'V_OG6': 'BIAS_1.OG_Vmeas',
'V_OG7': 'BIAS_1.OG_Vmeas',
'V_OG8': 'BIAS_1.OG_Vmeas',
'V_OG9': 'BIAS_1.OG_Vmeas',
'V_OG10': 'BIAS_1.OG_Vmeas',
'V_OG11': 'BIAS_1.OG_Vmeas',
'V_OG12': 'BIAS_1.OG_Vmeas',
'V_OG13': 'BIAS_1.OG_Vmeas',
'V_OG14': 'BIAS_1.OG_Vmeas',
'V_OG15': 'BIAS_1.OG_Vmeas',
'V_OG16': 'BIAS_1.OG_Vmeas',
'V_S1L': 'DRV_1.S1_low',
'V_S1H': 'DRV_1.S1_high',
'V_S2L': 'DRV_1.S2_low',
'V_S2H': 'DRV_1.S2_high',
'V_S3L': 'DRV_1.S3_low',
'V_S3H': 'DRV_1.S3_high',
'V_RGL': 'DRV_1.RG_low',
'V_RGH': 'DRV_1.RG_high',
'V_P1L': 'DRV_1.P1_low',
'V_P1H': 'DRV_1.P1_high',
'V_P2L': 'DRV_1.P2_low',
'V_P2H': 'DRV_1.P2_high',
'V_P3L': 'DRV_1.P3_low',
'V_P3H': 'DRV_1.P3_high',
'V_P4L': 'DRV_1.P4_low',
'V_P4H': 'DRV_1.P4_high',
'I_OD1': 'BIAS_1.OD1_Cmeas',
'I_OD2': 'BIAS_1.OD2_Cmeas',
'I_OD3': 'BIAS_1.OD3_Cmeas',
'I_OD4': 'BIAS_1.OD4_Cmeas',
'I_OD5': 'BIAS_1.OD5_Cmeas',
'I_OD6': 'BIAS_1.OD6_Cmeas',
'I_OD7': 'BIAS_1.OD7_Cmeas',
'I_OD8': 'BIAS_1.OD8_Cmeas',
'I_OD9': 'BIAS_1.OD1_Cmeas',
'I_OD10': 'BIAS_1.OD2_Cmeas',
'I_OD11': 'BIAS_1.OD3_Cmeas',
'I_OD12': 'BIAS_1.OD4_Cmeas',
'I_OD13': 'BIAS_1.OD5_Cmeas',
'I_OD14': 'BIAS_1.OD6_Cmeas',
'I_OD15': 'BIAS_1.OD7_Cmeas',
'I_OD16': 'BIAS_1.OD8_Cmeas',
'I_RD1': 'BIAS_2.RD_Cmeas',
'I_RD2': 'BIAS_2.RD_Cmeas',
'I_RD3': 'BIAS_2.RD_Cmeas',
'I_RD4': 'BIAS_2.RD_Cmeas',
'I_RD5': 'BIAS_2.RD_Cmeas',
'I_RD6': 'BIAS_2.RD_Cmeas',
'I_RD7': 'BIAS_2.RD_Cmeas',
'I_RD8': 'BIAS_2.RD_Cmeas',
'I_RD9': 'BIAS_2.RD_Cmeas',
'I_RD10': 'BIAS_2.RD_Cmeas',
'I_RD11': 'BIAS_2.RD_Cmeas',
'I_RD12': 'BIAS_2.RD_Cmeas',
'I_RD13': 'BIAS_2.RD_Cmeas',
'I_RD14': 'BIAS_2.RD_Cmeas',
'I_RD15': 'BIAS_2.RD_Cmeas',
'I_RD16': 'BIAS_2.RD_Cmeas',
'I_OG1': 'BIAS_1.OG_Cmeas',
'I_OG2': 'BIAS_1.OG_Cmeas',
'I_OG3': 'BIAS_1.OG_Cmeas',
'I_OG4': 'BIAS_1.OG_Cmeas',
'I_OG5': 'BIAS_1.OG_Cmeas',
'I_OG6': 'BIAS_1.OG_Cmeas',
'I_OG7': 'BIAS_1.OG_Cmeas',
'I_OG8': 'BIAS_1.OG_Cmeas',
'I_OG9': 'BIAS_1.OG_Cmeas',
'I_OG10': 'BIAS_1.OG_Cmeas',
'I_OG11': 'BIAS_1.OG_Cmeas',
'I_OG12': 'BIAS_1.OG_Cmeas',
'I_OG13': 'BIAS_1.OG_Cmeas',
'I_OG14': 'BIAS_1.OG_Cmeas',
'I_OG15': 'BIAS_1.OG_Cmeas',
'I_OG16': 'BIAS_1.OG_Cmeas'
}
}
RTS2_FITS_LUTs['HARVARD'] = {
0: {
# 'MJD' : 'JD',
'MONDIODE': 'K_PHOT.CURRENT',
'MONOWL': 'MONO.WAVELENG',
'FILTER': 'MONO.FILT',
'CONTROLL': 'INSTRUME',
'CCDTEMP': 'LAKESHORE.A.TEMP',
'IMGTYPE': 'TESTTYPE',
'TEMP_SET': 'LAKESHORE.SETPOINT',
'CTLRCFG': 'SIGFILE', # don't know what you want here
'TSTAND': 'TELESCOP',
'CCD_SERN': 'CCD_SER'
},
'TEST_COND': {
'MONOWL': 'MONO.WAVELENG',
'FILTER': 'MONO.FILTER',
'CCDTEMP': 'LAKESHORE.A.TEMP',
'TEMP_SET': 'LAKESHORE.SETPOINT'
},
'CCD_COND': {
'V_OD1': 'OD1_R',
'V_OD2': 'OD1_R',
'V_OD3': 'OD1_R',
'V_OD4': 'OD1_R',
'V_OD5': 'OD1_R',
'V_OD6': 'OD1_R',
'V_OD7': 'OD1_R',
'V_OD8': 'OD1_R',
'V_OD9': 'OD1_R',
'V_OD10': 'OD1_R',
'V_OD11': 'OD1_R',
'V_OD12': 'OD1_R',
'V_OD13': 'OD1_R',
'V_OD14': 'OD1_R',
'V_OD15': 'OD1_R',
'V_OD16': 'OD1_R',
'V_RD1': 'RD',
'V_RD2': 'RD',
'V_RD3': 'RD',
'V_RD4': 'RD',
'V_RD5': 'RD',
'V_RD6': 'RD',
'V_RD7': 'RD',
'V_RD8': 'RD',
'V_RD9': 'RD',
'V_RD10': 'RD',
'V_RD11': 'RD',
'V_RD12': 'RD',
'V_RD13': 'RD',
'V_RD14': 'RD',
'V_RD15': 'RD',
'V_RD16': 'RD',
'V_OG1': 'OG1_R',
'V_OG2': 'OG1_R',
'V_OG3': 'OG1_R',
'V_OG4': 'OG1_R',
'V_OG5': 'OG1_R',
'V_OG6': 'OG1_R',
'V_OG7': 'OG1_R',
'V_OG8': 'OG1_R',
'V_OG9': 'OG1_R',
'V_OG10': 'OG1_R',
'V_OG11': 'OG1_R',
'V_OG12': 'OG1_R',
'V_OG13': 'OG1_R',
'V_OG14': 'OG1_R',
'V_OG15': 'OG1_R',
'V_OG16': 'OG1_R',
'V_S1L': 'SLO',
'V_S1H': 'SHI',
'V_S2L': 'SLO',
'V_S2H': 'SHI',
'V_S3L': 'SLO',
'V_S3H': 'SHI',
'V_RGL': 'RLO',
'V_RGH': 'RHI',
'V_P1L': 'PLO',
'V_P1H': 'PHI',
'V_P2L': 'PLO',
'V_P2H': 'PHI',
'V_P3L': 'PLO',
'V_P3H': 'PHI',
'V_P4L': 'PLO',
'V_P4H': 'PHI',
# 'I_OD1' : 'BIAS_1.OD1_Cmeas',
# 'I_OD2' : 'BIAS_1.OD2_Cmeas',
# 'I_OD3' : 'BIAS_1.OD3_Cmeas',
# 'I_OD4' : 'BIAS_1.OD4_Cmeas',
# 'I_OD5' : 'BIAS_1.OD5_Cmeas',
# 'I_OD6' : 'BIAS_1.OD6_Cmeas',
# 'I_OD7' : 'BIAS_1.OD7_Cmeas',
# 'I_OD8' : 'BIAS_1.OD8_Cmeas',
# 'I_OD9' : 'BIAS_1.OD1_Cmeas',
# 'I_OD10' : 'BIAS_1.OD2_Cmeas',
# 'I_OD11' : 'BIAS_1.OD3_Cmeas',
# 'I_OD12' : 'BIAS_1.OD4_Cmeas',
# 'I_OD13' : 'BIAS_1.OD5_Cmeas',
# 'I_OD14' : 'BIAS_1.OD6_Cmeas',
# 'I_OD15' : 'BIAS_1.OD7_Cmeas',
# 'I_OD16' : 'BIAS_1.OD8_Cmeas',
# 'I_RD1' : 'BIAS_2.RD_Cmeas',
# 'I_RD2' : 'BIAS_2.RD_Cmeas',
# 'I_RD3' : 'BIAS_2.RD_Cmeas',
# 'I_RD4' : 'BIAS_2.RD_Cmeas',
# 'I_RD5' : 'BIAS_2.RD_Cmeas',
# 'I_RD6' : 'BIAS_2.RD_Cmeas',
# 'I_RD7' : 'BIAS_2.RD_Cmeas',
# 'I_RD8' : 'BIAS_2.RD_Cmeas',
# 'I_RD9' : 'BIAS_2.RD_Cmeas',
# 'I_RD10' : 'BIAS_2.RD_Cmeas',
# 'I_RD11' : 'BIAS_2.RD_Cmeas',
# 'I_RD12' : 'BIAS_2.RD_Cmeas',
# 'I_RD13' : 'BIAS_2.RD_Cmeas',
# 'I_RD14' : 'BIAS_2.RD_Cmeas',
# 'I_RD15' : 'BIAS_2.RD_Cmeas',
# 'I_RD16' : 'BIAS_2.RD_Cmeas',
# 'I_OG1' : 'BIAS_1.OG_Cmeas',
# 'I_OG2' : 'BIAS_1.OG_Cmeas',
# 'I_OG3' : 'BIAS_1.OG_Cmeas',
# 'I_OG4' : 'BIAS_1.OG_Cmeas',
# 'I_OG5' : 'BIAS_1.OG_Cmeas',
# 'I_OG6' : 'BIAS_1.OG_Cmeas',
# 'I_OG7' : 'BIAS_1.OG_Cmeas',
# 'I_OG8' : 'BIAS_1.OG_Cmeas',
# 'I_OG9' : 'BIAS_1.OG_Cmeas',
# 'I_OG10' : 'BIAS_1.OG_Cmeas',
# 'I_OG11' : 'BIAS_1.OG_Cmeas',
# 'I_OG12' : 'BIAS_1.OG_Cmeas',
# 'I_OG13' : 'BIAS_1.OG_Cmeas',
# 'I_OG14' : 'BIAS_1.OG_Cmeas',
# 'I_OG15' : 'BIAS_1.OG_Cmeas',
# 'I_OG16' : 'BIAS_1.OG_Cmeas'
}
}
sensor_geom = {'ITL': {'nx': 509,
'ny': 2000,
'prescan': 3,
'vendor': 'ITL'},
'E2V': {'nx': 512,
'ny': 2002,
'prescan': 10,
'vendor': 'E2V'}
}
| 9,519 | 5,280 |
"""
Given two non-empty binary trees s and t, check whether tree t has exactly the same structure and node values with a subtree of s. A subtree of s is a tree consists of a node in s and all of this node's descendants. The tree s could also be considered as a subtree of itself
"""
def isSubTree(self, s, t):
from hashlib import sha256
def hash_(x):
S = sha256()
S.update()
return S.hexdigest()
def merkle(node):
if not node:
return '#'
m_left = merkle(node.left)
m_right = merkle(node.right)
node.merkle = hash_(m_left + str(node.val) +m_right)
return node.merkle
merkle(s)
merkle(t)
def dfs(node):
if not node:
return False
return (node.merkle == t.merkle or dfs(node.left) or dfs(node.right))
return dfs(s)
| 850 | 287 |
from argparse import ArgumentParser
from contextlib import redirect_stderr
from io import StringIO
from re import escape as re_escape
from unittest import TestCase
from argparse_utils import python_literal_action
class TestPythonLiteralAction(TestCase):
def test_basic_python_literal_action(self):
parser = ArgumentParser()
parser.add_argument('-a', action=python_literal_action())
tests = [
('[1, 2, 3]', [1, 2, 3]),
('{"a": 1, "b": 2}', {"a": 1, "b": 2}),
('None', None),
('{"nested": {"Python": ["objects"]}}', {"nested": {"Python": ["objects"]}}),
('("some", "tuple")', ("some", "tuple")),
("'Single quotes'", 'Single quotes'),
]
for literal_str, literal_obj in tests:
with self.subTest(literal_obj=literal_obj):
args = parser.parse_args(['-a', literal_str])
self.assertEqual(args.a, literal_obj)
def test_invalid_python_literals(self):
invalid_python_literals = [
'variable_name',
'not a literal',
'{"incomplete": "dict"',
'null',
'2 * 3'
]
parser = ArgumentParser()
parser.add_argument('-a', action=python_literal_action())
for invalid_python_literal in invalid_python_literals:
with self.subTest(invalid_python_literal=invalid_python_literal):
error_message = StringIO()
with redirect_stderr(error_message), self.assertRaises(SystemExit):
parser.parse_args(['-a', invalid_python_literal])
self.assertRegex(
error_message.getvalue(),
re_escape("invalid Python literal: '{}'".format(invalid_python_literal))
)
def test_python_literal_action_help(self):
parser = ArgumentParser()
parser.add_argument('-a', action=python_literal_action())
self.assertRegex(parser.format_help(), "Python literal")
| 2,039 | 587 |
import logging
from joblib import Parallel, delayed
from typing import Iterable
from emissor.persistence import ScenarioStorage
from emissor.processing.api import DataPreprocessor, ScenarioInitializer, SignalProcessor
from emissor.representation.scenario import Modality
logger = logging.getLogger(__name__)
class DataProcessing:
def __init__(self, storage: ScenarioStorage, preprocessors: Iterable[DataPreprocessor],
scenario_initializer: ScenarioInitializer, signal_processors: Iterable[SignalProcessor],
num_jobs: int = 1):
self._storage = storage
self._preprocessors = preprocessors
self._scenario_initializer = scenario_initializer
self._signal_processors = signal_processors
self._num_jobs = num_jobs
def run(self):
self.run_preprocessing()
self.run_init()
self.run_process()
def run_preprocessing(self):
for preprocessor in self._preprocessors:
with preprocessor:
logger.info("Preprocessing dataset with %s to %s", preprocessor.name, self._storage.base_path)
preprocessor.preprocess()
logger.info("Finished preprocessing dataset with %s", preprocessor.name)
def run_init(self):
if not self._scenario_initializer:
return
logger.info("Initialize scenarios %s with %s", self._storage.base_path, self._scenario_initializer.name)
with self._scenario_initializer:
self.execute_for_scenarios(_initialize, self._scenario_initializer)
def run_process(self):
if not self._signal_processors:
return
logger.info("Processing scenarios with processors %s", [processor.name for processor in self._signal_processors])
for processor in self._signal_processors:
with processor:
self.execute_for_scenarios(_process, processor)
def execute_for_scenarios(self, function, task):
scenario_ids = tuple(sorted(self._storage.list_scenarios(), key=task.scenario_key(self._storage)))
if not task.parallel:
for scenario_id in scenario_ids:
function(self._storage.base_path, task, scenario_id)
else:
scenario_ids = tuple(scenario_ids)
num_jobs = min(self._num_jobs, len(scenario_ids))
Parallel(n_jobs=num_jobs)(
delayed(function)(self._storage.base_path, task, scenario_id)
for scenario_id in scenario_ids)
def _initialize(base_path, scenario_initializer, scenario_id):
storage = ScenarioStorage(base_path)
try:
storage.load_scenario(scenario_id)
logger.debug("Scenario %s already initialized", scenario_id)
return
except ValueError:
pass
scenario_initializer.initialize_scenario(scenario_id, storage)
logger.info("Initialized scenario %s", scenario_id)
scenario = storage.load_scenario(scenario_id)
for modality in Modality:
if modality in scenario.signals:
logger.debug("Modality %s for scenario %s already initialized", modality, scenario_id)
continue
scenario_initializer.initialize_modality(scenario, modality)
logger.info("Initialized modality %s for scenario %s", modality.name, scenario_id)
storage.save_scenario(scenario)
def _process(base_path, processor, scenario_id):
storage = ScenarioStorage(base_path)
logger.info("Processing scenario %s with processor %s", scenario_id, processor.name)
scenario = storage.load_scenario(scenario_id)
processor.process_scenario(scenario)
storage.save_scenario(scenario)
# TODO
def _signal_generator(scenario_id, modality, processor, storage):
signals = storage.load_modality(scenario_id, Modality[modality.upper()])
for signal in sorted(signals, key=processor.signal_key(storage)):
yield signal | 3,911 | 1,082 |
from django import template
from powers.models import Power
register = template.Library()
@register.simple_tag
def player_can_edit_power(power, player):
return power.parent_power.player_can_edit(player)
@register.inclusion_tag('powers/power_badge_snippet.html')
def power_badge(power_full):
latest_revision = power_full.latest_revision()
return {
'power_full': power_full,
'latest_revision': latest_revision,
}
| 466 | 155 |
import numpy as np
from matplotlib import pyplot as plt
from pylab import rcParams
def plot_buoyancy(cwd=''):
"""
Plotting routine for the cross section of the buoyancy
Args:
cwd (string): current working directory
"""
xx = np.load(cwd + 'data/xaxis.npy')
uend = np.load(cwd + 'data/sdc.npy')
udirk = np.load(cwd + 'data/dirk.npy')
uimex = np.load(cwd + 'data/rkimex.npy')
uref = np.load(cwd + 'data/uref.npy')
usplit = np.load(cwd + 'data/split.npy')
err_split = np.linalg.norm(usplit.flatten() - uref.flatten(), np.inf) / np.linalg.norm(uref.flatten(), np.inf)
err_dirk = np.linalg.norm(udirk.flatten() - uref.flatten(), np.inf) / np.linalg.norm(uref.flatten(), np.inf)
err_imex = np.linalg.norm(uimex.flatten() - uref.flatten(), np.inf) / np.linalg.norm(uref.flatten(), np.inf)
err_sdc = np.linalg.norm(uend.flatten() - uref.flatten(), np.inf) / np.linalg.norm(uref.flatten(), np.inf)
assert err_split < 4.821E-02, 'ERROR: split error is too high, got %s' % err_split
assert err_dirk < 1.495e-01, 'ERROR: dirk error is too high, got %s' % err_dirk
assert err_imex < 1.305e-01, 'ERROR: imex error is too high, got %s' % err_imex
assert err_sdc < 9.548e-02, 'ERROR: sdc error is too high, got %s' % err_sdc
print("Estimated discretisation error split explicit: %5.3e" % err_split)
print("Estimated discretisation error of DIRK: %5.3e" % err_dirk)
print("Estimated discretisation error of RK-IMEX: %5.3e" % err_imex)
print("Estimated discretisation error of SDC: %5.3e" % err_sdc)
fs = 8
rcParams['figure.figsize'] = 5.0, 2.5
plt.figure()
plt.plot(xx[:, 5], udirk[2, :, 5], '--', color='g', markersize=fs - 2, label='DIRK(4)', dashes=(3, 3))
plt.plot(xx[:, 5], uend[2, :, 5], '-', color='b', label='SDC(4)')
plt.plot(xx[:, 5], uimex[2, :, 5], '--', color='r', markersize=fs - 2, label='IMEX(4)', dashes=(3, 3))
plt.legend(loc='lower left', fontsize=fs, prop={'size': fs})
plt.yticks(fontsize=fs)
plt.xticks(fontsize=fs)
plt.xlabel('x [km]', fontsize=fs, labelpad=0)
plt.ylabel('Bouyancy', fontsize=fs, labelpad=1)
filename = 'data/boussinesq.png'
plt.savefig(filename, bbox_inches='tight')
if __name__ == "__main__":
plot_buoyancy()
| 2,298 | 993 |
#Requires the modules SpeechRecognition and pyaudio
import speech_recognition as sr
import sys
sys.path.insert(1, "..")
from camera.camera import Camera
from widefind.widefindScript import WidefindTracker
def recognizeSpeech(recognizer, microphone):
#Check that recognizer and microphone arguments are appropriate type
if not isinstance(recognizer, sr.Recognizer):
raise TypeError("'recognizer' must be 'Recognizer' instance")
if not isinstance(microphone, sr.Microphone):
raise TypeError("'microphone' must be 'Microphone' instance")
with microphone as source:
recognizer.adjust_for_ambient_noise(source)
audio = recognizer.listen(source)
response = {
"success": True, #Boolean for success true/false
"error": None, #None if no errors, otherwise returns error message from speech recognition API
"transcription": None #None if speech recognition failed, otherwise returns a transcription of input speech
}
try:
print("Analysing voice sample...")
response["transcription"] = recognizer.recognize_google(audio)
except sr.RequestError:
response["success"] = False
response["error"] = "API unavailable"
except sr.UnknownValueError:
response["error"] = "Unable to recognize speech"
return response
def recordAudio(recognizer, microphone):
print("\nListening for input, say something!")
audio = recognizeSpeech(recognizer, microphone)
success = audio["success"]
error = audio["error"]
transcription = audio["transcription"]
print("Success: " + str(success))
print("Error: " + str(error))
print("Transcription: " + str(transcription))
handleTranscription(transcription)
#Start listening for additional commands
recordAudio(recognizer, microphone)
#Handle transcriptions here
def handleTranscription(transcription):
if(not transcription):
return
if("help" in transcription):
print("Helping")
sensor.help()
if ("follow" in transcription):
print("Follow command recognized!")
print("Following")
sensor.follow()
if ("stop" in transcription):
print("Stop command recognized!")
sensor.stop()
#Two examples of easily recognizing transcript commands
#This will trigger if the transcription contains the letters "example" in order, anywhere in the string
#This is useful as if your speech is interpreted as "examples" it will trigger "example"
#Might lead to unintended commands as some words can contain other words
if ("example" in transcription):
print("example command recognized! (partial match)")
#Call function
#This will only trigger if the transcription is exactly "example"
#Might lead to problems if a string contains more words than just the command word(s) and if "example" is interpreted as "examples"
if (transcription == "example"):
print("example command recognized! (exact match)")
#Call function
if __name__ == "__main__":
# create recognizer and mic instances
recognizer = sr.Recognizer()
microphone = sr.Microphone()
c = Camera()
sensor = WidefindTracker()
sensor.start()
recordAudio(recognizer, microphone)
| 3,297 | 860 |
import os
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sklearn.datasets import load_digits
from sklearn.model_selection import learning_curve
class BlockingTimeSeriesSplit:
def __init__(self, n_splits):
self.n_splits = n_splits
def get_n_splits(self, X, y, groups):
return self.n_splits
def split(self, X, y=None, groups=None):
n_samples = len(X)
k_fold_size = n_samples // self.n_splits
indices = np.arange(n_samples)
margin = 0
for i in range(self.n_splits):
start = i * k_fold_size
stop = start + k_fold_size
mid = int(0.8 * (stop - start)) + start
yield indices[start:mid], indices[mid + margin : stop]
def _save_fig(
fig_id: int,
folder: str,
WF: str,
tight_layout=True,
fig_extension="png",
resolution=300,
):
os.makedirs(folder + WF, exist_ok=True)
path = os.path.join(folder + WF, fig_id + "." + fig_extension)
if tight_layout:
plt.tight_layout()
plt.savefig(path, format=fig_extension, dpi=resolution)
def export_reports(name, reports, loc):
""" Export each report in 'reports' to html in the location indicated by 'loc'
"""
for key in reports.keys():
try:
reports[key].to_file(output_file=loc + "{}_NWP{}.html".format(name, key))
except Exception:
print("WARN: Exportation failed for NWP{}".format(key))
continue
def plot_learning_curve(
estimator,
title,
X,
y,
axes=None,
ylim=None,
cv=None,
n_jobs=None,
train_sizes=np.linspace(0.1, 1.0, 5),
):
"""
Generate 3 plots: the test and training learning curve, the training
samples vs fit times curve, the fit times vs score curve.
Parameters
----------
estimator : object type that implements the "fit" and "predict" methods
An object of that type which is cloned for each validation.
title : string
Title for the chart.
X : array-like, shape (n_samples, n_features)
Training vector, where n_samples is the number of samples and
n_features is the number of features.
y : array-like, shape (n_samples) or (n_samples, n_features), optional
Target relative to X for classification or regression;
None for unsupervised learning.
axes : array of 3 axes, optional (default=None)
Axes to use for plotting the curves.
ylim : tuple, shape (ymin, ymax), optional
Defines minimum and maximum yvalues plotted.
cv : int, cross-validation generator or an iterable, optional
Determines the cross-validation splitting strategy.
Possible inputs for cv are:
- None, to use the default 5-fold cross-validation,
- integer, to specify the number of folds.
- :term:`CV splitter`,
- An iterable yielding (train, test) splits as arrays of indices.
For integer/None inputs, if ``y`` is binary or multiclass,
:class:`StratifiedKFold` used. If the estimator is not a classifier
or if ``y`` is neither binary nor multiclass, :class:`KFold` is used.
Refer :ref:`User Guide <cross_validation>` for the various
cross-validators that can be used here.
n_jobs : int or None, optional (default=None)
Number of jobs to run in parallel.
``None`` means 1 unless in a :obj:`joblib.parallel_backend` context.
``-1`` means using all processors. See :term:`Glossary <n_jobs>`
for more details.
train_sizes : array-like, shape (n_ticks,), dtype float or int
Relative or absolute numbers of training examples that will be used to
generate the learning curve. If the dtype is float, it is regarded as a
fraction of the maximum size of the training set (that is determined
by the selected validation method), i.e. it has to be within (0, 1].
Otherwise it is interpreted as absolute sizes of the training sets.
Note that for classification the number of samples usually have to
be big enough to contain at least one sample from each class.
(default: np.linspace(0.1, 1.0, 5))
"""
if axes is None:
_, axes = plt.subplots(1, 3, figsize=(20, 5))
axes[0].set_title(title)
if ylim is not None:
axes[0].set_ylim(*ylim)
axes[0].set_xlabel("Training examples")
axes[0].set_ylabel("Score")
train_sizes, train_scores, test_scores, fit_times, _ = learning_curve(
estimator,
X,
y,
cv=cv,
n_jobs=n_jobs,
train_sizes=train_sizes,
return_times=True,
)
train_scores_mean = np.mean(train_scores, axis=1)
train_scores_std = np.std(train_scores, axis=1)
test_scores_mean = np.mean(test_scores, axis=1)
test_scores_std = np.std(test_scores, axis=1)
fit_times_mean = np.mean(fit_times, axis=1)
fit_times_std = np.std(fit_times, axis=1)
# Plot learning curve
axes[0].grid()
axes[0].fill_between(
train_sizes,
train_scores_mean - train_scores_std,
train_scores_mean + train_scores_std,
alpha=0.1,
color="r",
)
axes[0].fill_between(
train_sizes,
test_scores_mean - test_scores_std,
test_scores_mean + test_scores_std,
alpha=0.1,
color="g",
)
axes[0].plot(
train_sizes, train_scores_mean, "o-", color="r", label="Training score"
)
axes[0].plot(
train_sizes, test_scores_mean, "o-", color="g", label="Cross-validation score"
)
axes[0].legend(loc="best")
# Plot n_samples vs fit_times
axes[1].grid()
axes[1].plot(train_sizes, fit_times_mean, "o-")
axes[1].fill_between(
train_sizes,
fit_times_mean - fit_times_std,
fit_times_mean + fit_times_std,
alpha=0.1,
)
axes[1].set_xlabel("Training examples")
axes[1].set_ylabel("fit_times")
axes[1].set_title("Scalability of the model")
# Plot fit_time vs score
axes[2].grid()
axes[2].plot(fit_times_mean, test_scores_mean, "o-")
axes[2].fill_between(
fit_times_mean,
test_scores_mean - test_scores_std,
test_scores_mean + test_scores_std,
alpha=0.1,
)
axes[2].set_xlabel("fit_times")
axes[2].set_ylabel("Score")
axes[2].set_title("Performance of the model")
return plt
| 6,432 | 2,101 |
import glob
import os
import shutil
import sys
from zipfile import ZipFile
import django
from internetarchive import upload
sys.path.append("/var/projects/museum/")
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "museum.settings")
django.setup()
from museum_site.models import File
ZGAMES_PATH = "/var/projects/museum"
BASE_PATH = "/var/projects/museum/museum_site/static/data/base/"
BASES = {
"A": {
"name": "ZZT v3.2 Registered",
"directory": "ZZT32-REG",
"use_cfg": True,
"registered": True,
"prefix": "zzt_",
"executable": "ZZT.EXE",
},
"B": {
"name": "ZZT v2.0 Shareware",
"directory": "ZZT20-SW",
"use_cfg": True,
"registered": False,
"prefix": "zzt_",
"executable": "ZZT.EXE",
}
}
def main():
print("Internet Archive Publisher")
while True:
file_id = input("File ID: ")
if not file_id:
break
# Load file
f = File.objects.get(pk=int(file_id))
print("Selected:", f, "(" + f.filename + ")")
for base in BASES.keys():
print("[" + base + "]", BASES[base]["name"])
selected_base = input("Select package base: ").upper()
base = BASES[selected_base]
# Copy the zip
zip_name = "zzt_" + f.filename
shutil.copy(
ZGAMES_PATH + f.download_url(),
zip_name
)
# Open the WIP zip
with ZipFile(zip_name, "a") as z:
# Insert the base files
to_add = glob.glob(
os.path.join(BASE_PATH, base["directory"], "*")
)
for a in to_add:
z.write(a, arcname=os.path.basename(a))
# Create ZZT.CFG if needed
if base["use_cfg"]:
# Find the relevant files to default to
file_list = z.namelist()
for idx, name in enumerate(file_list, start=1):
print(idx, name)
selected_idx = int(input("Launch which file? ")) - 1
launch_file = z.namelist()[selected_idx]
config_content = launch_file[:-4] # Remove .ZZT extension
if base["registered"]:
config_content += "\r\nREGISTERED"
z.writestr("ZZT.CFG", config_content)
# Zip file is completed, prepare the upload
meta = {
"title": f.title,
"mediatype": "software",
"collection": "open_source_software",
"emulator": "dosbox",
"emulator_ext": "zip",
"emulator_start": base["executable"] + " " + launch_file,
"year": str(f.release_date)[:4],
"subject": ["zzt"] + f.genre.split("/"),
"creator": f.author.split("/"),
"description": "World created using the ZZT engine."
}
print("Uploading to Internet Archive...")
r = upload(
base["prefix"] + f.filename[:-4],
files=[zip_name],
metadata=meta
)
if r[0].status_code == 200:
print("Upload successful!")
f.archive_name = base["prefix"] + f.filename[:-4]
f.save()
print("https://archive.org/details/" + f.archive_name)
os.remove(zip_name)
else:
print("Upload failed!")
print(r)
return True
if __name__ == "__main__":
main()
| 3,458 | 1,078 |
import ffmpeg
import os
import tempfile
import re
from pydub import AudioSegment
import math
FRAME_NAME_PATTERN = "frame-%08d.jpg"
def get_filename_from_path(path):
base = os.path.basename(path)
return os.path.splitext(base)[0]
FRACTION_PATTERN = r"(\d+)/(\d+)"
FRACTION_RE = re.compile(FRACTION_PATTERN)
def convert_fraction(frac):
match = FRACTION_RE.match(frac)
return float(match[1]) / float(match[2])
def get_video_duration(path):
probe = ffmpeg.probe(path)
video_stream = next((stream for stream in probe['streams'] if stream['codec_type'] == 'video'), None)
time_base = video_stream["time_base"]
duration_ts = video_stream["duration_ts"]
duration = convert_fraction(time_base) * float(duration_ts)
return duration
def extract_frames(path,frame_dir_name):
save_path = os.path.join(frame_dir_name,FRAME_NAME_PATTERN)
stream = ffmpeg.input(path)
stream = ffmpeg.output(stream, save_path)
stream.run()
def extract_audio(path,audio_dir_name):
save_path = os.path.join(audio_dir_name,"audio.mp3")
stream = ffmpeg.input(path)
stream = ffmpeg.output(stream, save_path, acodec="libmp3lame",f="mp3")
stream.run()
return save_path
def translate(value, from_min, from_max, to_min, to_max):
from_range = from_max - from_min
to_range = to_max - to_min
left_mapped = float(value - from_min) / float(from_range)
translated = to_min + (left_mapped * to_range)
if translated < 0.0001 or math.isinf(translated):
return 0
else:
return translated
SILENCE = -99.5
LOUDEST = 99.5
def to_db(amplitude):
try:
db = 10 * math.log(amplitude)
except:
return 0
else:
return db
def delete_file(filepath):
if os.path.exists(filepath):
os.remove(filepath)
return True
else:
return False
def remove_frame(frame_number,frame_dir_path):
filename = "frame-{:08d}.jpg".format(frame_number)
filepath = os.path.join(frame_dir_path,filename)
delete_file(filepath)
def extract(input_path,output_path,threshold_ratio=0.7,invert=False):
video_name = get_filename_from_path(input_path)
temp_dir = tempfile.TemporaryDirectory(suffix="_"+video_name)
temp_dir_name = temp_dir.name
duration = get_video_duration(input_path)
duration_millis = duration*1000
extract_frames(input_path,temp_dir_name)
framecount = len([name for name in os.listdir(temp_dir_name) if os.path.isfile(os.path.join(temp_dir_name, name))])
fps = framecount/duration
millis_per_frame = duration_millis/framecount
audio_path = extract_audio(input_path,temp_dir_name)
audio = AudioSegment.from_file(audio_path)
threshold = LOUDEST*threshold_ratio
new_audio = AudioSegment.empty()
for i in range(1,framecount):
start = (i-1) * millis_per_frame
end = i * millis_per_frame
clip = audio[start:end]
volume = to_db(clip.max)
if ((not invert) and volume >= threshold) or (invert and volume <= threshold):
remove_frame(i,temp_dir_name)
else:
new_audio += clip
new_audio_path = os.path.join(temp_dir_name,"new_audio.mp3")
new_audio.export(new_audio_path, format="mp3")
frames_stream = ffmpeg.input(temp_dir_name+ "/*.jpg", pattern_type='glob', framerate=fps)
audio_stream = ffmpeg.input(new_audio_path)
stream = ffmpeg.output(frames_stream,audio_stream,output_path)
try:
stream.run()
except:
return False
else:
return os.path.exists(output_path) | 3,591 | 1,290 |
#! /usr/bin/python3
# 20180726 - wiki.binefa.cat
# Based on a code from Tony DiCola (AdaFruit)
# License: Public Domain
import time
import Adafruit_ADS1x15
import time
import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BOARD)
GPIO.setup(32, GPIO.OUT)
GPIO.setup(33, GPIO.OUT)
GPIO.setup(12, GPIO.OUT)
GPIO.setup(35, GPIO.OUT)
p = [0]*4
p[0] = GPIO.PWM(32, 50) # channel=32 frequency=50Hz
p[1] = GPIO.PWM(33, 50) # channel=33 frequency=50Hz
p[2] = GPIO.PWM(12, 50) # channel=12 frequency=50Hz
p[3] = GPIO.PWM(35, 50) # channel=35 frequency=50Hz
p[0].start(0)
p[1].start(0)
p[2].start(0)
p[3].start(0)
adc = Adafruit_ADS1x15.ADS1115()
GAIN = 1
#VPS = 4.096 / 32768.0 #volts per step
VPS = 100.0 / 26600.0
print('-' * 46)
try:
values = [0]*4
while 1:
for i in range(4):
values[i] = adc.read_adc(i, gain=GAIN)
#print('ADC{:01d}: '.format(i)+'HEX 0x{:04x} '.format(values[i])+'DEC {:05d} '.format(values[i])+'reading {:2.3f} %'.format(values[i]*VPS))
p[i].ChangeDutyCycle(values[i]*VPS)
#print('-' * 46)
#time.sleep(0.5)
time.sleep(0.1)
except KeyboardInterrupt:
pass
p[0].stop()
p[1].stop()
p[2].stop()
p[3].stop()
GPIO.cleanup()
| 1,159 | 647 |
from .users import *
from .summaries import *
from .keys import * | 65 | 20 |
import sys
import warnings
import numpy as np
from scipy.stats import rankdata
TAB = ' '
maxfloat = np.float128 if hasattr(np, 'float128') else np.longdouble
class ReprMixin:
def __repr__(self):
return f'{self.__class__.__name__}\n' + '\n'.join([f'\t{k}: {v}' for k, v in self.__dict__.items()])
def _check_param(x):
if hasattr(x, '__len__'):
if len(x) == 2:
return x
elif len(x) == 1:
return [x[0], x[0]]
else:
print(f'Something went wrong, parameter array has {len(x)} values')
else:
return [x, x]
def _check_criteria(x):
if hasattr(x[0], '__len__'):
return x
else:
return [x, x]
def pearson2d(x, y):
x, y = np.asarray(x), np.asarray(y)
mx, my = np.nanmean(x, axis=-1), np.nanmean(y, axis=-1)
xm, ym = x - mx[..., None], y - my[..., None]
r_num = np.nansum(xm * ym, axis=-1)
r_den = np.sqrt(np.nansum(xm ** 2, axis=-1) * np.nansum(ym ** 2, axis=-1))
r = r_num / r_den
return r
def spearman2d(x, y, axis=0):
x, y = np.asarray(x), np.asarray(y)
xr, yr = rankdata(x, axis=axis), rankdata(y, axis=axis)
mxr, myr = np.nanmean(xr, axis=-1), np.nanmean(yr, axis=-1)
xmr, ymr = xr - mxr[..., None], yr - myr[..., None]
r_num = np.nansum(xmr * ymr, axis=-1)
r_den = np.sqrt(np.nansum(xmr ** 2, axis=-1) * np.nansum(ymr ** 2, axis=-1))
r = r_num / r_den
return r
def weighted_pearson(x, y, w):
xf = np.asarray(x).flatten()
yf = np.asarray(y).flatten()
w = np.asarray(w).flatten() / np.nansum(w)
mx = np.nansum(w * xf)
my = np.nansum(w * yf)
r_num = np.nansum(w * (xf - mx) * (yf - my))
s_x = np.nansum(w * (xf - mx) ** 2)
s_y = np.nansum(w * (yf - my) ** 2)
r_den = np.sqrt(s_x * s_y)
r = r_num / r_den
return r
def print_warnings(w):
for el in set([w_.message.args[0] for w_ in w]):
if 'delta_grad == 0.0' not in el:
print('\tWarning: ' + el)
def raise_warning_in_catch_block(msg, category, w):
warnings.warn(msg, category=category)
if len(w):
sys.stderr.write(warnings.formatwarning(
w[-1].message, w[-1].category, w[-1].filename, w[-1].lineno, line=w[-1].line
))
def type2roc(correct, conf, nbins=5):
# Calculate area under type 2 ROC
#
# correct - vector of 1 x ntrials, 0 for error, 1 for correct
# conf - vector of continuous confidence ratings between 0 and 1
# nbins - how many bins to use for discretization
bs = 1 / nbins
h2, fa2 = np.full(nbins, np.nan), np.full(nbins, np.nan)
for c in range(nbins):
if c:
h2[nbins - c - 1] = np.sum((conf > c*bs) & (conf <= (c+1)*bs) & correct.astype(bool)) + 0.5
fa2[nbins - c - 1] = np.sum((conf > c*bs) & (conf <= (c+1)*bs) & ~correct.astype(bool)) + 0.5
else:
h2[nbins - c - 1] = np.sum((conf >= c * bs) & (conf <= (c + 1) * bs) & correct.astype(bool)) + 0.5
fa2[nbins - c - 1] = np.sum((conf >= c * bs) & (conf <= (c + 1) * bs) & ~correct.astype(bool)) + 0.5
h2 /= np.sum(h2)
fa2 /= np.sum(fa2)
cum_h2 = np.hstack((0, np.cumsum(h2)))
cum_fa2 = np.hstack((0, np.cumsum(fa2)))
k = np.full(nbins, np.nan)
for c in range(nbins):
k[c] = (cum_h2[c+1] - cum_fa2[c])**2 - (cum_h2[c] - cum_fa2[c+1])**2
auroc2 = 0.5 + 0.25*np.sum(k)
return auroc2
| 3,423 | 1,523 |
#!/usr/bin/env python
# coding: utf-8
# Copyright (c) Qotto, 2019
""" BaseCommandHandler Module
All command handler must be inherit from this class. Execute function was called by consumer on each received command.
For make an transaction in execute function return 'transaction' as string after end transaction otherwise return none.
"""
from typing import Union
from tonga.models.handlers.base import BaseHandler
from tonga.models.records.command.command import BaseCommand
__all__ = [
'BaseCommandHandler'
]
class BaseCommandHandler(BaseHandler):
""" Base of all command handler
"""
@classmethod
def handler_name(cls) -> str:
""" Return handler name, used by serializer
Raises:
NotImplementedError: Abstract def
Returns:
None
"""
raise NotImplementedError
async def execute(self, event: BaseCommand) -> Union[str, None]:
""" This function is automatically call by Tonga when an command with same name was receive by consumer
Args:
event (BaseCommand): Command event receive by consumer
Notes:
If execute make an transaction return 'transaction' as string at transaction end
Raises:
NotImplementedError: Abstract def
Returns:
None
"""
raise NotImplementedError
| 1,372 | 347 |
from django.contrib import admin
from django.urls import path,re_path
from . import views
from rest_framework.authtoken.views import obtain_auth_token
from rest_framework_simplejwt import views as jwt_views
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('', views.home, name='index'),
path('c/', views.posted, name='sigxnup'),
path('signup/', views.signup, name='signup'),
re_path(r'^activate/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',
views.activate, name='activate'),
path('profile/', views.myprofile, name='profile'),
re_path(r'^update/profile', views.updatemyprofile, name='update_profile'),
re_path(r'^api-token-auth/', obtain_auth_token),
path('api/token/', jwt_views.TokenObtainPairView.as_view(), name='token_obtain_pair'),
path('api/token/refresh/', jwt_views.TokenRefreshView.as_view(), name='token_refresh'),
re_path(r'^update/(\d+)', views.comment, name='comment'),
re_path(r'^updates/(\d+)', views.updates, name='updates'),
re_path(r'^business/(\d+)', views.business, name='updatesds'),
re_path(r'^g/(\d+)', views.get_business, name='updatesds'),
path('search/', views.search_business, name='search_results'),
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root = settings.MEDIA_ROOT)
| 1,373 | 503 |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import argparse
import codecs
def arguments():
# Handle command line arguments
parser = argparse.ArgumentParser(description='Adventofcode.')
parser.add_argument('-f', '--file', required=True)
args = parser.parse_args()
return args
class Matchsticks:
def __init__(self, whole_string):
self.whole_string = whole_string
self.converted_string = None
self.length_whole_string = None
self.length_converted_string = None
def calc_length_whole_string(self):
self.length_whole_string = len(self.whole_string)
def calc_length_converted_string(self):
escaped_str = self.whole_string
escaped_str = escaped_str[1:]
escaped_str = escaped_str[:-1]
self.converted_string = codecs.getdecoder("unicode_escape")(escaped_str)[0]
self.length_converted_string = len(self.converted_string)
def main():
args = arguments()
with open(args.file) as file:
input_file = file.read().strip()
input_file = input_file.splitlines()
result = []
for row in input_file:
part1 = Matchsticks(row)
part1.calc_length_whole_string()
part1.calc_length_converted_string()
result.append(part1)
print("Part1:", (sum([x.length_whole_string for x in result])) - (sum([x.length_converted_string for x in result])))
print("Part2:", sum(2+s.count('\\')+s.count('"') for s in open('input')))
if __name__ == '__main__':
main()
| 1,517 | 502 |
from . import polygon_geo_cpu
def polygon_iou(poly1, poly2):
"""Compute the IoU of polygons."""
return polygon_geo_cpu.polygon_iou(poly1, poly2)
| 155 | 60 |
class CarrinhodeCompras:
def __init__(self):
self.produtos = [] #agregação na lista
def inserir_produtos(self, produto):
self.produtos.append(produto)
def lista_produto(self):
for produto in self.produtos:
print(produto.nome, produto.preco)
def soma_total(self):
total = 0
for produto in self.produtos:
total += produto.preco
return total
class Produto:
def __init__(self, nome, preco):
self.nome = nome
self.preco = preco
| 536 | 175 |
from ..app.crud import CRUDBase
from .models import (
User,
UserGroup,
UserGroupMember,
UserGroupPermission,
UserPermission,
)
class UserCRUD(CRUDBase):
model = User
@classmethod
def get_by_uuid(Cls, session, uuid: str):
return User.query.filter_by(uuid=uuid).first()
@classmethod
def get_by_login(cls, session, login: str) -> User:
return User.query.filter_by(login=login).first()
@classmethod
def is_active(cls, session, id: int) -> bool:
user: User = cls.get(session, id)
if user:
return user.is_active
@classmethod
def is_global_active(cls, session, id: int) -> bool:
user: User = cls.get(session, id)
if user is not None:
return user.domain.is_active and user.is_active
class UserGroupCRUD(CRUDBase):
model = UserGroup
@classmethod
def is_active(cls, session, id: int) -> bool:
user_group: UserGroup = cls.get(session, id)
if user_group:
return user_group.is_active
class UserGroupMemberCRUD(CRUDBase):
model = UserGroupMember
@classmethod
def get_by_pair(cls, session, group_id: int, user_id: int):
return UserGroupMember.query.filter_by(
group_id=group_id, user_id=user_id
).first()
@classmethod
def is_unique(cls, session, group_id: int, user_id: int):
return not (
UserGroupMember.query.filter_by(group_id=group_id, user_id=user_id).count()
== True
)
class UserGroupPermissionCRUD(CRUDBase):
model = UserGroupPermission
@classmethod
def get_by_pair(cls, session, group_id: int, permission_id: int):
return UserGroupPermission.query.filter_by(
group_id=group_id, permission_id=permission_id
).first()
@classmethod
def is_unique(cls, session, group_id: int, permission_id: int):
return not (
UserGroupPermission.query.filter_by(
group_id=group_id, permission_id=permission_id
).count()
== True
)
class UserPermissionCRUD(CRUDBase):
model = UserPermission
@classmethod
def get_by_pair(cls, session, user_id: int, permission_id: int):
return UserPermission.query.filter_by(
user_id=user_id, permission_id=permission_id
).first()
@classmethod
def is_unique(cls, session, user_id: int, permission_id: int):
return not (
UserPermission.query.filter_by(
user_id=user_id, permission_id=permission_id
).count()
== True
)
| 2,632 | 830 |
from dataclasses import dataclass, field
from datetime import datetime
from typing import Dict, Optional, Set
from .alibaba_compute_source import AlibabaComputeSource
from .alibaba_instance_charge_type import AlibabaInstanceChargeType
from .alibaba_spot_strategy import AlibabaSpotStrategy
from .compute_source_exhaustion_status import ComputeSourceExhaustionStatus
from .compute_source_status import ComputeSourceStatus
from .compute_source_traits import ComputeSourceTraits
@dataclass
class AlibabaInstancesComputeSource(AlibabaComputeSource):
"""Defines a source of compute composed of Alibaba Cloud ECS instances."""
type: str = field(default="co.yellowdog.platform.model.AlibabaInstancesComputeSource", init=False)
traits: Optional[ComputeSourceTraits] = field(default=None, init=False)
credentials: Optional[Set[str]] = field(default=None, init=False)
id: Optional[str] = field(default=None, init=False)
createdFromId: Optional[str] = field(default=None, init=False)
status: Optional[ComputeSourceStatus] = field(default=None, init=False)
statusMessage: Optional[str] = field(default=None, init=False)
exhaustionStatus: Optional[ComputeSourceExhaustionStatus] = field(default=None, init=False)
expectedExhaustionTermination: Optional[datetime] = field(default=None, init=False)
name: str
"""The name of the compute source. This must be unique within a compute requirement."""
credential: str
region: str
"""The Alibaba Cloud region where instances will be provisioned."""
securityGroupId: str
"""The ID of the Alibaba Cloud Security Group for the provisioned instances."""
vswitchId: str
"""The ID of the virtual switch to use for the provisioned instances."""
instanceType: str
"""The Alibaba Cloud instance type for the provisioned instances."""
imageId: str
"""The region-specific Alibaba Cloud ID for the image to use for the provisioned instances."""
availabilityZone: Optional[str] = None
"""The Alibaba Cloud availability zone within the region where instances will be provisioned."""
instanceChargeType: Optional[AlibabaInstanceChargeType] = None
"""The Alibaba Cloud charge type to use for the provisioned instances."""
spotStrategy: Optional[AlibabaSpotStrategy] = None
"""The Alibaba Cloud spot strategy to use when provisioning instances."""
spotPriceLimit: Optional[float] = None
"""The Alibaba Cloud spot price limit to use with SPOT_WITH_PRICE_LIMIT spot strategy."""
limit: int = 0
specifyMinimum: bool = False
"""
Indicates if YellowDog Compute should specify the minimum when requesting instances from Alibaba Cloud.
If true, then no instances are provisioned unless all requested instances are available;
otherwise, if false, YellowDog Compute will provision as many instances as possible up to the number requested from this compute source.
"""
assignPublicIp: bool = True
"""Indicates if provisioned instances should be assigned public IP addresses."""
keyName: Optional[str] = None
"""The name of the Alibaba Cloud key pair to use when logging into any instances provisioned from this source."""
ramRoleName: Optional[str] = None
"""The name of the RAM Role to use for the provisioned instances."""
userData: Optional[str] = None
instanceTags: Optional[Dict[str, str]] = None
| 3,392 | 913 |
from flask import Flask, render_template, request, redirect, url_for, session
from flask_session import Session
from tempfile import mkdtemp
from cs50 import SQL
from random import randint
from werkzeug.security import check_password_hash, generate_password_hash
from functions import get_coms_count, get_pages_count
import pagination
app = Flask(__name__)
# Configure session to use filesystem
app.config["SESSION_FILE_DIR"] = mkdtemp()
app.config["SESSION_PERMANENT"] = False
app.config["SESSION_TYPE"] = "filesystem"
Session(app)
# Configure CS50 Library to use SQLite database
db = SQL("sqlite:///problem.db")
# Set a constant number of problems per page as 10
PER_PAGE = 10
# Main page of the website
@app.route("/", defaults={'page': 1})
@app.route('/page/<int:page>')
def index(page):
# Return error if user wants to view page with a negative id
if page <= 0:
return render_template("404.html")
# Find a database id of the first problem on the page
row = PER_PAGE * (page - 1)
# Find how many problems are there in the database
problems_count = db.execute("SELECT COUNT(*) AS length FROM problems")[0]["length"]
# Select nessesary problems from database
problems = db.execute("SELECT * FROM problems ORDER BY id DESC LIMIT :row_number, :count", row_number=row, count=PER_PAGE)
# Return error if there are no problems on the page with given id
if not problems and page != 1:
return render_template("404.html")
# Find how many comments are there under each problem
for problem in problems:
get_coms_count(problem)
# Create pagination
pag = pagination.Pagination(page, PER_PAGE, problems_count)
pages_count = get_pages_count(problems_count)
return render_template("index.html", pagination=pag, problems=problems, pages_count=pages_count, current_page=page, from_where="index")
# Add a problem
@app.route("/add", methods=["GET", "POST"])
def add():
cap_1 = randint(1, 10)
cap_2 = 10 - cap_1
try:
user_id = session["user_id"]
except:
user_id = None
if request.method == "POST":
email = request.form.get("email")
problem = request.form.get("problem")
captcha = request.form.get("captcha")
if not email and not user_id:
return render_template("add.html", error=1, problem=problem, cap_1=cap_1, cap_2=cap_2, user_id=user_id)
if not problem:
return render_template("add.html", error=2, email=email, cap_1=cap_1, cap_2=cap_2, user_id=user_id)
if not captcha:
return render_template("add.html", error=3, problem=problem, email=email, cap_1=cap_1, cap_2=cap_2, user_id=user_id)
if captcha != "10":
return render_template("add.html", error=4, problem=problem, email=email, cap_1=cap_1, cap_2=cap_2, user_id=user_id)
db.execute("INSERT INTO 'problems' (text) VALUES (:problem)", problem=problem)
return redirect("/")
return render_template("add.html", error=None, cap_1=cap_1, cap_2=cap_2, user_id=user_id)
# View one problem with comments
@app.route("/post/<int:id>", methods=["GET", "POST"])
def post(id):
message = request.form.get("message")
if request.method == "POST" and message:
user_id = session["user_id"]
sql = "INSERT INTO comments (text, post_id, user_id) VALUES (:message, :post_id, :user_id)"
db.execute(sql, message=message, post_id=id, user_id=user_id)
return redirect("/post/{}".format(id))
else:
problem = db.execute("SELECT * FROM problems WHERE id=:id", id=id)
comments = db.execute("SELECT * FROM comments WHERE post_id=:id", id=id)
print(session)
if session.get("user_id") is None:
username = None
else:
user = db.execute("SELECT username FROM users WHERE id=:id", id=session["user_id"])
username = user[0]["username"]
if len(comments) > 0:
for comment in comments:
commentator_name = db.execute("SELECT username FROM users WHERE id=:user_id", user_id=comment["user_id"])
comment["username"] = commentator_name[0]["username"]
com_count = len(comments)
else:
comments = None
com_count = 0
if request.method == "GET":
error = None
else:
error = 1
return render_template("post.html", problem=problem[0], comments=comments, com_count=com_count, username=username, error=error)
# Like a problem
@app.route("/like/<int:post_id>/<from_where>")
def like(post_id, from_where):
likes = db.execute("SELECT likes FROM problems WHERE id=:post_id", post_id=post_id)[0]["likes"]
db.execute("UPDATE problems SET likes = :likes WHERE id = :post_id", likes=likes + 1, post_id=post_id)
if from_where == "post":
return redirect("/post/{}".format(post_id))
elif from_where == "index":
return redirect("/")
else:
return redirect("/{}".format(from_where))
# Dislike a problem
@app.route("/dislike/<int:post_id>/<from_where>")
def dislike(post_id, from_where):
likes = db.execute("SELECT likes FROM problems WHERE id=:post_id", post_id=post_id)[0]["likes"]
db.execute("UPDATE problems SET likes = :likes WHERE id = :post_id", likes=likes - 1, post_id=post_id)
if from_where == "post":
return redirect("/post/{}".format(post_id))
elif from_where == "index":
return redirect("/")
else:
return redirect("/{}".format(from_where))
# Like a comment
@app.route("/com_like/<int:com_id>")
def com_like(com_id):
comment = db.execute("SELECT likes, post_id FROM comments WHERE id=:com_id", com_id=com_id)[0]
post_id = comment["post_id"]
likes = comment["likes"]
db.execute("UPDATE comments SET likes = :likes WHERE id = :com_id", likes=likes + 1, com_id=com_id)
return redirect("/post/{}".format(post_id))
# Dislike a comment
@app.route("/com_dislike/<int:com_id>")
def com_dislike(com_id):
comment = db.execute("SELECT likes, post_id FROM comments WHERE id=:com_id", com_id=com_id)[0]
post_id = comment["post_id"]
likes = comment["likes"]
db.execute("UPDATE comments SET likes = :likes WHERE id = :com_id", likes=likes - 1, com_id=com_id)
return redirect("/post/{}".format(post_id))
# Top section
@app.route("/top", defaults={'page': 1})
@app.route('/top/page/<int:page>')
def top(page):
print("page = {}".format(page))
if page <= 0:
return render_template("404.html")
row = PER_PAGE * (page - 1)
problems_count = db.execute("SELECT COUNT(*) AS length FROM problems")[0]["length"]
problems = db.execute("SELECT * FROM problems ORDER BY likes DESC LIMIT :row_number, :count", row_number=row, count=PER_PAGE)
if not problems and page != 1:
return render_template("404.html")
for problem in problems:
get_coms_count(problem)
pag = pagination.Pagination(page, PER_PAGE, problems_count)
pages_count = get_pages_count(problems_count)
return render_template("index.html", pagination=pag, problems=problems, pages_count=pages_count, current_page=page, from_where="top")
# Login to the website
@app.route("/login", methods=["GET", "POST"])
def login():
# Forget any user_id
session.clear()
# User reached route via POST (as by submitting a form via POST)
if request.method == "POST":
# Get email
email = request.form.get("email")
# Ensure email was submitted
if not email:
return render_template("login.html", error=1)
# Ensure password was submitted
elif not request.form.get("password"):
return render_template("login.html", error=2, email=email)
# Query database for email
rows = db.execute("SELECT * FROM users WHERE email = :email", email=request.form.get("email"))
# Ensure email exists and password is correct
if len(rows) != 1 or not check_password_hash(rows[0]["hash"], request.form.get("password")):
return render_template("login.html", error=3)
# Remember which user has logged in
session["user_id"] = rows[0]["id"]
# Redirect user to main page
return redirect("/")
# User reached route via GET (as by clicking a link or via redirect)
return render_template("login.html", error=None)
# Logout from the website
@app.route("/logout")
def logout():
session.clear()
return redirect("/")
# Sign up on the website
@app.route("/signup", methods=["GET", "POST"])
def signup():
session.clear()
if request.method == "POST":
email = request.form.get("email")
password = request.form.get("password")
username = request.form.get("username")
confirmation = request.form.get("confirmation")
if not email:
return render_template("signup.html", error=1, username=username, text="Please provide email")
if not username:
return render_template("signup.html", error=2, email=email, text="Please provide username")
if not password:
return render_template("signup.html", error=3, email=email, username=username, text="Please provide password")
if not confirmation:
return render_template("signup.html", error=4, email=email, username=username, text="Please provide password twice")
rows = db.execute("SELECT * FROM users WHERE username = :username", username=username)
if len(rows) == 1:
return render_template("signup.html", error=2, email=email, text="Username taken")
rows = db.execute("SELECT * FROM users WHERE email = :email", email=email)
if len(rows) == 1:
return render_template("signup.html", error=1, username=username, text="There's already an account with this email")
if password != confirmation:
return render_template("signup.html", error=4, username=username, email=email, text="Passwords don't match")
sql = "INSERT INTO users (username, email, hash) VALUES (:username, :email, :password_hash)"
result = db.execute(sql, username=username, email=email, password_hash=generate_password_hash(password))
session["user_id"] = result
return redirect("/")
return render_template("signup.html", error=None)
# Search a problem with particular words
@app.route("/search", methods=["GET"])
@app.route('/search/<int:page>')
def search():
subject = request.args.get("subject")
if not subject:
return render_template("search_failure.html", text="Empty search query")
response = db.execute("SELECT * FROM problems WHERE text like :text1 ORDER BY id DESC", text1='%' + subject + '%')
if response == []:
return render_template("search_failure.html", text="There were no results matching the query")
for problem in response:
get_coms_count(problem)
return render_template("index.html", problems=response, pages_count=1, from_where="search?subject={subject}")
# Info about the website
@app.route("/about")
def about():
problems_count = len(db.execute("SELECT * FROM problems"))
users_count = len(db.execute("SELECT * FROM users"))
comments_count = len(db.execute("SELECT * FROM comments"))
return render_template("about.html", problems_count=problems_count, users_count=users_count, comments_count=comments_count) | 11,440 | 3,512 |
import json
from asyncy.hub.sdk.db.Service import Service
from playhouse.shortcuts import dict_to_model
class ConstServiceHub():
"""
A constant service hub class that allows serving a pre-defined set of
fixed services.
"""
def __init__(self, services):
self.services = services
@classmethod
def from_json(cls, path):
services = {}
with open(path, 'r') as f:
services = json.load(f)
for k, v in services.items():
services[k] = dict_to_model(Service, v)
return cls(services)
def get_all_service_names(self):
return self.services.keys()
def get(self, alias=None, owner=None, name=None):
if alias:
return self.services[alias]
else:
return self.services[f'{owner}/{name}']
| 826 | 249 |
# -*- coding: utf-8 -*-
from QAutoLibrary.extension import TESTDATA
from selenium.webdriver.common.by import By
from QAutoLibrary.QAutoSelenium import *
from time import sleep
from pagemodel.ss_version import Ss_version
class Component_ss_version(CommonUtils):
"""
Components common to security server version view
Changelog:
* 11.07.2017
| Documentation updated
"""
ss_version = Ss_version()
def verify_version(self, text=u'Security Server version 6'):
"""
Verify version view contains right version
:param text: String value for text
"""
self.ss_version.verify_version_text(text)
print("Version text contains '{}'".format(text))
| 721 | 212 |
import pytest
from django.db import connection, transaction, ProgrammingError
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from django.db.backends.dummy.base import DatabaseWrapper as DummyDatabaseWrapper
from django.db.backends.postgresql.schema import (
DatabaseSchemaEditor as PostgresqlDatabaseSchemaEditor,
)
from baserow.contrib.database.db.schema import (
lenient_schema_editor,
PostgresqlLenientDatabaseSchemaEditor,
safe_django_schema_editor,
)
from baserow.contrib.database.table.models import Table
@pytest.mark.django_db
def test_lenient_schema_editor():
dummy = DummyDatabaseWrapper({})
with pytest.raises(ValueError):
with lenient_schema_editor(dummy):
pass
assert connection.SchemaEditorClass == PostgresqlDatabaseSchemaEditor
with lenient_schema_editor(connection) as schema_editor:
assert isinstance(schema_editor, PostgresqlLenientDatabaseSchemaEditor)
assert isinstance(schema_editor, BaseDatabaseSchemaEditor)
assert schema_editor.alter_column_prepare_old_value == ""
assert schema_editor.alter_column_prepare_new_value == ""
assert not schema_editor.force_alter_column
assert connection.SchemaEditorClass != PostgresqlDatabaseSchemaEditor
assert connection.SchemaEditorClass == PostgresqlDatabaseSchemaEditor
with lenient_schema_editor(
connection,
"p_in = REGEXP_REPLACE(p_in, '', 'test', 'g');",
"p_in = REGEXP_REPLACE(p_in, 'test', '', 'g');",
True,
) as schema_editor:
assert schema_editor.alter_column_prepare_old_value == (
"p_in = REGEXP_REPLACE(p_in, '', 'test', 'g');"
)
assert schema_editor.alter_column_prepare_new_value == (
"p_in = REGEXP_REPLACE(p_in, 'test', '', 'g');"
)
assert schema_editor.force_alter_column
# Test provided as an example of how to trigger the django bug. However disabled from CI
# as it will break the connection!
@pytest.mark.django_db
@pytest.mark.slow
# You must add --runslow -s to pytest to run this test, you can do this in intellij by
# editing the run config for this test and adding --runslow -s to additional args.
def test_showing_how_djangos_schema_editor_is_broken(data_fixture):
cxn = transaction.get_connection()
starting_savepoints = list(cxn.savepoint_ids)
user = data_fixture.create_user()
database = data_fixture.create_database_application(user=user)
other_table = data_fixture.create_database_table(database=database)
table = Table.objects.create(database=database, order=0)
# Setup an existing index which will collide with the one that we will make later
# to ensure the `schema_editor.create_model` will fail in the deferred sql section.
with connection.cursor() as cursor:
cursor.execute(
f"CREATE index {table.get_collision_safe_order_id_idx_name()} on "
f'"database_table_{other_table.id}"("id", "order")'
)
cxn = transaction.get_connection()
assert cxn.savepoint_ids == starting_savepoints
# Create the table schema in the database database.
with pytest.raises(
ProgrammingError, match='relation "tbl_order_id_2_idx" already exists'
):
with connection.schema_editor() as schema_editor:
# Django only creates indexes when the model is managed.
model = table.get_model(managed=True)
schema_editor.create_model(model)
# Due to the bug in django.db.backends.base.schema.BaseDatabaseSchemaEditor.__exit__
# we are still in an atomic block even though we weren't in one before!!
cxn = transaction.get_connection()
assert cxn.savepoint_ids[0] == starting_savepoints[0]
# There is still an inner atomic transaction that has not been rolled back!
assert len(cxn.savepoint_ids) == 2
@pytest.mark.django_db
def test_safe_schema_editor(data_fixture):
cxn = transaction.get_connection()
starting_savepoints = list(cxn.savepoint_ids)
user = data_fixture.create_user()
database = data_fixture.create_database_application(user=user)
other_table = data_fixture.create_database_table(database=database)
table = Table.objects.create(database=database, order=0)
# Setup an existing index which will collide with the one that we will make later
# to ensure the `schema_editor.create_model` will fail in the deferred sql section.
with connection.cursor() as cursor:
cursor.execute(
f"CREATE index {table.get_collision_safe_order_id_idx_name()} on "
f'"database_table_{other_table.id}"("id", "order")'
)
cxn = transaction.get_connection()
assert cxn.savepoint_ids == starting_savepoints
# Create the table schema in the database database.
with pytest.raises(
ProgrammingError, match=f'relation "tbl_order_id_{table.id}_idx" already exists'
):
with safe_django_schema_editor() as schema_editor:
# Django only creates indexes when the model is managed.
model = table.get_model(managed=True)
schema_editor.create_model(model)
# Assert because we are using the safe schema editor the transaction was rolled back
# successfully!
cxn = transaction.get_connection()
assert cxn.savepoint_ids == starting_savepoints
@pytest.mark.django_db
def test_lenient_schema_editor_is_also_safe(data_fixture):
cxn = transaction.get_connection()
starting_savepoints = list(cxn.savepoint_ids)
user = data_fixture.create_user()
database = data_fixture.create_database_application(user=user)
other_table = data_fixture.create_database_table(database=database)
table = Table.objects.create(database=database, order=0)
# Setup an existing index which will collide with the one that we will make later
# to ensure the `schema_editor.create_model` will fail in the deferred sql section.
with connection.cursor() as cursor:
cursor.execute(
f"CREATE index {table.get_collision_safe_order_id_idx_name()} on "
f'"database_table_{other_table.id}"("id", "order")'
)
cxn = transaction.get_connection()
assert cxn.savepoint_ids == starting_savepoints
# Create the table schema in the database database.
with pytest.raises(
ProgrammingError, match=f'relation "tbl_order_id_{table.id}_idx" already exists'
):
with lenient_schema_editor(
connection,
None,
None,
False,
) as schema_editor:
# Django only creates indexes when the model is managed.
model = table.get_model(managed=True)
schema_editor.create_model(model)
# Assert because we are using the safe schema editor the transaction was rolled back
# successfully!
cxn = transaction.get_connection()
assert cxn.savepoint_ids == starting_savepoints
| 6,938 | 1,982 |
#!/usr/bin/python
import numpy as np
import math
import cv2
import config as cf
class Line():
def __init__(self, side, img_shape,
polyDelta=cf.POLYFIT_MARGIN,
nWindow=cf.N_WINDOWS, windowMargin=cf.WINDOW_MARGIN, reThresh=cf.RECENTER_WINDOW_THRESH):
# side identifier
self.side = side
# shape of image
self.img_shape, self.x, self.y = img_shape, img_shape[1], img_shape[0]
# sliding window
self.nWindow = nWindow
self.w_height = self.y // self.nWindow
self.w_width = cf.WINDOW_MARGIN
self.reBaseThresh = reThresh
# detection status in last iteration
self.isDetected = False
# confidence of line
self.maxConfidence = 0.0
self.confidence = 0.0
# current pixel base postion from histogram
self.pixelBase = None
# distance of vehicle from center line
self.lineBase = None
# curvature
self.curvature = 0.0
# detected pixel indices
self.x_inds = None
self.y_inds = None
# mask width for polyfit
self.polyDelta = polyDelta
# difference between last and new fit coefficients
self.diffFit = np.array([0,0,0], dtype='float')
# poly coefficients of most recent fit
self.currentFit = None
# poly coefficients of best fit
self.bestFit = None
# radius of curvature
self.roc = None
# mask for lane
self.lanemask = np.zeros(self.img_shape, dtype=np.uint8)
# drawing x y
self.linspace_y = np.linspace(0, self.y-1, self.y)
self.linspace_x = np.zeros_like(self.linspace_y)
# temporary variables
# x of current fitted line
self.currentX = None
# poly line for drawing
self.linePoly = None
def setBase(self, base):
self.pixelBase = base
def findLinesPoints(self, base, nonzero_x, nonzero_y, outMask, nWindow=cf.N_WINDOWS):
lane_inds = []
cur_base = base
for window in range(nWindow):
# Identify window boundaries in x and y (and right and left)
win_y_low = self.y - (window+1)*self.w_height
win_y_high = self.y - window*self.w_height
win_x_low = cur_base - self.w_width
win_x_high = cur_base + self.w_width
good_inds = ((nonzero_y >= win_y_low) & (nonzero_y < win_y_high) &
(nonzero_x >= win_x_low) & (nonzero_x < win_x_high)).nonzero()[0]
lane_inds.append(good_inds)
cv2.rectangle(outMask, (win_x_low, win_y_low), (win_x_high, win_y_high), (0,255,0), 2)
# If found > minpix pixels, recenter next window on their mean position
if len(good_inds) > self.reBaseThresh:
cur_base = np.int(np.mean(nonzero_x[good_inds]))
try:
lane_inds = np.concatenate(lane_inds)
except ValueError:
# Avoids an error if the above is not implemented fully
pass
xval = nonzero_x[lane_inds]
yval = nonzero_y[lane_inds]
self.x_inds = np.array(xval)
self.y_inds = np.array(yval)
return outMask
def findLinesPointsAroundPoly(self, nonzero_x, nonzero_y, margin=cf.POLY_SEARCH_MARGIN):
# margin = nonzero_y / self.y * margin
lane_inds = ((nonzero_x > (self.bestFit[0]*(nonzero_y**2) + self.bestFit[1]*nonzero_y +
self.bestFit[2] - margin)) & (nonzero_x < (self.bestFit[0]*(nonzero_y**2) +
self.bestFit[1]*nonzero_y + self.bestFit[2] + margin)))
xval = nonzero_x[lane_inds]
yval = nonzero_y[lane_inds]
self.x_inds = np.array(xval)
self.y_inds = np.array(yval)
# use existing lane on the right to create adjacent lane lines
def polyFitLeft(self, curImg, rightLane):
diff = np.polysub(rightLane.lines[rightLane.left].currentFit,
rightLane.lines[rightLane.right].currentFit)
self.currentFit = np.polyadd(rightlane.lines[rightLane.left].currentFit, diff)
poly = np.poly1d(self.currentFit)
self.y_inds = rightLane.lines[rightLane.left].y_inds
self.currentX = poly(self.y_inds)
self.x_inds = self.currentX
if len(self.y_inds) > cf.LINE_CREATE_THRESH:
self.maxConfidence = len(self.y_inds) * 2
self.confidence = 0.5
self.isDetected = True
# create mask
xy1 = np.column_stack(
(self.currentX + self.maskDelta, self.y_inds)).astype(np.int32)
xy2 = np.column_stack(
(self.currentX - self.maskDelta, self.y_inds)).astype(np.int32)
self.linePoly = np.concatenate((xy1, xy2[::-1]), axis=0)
self.lanemask = np.zeros_like(self.lanemask)
cv2.fillConvexPoly(self.lanemask, self.linePoly, 64)
# add bottom point
self.y_inds = np.append(self.y_inds, cf.IMG_HEIGHT - 1)
self.x_inds = poly(self.y_inds)
self.XYPolyLine = np.column_stack((self.x_inds, self.y_inds)).astype(np.int32)
self.bestFit = self.currentFit
x = poly([cf.IMG_HEIGHT - 1])
self.pixelBase = x[0]
# use existing lane on the left to create adjacent lane lines
def polyFitRight(self, curImg, leftLane):
diff = np.polysub(leftLane.lines[leftLane.left].currentFit,
leftLane.lines[leftLane.right].currentFit)
self.currentFit = np.polyadd(leftLane.lines[right].currentFit, diff)
poly = np.poly1d(self.currentFit)
self.y_inds = leftLane.lines[leftLane.left].y_inds
self.currentX = poly(self.y_inds)
self.x_inds = self.currentX
if len(self.y_inds) > cf.LINE_CREATE_THRESH:
self.maxConfidence = len(self.y_inds) * 2
self.confidence = 0.5
self.isDetected = True
# create mask
xy1 = np.column_stack(
(self.currentX + self.maskDelta, self.y_inds)).astype(np.int32)
xy2 = np.column_stack(
(self.currentX - self.maskDelta, self.y_inds)).astype(np.int32)
self.linePoly = np.concatenate((xy1, xy2[::-1]), axis=0)
self.lanemask = np.zeros_like(self.lanemask)
cv2.fillConvexPoly(self.lanemask, self.linePoly, 64)
# add bottom point
self.y_inds = np.append(self.y_inds, cf.IMG_HEIGHT - 1)
self.x_inds = poly(self.y_inds)
self.XYPolyLine = np.column_stack((self.x_inds, self.y_inds)).astype(np.int32)
self.bestFit = self.currentFit
x = poly([cf.IMG_HEIGHT - 1])
self.pixelBase = x[0]
# use existing lane one the right to update adjacent lane lines
def updatePolyFitLeft(self, curImg, rightLane):
diff = np.polysub(rightLane.lines[rightLane.left].currentFit,
rightLane.lines[rightLane.right].currentFit)
self.currentFit = np.polyadd(
rightLane.lines[rightLane.left].currentFit, diff)
poly = np.poly1d(self.currentFit)
self.y_inds = rightLane.lines[rightLane.left].y_inds
self.currentX = poly(self.y_inds)
self.x_inds = self.currentX
if len(self.y_inds) > cf.LINE_UPDATE_THRESH:
self.confidence = len(self.y_inds) / self.maxConfidence
if self.confidence >= 0.5:
self.isDetected = True
if self.confidence > 1:
self.confidence = 1
else:
self.isDetected = False
# create line poly
# create mask
xy1 = np.column_stack(
(self.currentX + self.maskDelta, self.y_inds)).astype(np.int32)
xy2 = np.column_stack(
(self.currentX - self.maskDelta, self.y_inds)).astype(np.int32)
self.linePoly = np.concatenate((xy1, xy2[::-1]), axis=0)
self.lanemask = np.zeros_like(self.lanemask)
cv2.fillConvexPoly(self.lanemask, self.linePoly, 64)
# add bottom point
y_inds = np.append(self.y_inds, cf.IMG_HEIGHT - 1)
x_inds = poly(y_inds)
self.XYPolyLine = np.column_stack((x_inds, y_inds)).astype(np.int32)
self.bestFit = self.currentFit
# use existing lane on the left to update adjacent lane lines
def updatePolyFitRight(self, curImg, leftLane):
diff = np.polysub(leftLane.lines[leftLane.left].currentFit,
leftLane.lines[leftLane.right].currentFit)
self.currentFit = np.polyadd(
leftLane.lines[leftLane.right].currentFit, diff)
poly = np.poly1d(self.currentFit)
self.y_inds = leftLane.lines[leftLane.right].y_inds
self.currentX = poly(self.y_inds)
self.x_inds = self.currentX
if len(self.y_inds) > cf.LINE_UPDATE_THRESH:
self.confidence = len(self.y_inds) / self.maxConfidence
if self.confidence >= 0.5:
self.isDetected = True
if self.confidence > 1:
self.confidence = 1
else:
self.isDetected = False
# create mask
xy1 = np.column_stack(
(self.currentX + self.maskDelta, self.y_inds)).astype(np.int32)
xy2 = np.column_stack(
(self.currentX - self.maskDelta, self.y_inds)).astype(np.int32)
self.linePoly = np.concatenate((xy1, xy2[::-1]), axis=0)
self.lanemask = np.zeros_like(self.lanemask)
cv2.fillConvexPoly(self.lanemask, self.linePoly, 64)
# add bottom point
y_inds = np.append(self.y_inds, cf.IMG_HEIGHT - 1)
x_inds = poly(y_inds)
self.XYPolyLine = np.column_stack((x_inds, y_inds)).astype(np.int32)
self.bestFit = self.currentFit
# def drawPolyLine(self, curImg, size=5):
# if self.side == 1:
# color = (255,0,0)
# else:
# color = (0,0,255)
# cv2.polylines(curImg, [self.XYPolyLine], 0, color, size)
def fitPolyPrior(self, deg=2):
if len(self.y_inds) > cf.LINE_CREATE_THRESH:
self.confidence = 0.5
self.maxConfidence = len(self.y_inds) * 2
self.isDetected = True
self.currentFit = np.polyfit(self.y_inds, self.x_inds, deg)
poly = np.poly1d(self.currentFit)
self.y_inds = self.y_inds[::-1]
self.currentX = poly(self.y_inds)
# # create mask
# xy1 = np.column_stack(
# (self.currentX + self.polyDelta, self.y_inds)).astype(np.int32)
# xy2 = np.column_stack(
# (self.currentX - self.polyDelta, self.y_inds)).astype(np.int32)
# self.linePoly = np.concatenate((xy1, xy2[::-1]), axis=0)
# self.lanemask = np.zeros_like(self.lanemask)
# cv2.fillConvexPoly(self.lanemask, self.linePoly, 64)
# # add bottom point
# x_inds = poly(self.y_inds)
# y_inds = np.append(self.y_inds, cf.IMG_HEIGHT - 1)
# x_inds = np.append(self.x_inds, self.pixelBase)
# self.XYPolyLine = np.column_stack((x_inds, y_inds)).astype(np.int32)
self.bestFit = self.currentFit
self.linspace_x = self.bestFit[0]*self.linspace_y**2 + self.bestFit[1]*self.linspace_y + self.bestFit[2]
else:
self.confidence = 0.0
self.isDetected = False
def fitPolySecond(self, deg=2):
if len(self.y_inds) > cf.LINE_UPDATE_THRESH:
self.currentFit = np.polyfit(self.y_inds, self.x_inds, deg)
self.diffFit = self.currentFit - self.bestFit
if abs(sum(self.diffFit)) < 20:
poly = np.poly1d(self.currentFit)
x = poly([cf.IMG_HEIGHT - 1])
self.y_inds = np.append(self.y_inds, cf.IMG_HEIGHT - 1)
self.x_inds = np.append(self.x_inds, x[0])
if abs(self.pixelBase - x[0] > 50):
self.confidence = 0.0
self.isDetected = False
return
self.pixelBase = x[0]
self.currentX = poly(self.y_inds)
# self.XYPolyLine = np.column_stack(
# (self.currentX, self.y_inds)).astype(np.int32)
# xy1 = np.column_stack(
# (self.currentX + self.maskDelta, self.y_inds)).astype(np.int32)
# xy2 = np.column_stack(
# (self.currentX - self.maskDelta, self.y_inds)).astype(np.int32)
# self.linePoly = np.concatenate((xy1, xy2[::-1]), axis=0)
# self.lanemask = np.zeros_like(self.lanemask)
# cv2.fillConvexPoly(self.lanemask, self.linePoly, 64)
self.bestFit = (self.currentFit + self.bestFit) / 2
self.linspace_x = self.bestFit[0]*self.linspace_y**2 + self.bestFit[1]*self.linspace_y + self.bestFit[2]
self.confidence = len(self.y_inds) / self.maxConfidence
if self.confidence >= 0.5:
self.isDetected = True
if self.confidence > 1:
self.confidence = 1
else:
self.confidence = 0.0
self.isDetected = False
else:
self.confidence = 0.0
self.isDetected = False
else:
self.confidence = 0.0
self.isDetected = False
def measureCurvature(self):
y = int(self.y * 3 / 4)
if self.isDetected:
fitCurvature = np.polyfit(self.y_inds * cf.YM_PER_PIX, self.currentX * cf.XM_PER_PIX, 2)
self.curvature = ((1 + (2*fitCurvature[0]*y*cf.YM_PER_PIX + fitCurvature[1])**2)**1.5) / np.absolute(2*fitCurvature[0])
else:
self.curvature = 0
return self.curvature | 14,149 | 4,916 |
from PyObjCTools.TestSupport import *
from Quartz.PDFKit import *
class TestPDFAnnotation (TestCase):
def testMethods(self):
self.assertResultIsBOOL(PDFAnnotation.shouldDisplay)
self.assertArgIsBOOL(PDFAnnotation.setShouldDisplay_, 0)
self.assertResultIsBOOL(PDFAnnotation.shouldPrint)
self.assertArgIsBOOL(PDFAnnotation.setShouldPrint_, 0)
self.assertResultIsBOOL(PDFAnnotation.hasAppearanceStream)
if __name__ == "__main__":
main()
| 485 | 142 |
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
import zipfile
ip = '127.0.0.1'
port = 9743
username = 'foo'
password = 'bar'
manifest_json = """
{
"version": "1.0.0",
"manifest_version": 2,
"name": "Chrome Proxy",
"permissions": [
"proxy",
"tabs",
"unlimitedStorage",
"storage",
"<all_urls>",
"webRequest",
"webRequestBlocking"
],
"background": {
"scripts": ["background.js"]
}
}
"""
background_js = """
var config = {
mode: "fixed_servers",
rules: {
singleProxy: {
scheme: "http",
host: "%(ip)s",
port: %(port)s
}
}
}
chrome.proxy.settings.set({value: config, scope: "regular"}, function() {});
function callbackFn(details) {
return {
authCredentials: {
username: "%(username)s",
password: "%(password)s"
}
}
}
chrome.webRequest.onAuthRequired.addListener(
callbackFn,
{urls: ["<all_urls>"]},
['blocking']
)
""" % {'ip': ip, 'port': port, 'username': username, 'password': password}
plugin_file = 'proxy_auth_plugin.zip'
with zipfile.ZipFile(plugin_file, 'w') as zp:
zp.writestr("manifest.json", manifest_json)
zp.writestr("background.js", background_js)
chrome_options = Options()
chrome_options.add_argument("--start-maximized")
chrome_options.add_extension(plugin_file)
browser = webdriver.Chrome(chrome_options=chrome_options)
browser.get('http://httpbin.org/get')
| 1,585 | 507 |
'''
Created on 2012-10-23
@author: hzzhoushaoyu
'''
import webob.exc
import json
from umbrella.common import wsgi
import umbrella.common.log as logging
from umbrella.common import cfg
import umbrella.context
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
context_opts = [
cfg.BoolOpt('owner_is_tenant', default=True),
cfg.StrOpt('admin_role', default='admin'),
cfg.BoolOpt('allow_anonymous_access', default=False),
]
CONF.register_opts(context_opts)
class ContextMiddleware(wsgi.Middleware):
def process_response(self, resp):
try:
request_id = resp.request.context.request_id
LOG.debug(_("req-%s is responsing") % request_id)
except AttributeError:
LOG.warn(_('Unable to retrieve request id from context'))
else:
resp.headers['x-openstack-request-id'] = 'req-%s' % request_id
return resp
def process_request(self, req):
if req.headers.get('X-Auth-Token') is not None:
kwargs = {'auth_tok': req.headers.get('X-Auth-Token')}
else:
kwargs = {}
req.context = umbrella.context.RequestContext(**kwargs)
class AuthContextMiddleware(ContextMiddleware):
def process_request(self, req):
"""Convert authentication information into a request context
Generate a glance.context.RequestContext object from the available
authentication headers and store on the 'context' attribute
of the req object.
:param req: wsgi request object that will be given the context object
:raises webob.exc.HTTPUnauthorized: when value of the X-Identity-Status
header is not 'Confirmed' and
anonymous access is disallowed
"""
if req.headers.get('X-Identity-Status') == 'Confirmed':
req.context = self._get_authenticated_context(req)
elif req.headers.get('X-Auth-Token') is not None:
req.context = self._get_auth_token_context(req)
elif CONF.allow_anonymous_access:
req.context = self._get_anonymous_context()
else:
raise webob.exc.HTTPUnauthorized()
def _get_anonymous_context(self):
kwargs = {
'user': None,
'tenant': None,
'roles': [],
'is_admin': False,
'read_only': True,
}
return umbrella.context.RequestContext(**kwargs)
def _get_auth_token_context(self, req):
return umbrella.context.RequestContext(
auth_tok=req.headers.get('X-Auth-Token'))
def _get_authenticated_context(self, req):
#NOTE(bcwaldon): X-Roles is a csv string, but we need to parse
# it into a list to be useful
roles_header = req.headers.get('X-Roles', '')
roles = [r.strip().lower() for r in roles_header.split(',')]
#NOTE(bcwaldon): This header is deprecated in favor of X-Auth-Token
deprecated_token = req.headers.get('X-Storage-Token')
service_catalog = None
if req.headers.get('X-Service-Catalog') is not None:
try:
catalog_header = req.headers.get('X-Service-Catalog')
service_catalog = json.loads(catalog_header)
except ValueError:
raise webob.exc.HTTPInternalServerError(
_('Invalid service catalog json.'))
kwargs = {
'user': req.headers.get('X-User-Id'),
'tenant': req.headers.get('X-Tenant-Id'),
'roles': roles,
'is_admin': CONF.admin_role.strip().lower() in roles,
'auth_tok': req.headers.get('X-Auth-Token', deprecated_token),
'owner_is_tenant': CONF.owner_is_tenant,
'service_catalog': service_catalog,
}
return umbrella.context.RequestContext(**kwargs)
| 3,925 | 1,133 |
# coding: utf-8
from __future__ import absolute_import
from ibutsu_server import util
from ibutsu_server.models.base_model_ import Model
class Group(Model):
"""NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
Do not edit the class manually.
"""
def __init__(self, id=None, name=None):
"""Group - a model defined in OpenAPI
:param id: The id of this Group.
:type id: str
:param name: The name of this Group.
:type name: str
"""
self.openapi_types = {"id": str, "name": str}
self.attribute_map = {"id": "id", "name": "name"}
self._id = id
self._name = name
@classmethod
def from_dict(cls, dikt) -> "Group":
"""Returns the dict as a model
:param dikt: A dict.
:type: dict
:return: The Group of this Group.
:rtype: Group
"""
return util.deserialize_model(dikt, cls)
@property
def id(self):
"""Gets the id of this Group.
Unique ID of the project
:return: The id of this Group.
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this Group.
Unique ID of the project
:param id: The id of this Group.
:type id: str
"""
self._id = id
@property
def name(self):
"""Gets the name of this Group.
The name of the group
:return: The name of this Group.
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this Group.
The name of the group
:param name: The name of this Group.
:type name: str
"""
self._name = name
| 1,813 | 557 |
# x_7_2
#
#
import re
pattern = 'い'
str = input('ひらがなの「い」のつく言葉を入力してください:')
print(re.sub(pattern, 'レヽ', str))
| 112 | 72 |
from clipper_admin import ClipperConnection, DockerContainerManager
from clipper_admin.deployers import keras as keras_deployer
import keras
clipper_conn = ClipperConnection(DockerContainerManager())
clipper_conn.connect() # if cluster exists
inpt = keras.layers.Input(shape=(1,))
out = keras.layers.multiply([inpt, inpt])
model = keras.models.Model(inputs=inpt, outputs=out)
#pred = model.predict([1, 2, 3, 5])
#print(pred)
# docker build -f dockerfiles/KerasDockerfile -t keras-container .
def predict(model, inputs):
return [model.predict(x) for x in inputs]
try:
clipper_conn.delete_application(name="keras-pow")
except:
pass
clipper_conn.register_application(name="keras-pow", input_type="ints", default_output="-1.0", slo_micros=1000000)
try:
clipper_conn.stop_models('pow')
except:
pass
keras_deployer.deploy_keras_model(clipper_conn=clipper_conn, name="pow", version="1", input_type="ints",
func=predict,
model_path_or_object=model,
base_image='keras-container')
clipper_conn.link_model_to_app(app_name="keras-pow", model_name="pow")
| 1,172 | 392 |
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
from utils.io import get_host_config
from utils.params import MessageType, Mode, Topology
class CommandUI(QMainWindow):
"""控制台主界面。"""
def __init__(self) -> None:
super().__init__()
self.__mode = Mode.UNICAST
self.__src = ""
self.__dst = ""
self.__msgtype = MessageType.TEXT
self.__text = ""
self.__filepath = ""
self.__hosts = get_host_config()
self.__init_ui()
def __init_ui(self):
"""初始化UI。"""
# 窗口外观。
self.setFixedSize(300, 200)
self.setWindowTitle(" ")
self.setFont(QFont("Microsoft YaHei UI", pointSize=11))
# 窗口位置。
screen = QDesktopWidget().screenGeometry()
size = self.frameGeometry()
size.moveCenter(screen.center())
self.move(size.topLeft())
# 窗口布局。
self.__central = QWidget()
self.setCentralWidget(self.__central)
self.__Hwidget_1 = QWidget(self.__central)
self.__Hwidget_1.setGeometry(QRect(140, 0, 150, 40))
self.__Hlayout_1 = QHBoxLayout(self.__Hwidget_1)
self.__Hlayout_1.setContentsMargins(0, 0, 0, 0)
self.__Hwidget_2 = QWidget(self.__central)
self.__Hwidget_2.setGeometry(QRect(10, 40, 280, 40))
self.__Hlayout_2 = QHBoxLayout(self.__Hwidget_2)
self.__Hlayout_2.setContentsMargins(0, 0, 0, 0)
self.__Vwidget = QWidget(self.__central)
self.__Vwidget.setGeometry(QRect(10, 80, 60, 80))
self.__Vlayout = QVBoxLayout(self.__Vwidget)
self.__Vlayout.setContentsMargins(0, 0, 0, 0)
# 标题标签。
self.__title = QLabel(self.__central)
self.__title.setGeometry(QRect(10, 0, 130, 40))
self.__title.setFont(QFont("Microsoft YaHei UI", pointSize=12, weight=75))
self.__title.setText("💻 控制台")
# 单播单选按钮。
self.__unicast_radio = QRadioButton(self.__Hwidget_1)
self.__unicast_radio.setText("单播")
self.__unicast_radio.setChecked(True)
self.__unicast_radio.clicked.connect(self.__onclick_unicast_radio)
# 广播单选按钮。
self.__broadcast_radio = QRadioButton(self.__Hwidget_1)
self.__broadcast_radio.setText("广播")
self.__broadcast_radio.clicked.connect(self.__onclick_broadcast_radio)
# 源标签。
self.__src_label = QLabel(self.__Hwidget_2)
self.__src_label.setAlignment(Qt.AlignCenter)
self.__src_label.setText("源")
# 源下拉框。
self.__src_combo = QComboBox(self.__Hwidget_2)
self.__src_combo.addItems(self.__hosts)
self.__src_combo.setCurrentIndex(-1)
self.__src_combo.activated.connect(self.__onactivate_src_combo)
# 目的标签。
self.__dst_label = QLabel(self.__Hwidget_2)
self.__dst_label.setAlignment(Qt.AlignCenter)
self.__dst_label.setText("目标")
# 目的下拉框。
self.__dst_combo = QComboBox(self.__Hwidget_2)
self.__dst_combo.addItems(self.__hosts)
self.__dst_combo.setCurrentIndex(-1)
self.__dst_combo.activated.connect(self.__onactivate_dst_combo)
# 文本单选按钮。
self.__text_radio = QRadioButton(self.__Vwidget)
self.__text_radio.setText("文本")
self.__text_radio.setChecked(True)
self.__text_radio.clicked.connect(self.__onclick_text_radio)
# 文本编辑框。
self.__text_edit = QLineEdit(self.__central)
self.__text_edit.setGeometry(QRect(80, 85, 210, 30))
self.__text_edit.textChanged.connect(self.__onedit_text_edit)
# 文件单选按钮。
self.__file_radio = QRadioButton(self.__Vwidget)
self.__file_radio.setText("图片")
self.__file_radio.clicked.connect(self.__onclick_file_radio)
# 文件按钮。
self.__file_btn = QPushButton(self.__central)
self.__file_btn.setGeometry(QRect(80, 125, 210, 30))
self.__file_btn.setText("选择文件")
self.__file_btn.clicked.connect(self.__onclick_file_btn)
# 发送按钮。
self.__send_btn = QPushButton(self.__central)
self.__send_btn.setGeometry(QRect(10, 160, 280, 35))
self.__send_btn.setText("发送")
self.__send_btn.clicked.connect(self._onclick_send_btn)
# 将组件添加进布局。
self.__Hlayout_1.addWidget(self.__unicast_radio)
self.__Hlayout_1.addWidget(self.__broadcast_radio)
self.__Hlayout_2.addWidget(self.__src_label)
self.__Hlayout_2.addWidget(self.__src_combo)
self.__Hlayout_2.addWidget(self.__dst_label)
self.__Hlayout_2.addWidget(self.__dst_combo)
self.__Vlayout.addWidget(self.__text_radio)
self.__Vlayout.addWidget(self.__file_radio)
def __onclick_unicast_radio(self) -> None:
"""单播按钮点击事件。"""
self.__mode = Mode.UNICAST
if not self.__dst_combo.isEnabled():
self.__dst_combo.setEnabled(True)
def __onclick_broadcast_radio(self) -> None:
"""广播按钮点击事件。"""
self.__mode = Mode.BROADCAST
if self.__dst_combo.isEnabled():
self.__dst_combo.setEnabled(False)
def __onactivate_src_combo(self) -> None:
"""源下拉框激活事件。"""
self.__src = self.__src_combo.currentText()
def __onactivate_dst_combo(self) -> None:
"""目标下拉框激活事件。"""
self.__dst = self.__dst_combo.currentText()
def __onclick_text_radio(self) -> None:
"""文本按钮点击事件。"""
self.__msgtype = MessageType.TEXT
def __onclick_file_radio(self) -> None:
"""文件按钮点击事件。"""
self.__msgtype = MessageType.FILE
def __onedit_text_edit(self) -> None:
"""文本输入框编辑事件。"""
self.__text = self.__text_edit.text()
if not self.__text_radio.isChecked():
self.__text_radio.setChecked(True)
self.__msgtype = MessageType.TEXT
def __onclick_file_btn(self) -> None:
"""文件选择按钮点击事件。"""
filename = QFileDialog.getOpenFileName(
self, "打开", "", "Image files (*.jpg *.png)"
)
imgname = filename[0].split("/")[-1]
if imgname:
self.__filepath = filename[0]
self.__file_btn.setText(imgname)
self.__file_radio.setChecked(True)
self.__msgtype = MessageType.FILE
def __is_valid(self) -> bool:
"""检验当前输入数据的合理性。
Returns:
合理为`True`,不合理为`False`。
"""
if not self.__mode:
CommandUI.__raise_critical("请选择发送模式!")
elif self.__src_combo.currentIndex() == -1:
CommandUI.__raise_critical("请选择源设备号!")
elif self.__mode == Mode.UNICAST and self.__dst_combo.currentIndex() == -1:
CommandUI.__raise_critical("请选择目标设备号!")
elif (
self.__mode == Mode.UNICAST
and self.__src_combo.currentText() == self.__dst_combo.currentText()
):
CommandUI.__raise_critical("源与目标不能相同!")
elif not self.__msgtype:
CommandUI.__raise_critical("请选择消息类型!")
elif self.__msgtype == MessageType.TEXT and not self.__text:
CommandUI.__raise_critical("请输入文本!")
elif self.__msgtype == MessageType.FILE and not self.__filepath:
CommandUI.__raise_critical("请选择文件!")
else:
return True
return False
def _onclick_send_btn(self) -> None:
"""发送按钮点击事件。"""
if not self.__is_valid():
return
self._user_data = {
"src": f"1{self.__src}300",
"dst": f"1{self.__dst}300"
if self.__mode == Mode.UNICAST
else Topology.BROADCAST_PORT,
"msgtype": self.__msgtype,
"text": self.__text,
"file": self.__filepath,
}
print(self._user_data)
@staticmethod
def __raise_critical(message: str):
"""弹出错误窗口。
Args:
message: 错误信息。
"""
# 错误弹窗。
box = QMessageBox(QMessageBox.Critical, "错误", message)
box.addButton("确定", QMessageBox.ButtonRole.YesRole)
box.exec_()
| 8,017 | 2,971 |
"""
-file concerned with implementation of GET /scans
-should return as many scans as possible starting from newest
-return size must be capped at 6mb
"""
from boto3.dynamodb.conditions import Key
from lib.dynamodb import scans_table
from lib.lambda_decorator.decorator import api_decorator, format_result
BYTE_LIMIT = 5000000
def determine_bytes(target: dict) -> int:
target_with_formatting = format_result(target)
return len(target_with_formatting.encode('utf-8'))
def make_result(records: list) -> dict:
for record in records:
record.pop('scan', None) # omit 'scan' from result, if key is present.
return {'scans': records}
def make_max_return(records: list, byte_limit: int) -> list:
count_bytes = determine_bytes(make_result(records))
while count_bytes > byte_limit:
records.pop()
count_bytes = determine_bytes(make_result(records))
return make_result(records)
@api_decorator
def scans_handler(event, context):
records = scans_table.query_all(
KeyConditionExpression=Key('scan').eq(scans_table.SCAN),
ScanIndexForward=False
)
return make_max_return(records, BYTE_LIMIT)
| 1,166 | 378 |
import logging
from sparrow_cloud.restclient import rest_client
from sparrow_cloud.restclient.exception import HTTPException
from sparrow_cloud.utils.get_cm_value import get_cm_value
logger = logging.getLogger(__name__)
def access_verify(user_id, app_name, resource_code):
"""
access control verify
"""
if all([user_id, app_name, resource_code]):
sc_access_control_svc = get_cm_value("SC_ACCESS_CONTROL_SVC")
sc_access_control_api = get_cm_value("SC_ACCESS_CONTROL_API")
params = {
"user_id": user_id,
"app_name": app_name,
"resource_code": resource_code
}
try:
response = rest_client.get(sc_access_control_svc, api_path=sc_access_control_api, params=params)
if response['has_perm']:
return True
except HTTPException as ex:
if ex.status_code == 400 or ex.status_code == 403:
logger.info("sparrow_cloud log : access verify failed. user:{}, message:{}".format(user_id, ex.detail))
return False
return True
return False
| 1,119 | 339 |
"""
Open Test Arbatrage System USE AT YOUR OWN RISK!
MULTIPLE EXCHANGE ACCEPTANCE
ALLOWS 'USERS' TO CHOOSE THEIR OWN STRATEGY AS WELL AS TRADING COINS
PROVIDES DETAILED INFORMATION ABOUT EXCHANGES & PLATFORMS
ALLOWS USERS TO CONNECT IN A FRIENDLY ENVIRONMENT
Author: ~Skrypt~
"""
import sys
import os
import time
import shlex
import random
import sha3
import pickle
import hashlib
#from web3 import Web3
#from web3.providers.rpc import HTTPProvider
#from ecdsa import SigningKey, SECP256k1
#web3 = Web3(HTTPProvider('https://mainnet.infura.io/M4QNeQhVp2x0Lm0OxNvW'))
#true = True
#false = False
#from resources import *
from resources.TheCoreData.RawData import *
from resources.PersonaData.PersonaRaw import *
from resources.TheCoreData.CoreResponse import *
#from resources.TheCoreData.CoreSyntax import CoreSyntaxSets
from resources.worlditems import worldItems
from resources.worldnotice import worldNotice
from resources.worldLists import worldLists
from resources.useroptions import userOptions
from resources.worldBoolean import worldBoolean
# import the Environment server class
from resources.server import C0reServer
from resources.TheCoreData.CoreItems import *
from resources.TheCoreData.C0reRoomSystem.C0reRooms import TheC0reRooms
from resources.TheCoreData.C0reUserSystem.UserObject import *
global C0RESUBPATH
global C0RESYNTAXPATH
C0RESUBPATH = './resources/TheCoreData/C0reSyntaxSystem/C0reSubSyntax.cmf'
C0RESYNTAXPATH = './resources/TheCoreData/C0reSyntaxSystem/C0reSyntax.cmf'
users = {}
Total_Users = 0
# starts C0re Server
C0re = C0reServer(input('|[USERNAME]>>: '),hashlib.sha256(input('|[USERNAME]>>: ').encode()).hexdigest())
INTERVAL = 0.2
C0reSub = pickle.load(open(C0RESUBPATH,'rb'))
C0reSyntax = pickle.load(open(C0RESYNTAXPATH,'rb'))
print('Starting C0re Server And Hosting Platform Please Wait A Moment')
while True:
time.sleep(0.2)
# 'update' must be called in the loop to keep the environment running and give
# us up-to-date information
C0re.update()
for id in C0re.get_new_users():
# User IS DEFINED HERE FROM - [Resources.PersonaData.UserObject.py]
users[id] = C0reGuest()
Total_Users += 1
C0re.send_message(id, Eresp["E_Notice_Welcome"].title())
for id in C0re.get_disconnected_users():
if id not in users: continue
for pid,pl in users.items():
C0re.send_message(pid, Eresp["E_Notice_Quit"].format(users[id].Name))
del(users[id])
for id,gak,params in C0re.get_commands():
if id not in users: continue
if users[id].Started == False:
users[id].Name = 'GuestWallet'+str(hex(Total_Users))
users[id].Started = True
for pid,pl in users.items():
usershere = []
if users[pid].Room == users[id].Room:
usershere.append(users[pid].Name)
C0re.send_message(pid, Eresp["E_Notice_Public_Symbol"].title())
C0re.send_message(pid, Eresp["E_Notice_Enter_Message"].format(users[id].Name))
rm = rooms[users[id].Room]
C0re.send_message(id, Eresp["E_Notice_Welcome_0"].format(users[id].Name))
C0re.send_message(id, Eresp['E_Notice_Welcome_1'].format())
C0re.send_message(id, Eresp['E_Notice_Have_Entered'].format(users[id].Room))
C0re.send_message(id, Eresp['E_Notice_Current_Room_Id'].format(users[id].RoomID))
C0re.send_message(id, Eresp['E_Notice_Open_Room_Format'].format(rm["description"]))
C0re.send_message(id, Eresp['E_Notice_Also_Here']+"%s" % ", ".join(usershere).title())
C0re.send_message(id, Eresp['E_Notice_Visible_Exits']+"%s" % ", ".join(rm["exits"]).title())
#Conditions To The Alive Player Below
# Refer to Ess.py for gak list referrences. ~Skrypt
elif gak.lower() in C0reSub:
par = params
par_split = par.split(' ')
Command = C0reSub[gak.lower()]
if 'global' in C0reSyntax[Command]['Rooms'] or users[id].Room_id in C0reSyntax[Command]['Rooms']:
if users[id].User_Level in C0reSyntax[Command]['Users'] or 'global' in C0reSyntax[Command]['Users']:
# Assert Command Match Here
if users[id].C0reTime == 0:
########### Start Of 'Say' Syntax #################################
if Command == 'say':
if TheC0reRooms[users[id].Room].Private == False and TheC0reRooms[users[id].Room].Whisper_Only == False:
if users[id].Special_Speech == False:
C0re.send_message(id, '{0}, \'{1}\''.format(users[id].Speech,params),users[id].Name) # Alert User
elif users[id].Special_Speech == True:
C0re.send_message(id, '{0} and Say, \'{1}\''.format(users[id].Speech,params),user[id].Name) # Alert User
# go through every user in the game
for pid,pl in users.items():
# if they're in the same room as the user
if users[pid].Room == users[id].Room and users[pid].Name != users[id].Name:
if users[id].Special_Speech == False:
C0re.send_message(pid, '{1}s, \'{2}\''.format(users[id].Speech, params),users[id].Name) # Tell Everyone In Room Except User
elif users[id].Special_Speech == True:
C0re.send_message(pid, '{1} and Says, \'{2}\''.format(users[id].Speech,params),users[id].Name)
elif TheC0reRooms[users[id].Room].Private == True or TheC0reRooms[users[id].Room].Whisper_Only == True:
if TheC0reRooms[users[id].Room].Private == True:
C0re.send_message(id, 'I\'m Sorry [{}] This Is A Private Area No Speaking Or Whispering Allowed.'.format(users[id].Name),'Vivian')
for pid,pl in users.items():
if users[pid].Room == users[id].Room and users[pid].Name != users[id].Name:
C0re.send_message(pid, 'Tries But Fails To Speak.',users[id].Name)
elif TheC0reRooms[users[id].Room].Whisper_Only == True:
C0re.send_message(id, 'I\'m Sorry [{}], This Is A Whisper Only Area')
for pid,pl in users.items():
if users[pid].Room == users[id].Room and users[pid].Name != users[id].Name:
C0re.send_message(pid,'Fails To Whisper.',users[id].Name)
######## End Of Say Syntax ###################################################
elif users[id].C0reTime > 0:
users[id].C0reTime += C0reTimeMap[gak.lower()] # Adds Additional C0re-Time To Prevent Server Spam
C0re.send_message(id,'...Please Wait ({}) C0re-Time...'.format(users[id].C0reTime),'Vivian')
elif users[id].User_Level not in C0reSyntax[Command]['Users'] and 'global' not in C0reSyntax[Command]['Users']:
# Assert Command Level Failure
C0re.send_message(id, '[{}] Syntax Is Not A [{}] Privlidge At This Time.'.format(Command,users[id].User_Level),'Vivian')
elif 'global' not in C0reSyntax[Command]['Rooms'] or users[id].Room_id not in C0reSyntax[Command]['Rooms']:
# Assert Command Room Failure
C0re.send_message(id, '[{}] Syntax Is Not Available Within Room [{}] At This Time.'.format(Command,users[id].Room),'Vivian')
else:
C0re.send_message(id, '[{}] Is A Unknown Command Please \'Submit\' A Support Ticket If You Feel This Is A Mistake.')
# 'get' command (Object Initiated) Inventory active needs room objects initiated and sub-surface objects (table items etc.)
elif gak.lower() in Ess["get"]["sets"]:
pa = params.lower()
pa_sub = pa.split(' ')
user_has_item = False
taken = False
try:
if pa_sub[0] == 'my' and pa_sub[1] != '':
for i in users[id]["inventory"]:
if i.Name == pa_sub[1].title():
user_has_item = True
taken = False
elif i.Name == pa_sub[1].title()+' '+pa_sub[2].title():
user_has_item = True
taken = False
if user_has_item == True and taken == False:
if users[id]["left hand"] != [] and users[id]["right hand"] != []:
C0re.send_message(id, "Your Hands Are Full. Maybe STOW Something And Try Again?")
elif users[id]["right hand"] == []:
users[id]["inventory"].remove(i)
users[id]["right hand"].append(i)
taken = True
C0re.send_message(id, "You Get Your {0} From Your Inventory With Your Right Hand.".format(i.Name))
elif users[id]["left hand"] == []:
users[id]["inventory"].remove(i)
users[id]["left hand"].append(i)
taken = True
C0re.send_message(id, "You Get Your {0} From Your Inventory With Your Left Hand.".format(i.Name))
else:
C0re.send_message(id, "Sorry {0}, You Do Not Have {1}.".format(users[id]["name"].title(), i.Name))
else:
C0re.send_message(id, 'What Are You Trying To Get?')
except Exception as Get_What:
C0re.send_message(id, 'What Are You Trying To Get?')
#elif paramTitle in wi: #needs work on this block (items need to be added to WI)
#Evm.send_message(id, "Sorry {0}, You Cannot Get This Period.".format(users[id]["name"].title()))
# 'stow' command (Object Initiated)
elif gak in Ess["stow"]["sets"]:
pa = params.lower()
if users[id]["right hand"] == [] and users[id]["left hand"] == []:
C0re.send_message(id, "You Have Nothing To Stow In Your Hands.")
elif users[id]["right hand"] != [] and pa == "right":
item_to_stow = users[id]["right hand"][0]
users[id]["right hand"].remove(item_to_stow)
users[id]["inventory"].append(item_to_stow)
C0re.send_message(id, "You Put Your {} From Your Right Hand In Your Inventory.".format(item_to_stow.Name))
elif users[id]["left hand"] != [] and pa == "left":
item_to_stow = users[id]["left hand"][0]
users[id]["left hand"].remove(item_to_stow)
users[id]["inventory"].append(item_to_stow)
C0re.send_message(id, "You Put Your {} From Your Left Hand In Your Inventory.".format(item_to_stow.Name))
elif users[id]["right hand"] == [] and pa == "right":
C0re.send_message(id, "You Have Nothing In Your Right Hand To Put In Your Inventory.")
elif users[id]["left hand"] == [] and pa == "left":
C0re.send_message(id, "You Have Nothing In Your Left Hand To Put In Your Inventory.")
elif params == "":
C0re.send_message(id, "Usage Is STOW RIGHT/LEFT.".format())
# 'swap' command (Object Initiated)
elif gak in Ess["swap"]["sets"]:
if users[id]["right hand"] == [] and users[id]["left hand"] == []:
C0re.send_message(id, "You Have Nothing To Swap In Your Hands.")
elif users[id]["right hand"] != [] and users[id]["left hand"] == []:
item_to_swap = users[id]["right hand"][0]
users[id]["right hand"].remove(item_to_swap)
users[id]["left hand"].append(item_to_swap)
C0re.send_message(id, "You Swap Your {} From Your Right Hand To Your Left Hand.".format(item_to_swap.Name))
elif users[id]["right hand"] == [] and users[id]["left hand"] != []:
item_to_swap = users[id]["left hand"][0]
users[id]["left hand"].remove(item_to_swap)
users[id]["right hand"].append(item_to_swap)
C0re.send_message(id, "You Swap Your {} From Your Left Hand To Your Right Hand.".format(item_to_swap.Name))
elif users[id]["right hand"] != [] and users[id]["left hand"] != []:
r = users[id].Body.RightHand[0]
l = users[id].Body.LeftHand[0]
users[id].Body.RightHand.remove(r)
users[id].Body.RightHand.append(l)
users[id].Body.LeftHand.remove(l)
users[id].Body.LeftHand.append(r)
C0re.send_message(id, 'You carefully swap {0} and {1} between your hands.'.format(users[id].Body.LeftHand,users[id].Body.RightHand))
# 'glance' command (Object Initiated)
elif gak.lower() in Ess["glance"]["sets"]:
if users[id]["right hand"] == [] and users[id]["left hand"] == []:
C0re.send_message(id, "You Glance Down At Your Empty Hands.")
elif users[id]["right hand"] != [] and users[id]["left hand"] == []:
C0re.send_message(id, "You Glance Down And See Nothing In Your Left Hand And {} In Your Right Hand.".format(users[id]["right hand"][0].Name))
elif users[id]["right hand"] == [] and users[id]["left hand"] != []:
C0re.send_message(id, "You Glance Down And See Nothing In Your Right Hand And {} In Your Left Hand.".format(users[id]["left hand"][0].Name))
elif users[id]["right hand"] != [] and users[id]["left hand"] != []:
C0re.send_message(id, "You Glance Down And See {0} In Your Right Hand And {1} In Your Left Hand.".format(users[id]["right hand"][0].Name, users[id]["left hand"][0].Name))
# '*say' command
elif gak.lower() in Ess["say change"]["sets"]:
pa = params.lower()
poa = po["user_actions"]
empty = ""
if pa == empty:
Evm.send_message(id, "Please Use *say "+"STYLE ".upper()+"For More Information On "+"STYLE".upper()+" use /*say".title())
if pa not in poa["say_options"] and pa != empty:
if pa not in poa["plural_options"]:
C0re.send_message(id, "that is not an option")
if pa in poa["say_options"]:
C0re.send_message(id, "thank you for choosing ".title()+pa.title())
C0re.send_message(id, "please remember to use *say in the future to alter your options".title())
users[id]["special speech"] = False
users[id]["speech"] = pa
elif pa in poa["plural_options"]:
C0re.send_message(id, "thank you for choosing ".title()+pa.title())
C0re.send_message(id, "please remember to use *say in the future to alter your options".title())
users[id]["special speech"] = True
users[id]["speech"] = pa
# 'inv' command (Object Initiated)
elif gak.lower() in Ess["inventory"]["sets"]:
name = users[id]["name"].title()
# send a message to user with the inventory
if users[id]["inventory"] == []:
C0re.send_message(id, "Sorry {0} Your Inventory Is Empty".format(name))
else:
inventory_list = []
for i in users[id]["inventory"]:
inventory_list.append(i.Name)
real_inventory_list = '%s' % ", ".join(inventory_list)
C0re.send_message(id, "{0} Your Inventory Consists Of:> ~>{1}<~".format(name, real_inventory_list))
# 'stand' command
elif gak.lower() == "stand" and params == "":
if users[id]["userdown"] == True:
C0re.send_message(id, "You Stand Back Up.")
users[id]["userdown"] = False
users[id]["standing"] = True
playershere = []
for pid,pl in users.items():
# if user is in the same room and isn't the user sending the command
if users[pid]["room"] == users[id]["room"] and pid!=id:
# send them a message telling them that the user searched their pockets
C0re.send_message(pid, "{0} Stands Up.".format(users[id]["name"].title()) )
elif users[id]["userdown"] == False:
C0re.send_message(id, "You Are Already Standing")
# 'say' command
elif gak.lower() in Ess["says"]["sets"]:
name = users[id]["name"]
nameTitle = name.title()
speech = users[id]["speech"]
speechTitle = speech.title()
# go through every user in the game
for pid,pl in users.items():
# if they're in the same room as the user
if users[pid]["room"] == users[id]["room"]:
if users[id]["special speech"] == False:
# send them a message telling them what the user said
C0re.send_message(pid, nameTitle + " {0}s, \"{1}\"".format(users[id]["speech"], params))
elif users[id]["special speech"] == True:
C0re.send_message(pid, nameTitle+" "+speechTitle+" And Says, \"{0}\"".format(params))
# 'wave' command
elif gak == "wave":
# go through every user in the game
C0re.send_message(id,'You Wave.')
for pid,pl in users.items():
# if they're in the same room as the user
if users[pid]["room"] == users[id]["room"] and users[pid]["name"] != users[id]["name"]:
# send them a message telling them what the user did
C0re.send_message(pid,"%s waves" % (users[id]["name"]))
# 'snort' command
elif gak.lower() == "snort":
# go through every user in the game
for pid,pl in users.items():
# if they're in the same room as the user
if users[pid]["room"] == users[id]["room"]:
# send them a message telling them what the user did
C0re.send_message(pid,"{0} snorts".format(users[id]["name"]))
# 'shake' command
elif gak == "shake":
pa = params.lower()
pa_sub = pa.split(' ')
try:
if pa_sub[0] == 'fist':
# go through every user in the game
for pid,pl in users.items():
# if they're in the same room as the user
if users[pid]["room"] == users[id]["room"]:
# send them a message telling them what the user did
C0re.send_message(pid,"%s Shakes Thier Fist." % (users[id]["name"]))
except Exception as e:
if 'list index' in e:
C0re.send_message(id, 'Shake What?')
else:
C0re.send_message(id, str(e))
# 'cough' command
elif gak == "cough":
# go through every user in the game
for pid,pl in users.items():
# if they're in the same room as the user
if users[pid]["room"] == users[id]["room"]:
# send them a message telling them what the user did
C0re.send_message(pid,"%s coughs" % (users[id]["name"]))
# 'sigh' command
elif gak == "sigh":
# go through every user in the game
for pid,pl in users.items():
# if they're in the same room as the user
if users[pid]["room"] == users[id]["room"]:
# send them a message telling them what the user did
C0re.send_message(pid,"%s sighs" % (users[id]["name"]))
# 'giggle' command
elif gak == "giggle":
# go through every user in the game
for pid,pl in users.items():
# if they're in the same room as the user
if users[pid]["room"] == users[id]["room"]:
# send them a message telling them what the user did
C0re.send_message(pid,"%s giggles" % (users[id]["name"]))
# 'emote' command
elif gak == "emote":
# go through every user in the game
for pid,pl in users.items():
# if they're in the same room as the user
if users[pid]["room"] == users[id]["room"]:
# send them a message telling them what the user did
C0re.send_message(pid,"%s %s" % (users[id]["name"],params))
# 'touch' command
elif gak == "touch":
# store the user's current room
rm = rooms[users[id]["room"]]
# stores params and checks to see if applicable
rx = params.lower()+users[id]["roomid"]
if rx not in rm["objects"] and wi:
rx = params.lower()
for pid,pl in users.items():
rx = params.lower() # if user is in the same room and isn't the user sending the command
if users[pid]["room"] == users[id]["room"] and pid!=id:
if rx == users[pid]["name"] and rx != users[id]["name"]:
C0re.send_message(id, "You Reach Out And Touch %s" % rx.title())
C0re.send_message(pid, "%s Reaches out and touches you." % users[id]["name"].title())
elif rx != users[pid]["name"] or rx != users[id]["name"]:
C0re.send_message(id, "I Cannot Find Who You Are Referring To.")
elif users[pid]["room"] == users[id]["room"] and rx == users[id]["name"]:
C0re.send_message(id, "You Touch Yourself... That's A Little Creepy...")
elif rx in rm["objects"] and wi:
si = wi[rx]
C0re.send_message(id, si["touchdesc"])
for pid,pl in users.items():
# if user is in the same room and isn't the user sending the command
if users[pid]["room"] == users[id]["room"] and pid!=id:
if rx in rm["objects"] and wi:
si = wi[rx]
C0re.send_message(pid, si["othertouchdesc"].format(users[id]["name"]))
# 'look' command
elif gak == "look":
# store the user's current room
rm = rooms[users[id]["room"]]
# stores params and checks to see if applicable
rx = params.lower()+users[id]["roomid"]
pa = params.lower()
name = users[id]['name']
if pa == name.lower():
C0re.send_message(id, str(users[id]))
if rx not in rm["objects"] and wi:
rx = params.lower()
C0re.send_message(id, "I Cannot Find {0}".format(rx) )
elif rx in rm["objects"] and wi:
si = wi[rx]
C0re.send_message(id, si["shortdesc"])
# go through all the users in the game
for pid,pl in users.items():
# if user is in the same room and isn't the user sending the command
if users[pid]["room"] == users[id]["room"] and pid!=id:
if rx in rm["objects"] and wi:
si = wi[rx]
# send them a message telling them that the user searched their pockets
C0re.send_message(pid, si["othershortdesc"].format(users[id]["name"]) )
# 'poke' command
elif gak == "poke":
# store the user's current room
rm = rooms[users[id]["room"]]
# stores params and checks to see if applicable
rx = params.lower()+users[id]["room id"]
if rx not in rm["objects"] and wi:
rx = params.lower()
C0re.send_message(id, "I Cannot Find {0}".format(rx) )
elif rx in rm["objects"] and wi:
si = wi[rx]
C0re.send_message(id, si["pokedesc"])
# go through all the users in the game
for pid,pl in users.items():
# if user is in the same room and isn't the user sending the command
if users[pid]["room"] == users[id]["room"] and pid!=id:
if rx in rm["objects"] and wi:
si = wi[rx]
# send them a message telling them that the user searched their pockets
C0re.send_message(pid, si["otherpokedesc"].format(users[id]["name"]))
# 'read' command
elif gak == "read":
# store the user's current room
rm = rooms[users[id]["room"]]
# stores params and checks to see if applicable
pa = params.lower()
rx = params.lower()+users[id]["room id"]
# Check false first.
if rx not in rm["objects"] and wi:
rx = params.lower()
C0re.send_message(id, "I Cannot Find {0}".format(rx))
elif rx in rm["objects"] and wi:
si = wi[rx]
C0re.send_message(id, si["readdesc"])
# 'area' command
elif gak == "area":
# store the user's current room
rm = rooms[users[id].Room]
# send the user back the description of their current room
usershere = []
# go through every user in the game
for pid,pl in users.items():
# if they're in the same room as the user
if users[pid].Room == users[id].Room:
# add their name to the list
usershere.append(users[pid].Name)
# send user a message containing the list of users in the room
C0re.send_message(id, Eresp['E_Notice_Current_Location'].format(users[id].Room))
C0re.send_message(id, Eresp['E_Notice_Current_Room_Id'].format(users[id].RoomID))
C0re.send_message(id, Eresp['E_Notice_Open_Room_Format'].format(rm["description"]))
C0re.send_message(id, Eresp['E_Notice_Also_Here']+"%s" % ", ".join(usershere).title())
C0re.send_message(id, Eresp['E_Notice_Visible_Exits']+"%s" % ", ".join(rm["exits"]).title())
# 'go' command
elif gak == "go":
name = users[id]["name"]
nameTitle = name.title()
move = users[id]["move"]
# store the exit name
ex = params
ext = ex.title()
# store the user's current room & newly added id
rm = rooms[users[id]["room"]]
# if the specified exit is found in the room's exits list)
if ex in rm["exits"]:
# go through all the users in the game
for pid,pl in users.items():
# if user is in the same room and isn't the user sending the command
if users[pid]["room"] == users[id]["room"] and pid!=id:
# send them a message telling them that the user left the room
C0re.send_message(pid,nameTitle+" "+move+"ed Away via: "+ext.format())
# update the user's current room to the one the exit leads to and update ID
users[id]["room"] = rm["exits"][ex]
rm = rooms[users[id]["room"]]
users[id]["room id"] = rm["room id"]
current = worldRooms[users[id]["room"]]
# go through all the users in the game
for pid,pl in users.items():
# if user is in the same (new) room and isn't the user sending the command
if users[pid]["room"] == users[id]["room"] and pid!=id:
# send them a message telling them that the user entered the room
C0re.send_message(pid,nameTitle+" "+move+"'s into the area from the "+ext.format())
#build usershere
usershere = []
# go through every user in the game
for pid,pl in users.items():
# if they're in the same room as the user
if users[pid]["room"] == users[id]["room"]:
# add their name to the list
usershere.append(users[pid]["name"])
# send the user a message telling them where they are now
roomti = users[id]["room"]
roomtitle = roomti.title()
C0re.send_message(id,nameTitle+" "+move+"ed"+" :"+roomtitle.format())
C0re.send_message(id, Eresp['E_Notice_Have_Entered'].format(users[id]["room"]))
C0re.send_message(id, Eresp['E_Notice_Current_Room_Id'].format(users[id]["room id"]))
C0re.send_message(id, Eresp['E_Notice_Open_Room_Format'].format(rm["description"]))
C0re.send_message(id, Eresp['E_Notice_Also_Here']+"%s" % ", ".join(usershere).title())
C0re.send_message(id, Eresp['E_Notice_Visible_Exits']+"%s" % ", ".join(rm["exits"]).title())
# the specified exit wasn't found in the current room
else:
# send back an 'unknown exit' message
Evm.send_message(id, "Unknown exit '%s'" % ex)
# 'exit' command
elif gak in Ess["exit"]["sets"]:
dummy = users[id]["name"]
# send message to user that socket is being exited
C0re.send_message(id, Vresp['Viv_Exit_Response_0'].format(users[id]['name']))
C0re.send_message(id, Vresp['Viv_Exit_Response_1'])
del(users[id])
for pid,pl in users.items():
C0re.send_message(pid,Eresp['E_Notice_Quit'].format(dummy.title()))
# some other, bullshit command
else:
gak = gak.upper()
# send back an 'unknown command' message
C0re.send_message(id, "I'm Sorry {0} {1} Is A Unknown/Broken Syntax. If You Feel This Might Be A Error Please CONTACT The Devs For Support.".format(users[id].Name,gak))
| 29,735 | 9,440 |
from datetime import datetime
from django.core.exceptions import ValidationError
from fms_core.models import Container
from ..containers import CONTAINER_KIND_SPECS
def get_container(barcode):
container = None
errors = []
warnings = []
if barcode:
try:
container = Container.objects.get(barcode=barcode)
except Container.DoesNotExist:
errors.append(f"Could not find Container with barcode {barcode}")
else:
errors.append(f"Barcode is required to get a container.")
return (container, errors, warnings)
def get_or_create_container(barcode,
kind=None, name=None, coordinates=None,
container_parent=None, creation_comment=None):
container = None
created_entity = False
errors = []
warnings = []
if barcode:
container_data = dict(
**(dict(location=container_parent) if container_parent else dict()),
**(dict(barcode=barcode) if barcode is not None else dict()),
**(dict(name=name) if name is not None else dict(name=barcode)), # By default, a container name will be his barcode
**(dict(coordinates=coordinates) if coordinates is not None else dict()),
**(dict(kind=kind) if kind is not None else dict()),
)
#TODO: check sample or container creation templates where only barcode OR name is required
comment = creation_comment or (f"Automatically generated on {datetime.utcnow().isoformat()}Z")
try:
container = Container.objects.get(barcode=barcode)
# Validate that the retrieved container is the right one
if kind and kind != container.kind:
errors.append(f"Provided container kind {kind} does not match the container kind {container.kind} of the container retrieved using the barcode {barcode}.")
if name and name != container.name:
errors.append(f"Provided container name {name} does not match the container name {container.name} of the container retrieved using the barcode {barcode}.")
if container_parent and container_parent.id != container.location.id:
errors.append(f"Provided parent container {container_parent.barcode} does not match the parent container {container.location.barcode} of the container retrieved using the barcode {barcode}.")
if coordinates and coordinates != container.coordinates:
errors.append(f"Provided container coordinates {coordinates} do not match the container coordinates {container.coordinates} of the container retrieved using the barcode {barcode}.")
except Container.DoesNotExist:
if container_parent and CONTAINER_KIND_SPECS[container_parent.kind].requires_coordinates and not coordinates:
errors.append(f"Parent container kind {container_parent.kind} requires that you provide coordinates.")
else:
try:
container = Container.objects.create(**container_data, comment=comment)
created_entity = True
# Pile up all validation error raised during the creation of the container
except ValidationError as e:
errors.append(';'.join(e.messages))
else:
errors.append(f"Barcode is required to get or create a container.")
return (container, created_entity, errors, warnings)
def create_container(barcode, kind,
name=None, coordinates=None, container_parent=None, creation_comment=None):
container = None
errors = []
warnings = []
if barcode:
if Container.objects.filter(barcode=barcode).exists():
errors.append(f"Container with barcode {barcode} already exists.")
else:
container_data = dict(
**(dict(location=container_parent) if container_parent else dict()),
**(dict(barcode=barcode) if barcode is not None else dict()),
**(dict(name=name) if name is not None else dict(name=barcode)), # By default, a container name will be his barcode
**(dict(coordinates=coordinates) if coordinates is not None else dict()),
**(dict(kind=kind) if kind is not None else dict()),
)
comment = creation_comment or (f"Automatically generated on {datetime.utcnow().isoformat()}Z")
if container_parent and CONTAINER_KIND_SPECS[container_parent.kind].requires_coordinates and not coordinates:
errors.append(f"Parent container kind {container_parent.kind} requires that you provide coordinates.")
else:
try:
container= Container.objects.create(**container_data, comment=comment)
# Pile up all validation error raised during the creation of the container
except ValidationError as e:
errors.append(';'.join(e.messages))
else:
errors.append(f"Barcode is required to create a container.")
return (container, errors, warnings)
def rename_container(container_to_update, barcode=None, name=None, update_comment=None):
errors = []
warnings = []
if not any([barcode, name]):
errors.append(f'Either New Barcode or New Name are required.')
return (container_to_update, errors, warnings)
if barcode:
container_to_update.barcode = barcode
if name:
container_to_update.name = name
if update_comment:
container_to_update.update_comment = update_comment
try:
container_to_update.save()
except Exception as e:
errors.append(str(e))
return (container_to_update, errors, warnings)
def move_container(container_to_move, destination_barcode,
destination_coordinates=None, update_comment=None):
destination_container = None
errors = []
warnings = []
if not destination_barcode:
errors.append(f'Destination location barcode is required.')
return (container_to_move, errors, warnings)
try:
# Test for container barcode to provide a better error message.
destination_container = Container.objects.get(barcode=destination_barcode)
except Container.DoesNotExist as e:
errors.append(f"Destination Container barcode {destination_barcode} does not exist.")
if container_to_move.location == destination_container and container_to_move.coordinates == destination_coordinates:
errors.append(f"Container {container_to_move.name } already is at container {destination_barcode} at coodinates {destination_coordinates}.")
return (container_to_move, errors, warnings)
container_to_move.location = destination_container
container_to_move.coordinates = destination_coordinates if destination_coordinates else ""
container_to_move.update_comment = update_comment
try:
container_to_move.save()
except Exception as e:
errors.append(str(e))
return (container_to_move, errors, warnings) | 7,097 | 1,797 |
import pytest
import time
from datetime import timedelta
from typing import Optional, Dict, Any
from whendo.core.util import Rez, SystemInfo, Now, KeyTagMode, DateTime, Rez
from whendo.core.action import Action
from whendo.core.server import Server
from whendo.core.actions.list_action import (
UntilFailure,
All,
Terminate,
IfElse,
RaiseCmp,
Result,
)
from whendo.core.schedulers.timed_scheduler import Timely
from whendo.core.scheduler import Immediately
from whendo.core.dispatcher import Dispatcher
from whendo.core.programs.simple_program import PBEProgram
from whendo.core.actions.dispatch_action import (
UnscheduleProgram,
ScheduleAction,
DeferAction,
ExpireAction,
)
from whendo.core.timed import Timed
from .fixtures import port, host
pause = 3
def test_server_all_1(friends, servers):
dispatcher, scheduler, action = friends()
aqua, teal = servers()
dispatcher.add_server(server_name="aqua", server=aqua)
dispatcher.add_server(server_name="teal", server=teal)
mode = KeyTagMode.ALL
result = dispatcher.get_servers_by_tags(
key_tags={"foo": ["bar", "baz"]}, key_tag_mode=mode
)
assert len(result) == 2
def test_server_all_2(friends, servers):
dispatcher, scheduler, action = friends()
aqua, teal = servers()
dispatcher.add_server(server_name="aqua", server=aqua)
dispatcher.add_server(server_name="teal", server=teal)
mode = KeyTagMode.ALL
result = dispatcher.get_servers_by_tags(
key_tags={"foo": ["bar"]}, key_tag_mode=mode
)
assert len(result) == 1
def test_server_all_3(friends, servers):
dispatcher, scheduler, action = friends()
aqua, teal = servers()
dispatcher.add_server(server_name="aqua", server=aqua)
dispatcher.add_server(server_name="teal", server=teal)
mode = KeyTagMode.ALL
result = dispatcher.get_servers_by_tags(key_tags={"foo": []}, key_tag_mode=mode)
assert len(result) == 0
def test_server_all_4(friends, servers):
dispatcher, scheduler, action = friends()
aqua, teal = servers()
dispatcher.add_server(server_name="aqua", server=aqua)
dispatcher.add_server(server_name="teal", server=teal)
mode = KeyTagMode.ALL
result = dispatcher.get_servers_by_tags(
key_tags={"foo": ["clasp"]}, key_tag_mode=mode
)
assert len(result) == 0
def test_server_any_1(friends, servers):
dispatcher, scheduler, action = friends()
aqua, teal = servers()
dispatcher.add_server(server_name="aqua", server=aqua)
dispatcher.add_server(server_name="teal", server=teal)
mode = KeyTagMode.ANY
result = dispatcher.get_servers_by_tags(
key_tags={"foo": ["bar", "baz"]}, key_tag_mode=mode
)
assert len(result) == 2
def test_server_any_2(friends, servers):
dispatcher, scheduler, action = friends()
aqua, teal = servers()
dispatcher.add_server(server_name="aqua", server=aqua)
dispatcher.add_server(server_name="teal", server=teal)
mode = KeyTagMode.ANY
result = dispatcher.get_servers_by_tags(
key_tags={"foo": ["bar"]}, key_tag_mode=mode
)
assert len(result) == 2
def test_server_any_3(friends, servers):
dispatcher, scheduler, action = friends()
aqua, teal = servers()
dispatcher.add_server(server_name="aqua", server=aqua)
dispatcher.add_server(server_name="teal", server=teal)
mode = KeyTagMode.ANY
result = dispatcher.get_servers_by_tags(key_tags={"foo": []}, key_tag_mode=mode)
assert len(result) == 0
def test_server_any_4(friends, servers):
dispatcher, scheduler, action = friends()
aqua, teal = servers()
dispatcher.add_server(server_name="aqua", server=aqua)
dispatcher.add_server(server_name="teal", server=teal)
mode = KeyTagMode.ANY
result = dispatcher.get_servers_by_tags(
key_tags={"foo": ["clasp"]}, key_tag_mode=mode
)
assert len(result) == 0
def test_schedule_action(friends):
"""
Tests Dispatcher and Timed objects running a scheduled action.
"""
dispatcher, scheduler, action = friends()
dispatcher.add_action("foo", action)
dispatcher.add_scheduler("bar", scheduler)
dispatcher.schedule_action("bar", "foo")
assert dispatcher.get_scheduled_action_count() == 1
dispatcher.run_jobs()
time.sleep(pause)
dispatcher.stop_jobs()
dispatcher.clear_jobs()
assert action.flea_count > 0
def test_schedule_action_action(friends):
"""
Tests Dispatcher and Timed objects running a scheduled action.
"""
dispatcher, scheduler, action = friends()
dispatcher.add_action("foo", action)
dispatcher.add_scheduler("bar", scheduler)
schedule_action = ScheduleAction(scheduler_name="bar", action_name="foo")
schedule_action.execute()
assert dispatcher.get_scheduled_action_count() == 1
dispatcher.run_jobs()
time.sleep(pause)
dispatcher.stop_jobs()
dispatcher.clear_jobs()
assert action.flea_count > 0
# def test_dispatcher_action_args_1(friends):
# """
# Tests computation of args based on fields, data and mode (=field).
# """
# dispatcher, scheduler, action = friends()
# action2 = FleaCount(flea_count=100)
# dispatcher.add_action("foo", action)
# dispatcher.add_action("flea", action2)
# dispatcher.add_scheduler("bar", scheduler)
# schedule_action = ScheduleAction(
# scheduler_name="bar", action_name="foo", mode=DispActionMode.FIELD
# )
# args = schedule_action.compute_args(
# args={"scheduler_name": "bar", "action_name": "foo"},
# data={"action_name": "flea"},
# )
# assert args["scheduler_name"] == "bar"
# assert args["action_name"] == "foo"
# def test_dispatcher_action_args_2(friends):
# """
# Tests computation of args based on fields, data and mode (=data).
# """
# dispatcher, scheduler, action = friends()
# action2 = FleaCount(flea_count=100)
# dispatcher.add_action("foo", action)
# dispatcher.add_action("flea", action2)
# dispatcher.add_scheduler("bar", scheduler)
# schedule_action = ScheduleAction(
# scheduler_name="bar", action_name="foo", mode=DispActionMode.DATA
# )
# args = schedule_action.compute_args(
# args={"scheduler_name": "bar", "action_name": "foo"},
# data={"action_name": "flea"},
# )
# assert args["scheduler_name"] == "bar"
# assert args["action_name"] == "flea"
# def test_dispatcher_action_args_3(friends):
# """
# Tests computation of args based on fields, data and mode (=field).
# """
# dispatcher, scheduler, action = friends()
# action2 = FleaCount(flea_count=100)
# dispatcher.add_action("foo", action)
# dispatcher.add_action("flea", action2)
# dispatcher.add_scheduler("bar", scheduler)
# schedule_action = ScheduleAction(
# scheduler_name="bar", action_name="foo", mode=DispActionMode.FIELD
# )
# args = schedule_action.compute_args(
# args={"scheduler_name": "bar", "action_name": "foo"},
# data={"result": {"action_name": "flea"}},
# )
# assert args["scheduler_name"] == "bar"
# assert args["action_name"] == "foo"
# def test_dispatcher_action_args_4(friends):
# """
# Tests computation of args based on fields, data and mode (=data).
# """
# dispatcher, scheduler, action = friends()
# action2 = FleaCount(flea_count=100)
# dispatcher.add_action("foo", action)
# dispatcher.add_action("flea", action2)
# dispatcher.add_scheduler("bar", scheduler)
# schedule_action = ScheduleAction(
# scheduler_name="bar", action_name="foo", mode=DispActionMode.DATA
# )
# args = schedule_action.compute_args(
# args={"scheduler_name": "bar", "action_name": "foo"},
# data={"result": {"action_name": "flea"}},
# )
# assert args["scheduler_name"] == "bar"
# assert args["action_name"] == "flea"
def test_schedule_action_action_data_1(friends):
"""
Tests Dispatcher and Timed objects running a scheduled action.
"""
dispatcher, scheduler, action = friends()
action2 = FleaCount(flea_count=100)
dispatcher.add_action("foo", action)
dispatcher.add_action("flea", action2)
dispatcher.add_scheduler("bar", scheduler)
schedule_action = ScheduleAction(scheduler_name="bar", action_name="foo")
schedule_action.execute(rez=Rez(flds={"action_name": "flea"}))
assert dispatcher.get_scheduled_action_count() == 1
dispatcher.run_jobs()
time.sleep(pause)
dispatcher.stop_jobs()
dispatcher.clear_jobs()
assert action.flea_count > 1
assert action2.flea_count == 100
def test_schedule_action_action_data_2(friends):
"""
Tests Dispatcher and Timed objects running a scheduled action.
"""
dispatcher, scheduler, action = friends()
action2 = FleaCount(flea_count=100)
dispatcher.add_action("foo", action)
dispatcher.add_action("flea", action2)
dispatcher.add_scheduler("bar", scheduler)
schedule_action = ScheduleAction(scheduler_name="bar")
schedule_action.execute(rez=Rez(flds={"action_name": "flea"}))
assert dispatcher.get_scheduled_action_count() == 1
dispatcher.run_jobs()
time.sleep(pause)
dispatcher.stop_jobs()
dispatcher.clear_jobs()
assert action.flea_count == 0
assert action2.flea_count > 100
def test_unschedule_scheduler(friends):
"""
Tests unscheduling a scheduler.
"""
dispatcher, scheduler, action = friends()
assert dispatcher.job_count() == 0
dispatcher.add_action("foo", action)
dispatcher.add_scheduler("bar", scheduler)
dispatcher.schedule_action("bar", "foo")
assert dispatcher.job_count() == 1
dispatcher.unschedule_scheduler("bar")
assert dispatcher.job_count() == 0
assert dispatcher.get_scheduled_action_count() == dispatcher.job_count()
# make sure that bar and foo remain
assert dispatcher.get_scheduler("bar")
assert dispatcher.get_action("foo")
def test_unschedule_all(friends):
"""
Tests unscheduling all schedulers.
"""
dispatcher, scheduler, action = friends()
assert dispatcher.job_count() == 0
dispatcher.add_action("foo", action)
dispatcher.add_scheduler("bar", scheduler)
dispatcher.schedule_action("bar", "foo")
assert dispatcher.job_count() == 1
assert dispatcher.get_scheduled_action_count() == 1
dispatcher.clear_jobs()
assert dispatcher.job_count() == 0
dispatcher.add_action("foo2", action)
dispatcher.schedule_action("bar", "foo2")
assert dispatcher.get_scheduled_action_count() == 2
dispatcher.unschedule_all_schedulers()
assert dispatcher.job_count() == 0
assert dispatcher.get_scheduled_action_count() == 0
def test_reschedule_all(friends):
"""
Tests unscheduling a scheduler.
"""
dispatcher, scheduler, action = friends()
assert dispatcher.job_count() == 0
dispatcher.add_action("foo", action)
dispatcher.add_scheduler("bar", scheduler)
dispatcher.schedule_action("bar", "foo")
assert dispatcher.job_count() == 1
assert dispatcher.get_scheduled_action_count() == 1
dispatcher.clear_jobs()
assert dispatcher.job_count() == 0
dispatcher.add_action("foo2", action)
dispatcher.schedule_action("bar", "foo2")
assert dispatcher.get_scheduled_action_count() == 2
dispatcher.reschedule_all_schedulers()
assert dispatcher.job_count() == 1
assert dispatcher.get_scheduled_action_count() == 2
def test_clear_dispatcher(friends):
"""
Tests clearing a dispatcher.
"""
dispatcher, scheduler, action = friends()
assert dispatcher.job_count() == 0
dispatcher.add_action("foo", action)
dispatcher.add_scheduler("bar", scheduler)
dispatcher.schedule_action("bar", "foo")
assert dispatcher.job_count() == 1
dispatcher.clear_all()
assert dispatcher.job_count() == 0
assert dispatcher.get_scheduled_action_count() == dispatcher.job_count()
# make sure that bar and foo are Gone
assert dispatcher.get_scheduler("bar") is None
assert dispatcher.get_action("foo") is None
def test_scheduled_action_count(friends):
"""
Tests scheduled action count
"""
# original
dispatcher, scheduler, action = friends()
dispatcher.add_action("foo", action)
dispatcher.add_scheduler("bar", scheduler)
dispatcher.schedule_action(action_name="foo", scheduler_name="bar")
assert 1 == dispatcher.get_scheduled_action_count()
assert 1 == dispatcher.job_count()
def test_jobs_are_running(friends):
dispatcher, scheduler, action = friends()
try:
dispatcher.run_jobs()
assert dispatcher.jobs_are_running()
finally:
try:
dispatcher.stop_jobs()
except:
pass
def test_jobs_are_not_running(friends):
dispatcher, scheduler, action = friends()
try:
dispatcher.run_jobs()
assert dispatcher.jobs_are_running()
dispatcher.stop_jobs()
assert not dispatcher.jobs_are_running()
finally:
try:
dispatcher.stop_jobs()
except:
pass
def test_replace_dispatcher(friends):
"""
Tests replacing a dispatcher
"""
# original
dispatcher, scheduler, action = friends()
saved_dir = dispatcher.get_saved_dir()
dispatcher.add_action("foo", action)
dispatcher.add_scheduler("bar", scheduler)
dispatcher.schedule_action(action_name="foo", scheduler_name="bar")
# replacement
replacement = Dispatcher() # no saved_dir
replacement.add_action("flea", action)
replacement.add_scheduler("bath", scheduler)
replacement.schedule_action(action_name="flea", scheduler_name="bath")
# do the business
dispatcher.replace_all(replacement)
# is everyone okay?
assert not dispatcher.get_action("foo")
assert not dispatcher.get_scheduler("bar")
assert dispatcher.get_action("flea")
assert dispatcher.get_scheduler("bath")
assert {"bath"} == set(k for k in dispatcher.get_schedulers())
assert {"flea"} == set(k for k in dispatcher.get_actions())
assert {"bath"} == set(
k for k in dispatcher.get_scheduled_actions().scheduler_names()
)
assert {"flea"} == dispatcher.get_scheduled_actions().actions("bath")
def test_load_dispatcher(friends):
"""
Tests loading a dispatcher
"""
dispatcher, scheduler, action = friends()
saved_dir = dispatcher.get_saved_dir()
assert saved_dir is not None
dispatcher.add_action("foo", action)
dispatcher.add_scheduler("bar", scheduler)
dispatcher.schedule_action(action_name="foo", scheduler_name="bar")
dispatcher2 = dispatcher.load_current()
assert dispatcher2 is not None, f"saved_dir({saved_dir})"
assert set(k for k in dispatcher.get_actions()) == set(
k for k in dispatcher2.get_actions()
)
assert set(k for k in dispatcher.get_schedulers()) == set(
k for k in dispatcher2.get_schedulers()
)
assert set(k for k in dispatcher.get_scheduled_actions().action_names()) == set(
k for k in dispatcher2.get_scheduled_actions().action_names()
)
def test_saved_dir_1(tmp_path):
saved_dir = str(tmp_path)
dispatcher = Dispatcher()
dispatcher.set_saved_dir(saved_dir=saved_dir)
assert dispatcher.get_saved_dir() == saved_dir
def test_saved_dir_2(tmp_path):
saved_dir = str(tmp_path)
dispatcher = Dispatcher(saved_dir=saved_dir)
assert dispatcher.get_saved_dir() == saved_dir
def test_defer_action(friends):
"""
Want to observe the scheduling move from deferred state to ready state.
"""
dispatcher, scheduler, action = friends()
dispatcher.add_action("foo", action)
dispatcher.add_scheduler("bar", scheduler)
assert 0 == dispatcher.get_deferred_action_count()
assert 0 == dispatcher.get_scheduled_action_count()
dispatcher.defer_action(
scheduler_name="bar", action_name="foo", wait_until=Now.dt()
)
# deferred state -- can run jobs and actions will _not_ be executed
assert 1 == dispatcher.get_deferred_action_count()
assert 0 == dispatcher.get_scheduled_action_count()
time.sleep(6) # the out-of-band job runs every five seconds
# ready state -- can run jobs and actions will be executed
assert 0 == dispatcher.get_deferred_action_count()
assert 1 == dispatcher.get_scheduled_action_count()
def test_defer_action_action(friends):
"""
Want to observe the scheduling move from deferred state to ready state.
"""
dispatcher, scheduler, action = friends()
dispatcher.add_action("foo", action)
dispatcher.add_scheduler("bar", scheduler)
assert 0 == dispatcher.get_deferred_action_count()
assert 0 == dispatcher.get_scheduled_action_count()
defer_action = DeferAction(
scheduler_name="bar",
action_name="foo",
wait_until=DateTime(dt=Now.dt()),
)
defer_action.execute()
# deferred state -- can run jobs and actions will _not_ be executed
assert 1 == dispatcher.get_deferred_action_count()
assert 0 == dispatcher.get_scheduled_action_count()
time.sleep(6) # the out-of-band job runs every five seconds
# ready state -- can run jobs and actions will be executed
assert 0 == dispatcher.get_deferred_action_count()
assert 1 == dispatcher.get_scheduled_action_count()
def test_expire_action(friends):
"""
Want to observe the scheduling move from deferred state to ready state.
"""
dispatcher, scheduler, action = friends()
dispatcher.add_action("foo", action)
dispatcher.add_scheduler("bar", scheduler)
assert 0 == dispatcher.get_expiring_action_count()
assert 0 == dispatcher.get_scheduled_action_count()
dispatcher.schedule_action(scheduler_name="bar", action_name="foo")
assert 0 == dispatcher.get_expiring_action_count()
assert 1 == dispatcher.get_scheduled_action_count()
dispatcher.expire_action(
scheduler_name="bar",
action_name="foo",
expire_on=Now.dt() + timedelta(seconds=1),
)
assert 1 == dispatcher.get_expiring_action_count()
assert 1 == dispatcher.get_scheduled_action_count()
time.sleep(6) # the out-of-band job runs every 2-5 seconds
assert 0 == dispatcher.get_expiring_action_count()
assert 0 == dispatcher.get_scheduled_action_count()
def test_expire_action_action(friends):
"""
Want to observe the scheduling move from deferred state to ready state.
"""
dispatcher, scheduler, action = friends()
dispatcher.add_action("foo", action)
dispatcher.add_scheduler("bar", scheduler)
assert 0 == dispatcher.get_expiring_action_count()
assert 0 == dispatcher.get_scheduled_action_count()
dispatcher.schedule_action(scheduler_name="bar", action_name="foo")
assert 0 == dispatcher.get_expiring_action_count()
assert 1 == dispatcher.get_scheduled_action_count()
expire_action = ExpireAction(
scheduler_name="bar",
action_name="foo",
expire_on=DateTime(dt=Now.dt() + timedelta(seconds=2)),
)
expire_action.execute()
assert 1 == dispatcher.get_expiring_action_count()
assert 1 == dispatcher.get_scheduled_action_count()
time.sleep(6) # the out-of-band job runs every 2-5 seconds
assert 0 == dispatcher.get_expiring_action_count()
assert 0 == dispatcher.get_scheduled_action_count()
def test_immediately(friends):
"""
Want to observe that action get executed immediately and that schedulers_actions
is not impacted.
"""
dispatcher, scheduler, action = friends()
class TestAction(Action):
fleas: int = 0
def execute(self, tag: str = None, rez: Rez = None):
self.fleas += 1
return self.action_result(result=self.fleas)
test_action = TestAction()
assert dispatcher.get_scheduled_action_count() == 0
assert test_action.fleas == 0
dispatcher.add_action("foo", test_action)
dispatcher.add_scheduler("imm", Immediately())
dispatcher.schedule_action(scheduler_name="imm", action_name="foo")
assert dispatcher.get_scheduled_action_count() == 0
assert test_action.fleas == 1
def test_program_1(friends):
"""
Want to observe that a Program's actions are executed.
"""
dispatcher, scheduler, action = friends()
class TestAction1(Action):
fleas: int = 0
def execute(self, tag: str = None, rez: Rez = None):
self.fleas += 1
return self.action_result()
class TestAction2(Action):
fleas: int = 0
def execute(self, tag: str = None, rez: Rez = None):
self.fleas += 1
return self.action_result()
class TestAction3(Action):
fleas: int = 0
def execute(self, tag: str = None, rez: Rez = None):
self.fleas += 1
return self.action_result()
action1 = TestAction1()
action2 = TestAction2()
action3 = TestAction3()
dispatcher.add_action("foo1", action1)
dispatcher.add_action("foo2", action2)
dispatcher.add_action("foo3", action3)
dispatcher.add_scheduler("bar", scheduler)
dispatcher.add_scheduler("immediately", Immediately())
program = PBEProgram().prologue("foo1").body_element("bar", "foo2").epilogue("foo3")
dispatcher.add_program("baz", program)
start = Now().dt() + timedelta(seconds=1)
stop = start + timedelta(seconds=4)
dispatcher.run_jobs()
dispatcher.schedule_program("baz", start, stop)
assert action1.fleas == 0
time.sleep(3)
assert action1.fleas == 1
time.sleep(4)
assert action2.fleas >= 2
time.sleep(2)
assert action3.fleas == 1
def test_unschedule_program(friends):
"""
Want to observe that a Program's actions are not executed
after being unscheduled prior to the deferral time.
"""
dispatcher, scheduler, action = friends()
class TestAction1(Action):
fleas: int = 0
def execute(self, tag: str = None, rez: Rez = None):
self.fleas += 1
return self.action_result()
class TestAction2(Action):
fleas: int = 0
def execute(self, tag: str = None, rez: Rez = None):
self.fleas += 1
return self.action_result()
class TestAction3(Action):
fleas: int = 0
def execute(self, tag: str = None, rez: Rez = None):
self.fleas += 1
return self.action_result()
action1 = TestAction1()
action2 = TestAction2()
action3 = TestAction3()
dispatcher.add_action("foo1", action1)
dispatcher.add_action("foo2", action2)
dispatcher.add_action("foo3", action3)
dispatcher.add_scheduler("bar", scheduler)
dispatcher.add_scheduler("immediately", Immediately())
program = PBEProgram().prologue("foo1").body_element("bar", "foo2").epilogue("foo3")
dispatcher.add_program("baz", program)
start = Now().dt() + timedelta(seconds=4)
stop = start + timedelta(seconds=4)
dispatcher.run_jobs()
dispatcher.schedule_program("baz", start, stop)
assert dispatcher.get_deferred_program_count() == 1
assert dispatcher.get_scheduled_action_count() == 0
dispatcher.unschedule_program("baz")
assert len(dispatcher.get_programs()) == 1
assert dispatcher.get_deferred_program_count() == 0
assert action1.fleas == 0
time.sleep(3)
assert action1.fleas == 0
time.sleep(4)
assert action2.fleas == 0
time.sleep(2)
assert action3.fleas == 0
def test_unschedule_program_action(friends):
"""
Want to observe that a Program's actions are not executed
after being unscheduled prior to the deferral time.
"""
dispatcher, scheduler, action = friends()
class TestAction1(Action):
fleas: int = 0
def execute(self, tag: str = None, rez: Rez = None):
self.fleas += 1
return self.action_result()
class TestAction2(Action):
fleas: int = 0
def execute(self, tag: str = None, rez: Rez = None):
self.fleas += 1
return self.action_result()
class TestAction3(Action):
fleas: int = 0
def execute(self, tag: str = None, rez: Rez = None):
self.fleas += 1
return self.action_result()
action1 = TestAction1()
action2 = TestAction2()
action3 = TestAction3()
dispatcher.add_action("foo1", action1)
dispatcher.add_action("foo2", action2)
dispatcher.add_action("foo3", action3)
dispatcher.add_scheduler("bar", scheduler)
dispatcher.add_scheduler("immediately", Immediately())
program = PBEProgram().prologue("foo1").body_element("bar", "foo2").epilogue("foo3")
dispatcher.add_program("baz", program)
start = Now().dt() + timedelta(seconds=4)
stop = start + timedelta(seconds=4)
dispatcher.run_jobs()
dispatcher.schedule_program("baz", start, stop)
assert dispatcher.get_deferred_program_count() == 1
assert dispatcher.get_scheduled_action_count() == 0
unschedule_program = UnscheduleProgram(program_name="baz")
unschedule_program.execute()
time.sleep(1)
assert len(dispatcher.get_programs()) == 1
assert dispatcher.get_deferred_program_count() == 0
assert action1.fleas == 0
time.sleep(3)
assert action1.fleas == 0
time.sleep(4)
assert action2.fleas == 0
time.sleep(2)
assert action3.fleas == 0
def test_delete_program(friends):
"""
Want to observe that a Program's actions are not executed
after being deleted prior to the deferral time.
"""
dispatcher, scheduler, action = friends()
class TestAction1(Action):
fleas: int = 0
def execute(self, tag: str = None, rez: Rez = None):
self.fleas += 1
return self.action_result()
class TestAction2(Action):
fleas: int = 0
def execute(self, tag: str = None, rez: Rez = None):
self.fleas += 1
return self.action_result()
class TestAction3(Action):
fleas: int = 0
def execute(self, tag: str = None, rez: Rez = None):
self.fleas += 1
return self.action_result()
action1 = TestAction1()
action2 = TestAction2()
action3 = TestAction3()
dispatcher.add_action("foo1", action1)
dispatcher.add_action("foo2", action2)
dispatcher.add_action("foo3", action3)
dispatcher.add_scheduler("bar", scheduler)
dispatcher.add_scheduler("immediately", Immediately())
program = PBEProgram().prologue("foo1").body_element("bar", "foo2").epilogue("foo3")
dispatcher.add_program("baz", program)
start = Now().dt() + timedelta(seconds=4)
stop = start + timedelta(seconds=4)
assert len(dispatcher.get_programs()) == 1
dispatcher.run_jobs()
dispatcher.schedule_program("baz", start, stop)
assert dispatcher.get_deferred_program_count() == 1
assert dispatcher.get_scheduled_action_count() == 0
dispatcher.delete_program("baz")
assert len(dispatcher.get_programs()) == 0
assert dispatcher.get_deferred_program_count() == 0
assert action1.fleas == 0
time.sleep(3)
assert action1.fleas == 0
time.sleep(4)
assert action2.fleas == 0
time.sleep(2)
assert action3.fleas == 0
def test_execute_with_rez(friends):
"""
Want to see execute work with supplied dictionary.
"""
dispatcher, scheduler, action = friends()
result = action.execute(rez=Rez(result={"fleacount": "infinite"}))
assert result.rez.result == {"fleacount": "infinite"}
def test_terminate_scheduler(friends):
"""
Want to terminate scheduler using TerminateScheduler action.
"""
dispatcher, scheduler, action = friends()
action2 = Terminate()
dispatcher.add_action("foo", action)
dispatcher.add_action("terminate", action2)
dispatcher.add_scheduler("bar", scheduler)
dispatcher.run_jobs()
dispatcher.schedule_action("bar", "foo")
time.sleep(2)
assert action.flea_count >= 1
assert dispatcher.get_scheduled_action_count() == 1
dispatcher.schedule_action("bar", "terminate")
assert dispatcher.get_scheduled_action_count() == 2
time.sleep(2)
assert dispatcher.get_scheduled_action_count() == 0
def test_terminate_scheduler_and(friends):
"""
Want to terminate scheduler using TerminateScheduler action.
"""
dispatcher, scheduler, action = friends()
action2 = FleaCount(flea_count=100)
actions = [action, Terminate(), action2]
action3 = (
UntilFailure()
) # add actions on next line to use them directly below; pydantic deep copies field values
action3.actions = actions
dispatcher.add_action("foo", action3)
dispatcher.add_scheduler("bar", scheduler)
dispatcher.run_jobs()
dispatcher.schedule_action("bar", "foo")
time.sleep(3)
assert action.flea_count == 1
assert action2.flea_count == 100
def test_if_else_1(friends):
"""
Want to terminate scheduler using TerminateScheduler action.
"""
dispatcher, scheduler, action = friends()
action2 = FleaCount(flea_count=100)
immediately = Immediately()
dispatcher.add_action("foo1", action)
dispatcher.add_action("foo2", action2)
dispatcher.add_scheduler("immediately", immediately)
if_else = IfElse(
test_action=RaiseCmp(value=1),
if_action=ScheduleAction(scheduler_name="immediately", action_name="foo1"),
else_action=ScheduleAction(scheduler_name="immediately", action_name="foo2"),
)
schedule_action = All(actions=[Result(value=2), if_else])
dispatcher.add_action("schedule_action", schedule_action)
dispatcher.schedule_action("immediately", "schedule_action")
assert action.flea_count == 1
assert action2.flea_count == 100
def test_if_else_2(friends):
"""
Want to terminate scheduler using TerminateScheduler action.
"""
dispatcher, scheduler, action = friends()
action2 = FleaCount(flea_count=100)
immediately = Immediately()
dispatcher.add_action("foo1", action)
dispatcher.add_action("foo2", action2)
dispatcher.add_scheduler("immediately", immediately)
if_else = IfElse(
test_action=RaiseCmp(value=2),
if_action=ScheduleAction(scheduler_name="immediately", action_name="foo1"),
else_action=ScheduleAction(scheduler_name="immediately", action_name="foo2"),
)
schedule_action = All(actions=[Result(value=2), if_else])
dispatcher.add_action("schedule_action", schedule_action)
dispatcher.schedule_action("immediately", "schedule_action")
assert action.flea_count == 0
assert action2.flea_count == 101
# ====================================
class FleaCount(Action):
flea_count: int = 0
data: Optional[Dict[Any, Any]]
def execute(self, tag: str = None, rez: Rez = None):
self.flea_count += 1
return self.action_result(
result=self.flea_count, rez=rez, flds=rez.flds if rez else {}
)
@pytest.fixture
def friends(tmp_path, host, port):
""" returns a tuple of useful test objects """
SystemInfo.init(host, port)
def stuff():
# want a fresh tuple from the fixture
dispatcher = Dispatcher(saved_dir=str(tmp_path))
dispatcher.set_timed(Timed())
dispatcher.initialize()
action = FleaCount()
scheduler = Timely(interval=1)
return dispatcher, scheduler, action
return stuff
@pytest.fixture
def servers():
def stuff():
server1 = Server(host="localhost", port=8000)
server1.add_key_tag("foo", "bar")
server1.add_key_tag("foo", "baz")
server1.add_key_tag("fleas", "standdown")
server1.add_key_tag("krimp", "kramp")
server2 = Server(host="localhost", port=8000)
server2.add_key_tag("foo", "bar")
server2.add_key_tag("fleas", "riseup")
server2.add_key_tag("slip", "slide")
return (server1, server2)
return stuff
| 32,053 | 10,610 |
import os
from functools import reduce
__here__ = os.path.dirname(__file__)
TEST_DATA = '''\
2199943210
3987894921
9856789892
8767896789
9899965678\
'''
def gen_neighbors(array, x, y):
'''Generated points in north, south, east, and west directions.
On edges only valid points are generated.
'''
dirs = [
(x - 1, y),
(x + 1, y),
(x, y - 1),
(x, y + 1),
]
for x, y in dirs:
if x >= 0 and y >= 0:
try:
yield array[x][y]
except IndexError:
continue
def decode_input(data):
lines = data.split('\n')
for line in lines:
yield [int(x) for x in line]
def lowest_points(array, shape):
'''Generates the points that are lower than all neighbors.'''
x_max, y_max = shape
for x in range(0, x_max):
for y in range(0, y_max):
current = array[x][y]
neighborhood = gen_neighbors(array, x, y)
if current < min(neighborhood):
yield current, (x, y)
def calculate_1(data):
array = list(decode_input(data))
x_max, y_max = len(array), len(array[0])
lows = [x for x, _ in lowest_points(array, (x_max, y_max))]
risk_levels = sum([r + 1 for r in lows])
return risk_levels
def flood_fill(data, origin, shape):
'''Returns an array filled with `10` starting from origin and bounded by `9`.
Mostly implmented as stack-based recursive flood-fill implementation given on
Wikipedia.
See: https://en.wikipedia.org/wiki/Flood_fill#Stack-based_recursive_implementation_(four-way)
Things to note:
- Since we know that boundaries of fill are `9`, we fill the points with `10`
so that we can distinguish those filled points from unfilled or not-to-be filled
ones.
- We use a copy of the data as array which is modified recursively by filler.
- We move in N, S, E, W directions from origin and fill as many points as we can.
'''
x_max, y_max = shape
array = [d[:] for d in data]
def filler(x, y):
if x < 0 or y < 0 or x >= x_max or y >= y_max:
# Bounds check.
return
if array[x][y] >= 9:
# Boundary check.
return
array[x][y] = 10 # use this to distinguish filled points.
filler(x, y + 1) # North
filler(x, y - 1) # South
filler(x + 1, y) # East
filler(x - 1, y) # West
filler(*origin)
return array
def calculate_2(data):
array = list(decode_input(data))
x_max, y_max = len(array), len(array[0])
low_coords = [coord for _, coord in lowest_points(array, (x_max, y_max))]
basins = []
for coord in low_coords:
filled = flood_fill(array, coord, (x_max, y_max))
basin_size = sum([row.count(10) for row in filled])
basins.append(basin_size)
top_3_basins = sorted(basins)[-3:]
return reduce(lambda x, y: x * y, top_3_basins, 1)
if __name__ == '__main__':
assert calculate_1(TEST_DATA) == 15
assert calculate_2(TEST_DATA) == 1134
with open(os.path.join(__here__, 'input.txt'), 'r') as fp:
data = fp.read()
answer_1 = calculate_1(data)
answer_2 = calculate_2(data)
print(f'{answer_1=}')
print(f'{answer_2=}')
| 3,298 | 1,195 |
#!/usr/bin/env python
"""
This is the installation script of the step module, a light and fast template engine. You can run it by typing:
python setup.py install
You can also run the test suite by running:
python setup.py test
"""
import sys
from distutils.core import setup
from step.tests import TestCommand
__author__ = "Daniele Mazzocchio <danix@kernel-panic.it>"
__version__ = "0.0.3"
__date__ = "Jul 25, 2019"
# Python versions prior 2.2.3 don't support 'classifiers' and 'download_url'
if sys.version < "2.2.3":
from distutils.dist import DistributionMetadata
DistributionMetadata.classifiers = None
DistributionMetadata.download_url = None
setup(name = "step-template",
version = __version__,
author = "Daniele Mazzocchio",
author_email = "danix@kernel-panic.it",
packages = ["step", "step.tests"],
cmdclass = {"test": TestCommand},
description = "Simple Template Engine for Python",
download_url = "https://github.com/dotpy/step/archive/step-0.0.3.tar.gz",
classifiers = ["Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Text Processing"],
url = "https://github.com/dotpy/step",
license = "OSI-Approved :: BSD License",
keywords = "templates templating template-engines",
long_description = "step is a pure-Python module providing a very "
"simple template engine with minimum syntax. It "
"supports variable expansion, flow control and "
"embedding of Python code.")
| 2,046 | 541 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import functools
from flask import url_for, redirect, session
def login_required(view):
@functools.wraps(view)
def _(**kwargs):
if session.get('admin_name') is None:
return redirect(url_for('admin_bp.admin_login'))
return view(**kwargs)
return _
| 335 | 113 |
from . import config, client, context
from .client import APIClient
from .context import ClientContext
| 104 | 25 |
ip = open("input.txt","r")
lst = ip.readlines()
lst.reverse()
lst.insert(1,'\n')
ip.close()
op = open("output.txt",'w')
op.write("".join(lst))
op.close()
| 158 | 71 |
from pathlib import Path
from fhir.resources.codesystem import CodeSystem
from oops_fhir.utils import CodeSystemConcept
__all__ = ["EffectEstimateType"]
_resource = CodeSystem.parse_file(Path(__file__).with_suffix(".json"))
class EffectEstimateType:
"""
EffectEstimateType
Whether the effect estimate is an absolute effect estimate (absolute
difference) or a relative effect estimate (relative difference), and the
specific type of effect estimate (eg relative risk or median
difference).
Status: draft - Version: 4.0.1
Copyright None
http://terminology.hl7.org/CodeSystem/effect-estimate-type
"""
relative_rr = CodeSystemConcept(
{
"code": "relative-RR",
"definition": "relative risk (a type of relative effect estimate).",
"display": "relative risk",
}
)
"""
relative risk
relative risk (a type of relative effect estimate).
"""
relative_or = CodeSystemConcept(
{
"code": "relative-OR",
"definition": "odds ratio (a type of relative effect estimate).",
"display": "odds ratio",
}
)
"""
odds ratio
odds ratio (a type of relative effect estimate).
"""
relative_hr = CodeSystemConcept(
{
"code": "relative-HR",
"definition": "hazard ratio (a type of relative effect estimate).",
"display": "hazard ratio",
}
)
"""
hazard ratio
hazard ratio (a type of relative effect estimate).
"""
absolute_ard = CodeSystemConcept(
{
"code": "absolute-ARD",
"definition": "absolute risk difference (a type of absolute effect estimate).",
"display": "absolute risk difference",
}
)
"""
absolute risk difference
absolute risk difference (a type of absolute effect estimate).
"""
absolute_mean_diff = CodeSystemConcept(
{
"code": "absolute-MeanDiff",
"definition": "mean difference (a type of absolute effect estimate).",
"display": "mean difference",
}
)
"""
mean difference
mean difference (a type of absolute effect estimate).
"""
absolute_smd = CodeSystemConcept(
{
"code": "absolute-SMD",
"definition": "standardized mean difference (a type of absolute effect estimate).",
"display": "standardized mean difference",
}
)
"""
standardized mean difference
standardized mean difference (a type of absolute effect estimate).
"""
absolute_median_diff = CodeSystemConcept(
{
"code": "absolute-MedianDiff",
"definition": "median difference (a type of absolute effect estimate).",
"display": "median difference",
}
)
"""
median difference
median difference (a type of absolute effect estimate).
"""
class Meta:
resource = _resource
| 3,006 | 794 |
from django.test import TestCase, Client
from django.urls import reverse
class TaskAboutViewsTests(TestCase):
def setUp(self):
self.guest_client = Client()
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.about_views = {
'about:author': 'author.html',
'about:tech': 'tech.html',
}
def test_about_pages_avialable_to_guest(self):
"""Страницы приложения about доступны гостевому пользователю"""
for view in TaskAboutViewsTests.about_views.keys():
with self.subTest():
response = self.guest_client.get(reverse(view))
status = response.status_code
self.assertEqual(
status, 200,
f'Страничка view "{view}" приложения about недоступна '
'гостевому пользователю'
)
def test_about_views_according_templates(self):
"""Проверка вызова корректных шаблонов во view приложения about"""
for view, template in TaskAboutViewsTests.about_views.items():
with self.subTest():
response = self.guest_client.get(reverse(view))
self.assertTemplateUsed(
response, template,
f'Во view "{view}" вызывется некорректный шаблон'
)
| 1,361 | 385 |
"""
user token
@version: v1.0.1
@Company: Thefair
@Author: Wang Yao
@Date: 2019-11-17 15:21:11
@LastEditors: Wang Yao
@LastEditTime: 2019-11-17 21:17:19
"""
from functools import wraps
from flask import request
from itsdangerous import TimedJSONWebSignatureSerializer as Serializer, BadData
from library.response.tfexception import TfException
from library.request.tfrequest import TfRequest
class TfToken(object):
"""
Token类
"""
def __init__(self, secret_key: str, expires_in: int):
self._secret_key = secret_key
self._expires_in = expires_in
self.serializer = Serializer(secret_key, expires_in=expires_in)
def get_token(self, user_id: str, email: str) -> str:
"""
生成token
@param user_id: 用户id, email: 邮箱
@return: token
:param email: 邮箱
"""
data = {'user_id': user_id, 'email': email}
token = self.serializer.dumps(data).decode()
return token
def decode_token(self, token: str) -> dict:
"""
token解码
@param token
@return: data
"""
try:
data = self.serializer.loads(token)
except BadData:
code, msg = -3, "token decoded error."
raise TfException(code, msg)
return data
def check_token(self, token):
"""
校验token
@param token
@return: token_data
"""
if token == 'null':
code, msg = -3, "please login first."
raise TfException(code, msg)
token_data = self.decode_token(token)
return token_data
def login_check(func):
"""
登录校验修饰器
@param func: API函数
@return: func
"""
@wraps(func)
def wrapper(*args, **kw):
TfRequest().get_params()
if not request.params.get('token'):
raise TfException(-3, "please login first.")
return func(*args, **kw)
return wrapper
| 1,937 | 652 |
from paa191t1.pph.pph_median import pph_median
from paa191t1.tests.pph import TestPPHBase
class TestPPHMedian(TestPPHBase):
def setUp(self):
self.pph = pph_median
| 178 | 80 |
# -*- coding: utf-8 -*-
# Copyright 2019 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Artifacts service tests."""
from __future__ import print_function
import json
import os
import shutil
import mock
from chromite.lib import autotest_util
from chromite.lib import build_target_lib
from chromite.lib import chroot_lib
from chromite.lib import constants
from chromite.lib import cros_build_lib
from chromite.lib import cros_test_lib
from chromite.lib import osutils
from chromite.lib import partial_mock
from chromite.lib import portage_util
from chromite.lib import sysroot_lib
from chromite.lib import toolchain_util
from chromite.lib.paygen import partition_lib
from chromite.lib.paygen import paygen_payload_lib
from chromite.lib.paygen import paygen_stateful_payload_lib
from chromite.service import artifacts
class BundleAutotestFilesTest(cros_test_lib.MockTempDirTestCase):
"""Test the Bundle Autotest Files function."""
def setUp(self):
self.output_dir = os.path.join(self.tempdir, 'output_dir')
self.archive_dir = os.path.join(self.tempdir, 'archive_base_dir')
sysroot_path = os.path.join(self.tempdir, 'sysroot')
self.chroot = chroot_lib.Chroot(self.tempdir)
self.sysroot = sysroot_lib.Sysroot('sysroot')
self.sysroot_dne = sysroot_lib.Sysroot('sysroot_DNE')
# Make sure we have the valid paths.
osutils.SafeMakedirs(self.output_dir)
osutils.SafeMakedirs(sysroot_path)
def testInvalidOutputDirectory(self):
"""Test invalid output directory."""
with self.assertRaises(AssertionError):
artifacts.BundleAutotestFiles(self.chroot, self.sysroot, None)
def testInvalidSysroot(self):
"""Test sysroot that does not exist."""
with self.assertRaises(AssertionError):
artifacts.BundleAutotestFiles(self.chroot, self.sysroot_dne,
self.output_dir)
def testArchiveDirectoryDoesNotExist(self):
"""Test archive directory that does not exist causes error."""
self.assertEqual(
artifacts.BundleAutotestFiles(self.chroot, self.sysroot,
self.output_dir), {})
def testSuccess(self):
"""Test a successful call handling."""
ab_path = os.path.join(self.tempdir, self.sysroot.path,
constants.AUTOTEST_BUILD_PATH)
osutils.SafeMakedirs(ab_path)
# Makes all of the individual calls to build out each of the tarballs work
# nicely with a single patch.
self.PatchObject(autotest_util.AutotestTarballBuilder, '_BuildTarball',
side_effect=lambda _, path, **kwargs: osutils.Touch(path))
result = artifacts.BundleAutotestFiles(self.chroot, self.sysroot,
self.output_dir)
for archive in result.values():
self.assertStartsWith(archive, self.output_dir)
self.assertExists(archive)
class ArchiveChromeEbuildEnvTest(cros_test_lib.MockTempDirTestCase):
"""ArchiveChromeEbuildEnv tests."""
def setUp(self):
# Create the chroot and sysroot instances.
self.chroot_path = os.path.join(self.tempdir, 'chroot_dir')
self.chroot = chroot_lib.Chroot(path=self.chroot_path)
self.sysroot_path = os.path.join(self.chroot_path, 'sysroot_dir')
self.sysroot = sysroot_lib.Sysroot(self.sysroot_path)
# Create the output directory.
self.output_dir = os.path.join(self.tempdir, 'output_dir')
osutils.SafeMakedirs(self.output_dir)
# The sysroot's /var/db/pkg prefix for the chrome package directories.
var_db_pkg = os.path.join(self.sysroot_path, portage_util.VDB_PATH)
# Create the var/db/pkg dir so we have that much for no-chrome tests.
osutils.SafeMakedirs(var_db_pkg)
# Two versions of chrome to test the multiple version checks/handling.
chrome_v1 = '%s-1.0.0-r1' % constants.CHROME_PN
chrome_v2 = '%s-2.0.0-r1' % constants.CHROME_PN
# Build the two chrome version paths.
chrome_cat_dir = os.path.join(var_db_pkg, constants.CHROME_CN)
self.chrome_v1_dir = os.path.join(chrome_cat_dir, chrome_v1)
self.chrome_v2_dir = os.path.join(chrome_cat_dir, chrome_v2)
# Directory tuple for verifying the result archive contents.
self.expected_archive_contents = cros_test_lib.Directory('./',
'environment')
# Create a environment.bz2 file to put into folders.
env_file = os.path.join(self.tempdir, 'environment')
osutils.Touch(env_file)
cros_build_lib.run(['bzip2', env_file])
self.env_bz2 = '%s.bz2' % env_file
def _CreateChromeDir(self, path, populate=True):
"""Setup a chrome package directory.
Args:
path (str): The full chrome package path.
populate (bool): Whether to include the environment bz2.
"""
osutils.SafeMakedirs(path)
if populate:
shutil.copy(self.env_bz2, path)
def testSingleChromeVersion(self):
"""Test a successful single-version run."""
self._CreateChromeDir(self.chrome_v1_dir)
created = artifacts.ArchiveChromeEbuildEnv(self.sysroot, self.output_dir)
self.assertStartsWith(created, self.output_dir)
cros_test_lib.VerifyTarball(created, self.expected_archive_contents)
def testMultipleChromeVersions(self):
"""Test a successful multiple version run."""
# Create both directories, but don't populate the v1 dir so it'll hit an
# error if the wrong one is used.
self._CreateChromeDir(self.chrome_v1_dir, populate=False)
self._CreateChromeDir(self.chrome_v2_dir)
created = artifacts.ArchiveChromeEbuildEnv(self.sysroot, self.output_dir)
self.assertStartsWith(created, self.output_dir)
cros_test_lib.VerifyTarball(created, self.expected_archive_contents)
def testNoChrome(self):
"""Test no version of chrome present."""
with self.assertRaises(artifacts.NoFilesError):
artifacts.ArchiveChromeEbuildEnv(self.sysroot, self.output_dir)
class ArchiveImagesTest(cros_test_lib.TempDirTestCase):
"""ArchiveImages tests."""
def setUp(self):
self.image_dir = os.path.join(self.tempdir, 'images')
osutils.SafeMakedirs(self.image_dir)
self.output_dir = os.path.join(self.tempdir, 'output')
osutils.SafeMakedirs(self.output_dir)
self.images = []
for img in artifacts.IMAGE_TARS.keys():
full_path = os.path.join(self.image_dir, img)
self.images.append(full_path)
osutils.Touch(full_path)
osutils.Touch(os.path.join(self.image_dir, 'irrelevant_image.bin'))
osutils.Touch(os.path.join(self.image_dir, 'foo.txt'))
osutils.Touch(os.path.join(self.image_dir, 'bar'))
def testNoImages(self):
"""Test an empty directory handling."""
artifacts.ArchiveImages(self.tempdir, self.output_dir)
self.assertFalse(os.listdir(self.output_dir))
def testAllImages(self):
"""Test each image gets picked up."""
created = artifacts.ArchiveImages(self.image_dir, self.output_dir)
self.assertCountEqual(list(artifacts.IMAGE_TARS.values()), created)
class CreateChromeRootTest(cros_test_lib.RunCommandTempDirTestCase):
"""CreateChromeRoot tests."""
def setUp(self):
# Create the build target.
self.build_target = build_target_lib.BuildTarget('board')
# Create the chroot.
self.chroot_dir = os.path.join(self.tempdir, 'chroot')
self.chroot_tmp = os.path.join(self.chroot_dir, 'tmp')
osutils.SafeMakedirs(self.chroot_tmp)
self.chroot = chroot_lib.Chroot(path=self.chroot_dir)
# Create the output directory.
self.output_dir = os.path.join(self.tempdir, 'output_dir')
osutils.SafeMakedirs(self.output_dir)
def testRunCommandError(self):
"""Test handling when the run command call is not successful."""
self.rc.SetDefaultCmdResult(
side_effect=cros_build_lib.RunCommandError('Error'))
with self.assertRaises(artifacts.CrosGenerateSysrootError):
artifacts.CreateChromeRoot(self.chroot, self.build_target,
self.output_dir)
def testSuccess(self):
"""Test success case."""
# Separate tempdir for the method itself.
call_tempdir = os.path.join(self.chroot_tmp, 'cgs_call_tempdir')
osutils.SafeMakedirs(call_tempdir)
self.PatchObject(osutils.TempDir, '__enter__', return_value=call_tempdir)
# Set up files in the tempdir since the command isn't being called to
# generate anything for it to handle.
files = ['file1', 'file2', 'file3']
expected_files = [os.path.join(self.output_dir, f) for f in files]
for f in files:
osutils.Touch(os.path.join(call_tempdir, f))
created = artifacts.CreateChromeRoot(self.chroot, self.build_target,
self.output_dir)
# Just test the command itself and the parameter-based args.
self.assertCommandContains(['cros_generate_sysroot',
'--board', self.build_target.name])
# Make sure we
self.assertCountEqual(expected_files, created)
for f in created:
self.assertExists(f)
class BundleEBuildLogsTarballTest(cros_test_lib.TempDirTestCase):
"""BundleEBuildLogsTarball tests."""
def testBundleEBuildLogsTarball(self):
"""Verifies that the correct EBuild tar files are bundled."""
board = 'samus'
# Create chroot object and sysroot object
chroot_path = os.path.join(self.tempdir, 'chroot')
chroot = chroot_lib.Chroot(path=chroot_path)
sysroot_path = os.path.join('build', board)
sysroot = sysroot_lib.Sysroot(sysroot_path)
# Create parent dir for logs
log_parent_dir = os.path.join(chroot.path, 'build')
# Names of log files typically found in a build directory.
log_files = (
'',
'x11-libs:libdrm-2.4.81-r24:20170816-175008.log',
'x11-libs:libpciaccess-0.12.902-r2:20170816-174849.log',
'x11-libs:libva-1.7.1-r2:20170816-175019.log',
'x11-libs:libva-intel-driver-1.7.1-r4:20170816-175029.log',
'x11-libs:libxkbcommon-0.4.3-r2:20170816-174908.log',
'x11-libs:pango-1.32.5-r1:20170816-174954.log',
'x11-libs:pixman-0.32.4:20170816-174832.log',
'x11-misc:xkeyboard-config-2.15-r3:20170816-174908.log',
'x11-proto:kbproto-1.0.5:20170816-174849.log',
'x11-proto:xproto-7.0.31:20170816-174849.log',
)
tarred_files = [os.path.join('logs', x) for x in log_files]
log_files_root = os.path.join(log_parent_dir,
'%s/tmp/portage/logs' % board)
# Generate a representative set of log files produced by a typical build.
cros_test_lib.CreateOnDiskHierarchy(log_files_root, log_files)
archive_dir = self.tempdir
tarball = artifacts.BundleEBuildLogsTarball(chroot, sysroot, archive_dir)
self.assertEqual('ebuild_logs.tar.xz', tarball)
# Verify the tarball contents.
tarball_fullpath = os.path.join(self.tempdir, tarball)
cros_test_lib.VerifyTarball(tarball_fullpath, tarred_files)
class BundleChromeOSConfigTest(cros_test_lib.TempDirTestCase):
"""BundleChromeOSConfig tests."""
def setUp(self):
self.board = 'samus'
# Create chroot object and sysroot object
chroot_path = os.path.join(self.tempdir, 'chroot')
self.chroot = chroot_lib.Chroot(path=chroot_path)
sysroot_path = os.path.join('build', self.board)
self.sysroot = sysroot_lib.Sysroot(sysroot_path)
self.archive_dir = self.tempdir
def testBundleChromeOSConfig(self):
"""Verifies that the correct ChromeOS config file is bundled."""
# Create parent dir for ChromeOS Config output.
config_parent_dir = os.path.join(self.chroot.path, 'build')
# Names of ChromeOS Config files typically found in a build directory.
config_files = ('config.json',
cros_test_lib.Directory('yaml', [
'config.c', 'config.yaml', 'ec_config.c', 'ec_config.h',
'model.yaml', 'private-model.yaml'
]))
config_files_root = os.path.join(
config_parent_dir, '%s/usr/share/chromeos-config' % self.board)
# Generate a representative set of config files produced by a typical build.
cros_test_lib.CreateOnDiskHierarchy(config_files_root, config_files)
# Write a payload to the config.yaml file.
test_config_payload = {
'chromeos': {
'configs': [{
'identity': {
'platform-name': 'Samus'
}
}]
}
}
with open(os.path.join(config_files_root, 'yaml', 'config.yaml'), 'w') as f:
json.dump(test_config_payload, f)
config_filename = artifacts.BundleChromeOSConfig(self.chroot, self.sysroot,
self.archive_dir)
self.assertEqual('config.yaml', config_filename)
with open(os.path.join(self.archive_dir, config_filename), 'r') as f:
self.assertEqual(test_config_payload, json.load(f))
def testNoChromeOSConfigFound(self):
"""Verifies that None is returned when no ChromeOS config file is found."""
self.assertIsNone(
artifacts.BundleChromeOSConfig(self.chroot, self.sysroot,
self.archive_dir))
class BundleVmFilesTest(cros_test_lib.TempDirTestCase):
"""BundleVmFiles tests."""
def testBundleVmFiles(self):
"""Verifies that the correct files are bundled"""
# Create the chroot instance.
chroot_path = os.path.join(self.tempdir, 'chroot')
chroot = chroot_lib.Chroot(path=chroot_path)
# Create the test_results_dir
test_results_dir = 'test/results'
# Create a set of files where some should get bundled up as VM files.
# Add a suffix (123) to one of the files matching the VM pattern prefix.
vm_files = ('file1.txt',
'file2.txt',
'chromiumos_qemu_disk.bin' + '123',
'chromiumos_qemu_mem.bin'
)
target_test_dir = os.path.join(chroot_path, test_results_dir)
cros_test_lib.CreateOnDiskHierarchy(target_test_dir, vm_files)
# Create the output directory.
output_dir = os.path.join(self.tempdir, 'output_dir')
osutils.SafeMakedirs(output_dir)
archives = artifacts.BundleVmFiles(
chroot, test_results_dir, output_dir)
expected_archive_files = [
output_dir + '/chromiumos_qemu_disk.bin' + '123.tar',
output_dir + '/chromiumos_qemu_mem.bin.tar']
self.assertCountEqual(archives, expected_archive_files)
class BuildFirmwareArchiveTest(cros_test_lib.TempDirTestCase):
"""BuildFirmwareArchive tests."""
def testBuildFirmwareArchive(self):
"""Verifies that firmware archiver includes proper files"""
# Assorted set of file names, some of which are supposed to be included in
# the archive.
fw_files = (
'dts/emeraldlake2.dts',
'image-link.rw.bin',
'nv_image-link.bin',
'pci8086,0166.rom',
'seabios.cbfs',
'u-boot.elf',
'u-boot_netboot.bin',
'updater-link.rw.sh',
'x86-memtest',
)
board = 'link'
# fw_test_root = os.path.join(self.tempdir, os.path.basename(__file__))
fw_test_root = self.tempdir
fw_files_root = os.path.join(fw_test_root,
'chroot/build/%s/firmware' % board)
# Generate a representative set of files produced by a typical build.
cros_test_lib.CreateOnDiskHierarchy(fw_files_root, fw_files)
# Create the chroot and sysroot instances.
chroot_path = os.path.join(self.tempdir, 'chroot')
chroot = chroot_lib.Chroot(path=chroot_path)
sysroot = sysroot_lib.Sysroot('/build/link')
# Create an archive from the simulated firmware directory
tarball = os.path.join(
fw_test_root,
artifacts.BuildFirmwareArchive(chroot, sysroot, fw_test_root))
# Verify the tarball contents.
cros_test_lib.VerifyTarball(tarball, fw_files)
class BundleFpmcuUnittestsTest(cros_test_lib.TempDirTestCase):
"""BundleFpmcuUnittests tests."""
def testBundleFpmcuUnittests(self):
"""Verifies that the resulting tarball includes proper files"""
unittest_files = (
'bloonchipper/test_rsa.bin',
'dartmonkey/test_utils.bin',
)
board = 'hatch'
unittest_files_root = os.path.join(
self.tempdir,
'chroot/build/%s/firmware/chromeos-fpmcu-unittests' % board)
cros_test_lib.CreateOnDiskHierarchy(unittest_files_root, unittest_files)
chroot_path = os.path.join(self.tempdir, 'chroot')
chroot = chroot_lib.Chroot(path=chroot_path)
sysroot = sysroot_lib.Sysroot('/build/%s' % board)
tarball = os.path.join(
self.tempdir,
artifacts.BundleFpmcuUnittests(chroot, sysroot, self.tempdir))
cros_test_lib.VerifyTarball(
tarball,
unittest_files + ('bloonchipper/', 'dartmonkey/'))
class BundleAFDOGenerationArtifacts(cros_test_lib.MockTempDirTestCase):
"""BundleAFDOGenerationArtifacts tests."""
def setUp(self):
# Create the build target.
self.build_target = build_target_lib.BuildTarget('board')
# Create the chroot.
self.chroot_dir = os.path.join(self.tempdir, 'chroot')
self.chroot_tmp = os.path.join(self.chroot_dir, 'tmp')
osutils.SafeMakedirs(self.chroot_tmp)
self.chroot = chroot_lib.Chroot(path=self.chroot_dir)
# Create the output directory.
self.output_dir = os.path.join(self.tempdir, 'output_dir')
osutils.SafeMakedirs(self.output_dir)
self.chrome_root = os.path.join(self.tempdir, 'chrome_root')
def testRunSuccess(self):
"""Generic function for testing success cases for different types."""
# Separate tempdir for the method itself.
call_tempdir = os.path.join(self.chroot_tmp, 'call_tempdir')
osutils.SafeMakedirs(call_tempdir)
self.PatchObject(osutils.TempDir, '__enter__', return_value=call_tempdir)
mock_orderfile_generate = self.PatchObject(
toolchain_util, 'GenerateChromeOrderfile',
autospec=True)
mock_afdo_generate = self.PatchObject(
toolchain_util, 'GenerateBenchmarkAFDOProfile',
autospec=True)
# Test both orderfile and AFDO.
for is_orderfile in [False, True]:
# Set up files in the tempdir since the command isn't being called to
# generate anything for it to handle.
files = ['artifact1', 'artifact2']
expected_files = [os.path.join(self.output_dir, f) for f in files]
for f in files:
osutils.Touch(os.path.join(call_tempdir, f))
created = artifacts.BundleAFDOGenerationArtifacts(
is_orderfile, self.chroot, self.chrome_root,
self.build_target, self.output_dir)
# Test right class is called with right arguments
if is_orderfile:
mock_orderfile_generate.assert_called_once_with(
board=self.build_target.name,
chrome_root=self.chrome_root,
output_dir=call_tempdir,
chroot_path=self.chroot.path,
chroot_args=self.chroot.get_enter_args()
)
else:
mock_afdo_generate.assert_called_once_with(
board=self.build_target.name,
output_dir=call_tempdir,
chroot_path=self.chroot.path,
chroot_args=self.chroot.get_enter_args(),
)
# Make sure we get all the expected files
self.assertCountEqual(expected_files, created)
for f in created:
self.assertExists(f)
os.remove(f)
class FetchPinnedGuestImagesTest(cros_test_lib.TempDirTestCase):
"""FetchPinnedGuestImages tests."""
def setUp(self):
self.chroot = chroot_lib.Chroot(self.tempdir)
self.sysroot = sysroot_lib.Sysroot('/sysroot')
sysroot_path = os.path.join(self.tempdir, 'sysroot')
osutils.SafeMakedirs(sysroot_path)
self.pin_dir = os.path.join(sysroot_path, constants.GUEST_IMAGES_PINS_PATH)
osutils.SafeMakedirs(self.pin_dir)
def testSuccess(self):
"""Tests that generating a guest images tarball."""
for filename in ('file1', 'file2'):
pin_file = os.path.join(self.pin_dir, filename + '.json')
with open(pin_file, 'w') as f:
pin = {
'filename': filename + '.tar.gz',
'gsuri': 'gs://%s' % filename,
}
json.dump(pin, f)
expected = [
artifacts.PinnedGuestImage(filename='file1.tar.gz', uri='gs://file1'),
artifacts.PinnedGuestImage(filename='file2.tar.gz', uri='gs://file2'),
]
pins = artifacts.FetchPinnedGuestImages(self.chroot, self.sysroot)
self.assertCountEqual(expected, pins)
def testBadPin(self):
"""Tests that generating a guest images tarball with a bad pin file."""
pin_file = os.path.join(self.pin_dir, 'file1.json')
with open(pin_file, 'w') as f:
pin = {
'gsuri': 'gs://%s' % 'file1',
}
json.dump(pin, f)
pins = artifacts.FetchPinnedGuestImages(self.chroot, self.sysroot)
self.assertFalse(pins)
def testNoPins(self):
"""Tests that generating a guest images tarball with no pins."""
pins = artifacts.FetchPinnedGuestImages(self.chroot, self.sysroot)
self.assertFalse(pins)
class GeneratePayloadsTest(cros_test_lib.MockTempDirTestCase):
"""Test cases for the payload generation functions."""
def setUp(self):
self.target_image = os.path.join(
self.tempdir,
'link/R37-5952.0.2014_06_12_2302-a1/chromiumos_test_image.bin')
osutils.Touch(self.target_image, makedirs=True)
self.dummy_dlc_image = os.path.join(
self.tempdir,
'link/R37-5952.0.2014_06_12_2302-a1/dlc/dummy-dlc/package/dlc.img')
osutils.Touch(self.dummy_dlc_image, makedirs=True)
def testGenerateFullTestPayloads(self):
"""Verifies correctly generating full payloads."""
paygen_mock = self.PatchObject(paygen_payload_lib, 'GenerateUpdatePayload')
artifacts.GenerateTestPayloads(self.target_image, self.tempdir, full=True)
payload_path = os.path.join(
self.tempdir,
'chromeos_R37-5952.0.2014_06_12_2302-a1_link_full_dev.bin')
paygen_mock.assert_call_once_with(self.target_image, payload_path)
def testGenerateDeltaTestPayloads(self):
"""Verifies correctly generating delta payloads."""
paygen_mock = self.PatchObject(paygen_payload_lib, 'GenerateUpdatePayload')
artifacts.GenerateTestPayloads(self.target_image, self.tempdir, delta=True)
payload_path = os.path.join(
self.tempdir,
'chromeos_R37-5952.0.2014_06_12_2302-a1_R37-'
'5952.0.2014_06_12_2302-a1_link_delta_dev.bin')
paygen_mock.assert_call_once_with(self.target_image, payload_path,
src_image=self.target_image)
def testGenerateFullDummyDlcTestPayloads(self):
"""Verifies correctly generating full payloads for dummy-dlc."""
paygen_mock = self.PatchObject(paygen_payload_lib, 'GenerateUpdatePayload')
self.PatchObject(portage_util, 'GetBoardUseFlags',
return_value=['dlc_test'])
artifacts.GenerateTestPayloads(self.target_image, self.tempdir, full=True,
dlc=True)
rootfs_payload = 'chromeos_R37-5952.0.2014_06_12_2302-a1_link_full_dev.bin'
dlc_payload = ('dlc_dummy-dlc_package_R37-5952.0.2014_06_12_2302-a1_link_'
'full_dev.bin')
paygen_mock.assert_has_calls([
mock.call(self.target_image,
os.path.join(self.tempdir, rootfs_payload)),
mock.call(self.dummy_dlc_image,
os.path.join(self.tempdir, dlc_payload)),
])
def testGenerateDeltaDummyDlcTestPayloads(self):
"""Verifies correctly generating delta payloads for dummy-dlc."""
paygen_mock = self.PatchObject(paygen_payload_lib, 'GenerateUpdatePayload')
self.PatchObject(portage_util, 'GetBoardUseFlags',
return_value=['dlc_test'])
artifacts.GenerateTestPayloads(self.target_image, self.tempdir, delta=True,
dlc=True)
rootfs_payload = ('chromeos_R37-5952.0.2014_06_12_2302-a1_R37-'
'5952.0.2014_06_12_2302-a1_link_delta_dev.bin')
dlc_payload = ('dlc_dummy-dlc_package_R37-5952.0.2014_06_12_2302-a1_R37-'
'5952.0.2014_06_12_2302-a1_link_delta_dev.bin')
paygen_mock.assert_has_calls([
mock.call(self.target_image,
os.path.join(self.tempdir, rootfs_payload),
src_image=self.target_image),
mock.call(self.dummy_dlc_image,
os.path.join(self.tempdir, dlc_payload),
src_image=self.dummy_dlc_image),
])
def testGenerateStatefulTestPayloads(self):
"""Verifies correctly generating stateful payloads."""
paygen_mock = self.PatchObject(paygen_stateful_payload_lib,
'GenerateStatefulPayload')
artifacts.GenerateTestPayloads(self.target_image, self.tempdir,
stateful=True)
paygen_mock.assert_call_once_with(self.target_image, self.tempdir)
def testGenerateQuickProvisionPayloads(self):
"""Verifies correct files are created for quick_provision script."""
extract_kernel_mock = self.PatchObject(partition_lib, 'ExtractKernel')
extract_root_mock = self.PatchObject(partition_lib, 'ExtractRoot')
compress_file_mock = self.PatchObject(cros_build_lib, 'CompressFile')
artifacts.GenerateQuickProvisionPayloads(self.target_image, self.tempdir)
extract_kernel_mock.assert_called_once_with(
self.target_image, partial_mock.HasString('kernel.bin'))
extract_root_mock.assert_called_once_with(
self.target_image, partial_mock.HasString('rootfs.bin'),
truncate=False)
calls = [mock.call(partial_mock.HasString('kernel.bin'),
partial_mock.HasString(
constants.QUICK_PROVISION_PAYLOAD_KERNEL)),
mock.call(partial_mock.HasString('rootfs.bin'),
partial_mock.HasString(
constants.QUICK_PROVISION_PAYLOAD_ROOTFS))]
compress_file_mock.assert_has_calls(calls)
class GenerateCpeExportTest(cros_test_lib.RunCommandTempDirTestCase):
"""GenerateCpeExport tests."""
def setUp(self):
self.sysroot = sysroot_lib.Sysroot('/build/board')
self.chroot = chroot_lib.Chroot(self.tempdir)
self.chroot_tempdir = osutils.TempDir(base_dir=self.tempdir)
self.PatchObject(self.chroot, 'tempdir', return_value=self.chroot_tempdir)
self.output_dir = os.path.join(self.tempdir, 'output_dir')
osutils.SafeMakedirs(self.output_dir)
result_file = artifacts.CPE_RESULT_FILE_TEMPLATE % 'board'
self.result_file = os.path.join(self.output_dir, result_file)
warnings_file = artifacts.CPE_WARNINGS_FILE_TEMPLATE % 'board'
self.warnings_file = os.path.join(self.output_dir, warnings_file)
def testSuccess(self):
"""Test success handling."""
# Set up warning output and the file the command would be making.
report = 'Report.'
warnings = 'Warnings.'
self.rc.SetDefaultCmdResult(returncode=0, output=report, error=warnings)
result = artifacts.GenerateCpeReport(self.chroot, self.sysroot,
self.output_dir)
expected_cmd = ['cros_extract_deps', '--sysroot', '/build/board',
'--format', 'cpe', 'virtual/target-os', '--output-path',
self.result_file]
self.assertCommandCalled(expected_cmd, capture_output=True,
chroot_args=['--chroot', mock.ANY],
enter_chroot=True)
self.assertEqual(self.result_file, result.report)
self.assertEqual(self.warnings_file, result.warnings)
# We cannot assert that self.result_file exists and check contents since we
# are mocking cros_extract_deps, but we verified the args to
# cros_extract_deps.
self.assertFileContents(self.warnings_file, warnings)
| 27,910 | 9,680 |
"""Helper for evaluation on the Labeled Faces in the Wild dataset
"""
# MIT License
#
# Copyright (c) 2016 David Sandberg
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import numpy as np
import facenet
import random
def evaluate(embeddings, actual_issame, nrof_folds=10):
# Calculate evaluation metrics
thresholds = np.arange(0, 4, 0.01)
# start from 0, step=2
embeddings1 = embeddings[0::2]
# start from 1, step=2
embeddings2 = embeddings[1::2]
# embeddings1 is corresponding with embeddings2
tpr, fpr, accuracy = facenet.calculate_roc(thresholds, embeddings1, embeddings2,
np.asarray(actual_issame), nrof_folds=nrof_folds)
thresholds = np.arange(0, 4, 0.001)
val, val_std, far = facenet.calculate_val(thresholds, embeddings1, embeddings2,
np.asarray(actual_issame), 1e-3, nrof_folds=nrof_folds)
return tpr, fpr, accuracy, val, val_std, far
def get_paths(lfw_dir, pairs, file_ext,timestep_size):
nrof_skipped_pairs = 0
path_list = []
issame_list = []
for pair in pairs:
if len(pair) == 5: # the same people
peoplepath=os.path.join(lfw_dir,pair[0])
videos = os.listdir(peoplepath)
video1path = os.path.join(peoplepath, videos[int(pair[1])-1]) # 取第pair[1]个视频
video2path = os.path.join(peoplepath, videos[int(pair[3])-1])
# sample timestep_siez images in video1
images_path = os.listdir(video1path)
images_path.sort(key=lambda x: int(x[2:-4]))
nrof_images = len(images_path)
for i in range(timestep_size):
length = int(nrof_images/timestep_size)
start_index = i * length
end_index = min(nrof_images-1, (i+1) * length)
# path = os.path.join(video1path, pair[0] + '_' + pair[1] + '_%04d' % int(random.randint(1, int(pair[2]))) + '.' + file_ext)
path = os.path.join(video1path,images_path[random.randint(start_index, end_index)])
path_list.append(path)
# sample timestep_size images in video2
images_path = os.listdir(video2path)
nrof_images = len(images_path)
# path = os.path.join(video2path, pair[0] + '_' + pair[3] + '_%04d' % int(random.randint(1, int(pair[4]))) + '.' + file_ext)
# path = os.path.join(video2path, pair[3] + '_label' + '.' + file_ext)
path = os.path.join(video2path,images_path[random.randint(0, nrof_images-1)])
path_list.append(path)
issame = True
elif len(pair) == 6:
people1path = os.path.join(lfw_dir, pair[0])
people2path = os.path.join(lfw_dir, pair[3])
videos1 = os.listdir(people1path)
videos2 = os.listdir(people2path)
video1path = os.path.join(people1path, videos1[int(pair[1])-1])
video2path = os.path.join(people2path, videos2[int(pair[4])-1])
images_path = os.listdir(video1path)
images_path.sort(key=lambda x: int(x[2:-4]))
nrof_images = len(images_path)
for i in range(timestep_size):
length = int(nrof_images / timestep_size)
start_index = i * length
end_index = min(nrof_images - 1, (i + 1) * length)
# path = os.path.join(video1path, pair[0] + '_' + pair[1] + '_%04d' % int(random.randint(1, int(pair[2]))) + '.' + file_ext)
path = os.path.join(video1path, images_path[random.randint(start_index, end_index)])
path_list.append(path)
# sample timestep_size images in video2
images_path = os.listdir(video2path)
nrof_images = len(images_path)
path = os.path.join(video2path, images_path[random.randint(0, nrof_images - 1)])
path_list.append(path)
issame = False
issame_list.append(issame)
'''
if os.path.exists(path0) and os.path.exists(path1): # Only add the pair if both paths exist
path_list += (path0,path1,path2,path3,path4,path5)
issame_list.append(issame)
else:
nrof_skipped_pairs += 1
print('pairs path:'+"\n"+path0+"\n"+path1+"\n"+path2+"\n"+path3+"\n"+path4+"\n"+path5)
if nrof_skipped_pairs>0:
print('Skipped %d image pairs' % nrof_skipped_pairs)
'''
return path_list, issame_list
def get_video_paths(lfw_dir, pairs, file_ext, timestep_size):
nrof_skipped_pairs = 0
path_list = []
issame_list = []
for pair in pairs:
if len(pair) == 5: # the same people
peoplepath = os.path.join(lfw_dir, pair[0])
video1path = os.path.join(peoplepath, pair[1])
video2path = os.path.join(peoplepath, pair[3])
for i in range(timestep_size):
path = os.path.join(video1path, pair[1] + '_%04d' % int(random.randint(1, int(pair[2]))) + '.' + file_ext)
path_list.append(path)
for i in range(timestep_size):
path = os.path.join(video2path, pair[3] + '_%04d' % int(random.randint(1, int(pair[4]))) + '.' + file_ext)
path_list.append(path)
issame = True
elif len(pair) == 6:
people1path = os.path.join(lfw_dir, pair[0])
people2path = os.path.join(lfw_dir, pair[3])
video1path = os.path.join(people1path, pair[1])
video2path = os.path.join(people2path, pair[4])
for i in range(timestep_size):
path = os.path.join(video1path, pair[1] + '_%04d' % int(random.randint(1, int(pair[2]))) + '.' + file_ext)
path_list.append(path)
for i in range(timestep_size):
path = os.path.join(video2path, pair[4] + '_%04d' % int(random.randint(1, int(pair[5]))) + '.' + file_ext)
path_list.append(path)
issame = False
issame_list.append(issame)
'''
if os.path.exists(path0) and os.path.exists(path1): # Only add the pair if both paths exist
path_list += (path0, path1, path2, path3, path4, path5)
issame_list.append(issame)
else:
nrof_skipped_pairs += 1
print(
'pairs path:' + "\n" + path0 + "\n" + path1 + "\n" + path2 + "\n" + path3 + "\n" + path4 + "\n" + path5)
if nrof_skipped_pairs > 0:
print('Skipped %d image pairs' % nrof_skipped_pairs)
'''
return path_list, issame_list
def read_pairs(pairs_filename):
pairs = []
with open(pairs_filename, 'r') as f:
for line in f.readlines()[1:]:
pair = line.strip().split()
pairs.append(pair)
return np.array(pairs) | 8,044 | 2,878 |