blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 4
721
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
57
| license_type
stringclasses 2
values | repo_name
stringlengths 5
91
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 321
values | visit_date
timestamp[ns]date 2016-08-12 09:31:09
2023-09-06 10:45:07
| revision_date
timestamp[ns]date 2010-09-28 14:01:40
2023-09-06 06:22:19
| committer_date
timestamp[ns]date 2010-09-28 14:01:40
2023-09-06 06:22:19
| github_id
int64 426
681M
| star_events_count
int64 101
243k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[ns]date 2012-06-28 18:51:49
2023-09-14 21:59:16
⌀ | gha_created_at
timestamp[ns]date 2008-02-11 22:55:26
2023-08-10 11:14:58
⌀ | gha_language
stringclasses 147
values | src_encoding
stringclasses 26
values | language
stringclasses 2
values | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 6
10.2M
| extension
stringclasses 115
values | filename
stringlengths 3
113
| content
stringlengths 6
10.2M
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
cf2c071c87c51dc93900f956778f5e796168e66f
|
5ef6c8d47864f471e26b9902d61f8c687e941f05
|
/src/genie/libs/parser/junos/tests/ShowInterfacesQueue/cli/equal/golden_output_expected.py
|
64209483091384e99e9b442f39b6baf6646b4da7
|
[
"Apache-2.0"
] |
permissive
|
CiscoTestAutomation/genieparser
|
169c196558f1c1a0f0d10650876096f993224917
|
b531eff760b2e44cd69d7a2716db6f866907c239
|
refs/heads/master
| 2023-09-03T08:56:18.831340
| 2023-08-29T22:32:02
| 2023-08-29T22:32:02
| 131,621,824
| 247
| 409
|
Apache-2.0
| 2023-08-29T22:32:04
| 2018-04-30T16:51:50
|
Python
|
UTF-8
|
Python
| false
| false
| 11,427
|
py
|
golden_output_expected.py
|
expected_output = {
"interface-information": {
"physical-interface": {
"description": "to_ixia_2/4",
"local-index": "143",
"name": "ge-0/0/2",
"oper-status": "Up",
"queue-counters": {
"interface-cos-summary": {
"intf-cos-forwarding-classes-in-use": "5",
"intf-cos-forwarding-classes-supported": "16",
"intf-cos-num-queues-in-use": "5",
"intf-cos-num-queues-supported": "8",
"intf-cos-queue-type": "Egress queues",
},
"queue": [
{
"forwarding-class-name": "Bronze-FC",
"queue-counters-queued-bytes": "564883280956",
"queue-counters-queued-bytes-rate": "0",
"queue-counters-queued-packets": "1470816406",
"queue-counters-queued-packets-rate": "0",
"queue-counters-red-bytes": "0",
"queue-counters-red-bytes-high": "0",
"queue-counters-red-bytes-low": "0",
"queue-counters-red-bytes-medium-high": "0",
"queue-counters-red-bytes-medium-low": "0",
"queue-counters-red-bytes-rate": "0",
"queue-counters-red-bytes-rate-high": "0",
"queue-counters-red-bytes-rate-low": "0",
"queue-counters-red-bytes-rate-medium-high": "0",
"queue-counters-red-bytes-rate-medium-low": "0",
"queue-counters-red-packets": "0",
"queue-counters-red-packets-high": "0",
"queue-counters-red-packets-low": "0",
"queue-counters-red-packets-medium-high": "0",
"queue-counters-red-packets-medium-low": "0",
"queue-counters-red-packets-rate": "0",
"queue-counters-red-packets-rate-high": "0",
"queue-counters-red-packets-rate-low": "0",
"queue-counters-red-packets-rate-medium-high": "0",
"queue-counters-red-packets-rate-medium-low": "0",
"queue-counters-tail-drop-packets": "0",
"queue-counters-tail-drop-packets-rate": "0",
"queue-counters-trans-bytes": "564883280956",
"queue-counters-trans-bytes-rate": "0",
"queue-counters-trans-packets": "1470816406",
"queue-counters-trans-packets-rate": "0",
"queue-number": "0",
},
{
"forwarding-class-name": "Platinum-FC",
"queue-counters-queued-bytes": "0",
"queue-counters-queued-bytes-rate": "0",
"queue-counters-queued-packets": "0",
"queue-counters-queued-packets-rate": "0",
"queue-counters-red-bytes": "0",
"queue-counters-red-bytes-high": "0",
"queue-counters-red-bytes-low": "0",
"queue-counters-red-bytes-medium-high": "0",
"queue-counters-red-bytes-medium-low": "0",
"queue-counters-red-bytes-rate": "0",
"queue-counters-red-bytes-rate-high": "0",
"queue-counters-red-bytes-rate-low": "0",
"queue-counters-red-bytes-rate-medium-high": "0",
"queue-counters-red-bytes-rate-medium-low": "0",
"queue-counters-red-packets": "0",
"queue-counters-red-packets-high": "0",
"queue-counters-red-packets-low": "0",
"queue-counters-red-packets-medium-high": "0",
"queue-counters-red-packets-medium-low": "0",
"queue-counters-red-packets-rate": "0",
"queue-counters-red-packets-rate-high": "0",
"queue-counters-red-packets-rate-low": "0",
"queue-counters-red-packets-rate-medium-high": "0",
"queue-counters-red-packets-rate-medium-low": "0",
"queue-counters-tail-drop-packets": "0",
"queue-counters-tail-drop-packets-rate": "0",
"queue-counters-trans-bytes": "0",
"queue-counters-trans-bytes-rate": "0",
"queue-counters-trans-packets": "0",
"queue-counters-trans-packets-rate": "0",
"queue-number": "1",
},
{
"forwarding-class-name": "Gold-FC",
"queue-counters-queued-bytes": "0",
"queue-counters-queued-bytes-rate": "0",
"queue-counters-queued-packets": "0",
"queue-counters-queued-packets-rate": "0",
"queue-counters-red-bytes": "0",
"queue-counters-red-bytes-high": "0",
"queue-counters-red-bytes-low": "0",
"queue-counters-red-bytes-medium-high": "0",
"queue-counters-red-bytes-medium-low": "0",
"queue-counters-red-bytes-rate": "0",
"queue-counters-red-bytes-rate-high": "0",
"queue-counters-red-bytes-rate-low": "0",
"queue-counters-red-bytes-rate-medium-high": "0",
"queue-counters-red-bytes-rate-medium-low": "0",
"queue-counters-red-packets": "0",
"queue-counters-red-packets-high": "0",
"queue-counters-red-packets-low": "0",
"queue-counters-red-packets-medium-high": "0",
"queue-counters-red-packets-medium-low": "0",
"queue-counters-red-packets-rate": "0",
"queue-counters-red-packets-rate-high": "0",
"queue-counters-red-packets-rate-low": "0",
"queue-counters-red-packets-rate-medium-high": "0",
"queue-counters-red-packets-rate-medium-low": "0",
"queue-counters-tail-drop-packets": "0",
"queue-counters-tail-drop-packets-rate": "0",
"queue-counters-trans-bytes": "0",
"queue-counters-trans-bytes-rate": "0",
"queue-counters-trans-packets": "0",
"queue-counters-trans-packets-rate": "0",
"queue-number": "2",
},
{
"forwarding-class-name": "Network-Control-FC",
"queue-counters-queued-bytes": "0",
"queue-counters-queued-bytes-rate": "0",
"queue-counters-queued-packets": "0",
"queue-counters-queued-packets-rate": "0",
"queue-counters-red-bytes": "0",
"queue-counters-red-bytes-high": "0",
"queue-counters-red-bytes-low": "0",
"queue-counters-red-bytes-medium-high": "0",
"queue-counters-red-bytes-medium-low": "0",
"queue-counters-red-bytes-rate": "0",
"queue-counters-red-bytes-rate-high": "0",
"queue-counters-red-bytes-rate-low": "0",
"queue-counters-red-bytes-rate-medium-high": "0",
"queue-counters-red-bytes-rate-medium-low": "0",
"queue-counters-red-packets": "0",
"queue-counters-red-packets-high": "0",
"queue-counters-red-packets-low": "0",
"queue-counters-red-packets-medium-high": "0",
"queue-counters-red-packets-medium-low": "0",
"queue-counters-red-packets-rate": "0",
"queue-counters-red-packets-rate-high": "0",
"queue-counters-red-packets-rate-low": "0",
"queue-counters-red-packets-rate-medium-high": "0",
"queue-counters-red-packets-rate-medium-low": "0",
"queue-counters-tail-drop-packets": "0",
"queue-counters-tail-drop-packets-rate": "0",
"queue-counters-trans-bytes": "0",
"queue-counters-trans-bytes-rate": "0",
"queue-counters-trans-packets": "0",
"queue-counters-trans-packets-rate": "0",
"queue-number": "3",
},
{
"forwarding-class-name": "Silver-FC",
"queue-counters-queued-bytes": "0",
"queue-counters-queued-bytes-rate": "0",
"queue-counters-queued-packets": "0",
"queue-counters-queued-packets-rate": "0",
"queue-counters-red-bytes": "0",
"queue-counters-red-bytes-high": "0",
"queue-counters-red-bytes-low": "0",
"queue-counters-red-bytes-medium-high": "0",
"queue-counters-red-bytes-medium-low": "0",
"queue-counters-red-bytes-rate": "0",
"queue-counters-red-bytes-rate-high": "0",
"queue-counters-red-bytes-rate-low": "0",
"queue-counters-red-bytes-rate-medium-high": "0",
"queue-counters-red-bytes-rate-medium-low": "0",
"queue-counters-red-packets": "0",
"queue-counters-red-packets-high": "0",
"queue-counters-red-packets-low": "0",
"queue-counters-red-packets-medium-high": "0",
"queue-counters-red-packets-medium-low": "0",
"queue-counters-red-packets-rate": "0",
"queue-counters-red-packets-rate-high": "0",
"queue-counters-red-packets-rate-low": "0",
"queue-counters-red-packets-rate-medium-high": "0",
"queue-counters-red-packets-rate-medium-low": "0",
"queue-counters-tail-drop-packets": "0",
"queue-counters-tail-drop-packets-rate": "0",
"queue-counters-trans-bytes": "0",
"queue-counters-trans-bytes-rate": "0",
"queue-counters-trans-packets": "0",
"queue-counters-trans-packets-rate": "0",
"queue-number": "4",
},
],
},
"snmp-index": "601",
}
}
}
|
9551e31b4075ef6a2ebdd31a7c212db3190f89f2
|
fa1ad2e2ac7e376fc7cb3b3a6e1bb88eed3e80be
|
/ai/modelscope/modelscope/pipelines/cv/image_salient_detection_pipeline.py
|
4a3eaa65a31cd8dba99636adedc686c7c8248938
|
[
"Apache-2.0",
"BSD-3-Clause",
"MIT"
] |
permissive
|
alldatacenter/alldata
|
7bc7713c9f1d56ad6b8e59ea03206d1073b7e047
|
8d5f9a2d49ab8f9e85ccf058cb02c2fda287afc6
|
refs/heads/master
| 2023-08-05T07:32:25.442740
| 2023-08-03T13:17:24
| 2023-08-03T13:17:24
| 213,321,771
| 774
| 250
|
Apache-2.0
| 2023-09-06T17:35:32
| 2019-10-07T07:36:18
| null |
UTF-8
|
Python
| false
| false
| 1,424
|
py
|
image_salient_detection_pipeline.py
|
# Copyright (c) Alibaba, Inc. and its affiliates.
from typing import Any, Dict
from modelscope.metainfo import Pipelines
from modelscope.outputs import OutputKeys
from modelscope.pipelines.base import Input, Pipeline
from modelscope.pipelines.builder import PIPELINES
from modelscope.preprocessors import LoadImage
from modelscope.utils.constant import Tasks
@PIPELINES.register_module(
Tasks.semantic_segmentation, module_name=Pipelines.salient_detection)
class ImageSalientDetectionPipeline(Pipeline):
def __init__(self, model: str, **kwargs):
"""
model: model id on modelscope hub.
"""
super().__init__(model=model, auto_collate=False, **kwargs)
def preprocess(self, input: Input) -> Dict[str, Any]:
img = LoadImage.convert_to_ndarray(input)
img_h, img_w, _ = img.shape
img = self.model.preprocess(img)
result = {'img': img, 'img_w': img_w, 'img_h': img_h}
return result
def forward(self, input: Dict[str, Any]) -> Dict[str, Any]:
outputs = self.model.inference(input['img'])
result = {
'data': outputs,
'img_w': input['img_w'],
'img_h': input['img_h']
}
return result
def postprocess(self, inputs: Dict[str, Any]) -> Dict[str, Any]:
data = self.model.postprocess(inputs)
outputs = {OutputKeys.MASKS: data}
return outputs
|
4db13306f96da38a04c3d13e9e67dedd454fd4f8
|
974d04d2ea27b1bba1c01015a98112d2afb78fe5
|
/test/legacy_test/test_conv2d_transpose_op.py
|
b7fe99f85c3c2a4183c286a41cfee73a67a01fbb
|
[
"Apache-2.0"
] |
permissive
|
PaddlePaddle/Paddle
|
b3d2583119082c8e4b74331dacc4d39ed4d7cff0
|
22a11a60e0e3d10a3cf610077a3d9942a6f964cb
|
refs/heads/develop
| 2023-08-17T21:27:30.568889
| 2023-08-17T12:38:22
| 2023-08-17T12:38:22
| 65,711,522
| 20,414
| 5,891
|
Apache-2.0
| 2023-09-14T19:20:51
| 2016-08-15T06:59:08
|
C++
|
UTF-8
|
Python
| false
| false
| 43,339
|
py
|
test_conv2d_transpose_op.py
|
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import unittest
import numpy as np
import paddle
from paddle import nn
paddle.enable_static()
from eager_op_test import OpTest, convert_float_to_uint16, get_numeric_gradient
from test_attribute_var import UnittestBase
from testsuite import create_op
from paddle import fluid
from paddle.fluid import Program, core, program_guard
def conv2dtranspose_forward_naive(input_, filter_, attrs):
padding_algorithm = attrs['padding_algorithm']
if padding_algorithm not in ["SAME", "VALID", "EXPLICIT"]:
raise ValueError(
"Unknown Attr(padding_algorithm): '%s'. "
"It can only be 'SAME' or 'VALID'." % str(padding_algorithm)
)
if attrs['data_format'] == 'NHWC':
input_ = np.transpose(input_, [0, 3, 1, 2])
in_n, in_c, in_h, in_w = input_.shape
f_c, f_out_c, f_h, f_w = filter_.shape
groups = attrs['groups']
assert in_c == f_c
out_c = f_out_c * groups
sub_in_c = in_c // groups
stride, pad, dilations = (
attrs['strides'],
attrs['paddings'],
attrs['dilations'],
)
# update pad and dilation
def _get_padding_with_SAME(input_shape, kernel_size, kernel_stride):
padding = []
for input_size, filter_size, stride_size in zip(
input_shape, kernel_size, kernel_stride
):
out_size = int((input_size + stride_size - 1) / stride_size)
pad_sum = np.max(
((out_size - 1) * stride_size + filter_size - input_size, 0)
)
pad_0 = int(pad_sum / 2)
pad_1 = int(pad_sum - pad_0)
padding.append(pad_0)
padding.append(pad_1)
return padding
ksize = filter_.shape[2:4]
if padding_algorithm == "VALID":
pad = [0, 0, 0, 0]
elif padding_algorithm == "SAME":
dilations = [1, 1]
input_data_shape = input_.shape[2:4]
pad = _get_padding_with_SAME(input_data_shape, ksize, stride)
pad_h_0, pad_h_1 = pad[0], pad[0]
pad_w_0, pad_w_1 = pad[1], pad[1]
if len(pad) == 4:
pad_h_0, pad_h_1 = pad[0], pad[1]
pad_w_0, pad_w_1 = pad[2], pad[3]
d_bolck_h = dilations[0] * (f_h - 1) + 1
d_bolck_w = dilations[1] * (f_w - 1) + 1
out_h = (in_h - 1) * stride[0] + d_bolck_h
out_w = (in_w - 1) * stride[1] + d_bolck_w
if 'output_size' in attrs:
output_size = attrs['output_size']
out_h = output_size[0] + pad_h_0 + pad_h_1
out_w = output_size[1] + pad_w_0 + pad_w_1
out_pad_h = 0
out_pad_w = 0
if 'output_padding' in attrs:
out_pad_h = attrs['output_padding'][0]
out_pad_w = attrs['output_padding'][1]
out = np.zeros(
(in_n, out_c, out_h + out_pad_h, out_w + out_pad_w), dtype=input_.dtype
)
for n in range(in_n):
for i in range(in_h):
for j in range(in_w):
for g in range(groups):
input_masked = input_[
n, g * sub_in_c : (g + 1) * sub_in_c, i, j
] # (c)
input_masked = np.reshape(input_masked, (sub_in_c, 1, 1))
input_masked = np.tile(input_masked, (1, f_h, f_w))
for k in range(f_out_c):
tmp_out = np.sum(
input_masked
* filter_[
g * sub_in_c : (g + 1) * sub_in_c, k, :, :
],
axis=0,
)
i1, i2 = i * stride[0], i * stride[0] + d_bolck_h
j1, j2 = j * stride[1], j * stride[1] + d_bolck_w
out[
n,
g * f_out_c + k,
i1 : i2 : dilations[0],
j1 : j2 : dilations[1],
] += tmp_out
out = out[
:,
:,
pad_h_0 : out_h - pad_h_1 + out_pad_h,
pad_w_0 : out_w - pad_w_1 + out_pad_w,
]
if attrs['data_format'] == 'NHWC':
out = np.transpose(out, [0, 2, 3, 1])
return out
def conv2dtranspose_wrapper(
x,
weight,
stride=1,
padding=0,
output_padding=[],
output_size=[],
padding_algorithm="EXPLICIT",
groups=1,
dilation=1,
data_format="NCDHW",
):
if data_format == "AnyLayout":
data_format = "NCDHW"
if padding_algorithm is None:
padding_algorithm = "EXPLICIT"
return paddle._C_ops.conv2d_transpose(
x,
weight,
stride,
padding,
output_padding,
output_size,
padding_algorithm,
groups,
dilation,
data_format,
)
class TestConv2DTransposeOp(OpTest):
def setUp(self):
# init as conv transpose
self.dtype = np.float32 if core.is_compiled_with_rocm() else np.float64
self.need_check_grad = True
self.is_test = False
self.use_cudnn = False
self.use_mkldnn = False
self.output_size = None
self.output_padding = []
self.data_format = "NCHW"
self.pad = [0, 0]
self.padding_algorithm = "EXPLICIT"
self.init_op_type()
self.init_test_case()
if self.is_bfloat16_op():
input_ = np.random.random(self.input_size).astype(np.float32)
filter_ = np.random.random(self.filter_size).astype(np.float32)
else:
input_ = np.random.random(self.input_size).astype(self.dtype)
filter_ = np.random.random(self.filter_size).astype(self.dtype)
self.attrs = {
'strides': self.stride,
'paddings': self.pad,
'padding_algorithm': self.padding_algorithm,
'groups': self.groups,
'dilations': self.dilations,
'use_cudnn': self.use_cudnn,
'is_test': self.is_test,
'use_mkldnn': self.use_mkldnn,
'data_format': self.data_format,
}
if self.output_size is not None:
self.attrs['output_size'] = self.output_size
if len(self.output_padding) > 0:
self.attrs['output_padding'] = self.output_padding
output = conv2dtranspose_forward_naive(input_, filter_, self.attrs)
if self.is_bfloat16_op():
output = output.astype(np.float32)
self.inputs = {
'Input': convert_float_to_uint16(input_),
'Filter': convert_float_to_uint16(filter_),
}
self.inputs_fp32 = {'Input': input_, 'Filter': filter_}
else:
output = output.astype(self.dtype)
self.inputs = {'Input': input_, 'Filter': filter_}
self.outputs = {'Output': output}
def test_check_output(self):
# TODO(wangzhongpu): support mkldnn op in dygraph mode
if self.use_cudnn:
place = core.CUDAPlace(0)
self.check_output_with_place(
place, atol=1e-5, check_dygraph=(not self.use_mkldnn)
)
else:
self.check_output(check_dygraph=(not self.use_mkldnn))
def test_check_grad_no_input(self):
if self.need_check_grad:
if self.use_cudnn:
place = core.CUDAPlace(0)
self.check_grad_with_place(
place,
['Filter'],
'Output',
max_relative_error=0.02,
no_grad_set={'Input'},
)
else:
self.check_grad(['Filter'], 'Output', no_grad_set={'Input'})
def test_check_grad_no_filter(self):
if self.need_check_grad:
if self.use_cudnn:
place = core.CUDAPlace(0)
self.check_grad_with_place(
place, ['Input'], 'Output', no_grad_set={'Filter'}
)
else:
self.check_grad(['Input'], 'Output', no_grad_set={'Filter'})
def test_check_grad(self):
if self.need_check_grad:
if self.use_cudnn:
place = core.CUDAPlace(0)
self.check_grad_with_place(
place,
{'Input', 'Filter'},
'Output',
max_relative_error=0.02,
)
else:
self.check_grad(
{'Input', 'Filter'}, 'Output', max_relative_error=0.02
)
def init_test_case(self):
self.pad = [0, 0]
self.stride = [1, 1]
self.dilations = [1, 1]
self.groups = 1
self.input_size = [2, 3, 5, 5] # NCHW
f_c = self.input_size[1]
self.filter_size = [f_c, 6, 3, 3]
def init_op_type(self):
self.op_type = "conv2d_transpose"
self.python_api = conv2dtranspose_wrapper
class TestWithSymmetricPad(TestConv2DTransposeOp):
def init_test_case(self):
self.pad = [1, 1]
self.stride = [1, 1]
self.dilations = [1, 1]
self.groups = 1
self.input_size = [2, 3, 5, 5] # NCHW
f_c = self.input_size[1]
self.filter_size = [f_c, 6, 3, 3]
class TestWithAsymmetricPad(TestConv2DTransposeOp):
def init_test_case(self):
self.pad = [1, 0, 1, 2]
self.stride = [1, 1]
self.dilations = [1, 1]
self.groups = 1
self.input_size = [2, 3, 5, 5] # NCHW
f_c = self.input_size[1]
self.filter_size = [f_c, 6, 3, 3]
class TestWithSAMEPad(TestConv2DTransposeOp):
def init_test_case(self):
self.stride = [2, 1]
self.dilations = [1, 2]
self.groups = 1
self.input_size = [2, 3, 6, 5] # NCHW
f_c = self.input_size[1]
self.filter_size = [f_c, 6, 4, 3]
self.padding_algorithm = 'SAME'
class TestWithVALIDPad(TestConv2DTransposeOp):
def init_test_case(self):
self.stride = [1, 1]
self.dilations = [1, 1]
self.groups = 1
self.input_size = [2, 3, 5, 5] # NCHW
f_c = self.input_size[1]
self.filter_size = [f_c, 6, 3, 3]
self.padding_algorithm = 'VALID'
class TestWithGroups(TestConv2DTransposeOp):
def init_test_case(self):
self.pad = [1, 1]
self.stride = [1, 1]
self.dilations = [1, 1]
self.groups = 2
self.input_size = [2, 4, 5, 5] # NCHW
f_c = self.input_size[1]
self.filter_size = [f_c, 3, 3, 3]
class TestWithStride(TestConv2DTransposeOp):
def init_test_case(self):
self.pad = [1, 1]
self.stride = [2, 2]
self.dilations = [1, 1]
self.groups = 1
self.input_size = [2, 3, 5, 5] # NCHW
f_c = self.input_size[1]
self.filter_size = [f_c, 6, 3, 3]
class TestWithDilation(TestConv2DTransposeOp):
def init_test_case(self):
self.pad = [1, 1]
self.stride = [1, 1]
self.groups = 1
self.dilations = [2, 2]
self.input_size = [2, 3, 5, 5] # NCHW
f_c = self.input_size[1]
self.filter_size = [f_c, 6, 3, 3]
class TestWithEvenUpsample(TestConv2DTransposeOp):
def init_test_case(self):
self.pad = [2, 2]
self.stride = [2, 2]
self.groups = 1
self.dilations = [1, 1]
self.output_size = [14, 14]
self.input_size = [2, 3, 7, 7] # NCHW
f_c = self.input_size[1]
self.filter_size = [f_c, 6, 5, 5]
class TestWithEvenUpsampleOutputPadding(TestConv2DTransposeOp):
def init_test_case(self):
self.pad = [2, 2]
self.stride = [2, 2]
self.groups = 1
self.dilations = [1, 1]
self.output_padding = [1, 1]
self.input_size = [2, 3, 7, 7] # NCHW
f_c = self.input_size[1]
self.filter_size = [f_c, 6, 5, 5]
class Test_NHWC(TestConv2DTransposeOp):
def init_test_case(self):
self.pad = [0, 0]
self.stride = [1, 1]
self.dilations = [1, 1]
self.groups = 1
self.input_size = [2, 5, 5, 3] # NHWC
f_c = self.input_size[-1]
self.filter_size = [f_c, 6, 3, 3]
self.data_format = 'NHWC'
class TestWithSymmetricPad_NHWC(TestConv2DTransposeOp):
def init_test_case(self):
self.pad = [1, 1]
self.stride = [1, 1]
self.dilations = [1, 1]
self.groups = 1
self.input_size = [2, 5, 5, 3] # NHWC
f_c = self.input_size[-1]
self.filter_size = [f_c, 6, 3, 3]
self.data_format = 'NHWC'
class TestWithAsymmetricPad_NHWC(TestConv2DTransposeOp):
def init_test_case(self):
self.pad = [1, 0, 1, 2]
self.stride = [1, 1]
self.dilations = [1, 1]
self.groups = 1
self.input_size = [2, 5, 5, 3] # NHWC
f_c = self.input_size[-1]
self.filter_size = [f_c, 6, 3, 3]
self.data_format = 'NHWC'
class TestWithGroups_NHWC(TestConv2DTransposeOp):
def init_test_case(self):
self.pad = [1, 1]
self.stride = [1, 1]
self.dilations = [1, 1]
self.groups = 2
self.input_size = [2, 5, 5, 4] # NHWC
f_c = self.input_size[-1]
self.filter_size = [f_c, 3, 3, 3]
self.data_format = 'NHWC'
class TestWithStride_NHWC(TestConv2DTransposeOp):
def init_test_case(self):
self.pad = [1, 1]
self.stride = [2, 2]
self.dilations = [1, 1]
self.groups = 1
self.input_size = [2, 5, 5, 3] # NCHW
f_c = self.input_size[-1]
self.filter_size = [f_c, 6, 3, 3]
self.data_format = 'NHWC'
class TestWithDilation_NHWC(TestConv2DTransposeOp):
def init_test_case(self):
self.pad = [1, 1]
self.stride = [1, 1]
self.groups = 1
self.dilations = [2, 2]
self.input_size = [2, 5, 5, 3] # NHWC
f_c = self.input_size[-1]
self.filter_size = [f_c, 6, 3, 3]
self.data_format = 'NHWC'
class TestWithEvenUpsample_NHWC(TestConv2DTransposeOp):
def init_test_case(self):
self.pad = [2, 2]
self.stride = [2, 2]
self.groups = 1
self.dilations = [1, 1]
self.output_size = [14, 14]
self.input_size = [2, 7, 7, 3] # NHWC
f_c = self.input_size[-1]
self.filter_size = [f_c, 6, 5, 5]
self.data_format = 'NHWC'
class TestWithEvenUpsample_NHWC_output_padding(TestConv2DTransposeOp):
def init_test_case(self):
self.pad = [2, 2]
self.stride = [2, 2]
self.groups = 1
self.dilations = [1, 1]
self.output_padding = [1, 1]
self.input_size = [2, 7, 7, 3] # NHWC
f_c = self.input_size[-1]
self.filter_size = [f_c, 6, 5, 5]
self.data_format = 'NHWC'
# ------------ test_cudnn ------------
@unittest.skipIf(
not core.is_compiled_with_cuda(), "core is not compiled with CUDA"
)
class TestCUDNN(TestConv2DTransposeOp):
def init_op_type(self):
self.use_cudnn = True
self.op_type = "conv2d_transpose"
self.python_api = conv2dtranspose_wrapper
@unittest.skipIf(
not core.is_compiled_with_cuda(), "core is not compiled with CUDA"
)
class TestCUDNNWithSymmetricPad(TestWithSymmetricPad):
def init_test_case(self):
self.pad = [1, 1]
self.stride = [1, 1]
self.groups = 1
self.dilations = [1, 1]
self.input_size = [2, 3, 5, 5] # NCHW
f_c = self.input_size[1]
self.filter_size = [f_c, 6, 3, 3]
def init_op_type(self):
self.use_cudnn = True
self.op_type = "conv2d_transpose"
self.python_api = conv2dtranspose_wrapper
@unittest.skipIf(
not core.is_compiled_with_cuda(), "core is not compiled with CUDA"
)
class TestCUDNNWithAsymmetricPad(TestWithAsymmetricPad):
def init_test_case(self):
self.pad = [1, 0, 1, 2]
self.stride = [1, 1]
self.groups = 1
self.dilations = [1, 1]
self.input_size = [2, 3, 5, 5] # NCHW
f_c = self.input_size[1]
self.filter_size = [f_c, 6, 3, 3]
def init_op_type(self):
self.use_cudnn = True
self.op_type = "conv2d_transpose"
self.python_api = conv2dtranspose_wrapper
@unittest.skipIf(
not core.is_compiled_with_cuda(), "core is not compiled with CUDA"
)
class TestCUDNNWithSAMEPad(TestWithSAMEPad):
def init_test_case(self):
self.pad = [1, 0, 1, 2]
self.stride = [1, 2]
self.groups = 1
self.dilations = [1, 1]
self.input_size = [2, 3, 5, 5] # NCHW
f_c = self.input_size[1]
self.filter_size = [f_c, 6, 3, 3]
def init_op_type(self):
self.use_cudnn = True
self.op_type = "conv2d_transpose"
self.python_api = conv2dtranspose_wrapper
@unittest.skipIf(
not core.is_compiled_with_cuda(), "core is not compiled with CUDA"
)
class TestCUDNNWithVALIDPad(TestWithVALIDPad):
def init_test_case(self):
self.pad = [1, 0, 1, 2]
self.stride = [1, 1]
self.groups = 1
self.dilations = [1, 1]
self.input_size = [2, 3, 5, 5] # NCHW
f_c = self.input_size[1]
self.filter_size = [f_c, 6, 3, 3]
def init_op_type(self):
self.use_cudnn = True
self.op_type = "conv2d_transpose"
self.python_api = conv2dtranspose_wrapper
@unittest.skipIf(
not core.is_compiled_with_cuda(), "core is not compiled with CUDA"
)
class TestCUDNNWithStride(TestWithStride):
def init_test_case(self):
self.pad = [1, 1]
self.stride = [2, 2]
self.groups = 1
self.dilations = [1, 1]
self.input_size = [2, 3, 5, 5] # NCHW
f_c = self.input_size[1]
self.filter_size = [f_c, 6, 3, 3]
def init_op_type(self):
self.use_cudnn = True
self.op_type = "conv2d_transpose"
self.python_api = conv2dtranspose_wrapper
@unittest.skipIf(
not core.is_compiled_with_cuda(), "core is not compiled with CUDA"
)
class TestCUDNNWithGroups(TestWithGroups):
def init_test_case(self):
self.pad = [1, 1]
self.stride = [1, 1]
self.dilations = [1, 1]
self.groups = 2
self.input_size = [2, 4, 5, 5] # NCHW
f_c = self.input_size[1]
self.filter_size = [f_c, 3, 3, 3]
def init_op_type(self):
self.use_cudnn = True
self.op_type = "conv2d_transpose"
self.python_api = conv2dtranspose_wrapper
# ------------ test_cudnn ------------
@unittest.skipIf(
not core.is_compiled_with_cuda(), "core is not compiled with CUDA"
)
class TestCUDNNWithEvenUpsample(TestWithEvenUpsample):
def init_op_type(self):
self.use_cudnn = True
self.op_type = "conv2d_transpose"
self.python_api = conv2dtranspose_wrapper
# Please Don't remove the following code.
# Currently, CI use cudnn V5.0 which not support dilation conv.
# class TestCUDNNWithDilation(TestWithDilation):
# def init_test_case(self):
# self.pad = [1, 1]
# self.stride = [2, 2]
# self.dilations = [2, 2]
# self.input_size = [2, 3, 5, 5] # NCHW
# f_c = self.input_size[1]
# self.filter_size = [f_c, 6, 3, 3]
#
# def init_op_type(self):
# self.op_type = "conv2d_transpose"
@unittest.skipIf(
not core.is_compiled_with_cuda(), "core is not compiled with CUDA"
)
class TestCUDNN_NHWC(TestConv2DTransposeOp):
def init_test_case(self):
self.pad = [0, 0]
self.stride = [1, 1]
self.dilations = [1, 1]
self.groups = 1
self.input_size = [2, 5, 5, 3] # NHWC
f_c = self.input_size[-1]
self.filter_size = [f_c, 6, 3, 3]
self.data_format = 'NHWC'
def init_op_type(self):
self.use_cudnn = True
self.op_type = "conv2d_transpose"
self.python_api = conv2dtranspose_wrapper
@unittest.skipIf(
not core.is_compiled_with_cuda(), "core is not compiled with CUDA"
)
class TestCUDNNWithSymmetricPad_NHWC(TestWithSymmetricPad):
def init_test_case(self):
self.pad = [1, 1]
self.stride = [1, 1]
self.groups = 1
self.dilations = [1, 1]
self.input_size = [2, 5, 5, 3] # NHWC
f_c = self.input_size[-1]
self.filter_size = [f_c, 6, 3, 3]
self.data_format = 'NHWC'
def init_op_type(self):
self.use_cudnn = True
self.op_type = "conv2d_transpose"
self.python_api = conv2dtranspose_wrapper
@unittest.skipIf(
not core.is_compiled_with_cuda(), "core is not compiled with CUDA"
)
class TestCUDNNWithAsymmetricPad_NHWC(TestWithSymmetricPad):
def init_test_case(self):
self.pad = [1, 0, 2, 3]
self.stride = [2, 2]
self.groups = 1
self.dilations = [1, 1]
self.input_size = [2, 5, 5, 3] # NHWC
f_c = self.input_size[-1]
self.filter_size = [f_c, 6, 3, 3]
self.data_format = 'NHWC'
def init_op_type(self):
self.use_cudnn = True
self.op_type = "conv2d_transpose"
self.python_api = conv2dtranspose_wrapper
@unittest.skipIf(
not core.is_compiled_with_cuda(), "core is not compiled with CUDA"
)
class TestCUDNNWithStride_NHWC(TestWithStride):
def init_test_case(self):
self.pad = [1, 1]
self.stride = [2, 2]
self.groups = 1
self.dilations = [1, 1]
self.input_size = [2, 5, 5, 3] # NHWC
f_c = self.input_size[-1]
self.filter_size = [f_c, 6, 3, 3]
self.data_format = 'NHWC'
def init_op_type(self):
self.use_cudnn = True
self.op_type = "conv2d_transpose"
self.python_api = conv2dtranspose_wrapper
@unittest.skipIf(
not core.is_compiled_with_cuda(), "core is not compiled with CUDA"
)
class TestCUDNNWithGroups_NHWC(TestWithGroups):
def init_test_case(self):
self.pad = [1, 1]
self.stride = [1, 1]
self.dilations = [1, 1]
self.groups = 2
self.input_size = [2, 5, 5, 4] # NCHW
f_c = self.input_size[-1]
self.filter_size = [f_c, 3, 3, 3]
self.data_format = 'NHWC'
def init_op_type(self):
self.use_cudnn = True
self.op_type = "conv2d_transpose"
self.python_api = conv2dtranspose_wrapper
@unittest.skipIf(
not core.is_compiled_with_cuda(), "core is not compiled with CUDA"
)
class TestCUDNNWithEvenUpsample_NHWC(TestWithEvenUpsample):
def init_test_case(self):
self.pad = [2, 2]
self.stride = [2, 2]
self.groups = 1
self.dilations = [1, 1]
self.output_size = [14, 14]
self.input_size = [2, 7, 7, 3] # NHWC
f_c = self.input_size[-1]
self.filter_size = [f_c, 6, 5, 5]
self.data_format = 'NHWC'
def init_op_type(self):
self.use_cudnn = True
self.op_type = "conv2d_transpose"
self.python_api = conv2dtranspose_wrapper
@unittest.skipIf(
not core.is_compiled_with_cuda(), "core is not compiled with CUDA"
)
class TestCUDNN_FP16(TestConv2DTransposeOp):
def init_test_case(self):
self.dtype = np.float16
self.pad = [1, 1]
self.stride = [1, 1]
self.groups = 1
self.dilations = [1, 1]
self.input_size = [2, 3, 5, 5] # NCHW
f_c = self.input_size[1]
self.filter_size = [f_c, 6, 3, 3]
def init_op_type(self):
self.need_check_grad = True
self.use_cudnn = True
self.op_type = "conv2d_transpose"
self.python_api = conv2dtranspose_wrapper
def test_check_output(self):
if self.use_cudnn:
place = core.CUDAPlace(0)
if core.is_float16_supported(place):
self.check_output_with_place(
place, atol=0.02, check_dygraph=(not self.use_mkldnn)
)
else:
self.check_output(check_dygraph=(not self.use_mkldnn))
def test_check_grad_no_input(self):
if self.need_check_grad:
if self.use_cudnn:
place = core.CUDAPlace(0)
if core.is_float16_supported(place):
self.check_grad_with_place(
place,
['Filter'],
'Output',
max_relative_error=0.02,
no_grad_set={'Input'},
)
else:
self.check_grad(['Filter'], 'Output', no_grad_set={'Input'})
def test_check_grad_no_filter(self):
if self.need_check_grad:
if self.use_cudnn:
place = core.CUDAPlace(0)
if core.is_float16_supported(place):
self.check_grad_with_place(
place,
['Input'],
'Output',
max_relative_error=0.02,
no_grad_set={'Filter'},
)
else:
self.check_grad(['Input'], 'Output', no_grad_set={'Filter'})
def test_check_grad(self):
if self.need_check_grad:
if self.use_cudnn:
place = core.CUDAPlace(0)
if core.is_float16_supported(place):
self.check_grad_with_place(
place,
{'Input', 'Filter'},
'Output',
max_relative_error=0.02,
)
else:
self.check_grad(
{'Input', 'Filter'}, 'Output', max_relative_error=0.02
)
@unittest.skipIf(
not core.is_compiled_with_cuda(), "core is not compiled with CUDA"
)
class TestCUDNN_NHWC_FP16(TestCUDNN_FP16):
def init_test_case(self):
self.dtype = np.float16
self.pad = [0, 0]
self.stride = [1, 1]
self.dilations = [1, 1]
self.groups = 1
self.input_size = [2, 5, 5, 3] # NHWC
f_c = self.input_size[-1]
self.filter_size = [f_c, 6, 3, 3]
self.data_format = 'NHWC'
@unittest.skipIf(
not core.is_compiled_with_cuda(), "core is not compiled with CUDA"
)
class TestCUDNNWithSymmetricPad_NHWC_FP16(TestCUDNN_FP16):
def init_test_case(self):
self.dtype = np.float16
self.pad = [1, 1]
self.stride = [1, 1]
self.groups = 1
self.dilations = [1, 1]
self.input_size = [2, 5, 5, 3] # NHWC
f_c = self.input_size[-1]
self.filter_size = [f_c, 6, 3, 3]
self.data_format = 'NHWC'
@unittest.skipIf(
not core.is_compiled_with_cuda(), "core is not compiled with CUDA"
)
class TestCUDNNWithAsymmetricPad_NHWC_FP16(TestCUDNN_FP16):
def init_test_case(self):
self.dtype = np.float16
self.pad = [1, 0, 2, 3]
self.stride = [2, 2]
self.groups = 1
self.dilations = [1, 1]
self.input_size = [2, 5, 5, 3] # NHWC
f_c = self.input_size[-1]
self.filter_size = [f_c, 6, 3, 3]
self.data_format = 'NHWC'
@unittest.skipIf(
not core.is_compiled_with_cuda(), "core is not compiled with CUDA"
)
class TestCUDNNWithStride_NHWC_FP16(TestCUDNN_FP16):
def init_test_case(self):
self.dtype = np.float16
self.pad = [1, 1]
self.stride = [2, 2]
self.groups = 1
self.dilations = [1, 1]
self.input_size = [2, 5, 5, 3] # NHWC
f_c = self.input_size[-1]
self.filter_size = [f_c, 6, 3, 3]
self.data_format = 'NHWC'
@unittest.skipIf(
not core.is_compiled_with_cuda(), "core is not compiled with CUDA"
)
class TestCUDNNWithGroups_NHWC_FP16(TestCUDNN_FP16):
def init_test_case(self):
self.dtype = np.float16
self.pad = [1, 1]
self.stride = [1, 1]
self.dilations = [1, 1]
self.groups = 2
self.input_size = [2, 5, 5, 4] # NCHW
f_c = self.input_size[-1]
self.filter_size = [f_c, 3, 3, 3]
self.data_format = 'NHWC'
@unittest.skipIf(
not core.is_compiled_with_cuda(), "core is not compiled with CUDA"
)
class TestCUDNNWithEvenUpsample_NHWC_FP16(TestCUDNN_FP16):
def init_test_case(self):
self.dtype = np.float16
self.pad = [2, 2]
self.stride = [2, 2]
self.groups = 1
self.dilations = [1, 1]
self.output_size = [14, 14]
self.input_size = [2, 7, 7, 3] # NHWC
f_c = self.input_size[-1]
self.filter_size = [f_c, 6, 5, 5]
self.data_format = 'NHWC'
@unittest.skipIf(
not core.is_compiled_with_cuda()
or not core.is_bfloat16_supported(core.CUDAPlace(0)),
"core is not compiled with CUDA and do not support bfloat16",
)
class TestCUDNN_BF16(TestConv2DTransposeOp):
def get_numeric_grad(self, place, check_name):
scope = core.Scope()
self._check_grad_helper()
op = create_op(
scope, self.op_type, self.inputs, self.outputs, self.attrs
)
return get_numeric_gradient(
place, scope, op, self.inputs_fp32, check_name, ['Output']
)
def init_test_case(self):
self.dtype = np.uint16
self.pad = [1, 1]
self.stride = [1, 1]
self.groups = 1
self.dilations = [1, 1]
self.input_size = [2, 3, 5, 5] # NCHW
f_c = self.input_size[1]
self.filter_size = [f_c, 6, 3, 3]
def init_op_type(self):
self.need_check_grad = False
self.use_cudnn = True
self.op_type = "conv2d_transpose"
self.python_api = conv2dtranspose_wrapper
def test_check_output(self):
place = core.CUDAPlace(0)
self.check_output_with_place(
place, atol=0.02, check_dygraph=(not self.use_mkldnn)
)
def test_check_grad_no_input(self):
place = core.CUDAPlace(0)
numeric_grads = self.get_numeric_grad(place, 'Filter')
self.check_grad_with_place(
place,
['Filter'],
'Output',
max_relative_error=0.02,
no_grad_set={'Input'},
user_defined_grads=[numeric_grads],
)
def test_check_grad_no_filter(self):
place = core.CUDAPlace(0)
numeric_grads = self.get_numeric_grad(place, 'Input')
self.check_grad_with_place(
place,
['Input'],
'Output',
max_relative_error=0.02,
no_grad_set={'Filter'},
user_defined_grads=[numeric_grads],
)
@unittest.skipIf(
not core.is_compiled_with_cuda()
or not core.is_bfloat16_supported(core.CUDAPlace(0)),
"core is not compiled with CUDA and do not support bfloat16",
)
class TestCUDNN_NHWC_BF16(TestCUDNN_BF16):
def init_test_case(self):
self.dtype = np.uint16
self.pad = [0, 0]
self.stride = [1, 1]
self.dilations = [1, 1]
self.groups = 1
self.input_size = [2, 5, 5, 3] # NHWC
f_c = self.input_size[-1]
self.filter_size = [f_c, 6, 3, 3]
self.data_format = 'NHWC'
@unittest.skipIf(
not core.is_compiled_with_cuda()
or not core.is_bfloat16_supported(core.CUDAPlace(0)),
"core is not compiled with CUDA and do not support bfloat16",
)
class TestCUDNNWithSymmetricPad_NHWC_BF16(TestCUDNN_BF16):
def init_test_case(self):
self.dtype = np.uint16
self.pad = [1, 1]
self.stride = [1, 1]
self.groups = 1
self.dilations = [1, 1]
self.input_size = [2, 5, 5, 3] # NHWC
f_c = self.input_size[-1]
self.filter_size = [f_c, 6, 3, 3]
self.data_format = 'NHWC'
@unittest.skipIf(
not core.is_compiled_with_cuda()
or not core.is_bfloat16_supported(core.CUDAPlace(0)),
"core is not compiled with CUDA and do not support bfloat16",
)
class TestCUDNNWithAsymmetricPad_NHWC_BF16(TestCUDNN_BF16):
def init_test_case(self):
self.dtype = np.uint16
self.pad = [1, 0, 2, 3]
self.stride = [2, 2]
self.groups = 1
self.dilations = [1, 1]
self.input_size = [2, 5, 5, 3] # NHWC
f_c = self.input_size[-1]
self.filter_size = [f_c, 6, 3, 3]
self.data_format = 'NHWC'
@unittest.skipIf(
not core.is_compiled_with_cuda()
or not core.is_bfloat16_supported(core.CUDAPlace(0)),
"core is not compiled with CUDA and do not support bfloat16",
)
class TestCUDNNWithStride_NHWC_BF16(TestCUDNN_BF16):
def init_test_case(self):
self.dtype = np.uint16
self.pad = [1, 1]
self.stride = [2, 2]
self.groups = 1
self.dilations = [1, 1]
self.input_size = [2, 5, 5, 3] # NHWC
f_c = self.input_size[-1]
self.filter_size = [f_c, 6, 3, 3]
self.data_format = 'NHWC'
@unittest.skipIf(
not core.is_compiled_with_cuda()
or not core.is_bfloat16_supported(core.CUDAPlace(0)),
"core is not compiled with CUDA and do not support bfloat16",
)
class TestCUDNNWithGroups_NHWC_BF16(TestCUDNN_BF16):
def init_test_case(self):
self.dtype = np.uint16
self.pad = [1, 1]
self.stride = [1, 1]
self.dilations = [1, 1]
self.groups = 2
self.input_size = [2, 5, 5, 4] # NCHW
f_c = self.input_size[-1]
self.filter_size = [f_c, 3, 3, 3]
self.data_format = 'NHWC'
@unittest.skipIf(
not core.is_compiled_with_cuda()
or not core.is_bfloat16_supported(core.CUDAPlace(0)),
"core is not compiled with CUDA and do not support bfloat16",
)
class TestCUDNNWithEvenUpsample_NHWC_BF16(TestCUDNN_BF16):
def init_test_case(self):
self.dtype = np.uint16
self.pad = [2, 2]
self.stride = [2, 2]
self.groups = 1
self.dilations = [1, 1]
self.output_size = [14, 14]
self.input_size = [2, 7, 7, 3] # NHWC
f_c = self.input_size[-1]
self.filter_size = [f_c, 6, 5, 5]
self.data_format = 'NHWC'
class TestConv2DTransposeAPI(unittest.TestCase):
def test_case1(self):
data1 = paddle.static.data(
name='data1', shape=[-1, 3, 5, 5], dtype='float32'
)
data2 = paddle.static.data(
name='data2', shape=[-1, 5, 5, 3], dtype='float32'
)
out1 = paddle.static.nn.conv2d_transpose(
input=data1,
groups=1,
num_filters=6,
filter_size=3,
data_format='NCHW',
)
out2 = paddle.static.nn.conv2d_transpose(
input=data2,
groups=1,
num_filters=6,
filter_size=3,
data_format='NHWC',
)
out3 = paddle.static.nn.conv2d_transpose(
input=data1,
groups=1,
num_filters=6,
filter_size=3,
padding=[[0, 0], [1, 1], [1, 1], [0, 0]],
data_format='NHWC',
)
out4 = paddle.static.nn.conv2d_transpose(
input=data1,
groups=3,
num_filters=6,
filter_size=3,
padding=[[0, 0], [0, 0], [2, 1], [0, 0]],
data_format='NCHW',
)
out5 = paddle.static.nn.conv2d_transpose(
input=data2,
groups=1,
num_filters=6,
filter_size=3,
padding='SAME',
data_format='NCHW',
)
out6 = paddle.static.nn.conv2d_transpose(
input=data1,
groups=1,
num_filters=6,
filter_size=3,
padding='VALID',
data_format='NHWC',
)
out7 = paddle.static.nn.conv2d_transpose(
input=data1,
groups=1,
num_filters=6,
output_size=[7, 7],
padding=[0, 0],
data_format='NHWC',
)
data1_np = np.random.random((2, 3, 5, 5)).astype("float32")
data2_np = np.random.random((2, 5, 5, 3)).astype("float32")
if core.is_compiled_with_cuda():
place = core.CUDAPlace(0)
else:
place = core.CPUPlace()
exe = fluid.Executor(place)
exe.run(fluid.default_startup_program())
results = exe.run(
fluid.default_main_program(),
feed={"data1": data1_np, "data2": data2_np},
fetch_list=[out1, out2, out3, out4, out5, out6, out7],
return_numpy=True,
)
self.assertIsNotNone(results[0])
self.assertIsNotNone(results[1])
self.assertIsNotNone(results[2])
self.assertIsNotNone(results[3])
self.assertIsNotNone(results[4])
self.assertIsNotNone(results[5])
self.assertIsNotNone(results[6])
class TestConv2DTransposeOpException(unittest.TestCase):
def test_exception(self):
data = paddle.static.data(
name='data', shape=[-1, 3, 5, 5], dtype="float32"
)
def attr_data_format():
out = paddle.static.nn.conv2d_transpose(
input=data,
groups=1,
num_filters=6,
filter_size=3,
data_format="NCDHW",
)
self.assertRaises(ValueError, attr_data_format)
def attr_padding_str():
out = paddle.static.nn.conv2d_transpose(
input=data,
groups=1,
num_filters=6,
filter_size=3,
padding='Vald',
)
self.assertRaises(ValueError, attr_padding_str)
def attr_padding_list():
out = paddle.static.nn.conv2d_transpose(
input=data,
groups=1,
num_filters=6,
filter_size=3,
padding=[[1, 1], [1, 1], [0, 0], [0, 0]],
)
self.assertRaises(ValueError, attr_padding_list)
def attr_padding_with_data_format():
out = paddle.static.nn.conv2d_transpose(
input=data,
groups=1,
num_filters=6,
filter_size=3,
padding=[[1, 1], [0, 0], [0, 0], [1, 1]],
data_format='NHWC',
)
self.assertRaises(ValueError, attr_padding_with_data_format)
error_input = paddle.static.data(
name='error_data', shape=[-1, 1], dtype="float32"
)
def error_input_size():
out = paddle.static.nn.conv2d_transpose(
input=error_input, groups=1, num_filters=6, filter_size=3
)
self.assertRaises(ValueError, error_input_size)
def error_groups():
out = paddle.static.nn.conv2d_transpose(
input=data,
groups=0,
num_filters=6,
filter_size=3,
data_format='NHWC',
)
self.assertRaises(ValueError, error_groups)
def error_0_filter_number():
out = paddle.static.nn.conv2d_transpose(
input=data,
groups=1,
num_filters=0,
filter_size=3,
data_format='NCHW',
)
self.assertRaises(ValueError, error_0_filter_number)
class TestConv2DTransposeRepr(unittest.TestCase):
def test_case(self):
paddle.disable_static()
x_var = paddle.uniform((2, 4, 8, 8), dtype='float32', min=-1.0, max=1.0)
conv = nn.Conv2DTranspose(4, 6, (3, 3), output_padding=1, stride=2)
print(conv)
y_var = conv(x_var)
y_np = y_var.numpy()
self.assertIsNotNone(y_np)
paddle.enable_static()
class TestConv2dTranspose(unittest.TestCase):
def error_weight_input(self):
array = np.array([1], dtype=np.float32)
x = paddle.to_tensor(np.reshape(array, [1, 1, 1, 1]), dtype='float32')
weight = paddle.to_tensor(np.reshape(array, [1]), dtype='float32')
paddle.nn.functional.conv2d_transpose(x, weight, bias=0)
def test_type_error(self):
self.assertRaises(ValueError, self.error_weight_input)
class TestTensorOutputSize1(UnittestBase):
def init_info(self):
self.shapes = [[2, 3, 8, 8]]
self.save_path = os.path.join(self.temp_dir.name, self.path_prefix())
def path_prefix(self):
return 'conv2d_transpose_tensor_output_size1'
def var_prefix(self):
return "Vars["
def call_func(self, x):
w_var = paddle.randn((3, 6, 3, 3), dtype='float32')
output_size = paddle.assign([17])
out = paddle.paddle.nn.functional.conv2d_transpose(
x, w_var, stride=2, output_size=output_size
)
return out
def test_static(self):
main_prog = Program()
starup_prog = Program()
with program_guard(main_prog, starup_prog):
fc = paddle.nn.Linear(8, 8)
x = paddle.randn([2, 3, 8, 8])
x.stop_gradient = False
feat = fc(x)
out = self.call_func(feat)
sgd = paddle.optimizer.SGD()
sgd.minimize(paddle.mean(out))
self.assertTrue(self.var_prefix() in str(main_prog))
exe = paddle.static.Executor()
exe.run(starup_prog)
res = exe.run(fetch_list=[feat, out])
np.testing.assert_allclose(res[1].shape, (2, 6, 17, 17))
paddle.static.save_inference_model(
self.save_path, [x], [feat, out], exe
)
# Test for Inference Predictor
infer_outs = self.infer_prog()
np.testing.assert_allclose(infer_outs[1].shape, (2, 6, 17, 17))
class TestTensorOutputSize2(TestTensorOutputSize1):
def path_prefix(self):
return 'conv2d_transpose_tensor_output_size2'
def call_func(self, x):
w_var = paddle.randn((3, 6, 3, 3), dtype='float32')
output_size = [17, paddle.assign([17])]
out = paddle.paddle.nn.functional.conv2d_transpose(
x, w_var, stride=2, output_size=output_size
)
return out
class TestTensorOutputSize3(TestTensorOutputSize1):
def path_prefix(self):
return 'conv2d_transpose_tensor_output_size3'
def call_func(self, x):
w_var = paddle.randn((3, 6, 3, 3), dtype='float32')
output_size = paddle.assign([17])
out = paddle.static.nn.conv2d_transpose(
x, num_filters=6, output_size=output_size, filter_size=3, stride=2
)
return out
class TestTensorOutputSize4(TestTensorOutputSize1):
def path_prefix(self):
return 'conv2d_transpose_tensor_output_size4'
def call_func(self, x):
output_size = [17, paddle.assign([17])]
out = paddle.static.nn.conv2d_transpose(
x, num_filters=6, output_size=output_size, filter_size=3, stride=2
)
return out
if __name__ == '__main__':
unittest.main()
|
3dc0c7f40aa991943b01d562630fe0b43244de02
|
d60dcdd392e32cd6272f7f364e5b4d556d6b84fb
|
/dtaidistance/util_numpy.py
|
eec423370f94515146f545cd56ff64098cdb1920
|
[
"Apache-2.0"
] |
permissive
|
wannesm/dtaidistance
|
7e39ba5086129f330a297d82af1b25854c016f74
|
d914ab85021f67ff1c58d45727e0e4844ad26d8e
|
refs/heads/master
| 2023-08-31T06:21:45.836316
| 2023-08-01T12:51:50
| 2023-08-01T12:53:50
| 80,764,246
| 943
| 188
|
NOASSERTION
| 2023-05-23T14:44:06
| 2017-02-02T20:11:03
|
Python
|
UTF-8
|
Python
| false
| false
| 3,890
|
py
|
util_numpy.py
|
from contextlib import ContextDecorator
import os
import logging
from .exceptions import NumpyException, ScipyException
logger = logging.getLogger("be.kuleuven.dtai.distance")
try:
import numpy as np
except ImportError:
np = None
try:
import scipy
except ImportError:
scipy = None
def test_without_numpy():
if "DTAIDISTANCE_TESTWITHOUTNUMPY" in os.environ and os.environ["DTAIDISTANCE_TESTWITHOUTNUMPY"] == "1":
return True
return False
def test_without_scipy():
if test_without_numpy():
return True
if "DTAIDISTANCE_TESTWITHOUTSCIPY" in os.environ and os.environ["DTAIDISTANCE_TESTWITHOUTSCIPY"] == "1":
return True
return False
def verify_np_array(seq):
if np is not None:
if isinstance(seq, (np.ndarray, np.generic)):
if not seq.data.c_contiguous:
logger.debug("Warning: Sequence 1 passed to method distance is not C-contiguous. " +
"The sequence will be copied.")
seq = seq.copy(order='C')
return seq
class NumpyStub:
def __init__(self, testwithoutnp):
self.testwithoutnp = testwithoutnp
def __getattr__(self, name):
if self.testwithoutnp or np is None:
raise NumpyException("Numpy excepted to be available for test. "
"Set DTAIDISTANCE_TESTWITHOUTNUMPY=1 to test without Numpy.")
return getattr(np, name)
class ScipyStub:
def __init__(self, testwithoutscipy):
self.testwithoutscipy = testwithoutscipy
def __getattr__(self, name):
if self.testwithoutscipy or scipy is None:
raise ScipyException("Scipy excepted to be available for test. "
"Set DTAIDISTANCE_TESTWITHOUTSCIPY=1 to test without Scipy.")
return getattr(scipy, name)
def import_signal(self):
from scipy import signal
return signal
class test_uses_numpy(ContextDecorator):
def __init__(self, strict=True):
"""Context to construct tests that use the optional dependency Numpy.
:param strict: Throw error if Numpy is not used (to remove context where not necessary)
:return: Numpy stub
"""
self.strict = strict
self.testwithoutnp = test_without_numpy()
def __enter__(self):
return NumpyStub(self.testwithoutnp)
def __exit__(self, *exc):
if self.testwithoutnp:
if exc[0] is None:
if self.strict and self.testwithoutnp:
# If no NumpyException is thrown, this test did not use Numpy because no error was thrown
# and should not use this decorator
raise Exception("Test does not use Numpy, remove decorator!")
else:
return
if issubclass(exc[0], NumpyException):
return True
class test_uses_scipy(ContextDecorator):
def __init__(self, strict=True):
"""Context to construct tests that use the optional dependency Scipy.
:param strict: Throw error if Scipy is not used (to remove context where not necessary)
:return: Numpy stub
"""
self.strict = strict
self.testwithoutscipy = test_without_scipy()
def __enter__(self):
return ScipyStub(self.testwithoutscipy)
def __exit__(self, *exc):
if self.testwithoutscipy:
if exc[0] is None:
if self.strict and self.testwithoutscipy:
# If no ScipyException is thrown, this test did not use Scipy because no error was thrown
# and should not use this decorator
raise Exception("Test does not use Scipy, remove decorator!")
else:
return
if issubclass(exc[0], ScipyException):
return True
|
412c91ae1974eee045bfa4a324ef869e8c1034ac
|
828282d212d33667f05818d144ce1534db0d1584
|
/src/imitation/algorithms/base.py
|
fd33c5f40668079b3450a9210064242e3a7c7823
|
[
"MIT"
] |
permissive
|
HumanCompatibleAI/imitation
|
bdbb167d8e1abc0fb629d18ac88121a096881d43
|
5b0b531bdf6fdfcaab93ff18f454aa1b54bb4355
|
refs/heads/master
| 2023-08-17T08:29:01.192792
| 2023-08-11T07:57:29
| 2023-08-11T07:57:29
| 160,906,482
| 936
| 203
|
MIT
| 2023-09-14T12:04:40
| 2018-12-08T05:15:33
|
Python
|
UTF-8
|
Python
| false
| false
| 11,334
|
py
|
base.py
|
"""Module of base classes and helper methods for imitation learning algorithms."""
import abc
from typing import (
Any,
Generic,
Iterable,
Iterator,
Mapping,
Optional,
TypeVar,
Union,
cast,
)
import torch.utils.data as th_data
from stable_baselines3.common import policies
from imitation.data import rollout, types
from imitation.util import logger as imit_logger
from imitation.util import util
class BaseImitationAlgorithm(abc.ABC):
"""Base class for all imitation learning algorithms."""
_logger: imit_logger.HierarchicalLogger
"""Object to log statistics and natural language messages to."""
allow_variable_horizon: bool
"""If True, allow variable horizon trajectories; otherwise error if detected."""
_horizon: Optional[int]
"""Horizon of trajectories seen so far (None if no trajectories seen)."""
def __init__(
self,
*,
custom_logger: Optional[imit_logger.HierarchicalLogger] = None,
allow_variable_horizon: bool = False,
):
"""Creates an imitation learning algorithm.
Args:
custom_logger: Where to log to; if None (default), creates a new logger.
allow_variable_horizon: If False (default), algorithm will raise an
exception if it detects trajectories of different length during
training. If True, overrides this safety check. WARNING: variable
horizon episodes leak information about the reward via termination
condition, and can seriously confound evaluation. Read
https://imitation.readthedocs.io/en/latest/getting-started/variable-horizon.html
before overriding this.
"""
self._logger = custom_logger or imit_logger.configure()
self.allow_variable_horizon = allow_variable_horizon
if allow_variable_horizon:
self.logger.warn(
"Running with `allow_variable_horizon` set to True. "
"Some algorithms are biased towards shorter or longer "
"episodes, which may significantly confound results. "
"Additionally, even unbiased algorithms can exploit "
"the information leak from the termination condition, "
"producing spuriously high performance. See "
"https://imitation.readthedocs.io/en/latest/getting-started/"
"variable-horizon.html for more information.",
)
self._horizon = None
@property
def logger(self) -> imit_logger.HierarchicalLogger:
return self._logger
@logger.setter
def logger(self, value: imit_logger.HierarchicalLogger) -> None:
self._logger = value
def _check_fixed_horizon(self, horizons: Iterable[int]) -> None:
"""Checks that episode lengths in `horizons` are fixed and equal to prior calls.
If algorithm is safe to use with variable horizon episodes (e.g. behavioral
cloning), then just don't call this method.
Args:
horizons: An iterable sequence of episode lengths.
Raises:
ValueError: The length of trajectories in trajs differs from one
another, or from trajectory lengths in previous calls to this method.
"""
if self.allow_variable_horizon: # skip check -- YOLO
return
# horizons = all horizons seen so far (including trajs)
horizons = set(horizons)
if self._horizon is not None:
horizons.add(self._horizon)
if len(horizons) > 1:
raise ValueError(
f"Episodes of different length detected: {horizons}. "
"Variable horizon environments are discouraged -- "
"termination conditions leak information about reward. See "
"https://imitation.readthedocs.io/en/latest/getting-started/"
"variable-horizon.html for more information. "
"If you are SURE you want to run imitation on a "
"variable horizon task, then please pass in the flag: "
"`allow_variable_horizon=True`.",
)
elif len(horizons) == 1:
self._horizon = horizons.pop()
def __getstate__(self):
state = self.__dict__.copy()
# logger can't be pickled as it depends on open files
del state["_logger"]
return state
def __setstate__(self, state):
self.__dict__.update(state)
# callee should modify self.logger directly if they want to override this
self.logger = state.get("_logger") or imit_logger.configure()
TransitionKind = TypeVar("TransitionKind", bound=types.TransitionsMinimal)
AnyTransitions = Union[
Iterable[types.Trajectory],
Iterable[types.TransitionMapping],
types.TransitionsMinimal,
]
class DemonstrationAlgorithm(BaseImitationAlgorithm, Generic[TransitionKind]):
"""An algorithm that learns from demonstration: BC, IRL, etc."""
def __init__(
self,
*,
demonstrations: Optional[AnyTransitions],
custom_logger: Optional[imit_logger.HierarchicalLogger] = None,
allow_variable_horizon: bool = False,
):
"""Creates an algorithm that learns from demonstrations.
Args:
demonstrations: Demonstrations from an expert (optional). Transitions
expressed directly as a `types.TransitionsMinimal` object, a sequence
of trajectories, or an iterable of transition batches (mappings from
keywords to arrays containing observations, etc).
custom_logger: Where to log to; if None (default), creates a new logger.
allow_variable_horizon: If False (default), algorithm will raise an
exception if it detects trajectories of different length during
training. If True, overrides this safety check. WARNING: variable
horizon episodes leak information about the reward via termination
condition, and can seriously confound evaluation. Read
https://imitation.readthedocs.io/en/latest/getting-started/variable-horizon.html
before overriding this.
"""
super().__init__(
custom_logger=custom_logger,
allow_variable_horizon=allow_variable_horizon,
)
if demonstrations is not None:
self.set_demonstrations(demonstrations)
@abc.abstractmethod
def set_demonstrations(self, demonstrations: AnyTransitions) -> None:
"""Sets the demonstration data.
Changing the demonstration data on-demand can be useful for
interactive algorithms like DAgger.
Args:
demonstrations: Either a Torch `DataLoader`, any other iterator that
yields dictionaries containing "obs" and "acts" Tensors or NumPy arrays,
`TransitionKind` instance, or a Sequence of Trajectory objects.
"""
@property
@abc.abstractmethod
def policy(self) -> policies.BasePolicy:
"""Returns a policy imitating the demonstration data."""
class _WrappedDataLoader:
"""Wraps a data loader (batch iterable) and checks for specified batch size."""
def __init__(
self,
data_loader: Iterable[types.TransitionMapping],
expected_batch_size: int,
):
"""Builds _WrappedDataLoader.
Args:
data_loader: The data loader (batch iterable) to wrap.
expected_batch_size: The batch size to check for.
"""
self.data_loader = data_loader
self.expected_batch_size = expected_batch_size
def __iter__(self) -> Iterator[types.TransitionMapping]:
"""Yields data from `self.data_loader`, checking `self.expected_batch_size`.
Yields:
Identity -- yields same batches as from `self.data_loader`.
Raises:
ValueError: `self.data_loader` returns a batch of size not equal to
`self.expected_batch_size`.
"""
for batch in self.data_loader:
if len(batch["obs"]) != self.expected_batch_size:
raise ValueError(
f"Expected batch size {self.expected_batch_size} "
f"!= {len(batch['obs'])} = len(batch['obs'])",
)
if len(batch["acts"]) != self.expected_batch_size:
raise ValueError(
f"Expected batch size {self.expected_batch_size} "
f"!= {len(batch['acts'])} = len(batch['acts'])",
)
yield batch
def make_data_loader(
transitions: AnyTransitions,
batch_size: int,
data_loader_kwargs: Optional[Mapping[str, Any]] = None,
) -> Iterable[types.TransitionMapping]:
"""Converts demonstration data to Torch data loader.
Args:
transitions: Transitions expressed directly as a `types.TransitionsMinimal`
object, a sequence of trajectories, or an iterable of transition
batches (mappings from keywords to arrays containing observations, etc).
batch_size: The size of the batch to create. Does not change the batch size
if `transitions` is already an iterable of transition batches.
data_loader_kwargs: Arguments to pass to `th_data.DataLoader`.
Returns:
An iterable of transition batches.
Raises:
ValueError: if `transitions` is an iterable over transition batches with batch
size not equal to `batch_size`; or if `transitions` is transitions or a
sequence of trajectories with total timesteps less than `batch_size`.
TypeError: if `transitions` is an unsupported type.
"""
if batch_size <= 0:
raise ValueError(f"batch_size={batch_size} must be positive.")
if isinstance(transitions, Iterable):
# Inferring the correct type here is difficult with generics.
(
first_item,
transitions,
) = util.get_first_iter_element( # type: ignore[assignment]
transitions,
)
if isinstance(first_item, types.Trajectory):
transitions = cast(Iterable[types.Trajectory], transitions)
transitions = rollout.flatten_trajectories(list(transitions))
if isinstance(transitions, types.TransitionsMinimal):
if len(transitions) < batch_size:
raise ValueError(
f"Number of transitions in `demonstrations` {len(transitions)} "
f"is smaller than batch size {batch_size}.",
)
kwargs: Mapping[str, Any] = {
"shuffle": True,
"drop_last": True,
**(data_loader_kwargs or {}),
}
return th_data.DataLoader(
transitions,
batch_size=batch_size,
collate_fn=types.transitions_collate_fn,
**kwargs,
)
elif isinstance(transitions, Iterable):
# Safe to ignore this error since we've already converted Iterable[Trajectory]
# `transitions` into Iterable[TransitionMapping]
return _WrappedDataLoader(transitions, batch_size) # type: ignore[arg-type]
else:
raise TypeError(f"`demonstrations` unexpected type {type(transitions)}")
|
0e0280487ab087581bbab328481cfdd5398c93a6
|
6416b746ee71d897789eab1e450000831674dbd0
|
/src/otx/algorithms/classification/adapters/mmcls/models/classifiers/semisl_multilabel_classifier.py
|
1a7730543846475f1e59e026b950f61ad108c160
|
[
"Apache-2.0"
] |
permissive
|
openvinotoolkit/training_extensions
|
c921f83ad52311af96ff45ae0b88d0aecddd855b
|
80454808b38727e358e8b880043eeac0f18152fb
|
refs/heads/develop
| 2023-08-31T06:29:07.229339
| 2023-08-31T01:57:26
| 2023-08-31T01:57:26
| 154,843,614
| 397
| 230
|
Apache-2.0
| 2023-09-14T06:17:01
| 2018-10-26T14:02:29
|
Python
|
UTF-8
|
Python
| false
| false
| 1,747
|
py
|
semisl_multilabel_classifier.py
|
"""Module for defining a semi-supervised multi-label classifier using mmcls."""
# Copyright (C) 2023 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
#
from mmcls.models.builder import CLASSIFIERS
from otx.algorithms.common.utils.logger import get_logger
from .custom_image_classifier import CustomImageClassifier
logger = get_logger()
@CLASSIFIERS.register_module()
class SemiSLMultilabelClassifier(CustomImageClassifier):
"""Semi-SL Multilabel Classifier which supports unlabeled data by overriding forward_train."""
def forward_train(self, img, gt_label, **kwargs):
"""Data is transmitted as a classifier training function.
Args:
img (list[Tensor]): List of tensors of shape (1, C, H, W)
Typically these should be mean centered and std scaled.
gt_label (Tensor): Ground truth labels for the input labeled images
kwargs (keyword arguments): Specific to concrete implementation
"""
if "extra_0" not in kwargs:
raise ValueError("'extra_0' does not exist in the dataset")
if "img_strong" not in kwargs:
raise ValueError("'img_strong' does not exist in the dataset")
target = gt_label.squeeze()
unlabeled_data = kwargs["extra_0"]
x = {}
x["labeled_weak"] = self.extract_feat(img)
x["labeled_strong"] = self.extract_feat(kwargs["img_strong"])
img_uw = unlabeled_data["img"]
x["unlabeled_weak"] = self.extract_feat(img_uw)
img_us = unlabeled_data["img_strong"]
x["unlabeled_strong"] = self.extract_feat(img_us)
losses = dict()
loss = self.head.forward_train(x, target)
losses.update(loss)
return losses
|
1f788ff838f411b15c0a5058bb07c86eccf38462
|
028b29b2e476ee042c9e8fdc9a8cde4cdd4562f4
|
/tests/test_continuous_column.py
|
8939bdf1f7f64342d21f21465d5c86bc0e414e6f
|
[
"Apache-2.0"
] |
permissive
|
Rambatino/CHAID
|
fba71f001e5d0e623137160cc14723fc5d2bcea9
|
b7c13b28d73e88b0b11dde5d4155775a234063e7
|
refs/heads/master
| 2023-08-14T14:16:48.761784
| 2023-07-25T15:28:09
| 2023-07-25T15:28:09
| 56,257,335
| 166
| 59
|
Apache-2.0
| 2023-09-07T10:06:04
| 2016-04-14T17:34:11
|
Python
|
UTF-8
|
Python
| false
| false
| 1,073
|
py
|
test_continuous_column.py
|
"""
Testing module for the class ContinuousColumn
"""
from unittest import TestCase
import numpy as np
from numpy import nan
from setup_tests import list_ordered_equal, CHAID
import pytest
def test_chaid_vector_converts_strings():
"""
Check that error raised when string column supplied
"""
with pytest.raises(ValueError) as excinfo:
vector = CHAID.ContinuousColumn(np.array(['2', '4']))
assert excinfo.value.message == 'Must only pass numerical values to create continuous column'
def test_chaid_vector_with_dtype_object():
"""
Check that error raised when object column supplied
"""
with pytest.raises(ValueError) as excinfo:
vector = CHAID.ContinuousColumn(np.array(['2', '4'], dtype="object"))
assert excinfo.value.message == 'Must only pass numerical values to create continuous column'
def test_nans_filled_with_zero():
"""
Check that nans are filled with zero
"""
arr = np.array([np.nan, 1., 2., 3.])
assert (CHAID.ContinuousColumn(arr).arr == np.array([0., 1., 2., 3.])).all()
|
4758686da346ec965f03f1ab9641b4c673cc584b
|
8ed15d43652dbcab332c78923da416b91b139323
|
/python/fedml/cli/model_deployment/device_client_runner.py
|
22c7756cdc89322aba61aa94001383905a2f406f
|
[
"Apache-2.0"
] |
permissive
|
FedML-AI/FedML
|
74d144038c9de4a0621eb328d00987abac35e2d1
|
b436fbd95cbb62f6c58d2233d7affa0f62cb1817
|
refs/heads/master
| 2023-08-31T22:15:39.786371
| 2023-08-24T03:41:58
| 2023-08-24T03:41:58
| 281,519,510
| 3,197
| 807
|
Apache-2.0
| 2023-09-14T02:14:20
| 2020-07-21T22:41:25
|
Python
|
UTF-8
|
Python
| false
| false
| 47,484
|
py
|
device_client_runner.py
|
import json
import logging
import multiprocessing
import sys
from multiprocessing import Process
import os
import platform
import shutil
import subprocess
import threading
import time
import traceback
import urllib
import uuid
import zipfile
from urllib.parse import urlparse
import requests
from fedml import mlops
from fedml.cli.model_deployment.device_model_msg_object import FedMLModelMsgObject
from fedml.core.distributed.communication.s3.remote_storage import S3Storage
from ..comm_utils import sys_utils
from ...core.mlops.mlops_runtime_log import MLOpsRuntimeLog
from ...core.distributed.communication.mqtt.mqtt_manager import MqttManager
from ...cli.comm_utils.yaml_utils import load_yaml_config
from ...cli.model_deployment.device_client_constants import ClientConstants
from ...core.mlops.mlops_metrics import MLOpsMetrics
from ...core.mlops.mlops_configs import MLOpsConfigs
from ...core.mlops.mlops_runtime_log_daemon import MLOpsRuntimeLogDaemon
from ...core.mlops.mlops_status import MLOpsStatus
from ..comm_utils.sys_utils import get_sys_runner_info, get_python_program
from .device_model_deployment import start_deployment
from .device_client_data_interface import FedMLClientDataInterface
from ...serving.fedml_client import FedMLModelServingClient
from ...core.mlops.mlops_utils import MLOpsUtils
class RunnerError(Exception):
""" Runner failed. """
pass
class FedMLClientRunner:
FEDML_BOOTSTRAP_RUN_OK = "[FedML]Bootstrap Finished"
def __init__(self, args, edge_id=0, request_json=None, agent_config=None, run_id=0):
self.run_process_event = None
self.device_status = None
self.current_training_status = None
self.mqtt_mgr = None
self.client_mqtt_mgr = None
self.client_mqtt_is_connected = False
self.client_mqtt_lock = None
self.edge_id = edge_id
self.run_id = run_id
self.unique_device_id = None
self.process = None
self.args = args
self.request_json = request_json
self.version = args.version
self.device_id = args.device_id
self.cur_dir = os.path.split(os.path.realpath(__file__))[0]
if args.current_running_dir is not None:
self.cur_dir = args.current_running_dir
self.sudo_cmd = ""
self.is_mac = False
if platform.system() == "Darwin":
self.is_mac = True
self.agent_config = agent_config
self.fedml_data_base_package_dir = os.path.join("/", "fedml", "data")
self.fedml_data_local_package_dir = os.path.join("/", "fedml", "fedml-package", "fedml", "data")
self.fedml_data_dir = self.fedml_data_base_package_dir
self.fedml_config_dir = os.path.join("/", "fedml", "conf")
self.FEDML_DYNAMIC_CONSTRAIN_VARIABLES = {}
self.mlops_metrics = None
self.client_active_list = dict()
self.infer_host = "127.0.0.1"
self.model_is_from_open = False
self.model_runner_mapping = dict()
self.ntp_offset = MLOpsUtils.get_ntp_offset()
def unzip_file(self, zip_file, unzip_file_path) -> str:
unziped_file_name = ""
if zipfile.is_zipfile(zip_file):
with zipfile.ZipFile(zip_file, "r") as zipf:
zipf.extractall(unzip_file_path)
unziped_file_name = zipf.namelist()[0]
return unziped_file_name
def retrieve_and_unzip_package(self, package_name, package_url):
local_package_path = ClientConstants.get_model_package_dir()
if not os.path.exists(local_package_path):
os.makedirs(local_package_path, exist_ok=True)
local_package_file = "{}.zip".format(os.path.join(local_package_path, package_name))
if os.path.exists(local_package_file):
os.remove(local_package_file)
urllib.request.urlretrieve(package_url, filename=None, reporthook=self.package_download_progress) # Do NOT rename, use the filename from MLOps
unzip_package_path = ClientConstants.get_model_dir()
self.fedml_packages_base_dir = unzip_package_path
try:
shutil.rmtree(
os.path.join(unzip_package_path, package_name), ignore_errors=True
)
except Exception as e:
pass
logging.info("local_package_file {}, unzip_package_path {}".format(
local_package_file, unzip_package_path))
package_name = self.unzip_file(local_package_file, unzip_package_path) # Using unziped folder name
unzip_package_path = os.path.join(unzip_package_path, package_name)
model_bin_file = os.path.join(unzip_package_path, "fedml_model.bin")
return unzip_package_path, model_bin_file
def retrieve_binary_model_file(self, package_name, package_url):
local_package_path = ClientConstants.get_model_package_dir()
if not os.path.exists(local_package_path):
os.makedirs(local_package_path, exist_ok=True)
unzip_package_path = ClientConstants.get_model_dir()
local_package_file = "{}".format(os.path.join(local_package_path, package_name))
if os.path.exists(local_package_file):
os.remove(local_package_file)
urllib.request.urlretrieve(package_url, local_package_file, reporthook=self.package_download_progress)
unzip_package_path = os.path.join(unzip_package_path, package_name)
if not os.path.exists(unzip_package_path):
os.makedirs(unzip_package_path, exist_ok=True)
dst_model_file = os.path.join(unzip_package_path, package_name)
if os.path.exists(local_package_file):
shutil.copy(local_package_file, dst_model_file)
return unzip_package_path, dst_model_file
def package_download_progress(self, count, blksize, filesize):
self.check_runner_stop_event()
downloaded = count * blksize
downloaded = filesize if downloaded > filesize else downloaded
progress = (downloaded / filesize * 100) if filesize != 0 else 0
progress_int = int(progress)
downloaded_kb = format(downloaded / 1024, '.2f')
# since this hook funtion is stateless, we need a state to avoid printing progress repeatly
if count == 0:
self.prev_download_progress = 0
if progress_int != self.prev_download_progress and progress_int % 5 == 0:
self.prev_download_progress = progress_int
logging.info("package downloaded size {} KB, progress {}%".format(downloaded_kb, progress_int))
def build_dynamic_constrain_variables(self, run_id, run_config):
pass
def update_local_fedml_config(self, run_id, model_config, model_config_parameters):
model_name = model_config["model_name"]
model_storage_url = model_config["model_storage_url"]
scale_min = model_config["instance_scale_min"]
scale_max = model_config["instance_scale_max"]
inference_engine = model_config.get("inference_engine", 0)
inference_end_point_id = run_id
# Retrieve model package or model binary file.
if self.model_is_from_open:
unzip_package_path, model_bin_file = self.retrieve_binary_model_file(model_name, model_storage_url)
else:
unzip_package_path, model_bin_file = self.retrieve_and_unzip_package(model_name, model_storage_url)
# Load the config to memory
package_conf_object = {}
fedml_local_config_file = os.path.join(unzip_package_path, "fedml_model_config.yaml")
if model_config_parameters is not None:
package_conf_object = model_config_parameters
with open(fedml_local_config_file, 'w') as f:
json.dump(package_conf_object, f)
else:
if os.path.exists(fedml_local_config_file):
package_conf_object = load_yaml_config(fedml_local_config_file)
return unzip_package_path, model_bin_file, package_conf_object
def build_dynamic_args(self, run_config, package_conf_object, base_dir):
pass
def download_model_package(self, package_name, package_url):
# Copy config file from the client
unzip_package_path = self.retrieve_and_unzip_package(
package_name, package_url
)
return unzip_package_path
def run(self, process_event):
os.environ['PYTHONWARNINGS'] = 'ignore:semaphore_tracker:UserWarning'
os.environ.setdefault('PYTHONWARNINGS', 'ignore:semaphore_tracker:UserWarning')
self.run_process_event = process_event
try:
MLOpsUtils.set_ntp_offset(self.ntp_offset)
self.setup_client_mqtt_mgr()
self.run_impl()
except RunnerError:
logging.info("Runner stopped.")
self.reset_devices_status(self.edge_id, ClientConstants.MSG_MLOPS_CLIENT_STATUS_KILLED)
except Exception as e:
logging.error("Runner exits with exceptions. {}".format(traceback.format_exc()))
self.reset_devices_status(self.edge_id, ClientConstants.MSG_MLOPS_CLIENT_STATUS_FAILED)
MLOpsRuntimeLogDaemon.get_instance(self.args).stop_log_processor(self.run_id, self.edge_id)
if self.mlops_metrics is not None:
self.mlops_metrics.stop_sys_perf()
time.sleep(3)
sys_utils.cleanup_all_fedml_client_login_processes(ClientConstants.CLIENT_LOGIN_PROGRAM,
clean_process_group=False)
sys.exit(1)
finally:
logging.info("Release resources.")
MLOpsRuntimeLogDaemon.get_instance(self.args).stop_log_processor(self.run_id, self.edge_id)
if self.mlops_metrics is not None:
self.mlops_metrics.stop_sys_perf()
time.sleep(3)
self.release_client_mqtt_mgr()
def check_runner_stop_event(self):
if self.run_process_event.is_set():
logging.info("Received stopping event.")
raise RunnerError("Runner stopped")
def inference_run(self):
run_id, end_point_name, token, user_id, user_name, device_ids, device_objs, model_config, model_name, \
model_id, model_storage_url, scale_min, scale_max, inference_engine, model_is_from_open, \
inference_end_point_id, use_gpu, memory_size, model_version = self.parse_model_run_params(self.request_json)
inference_client = FedMLModelServingClient(self.args,
end_point_name,
model_name,
model_version,
inference_request=self.request_json)
inference_client.run()
def run_impl(self):
run_id = self.request_json["end_point_id"]
end_point_name = self.request_json["end_point_name"]
token = self.request_json["token"]
user_id = self.request_json["user_id"]
user_name = self.request_json["user_name"]
device_ids = self.request_json["device_ids"]
device_objs = self.request_json["device_objs"]
model_config = self.request_json["model_config"]
model_name = model_config["model_name"]
model_id = model_config["model_id"]
model_version = model_config["model_version"]
model_storage_url = model_config["model_storage_url"]
scale_min = model_config["instance_scale_min"]
scale_max = model_config["instance_scale_max"]
inference_engine = model_config.get("inference_engine", ClientConstants.INFERENCE_ENGINE_TYPE_INT_TRITON)
self.model_is_from_open = True if model_config.get("is_from_open", 0) == 1 else False
if self.model_is_from_open:
model_net_url = model_config["model_net_url"]
model_config_parameters = self.request_json["parameters"]
inference_end_point_id = run_id
use_gpu = "gpu" # TODO: Get GPU from device infos
memory_size = "4096m" # TODO: Get Memory size for each instance
self.check_runner_stop_event()
logging.info("model deployment request: {}".format(self.request_json))
MLOpsRuntimeLog.get_instance(self.args).init_logs(show_stdout_log=True)
# Initiate an FedMLInferenceClient object
# client_runner = FedMLClientRunner(
# self.args, edge_id=self.edge_id, run_id=self.run_id, request_json=self.request_json,
# agent_config=self.agent_config
# )
# inference_process = Process(target=client_runner.inference_run)
# inference_process.start()
self.mlops_metrics.report_client_training_status(self.edge_id,
ClientConstants.MSG_MLOPS_CLIENT_STATUS_INITIALIZING,
is_from_model=True,
running_json=json.dumps(self.request_json))
self.mlops_metrics.report_client_training_status(self.edge_id,
ClientConstants.MSG_MLOPS_CLIENT_STATUS_RUNNING,
is_from_model=True)
self.check_runner_stop_event()
# update local config with real time parameters from server and dynamically replace variables value
logging.info("download and unzip model to local...")
unzip_package_path, model_bin_file, fedml_config_object = \
self.update_local_fedml_config(run_id, model_config, model_config_parameters)
if unzip_package_path is None or fedml_config_object is None:
logging.info("failed to update local fedml config.")
self.check_runner_stop_event()
self.cleanup_run_when_starting_failed()
self.mlops_metrics.client_send_exit_train_msg(run_id, self.edge_id,
ClientConstants.MSG_MLOPS_CLIENT_STATUS_FAILED)
return
logging.info("check downloaded packages...")
if not os.path.exists(unzip_package_path):
logging.info("failed to unzip file.")
self.check_runner_stop_event()
self.cleanup_run_when_starting_failed()
self.mlops_metrics.client_send_exit_train_msg(run_id, self.edge_id,
ClientConstants.MSG_MLOPS_CLIENT_STATUS_FAILED)
return
# download model net and load into the torch model
model_from_open = None
if self.model_is_from_open:
logging.info("process the model net from open...")
self.check_runner_stop_event()
s3_config = self.agent_config.get("s3_config", None)
if s3_config is not None and model_net_url is not None and model_net_url != "":
s3_client = S3Storage(s3_config)
url_parsed = urlparse(model_net_url)
path_list = url_parsed.path.split("/")
if len(path_list) > 0:
model_key = path_list[-1]
model_from_open = s3_client.read_model_net(model_key,
ClientConstants.get_model_cache_dir())
model_input_size, model_input_type = mlops.get_training_model_input_info(model_net_url, s3_config)
if model_input_size is not None and model_input_type is not None:
model_config_parameters["input_size"] = model_input_size
model_config_parameters["input_types"] = model_input_type
logging.info(
f"model input size {model_input_size}, input type {model_input_type} from the open platform.")
logging.info("start the model deployment...")
self.check_runner_stop_event()
running_model_name, inference_output_url, inference_model_version, model_metadata, model_config = \
start_deployment(
inference_end_point_id, end_point_name, model_id, model_version,
unzip_package_path, model_bin_file, model_name, inference_engine,
ClientConstants.INFERENCE_HTTP_PORT,
ClientConstants.INFERENCE_GRPC_PORT,
ClientConstants.INFERENCE_METRIC_PORT,
use_gpu, memory_size,
ClientConstants.INFERENCE_CONVERTOR_IMAGE,
ClientConstants.INFERENCE_SERVER_IMAGE,
self.infer_host,
self.model_is_from_open, model_config_parameters,
model_from_open,
token)
if inference_output_url == "":
logging.error("failed to deploy the model...")
self.send_deployment_status(end_point_name, self.edge_id,
model_id, model_name, model_version,
inference_output_url,
ClientConstants.MSG_MODELOPS_DEPLOYMENT_STATUS_FAILED)
self.send_deployment_results(end_point_name, self.edge_id,
ClientConstants.MSG_MODELOPS_DEPLOYMENT_STATUS_FAILED,
model_id, model_name, inference_output_url,
inference_model_version, ClientConstants.INFERENCE_HTTP_PORT,
inference_engine, model_metadata, model_config)
self.mlops_metrics.run_id = self.run_id
self.mlops_metrics.broadcast_client_training_status(self.edge_id,
ClientConstants.MSG_MLOPS_CLIENT_STATUS_FAILED,
is_from_model=True)
self.mlops_metrics.client_send_exit_train_msg(run_id, self.edge_id,
ClientConstants.MSG_MLOPS_CLIENT_STATUS_FAILED)
else:
logging.info("finished deployment, continue to send results to master...")
self.send_deployment_status(end_point_name, self.edge_id,
model_id, model_name, model_version,
inference_output_url,
ClientConstants.MSG_MODELOPS_DEPLOYMENT_STATUS_DEPLOYED)
self.send_deployment_results(end_point_name, self.edge_id,
ClientConstants.MSG_MODELOPS_DEPLOYMENT_STATUS_DEPLOYED,
model_id, model_name, inference_output_url,
model_version, ClientConstants.INFERENCE_HTTP_PORT,
inference_engine, model_metadata, model_config)
time.sleep(1)
self.mlops_metrics.run_id = self.run_id
self.mlops_metrics.broadcast_client_training_status(self.edge_id,
ClientConstants.MSG_MLOPS_CLIENT_STATUS_FINISHED,
is_from_model=True)
def send_deployment_results(self, end_point_name, device_id, model_status,
model_id, model_name, model_inference_url,
model_version, inference_port, inference_engine,
model_metadata, model_config):
deployment_results_topic = "model_ops/model_device/return_deployment_result/{}".format(device_id)
deployment_results_payload = {"end_point_id": self.run_id, "end_point_name": end_point_name,
"model_id": model_id, "model_name": model_name,
"model_url": model_inference_url, "model_version": model_version,
"port": inference_port,
"inference_engine": inference_engine,
"model_metadata": model_metadata,
"model_config": model_config,
"model_status": model_status}
logging.info("send_deployment_results: topic {}, payload {}.".format(deployment_results_topic,
deployment_results_payload))
self.client_mqtt_mgr.send_message_json(deployment_results_topic, json.dumps(deployment_results_payload))
def send_deployment_status(self, end_point_name, device_id,
model_id, model_name, model_version,
model_inference_url, model_status):
deployment_status_topic = "model_ops/model_device/return_deployment_status/{}".format(device_id)
deployment_status_payload = {"end_point_id": self.run_id, "end_point_name": end_point_name,
"device_id": device_id,
"model_id": model_id, "model_name": model_name,
"model_version": model_version,
"model_url": model_inference_url, "model_status": model_status}
logging.info("send_deployment_status: topic {}, payload {}.".format(deployment_status_topic,
deployment_status_payload))
self.client_mqtt_mgr.send_message_json(deployment_status_topic, json.dumps(deployment_status_payload))
def reset_devices_status(self, edge_id, status):
self.mlops_metrics.run_id = self.run_id
self.mlops_metrics.edge_id = edge_id
self.mlops_metrics.broadcast_client_training_status(edge_id, status, is_from_model=True)
def cleanup_run_when_starting_failed(self):
logging.info("Cleanup run successfully when starting failed.")
self.reset_devices_status(self.edge_id, ClientConstants.MSG_MLOPS_CLIENT_STATUS_FAILED)
time.sleep(2)
try:
self.mlops_metrics.stop_sys_perf()
except Exception as ex:
pass
time.sleep(1)
def cleanup_run_when_finished(self):
logging.info("Cleanup run successfully when finished.")
self.reset_devices_status(self.edge_id, ClientConstants.MSG_MLOPS_CLIENT_STATUS_FINISHED)
time.sleep(2)
try:
self.mlops_metrics.stop_sys_perf()
except Exception as ex:
pass
time.sleep(1)
def on_client_mqtt_disconnected(self, mqtt_client_object):
if self.client_mqtt_lock is None:
self.client_mqtt_lock = threading.Lock()
self.client_mqtt_lock.acquire()
self.client_mqtt_is_connected = False
self.client_mqtt_lock.release()
def on_client_mqtt_connected(self, mqtt_client_object):
if self.mlops_metrics is None:
self.mlops_metrics = MLOpsMetrics()
self.mlops_metrics.set_messenger(self.client_mqtt_mgr)
self.mlops_metrics.run_id = self.run_id
if self.client_mqtt_lock is None:
self.client_mqtt_lock = threading.Lock()
self.client_mqtt_lock.acquire()
self.client_mqtt_is_connected = True
self.client_mqtt_lock.release()
def setup_client_mqtt_mgr(self):
if self.client_mqtt_mgr is not None:
return
if self.client_mqtt_lock is None:
self.client_mqtt_lock = threading.Lock()
self.client_mqtt_mgr = MqttManager(
self.agent_config["mqtt_config"]["BROKER_HOST"],
self.agent_config["mqtt_config"]["BROKER_PORT"],
self.agent_config["mqtt_config"]["MQTT_USER"],
self.agent_config["mqtt_config"]["MQTT_PWD"],
self.agent_config["mqtt_config"]["MQTT_KEEPALIVE"],
"FedML_ModelClientAgent_Metrics_{}_{}_{}".format(self.args.current_device_id,
str(os.getpid()),
str(uuid.uuid4()))
)
self.client_mqtt_mgr.add_connected_listener(self.on_client_mqtt_connected)
self.client_mqtt_mgr.add_disconnected_listener(self.on_client_mqtt_disconnected)
self.client_mqtt_mgr.connect()
self.client_mqtt_mgr.loop_start()
if self.mlops_metrics is None:
self.mlops_metrics = MLOpsMetrics()
self.mlops_metrics.set_messenger(self.client_mqtt_mgr)
self.mlops_metrics.run_id = self.run_id
def release_client_mqtt_mgr(self):
try:
if self.client_mqtt_mgr is not None:
self.client_mqtt_mgr.loop_stop()
self.client_mqtt_mgr.disconnect()
self.client_mqtt_lock.acquire()
if self.client_mqtt_mgr is not None:
self.client_mqtt_is_connected = False
self.client_mqtt_mgr = None
self.client_mqtt_lock.release()
except Exception:
pass
def ota_upgrade(self, payload, request_json):
no_upgrade = False
upgrade_version = None
run_id = request_json["end_point_id"]
try:
parameters = request_json.get("parameters", None)
common_args = parameters.get("common_args", None)
no_upgrade = common_args.get("no_upgrade", False)
upgrade_version = common_args.get("upgrade_version", None)
except Exception as e:
pass
should_upgrade = True
if upgrade_version is None or upgrade_version == "latest":
try:
fedml_is_latest_version, local_ver, remote_ver = sys_utils. \
check_fedml_is_latest_version(self.version)
except Exception as e:
return
if fedml_is_latest_version:
should_upgrade = False
upgrade_version = remote_ver
if no_upgrade:
should_upgrade = False
if should_upgrade:
FedMLClientDataInterface.get_instance(). \
save_started_job(run_id, self.edge_id, time.time(),
ClientConstants.MSG_MLOPS_CLIENT_STATUS_UPGRADING,
ClientConstants.MSG_MLOPS_CLIENT_STATUS_UPGRADING,
payload)
logging.info(f"Upgrade to version {upgrade_version} ...")
sys_utils.do_upgrade(self.version, upgrade_version)
raise Exception("Restarting after upgraded...")
def callback_start_deployment(self, topic, payload):
"""
topic: model_ops/model_device/start_deployment/model-agent-device-id
payload: {"model_name": "image-model", "model_storage_url":"s3-url", "instance_scale_min":1, "instance_scale_max":3, "inference_engine":"onnx (or tensorrt)"}
"""
# get deployment params
request_json = json.loads(payload)
run_id = request_json["end_point_id"]
token = request_json["token"]
user_id = request_json["user_id"]
user_name = request_json["user_name"]
device_ids = request_json["device_ids"]
device_objs = request_json["device_objs"]
model_config = request_json["model_config"]
model_name = model_config["model_name"]
model_storage_url = model_config["model_storage_url"]
scale_min = model_config["instance_scale_min"]
scale_max = model_config["instance_scale_max"]
inference_engine = model_config.get("inference_engine", 0)
inference_end_point_id = run_id
try:
_, _ = MLOpsConfigs.get_instance(self.args).fetch_configs()
except Exception as e:
pass
# Terminate previous process about starting or stopping run command
ClientConstants.cleanup_run_process(run_id)
ClientConstants.save_runner_infos(self.args.device_id + "." + self.args.os_name, self.edge_id, run_id=run_id)
# Start log processor for current run
run_id = inference_end_point_id
self.args.run_id = run_id
self.args.edge_id = self.edge_id
MLOpsRuntimeLog.get_instance(self.args).init_logs(show_stdout_log=True)
MLOpsRuntimeLogDaemon.get_instance(self.args).set_log_source(
ClientConstants.FEDML_LOG_SOURCE_TYPE_MODEL_END_POINT)
MLOpsRuntimeLogDaemon.get_instance(self.args).start_log_processor(run_id, self.edge_id)
self.ota_upgrade(payload, request_json)
# Start client with multiprocessing mode
request_json["run_id"] = run_id
self.request_json = request_json
client_runner = FedMLClientRunner(
self.args, edge_id=self.edge_id, request_json=request_json, agent_config=self.agent_config, run_id=run_id
)
client_runner.infer_host = self.infer_host
if self.run_process_event is None:
self.run_process_event = multiprocessing.Event()
self.run_process_event.clear()
client_runner.run_process_event = self.run_process_event
self.model_runner_mapping[run_id] = client_runner
self.run_id = run_id
self.process = Process(target=client_runner.run, args=(self.run_process_event,))
# client_runner.run()
self.process.start()
ClientConstants.save_run_process(run_id, self.process.pid)
def set_runner_stopped_event(self, run_id):
client_runner = self.model_runner_mapping.get(run_id, None)
if client_runner is not None:
if client_runner.run_process_event is not None:
client_runner.run_process_event.set()
self.model_runner_mapping.pop(run_id)
def callback_delete_deployment(self, topic, payload):
logging.info("callback_delete_deployment: topic = %s, payload = %s" % (topic, payload))
# Parse payload as the model message object.
model_msg_object = FedMLModelMsgObject(topic, payload)
ClientConstants.remove_deployment(model_msg_object.end_point_name,
model_msg_object.model_name, model_msg_object.model_version)
self.set_runner_stopped_event(model_msg_object.run_id)
def exit_run_with_exception_entry(self):
try:
self.setup_client_mqtt_mgr()
self.exit_run_with_exception()
except Exception as e:
self.release_client_mqtt_mgr()
sys_utils.cleanup_all_fedml_client_login_processes(
ClientConstants.CLIENT_LOGIN_PROGRAM, clean_process_group=False)
sys.exit(1)
finally:
self.release_client_mqtt_mgr()
def exit_run_with_exception(self):
logging.info("Exit run successfully.")
ClientConstants.cleanup_learning_process(self.run_id)
ClientConstants.cleanup_run_process(self.run_id)
self.mlops_metrics.report_client_id_status(self.run_id, self.edge_id,
ClientConstants.MSG_MLOPS_CLIENT_STATUS_FAILED,
is_from_model=True)
time.sleep(1)
def callback_exit_train_with_exception(self, topic, payload):
request_json = json.loads(payload)
is_retain = request_json.get("is_retain", False)
if is_retain:
return
run_id = request_json.get("runId", None)
if run_id is None:
run_id = request_json.get("run_id", None)
if run_id is None:
run_id = request_json.get("id", None)
if run_id is None:
return
# Stop client with multiprocessing mode
self.request_json = request_json
client_runner = FedMLClientRunner(
self.args, edge_id=self.edge_id, request_json=request_json, agent_config=self.agent_config, run_id=run_id
)
try:
Process(target=client_runner.exit_run_with_exception_entry).start()
except Exception as e:
pass
def cleanup_client_with_status(self):
if self.device_status == ClientConstants.MSG_MLOPS_CLIENT_STATUS_FINISHED:
self.cleanup_run_when_finished()
elif self.device_status == ClientConstants.MSG_MLOPS_CLIENT_STATUS_FAILED:
self.cleanup_run_when_starting_failed()
def callback_runner_id_status(self, topic, payload):
logging.info("callback_runner_id_status: topic = %s, payload = %s" % (topic, payload))
request_json = json.loads(payload)
run_id = request_json["run_id"]
edge_id = request_json["edge_id"]
status = request_json["status"]
self.save_training_status(edge_id, status)
if status == ClientConstants.MSG_MLOPS_CLIENT_STATUS_FINISHED or \
status == ClientConstants.MSG_MLOPS_CLIENT_STATUS_FAILED:
# Stop client with multiprocessing mode
self.request_json = request_json
client_runner = FedMLClientRunner(
self.args,
edge_id=self.edge_id,
request_json=request_json,
agent_config=self.agent_config,
run_id=run_id,
)
client_runner.device_status = status
status_process = Process(target=client_runner.cleanup_client_with_status)
status_process.start()
status_process.join(15)
# Stop log processor for current run
MLOpsRuntimeLogDaemon.get_instance(self.args).stop_log_processor(run_id, edge_id)
def callback_report_current_status(self, topic, payload):
self.send_agent_active_msg()
@staticmethod
def process_ota_upgrade_msg():
os.system("pip install -U fedml")
def callback_client_ota_msg(self, topic, payload):
request_json = json.loads(payload)
cmd = request_json["cmd"]
if cmd == ClientConstants.FEDML_OTA_CMD_UPGRADE:
FedMLClientRunner.process_ota_upgrade_msg()
# Process(target=FedMLClientRunner.process_ota_upgrade_msg).start()
raise Exception("After upgraded, restart runner...")
elif cmd == ClientConstants.FEDML_OTA_CMD_RESTART:
raise Exception("Restart runner...")
def save_training_status(self, edge_id, training_status):
self.current_training_status = training_status
ClientConstants.save_training_infos(edge_id, training_status)
@staticmethod
def get_device_id():
device_file_path = os.path.join(ClientConstants.get_data_dir(),
ClientConstants.LOCAL_RUNNER_INFO_DIR_NAME)
file_for_device_id = os.path.join(device_file_path, "devices.id")
if not os.path.exists(device_file_path):
os.makedirs(device_file_path)
elif os.path.exists(file_for_device_id):
with open(file_for_device_id, 'r', encoding='utf-8') as f:
device_id_from_file = f.readline()
if device_id_from_file is not None and device_id_from_file != "":
return device_id_from_file
if platform.system() == "Darwin":
cmd_get_serial_num = "system_profiler SPHardwareDataType | grep Serial | awk '{gsub(/ /,\"\")}{print}' " \
"|awk -F':' '{print $2}' "
device_id = os.popen(cmd_get_serial_num).read()
device_id = device_id.replace('\n', '').replace(' ', '')
if device_id is None or device_id == "":
device_id = hex(uuid.getnode())
else:
device_id = "0x" + device_id
else:
if "nt" in os.name:
def get_uuid():
guid = ""
try:
cmd = "wmic csproduct get uuid"
guid = str(subprocess.check_output(cmd))
pos1 = guid.find("\\n") + 2
guid = guid[pos1:-15]
except Exception as ex:
pass
return str(guid)
device_id = str(get_uuid())
logging.info(device_id)
elif "posix" in os.name:
device_id = sys_utils.get_device_id_in_docker()
if device_id is None:
device_id = hex(uuid.getnode())
else:
device_id = sys_utils.run_subprocess_open(
"hal-get-property --udi /org/freedesktop/Hal/devices/computer --key system.hardware.uuid".split()
)
device_id = hex(device_id)
if device_id is not None and device_id != "":
with open(file_for_device_id, 'w', encoding='utf-8') as f:
f.write(device_id)
else:
device_id = hex(uuid.uuid4())
with open(file_for_device_id, 'w', encoding='utf-8') as f:
f.write(device_id)
return device_id
def bind_account_and_device_id(self, url, account_id, device_id, os_name, role="md.on_premise_device"):
ip = requests.get('https://checkip.amazonaws.com').text.strip()
fedml_ver, exec_path, os_ver, cpu_info, python_ver, torch_ver, mpi_installed, \
cpu_usage, available_mem, total_mem, gpu_info, gpu_available_mem, gpu_total_mem = get_sys_runner_info()
json_params = {
"accountid": account_id,
"deviceid": device_id,
"type": os_name,
"processor": cpu_info,
"core_type": cpu_info,
"network": "",
"role": role,
"os_ver": os_ver,
"memory": total_mem,
"ip": ip,
"extra_infos": {"fedml_ver": fedml_ver, "exec_path": exec_path, "os_ver": os_ver,
"cpu_info": cpu_info, "python_ver": python_ver, "torch_ver": torch_ver,
"mpi_installed": mpi_installed, "cpu_sage": cpu_usage,
"available_mem": available_mem, "total_mem": total_mem}
}
if gpu_info is not None:
if gpu_total_mem is not None:
json_params["gpu"] = gpu_info + ", Total GPU Memory: " + gpu_total_mem
else:
json_params["gpu"] = gpu_info
json_params["extra_infos"]["gpu_info"] = gpu_info
if gpu_available_mem is not None:
json_params["extra_infos"]["gpu_available_mem"] = gpu_available_mem
if gpu_total_mem is not None:
json_params["extra_infos"]["gpu_total_mem"] = gpu_total_mem
else:
json_params["gpu"] = "None"
_, cert_path = MLOpsConfigs.get_instance(self.args).get_request_params()
if cert_path is not None:
try:
requests.session().verify = cert_path
response = requests.post(
url, json=json_params, verify=True,
headers={"content-type": "application/json", "Connection": "close"}
)
except requests.exceptions.SSLError as err:
MLOpsConfigs.install_root_ca_file()
response = requests.post(
url, json=json_params, verify=True,
headers={"content-type": "application/json", "Connection": "close"}
)
else:
response = requests.post(url, json=json_params, headers={"Connection": "close"})
status_code = response.json().get("code")
if status_code == "SUCCESS":
edge_id = response.json().get("data").get("id")
else:
return 0
return edge_id
def fetch_configs(self):
return MLOpsConfigs.get_instance(self.args).fetch_all_configs()
def send_agent_active_msg(self):
active_topic = "flclient_agent/active"
status = MLOpsStatus.get_instance().get_client_agent_status(self.edge_id)
if (
status is not None
and status != ClientConstants.MSG_MLOPS_CLIENT_STATUS_OFFLINE
and status != ClientConstants.MSG_MLOPS_CLIENT_STATUS_IDLE
):
return
try:
current_job = FedMLClientDataInterface.get_instance().get_job_by_id(self.run_id)
except Exception as e:
current_job = None
if current_job is None:
if status is not None and status == ClientConstants.MSG_MLOPS_CLIENT_STATUS_OFFLINE:
status = ClientConstants.MSG_MLOPS_CLIENT_STATUS_IDLE
else:
return
else:
status = ClientConstants.get_device_state_from_run_edge_state(current_job.status)
active_msg = {"ID": self.edge_id, "status": status}
MLOpsStatus.get_instance().set_client_agent_status(self.edge_id, status)
self.mqtt_mgr.send_message_json(active_topic, json.dumps(active_msg))
def recover_start_deployment_msg_after_upgrading(self):
try:
current_job = FedMLClientDataInterface.get_instance().get_current_job()
if current_job is not None and \
current_job.status == ClientConstants.MSG_MLOPS_CLIENT_STATUS_UPGRADING:
logging.info("start deployment after upgrading.")
topic_start_deployment = "model_ops/model_device/start_deployment/{}".format(str(self.edge_id))
self.callback_start_deployment(topic_start_deployment, current_job.running_json)
except Exception as e:
logging.info("recover starting deployment message after upgrading: {}".format(traceback.format_exc()))
def on_agent_mqtt_connected(self, mqtt_client_object):
# The MQTT message topic format is as follows: <sender>/<receiver>/<action>
# Setup MQTT message listener for starting deployment
topic_start_deployment = "model_ops/model_device/start_deployment/{}".format(str(self.edge_id))
self.mqtt_mgr.add_message_listener(topic_start_deployment, self.callback_start_deployment)
# Setup MQTT message listener for delete deployment
topic_delete_deployment = "model_ops/model_device/delete_deployment/{}".format(str(self.edge_id))
self.mqtt_mgr.add_message_listener(topic_delete_deployment, self.callback_delete_deployment)
# Setup MQTT message listener for running failed
topic_exit_train_with_exception = "flserver_agent/" + str(self.edge_id) + "/exit_train_with_exception"
self.mqtt_mgr.add_message_listener(topic_exit_train_with_exception, self.callback_exit_train_with_exception)
# Setup MQTT message listener for client status switching
topic_client_status = "fl_client/flclient_agent_" + str(self.edge_id) + "/status"
self.mqtt_mgr.add_message_listener(topic_client_status, self.callback_runner_id_status)
# Setup MQTT message listener to report current device status.
topic_report_status = "mlops/report_device_status"
self.mqtt_mgr.add_message_listener(topic_report_status, self.callback_report_current_status)
# Setup MQTT message listener to OTA messages from the MLOps.
topic_ota_msg = "mlops/flclient_agent_" + str(self.edge_id) + "/ota"
self.mqtt_mgr.add_message_listener(topic_ota_msg, self.callback_client_ota_msg)
# Subscribe topics for starting deployment, stopping deployment and fetching client status.
mqtt_client_object.subscribe(topic_start_deployment, qos=2)
mqtt_client_object.subscribe(topic_delete_deployment, qos=2)
mqtt_client_object.subscribe(topic_client_status, qos=2)
mqtt_client_object.subscribe(topic_report_status, qos=2)
mqtt_client_object.subscribe(topic_exit_train_with_exception, qos=2)
mqtt_client_object.subscribe(topic_ota_msg, qos=2)
# Broadcast the first active message.
self.send_agent_active_msg()
# Echo results
print("\n\nCongratulations, your device is connected to the FedML MLOps platform successfully!")
print(
"Your FedML Edge ID is " + str(self.edge_id) + ", unique device ID is "
+ str(self.unique_device_id)
+ "\n"
)
MLOpsRuntimeLog.get_instance(self.args).init_logs(show_stdout_log=True)
def on_agent_mqtt_disconnected(self, mqtt_client_object):
MLOpsStatus.get_instance().set_client_agent_status(
self.edge_id, ClientConstants.MSG_MLOPS_CLIENT_STATUS_OFFLINE
)
pass
def setup_agent_mqtt_connection(self, service_config):
# Setup MQTT connection
self.mqtt_mgr = MqttManager(
service_config["mqtt_config"]["BROKER_HOST"],
service_config["mqtt_config"]["BROKER_PORT"],
service_config["mqtt_config"]["MQTT_USER"],
service_config["mqtt_config"]["MQTT_PWD"],
service_config["mqtt_config"]["MQTT_KEEPALIVE"],
"FedML_ModelClientAgent_Daemon_" + self.args.current_device_id,
"flclient_agent/last_will_msg",
json.dumps({"ID": self.edge_id, "status": ClientConstants.MSG_MLOPS_CLIENT_STATUS_OFFLINE}),
)
self.agent_config = service_config
# Init local database
FedMLClientDataInterface.get_instance().create_job_table()
# Start local API services
python_program = get_python_program()
local_api_process = ClientConstants.exec_console_with_script(
"{} -m uvicorn fedml.cli.model_deployment.client_api:api --host 0.0.0.0 --port {} "
"--log-level critical".format(python_program,
ClientConstants.LOCAL_CLIENT_API_PORT),
should_capture_stdout=False,
should_capture_stderr=False
)
MLOpsRuntimeLogDaemon.get_instance(self.args).stop_all_log_processor()
# Setup MQTT connected listener
self.mqtt_mgr.add_connected_listener(self.on_agent_mqtt_connected)
self.mqtt_mgr.add_disconnected_listener(self.on_agent_mqtt_disconnected)
self.mqtt_mgr.connect()
self.setup_client_mqtt_mgr()
self.mlops_metrics.report_client_training_status(self.edge_id,
ClientConstants.MSG_MLOPS_CLIENT_STATUS_IDLE,
is_from_model=True)
MLOpsStatus.get_instance().set_client_agent_status(self.edge_id, ClientConstants.MSG_MLOPS_CLIENT_STATUS_IDLE)
self.mlops_metrics.stop_sys_perf()
setattr(self.args, "mqtt_config_path", service_config["mqtt_config"])
self.mlops_metrics.report_sys_perf(self.args)
self.recover_start_deployment_msg_after_upgrading()
def start_agent_mqtt_loop(self):
# Start MQTT message loop
try:
self.mqtt_mgr.loop_forever()
except Exception as e:
if str(e) == "Restarting after upgraded...":
logging.info("Restarting after upgraded...")
else:
logging.info("Client tracing: {}".format(traceback.format_exc()))
self.mqtt_mgr.loop_stop()
self.mqtt_mgr.disconnect()
self.release_client_mqtt_mgr()
time.sleep(5)
sys_utils.cleanup_all_fedml_client_login_processes(
ClientConstants.CLIENT_LOGIN_PROGRAM, clean_process_group=False)
sys.exit(1)
|
baee403bc20d3ba003ae5501c8dc144927f25d75
|
c2c212ba42ebfa35f3b6122344978bc94ec8fa67
|
/tests/test_sunset.py
|
fe1a76f13c6f5834a2d2c90360667bd59cd172ab
|
[
"MIT"
] |
permissive
|
hhursev/recipe-scrapers
|
0cd6b7db4ef23ca825f2354f5d1ba76076a14813
|
8ced0227b3b16c532fc5ebf3060c99ee0452adab
|
refs/heads/main
| 2023-09-03T07:33:29.684121
| 2023-09-01T21:15:50
| 2023-09-01T21:15:50
| 42,446,168
| 1,276
| 443
|
MIT
| 2023-09-14T16:34:09
| 2015-09-14T12:05:00
|
Python
|
UTF-8
|
Python
| false
| false
| 3,478
|
py
|
test_sunset.py
|
from recipe_scrapers.sunset import Sunset
from tests import ScraperTest
class TestSunsetScraper(ScraperTest):
scraper_class = Sunset
def test_host(self):
self.assertEqual("sunset.com", self.harvester_class.host())
def test_canonical_url(self):
self.assertEqual(
"https://www.sunset.com/recipe/crisp-top-sourdough-stuffing",
self.harvester_class.canonical_url(),
)
def test_title(self):
self.assertEqual(
self.harvester_class.title(),
"Crisp-Top Sourdough Stuffing",
)
def test_author(self):
self.assertEqual(self.harvester_class.author(), "Angela Brassinga")
def test_total_time(self):
self.assertEqual(90, self.harvester_class.total_time())
def test_yields(self):
self.assertEqual("12 servings", self.harvester_class.yields())
def test_image(self):
self.assertEqual(
"https://img.sunset02.com/sites/default/files/styles/4_3_horizontal_-_900x675/public/crisp-top-sourdough-stuffing-su.jpg",
self.harvester_class.image(),
)
def test_ingredients(self):
self.assertEqual(
[
"1 1-pound loaf sourdough, at least 1 day old",
"1/4 cup salted butter",
"2 cups chopped onion (1 large)",
"1 cup chopped celery (2 or 3 stalks)",
"1/4 cup chopped flat-leaf parsley",
"1 tablespoon finely chopped fresh sage",
"About 1/2 tsp. kosher salt",
"About 1/2 tsp. pepper",
"About 3 cups turkey broth, reduced-sodium chicken broth, or mushroom or other vegetable broth",
],
self.harvester_class.ingredients(),
)
def test_instructions(self):
return self.assertEqual(
"Step 1\n Slice bread into 1 1/2-in.-thick slices and tear into irregular 1- to 2-in. pieces. Spread on a rimmed baking sheet and leave to dry at room temperature until needed (up to 2 days). For the best stuffing, the bread should be very dry.\nStep 2\n Preheat oven to 350°. Melt butter in a large frying pan over medium heat. Pour out 2 tbsp. butter and set aside.\nStep 3\n Add onion, celery, herbs, and 1/2 tsp. each salt and pepper to hot pan. Cook until onions are translucent and celery is tender-crisp, about 15 minutes. Transfer to a large bowl.\nStep 4\n Add torn bread and broth to vegetables and mix in until bread is soaked. Add salt and pepper to taste.\nStep 5\n Generously coat a 9- by 13-in. glass baking pan with 1 tsp. reserved melted butter. Pour stuffing into pan and drizzle with remaining melted butter.\nStep 6\n Cover with foil; bake 25 minutes. Remove foil and bake until starting to brown on top, about 30 minutes more.\nStep 7\nMake ahead: Up to 2 days, chilled. Reheat at 350°, covered, until hot (about 30 minutes). Remove foil and cook 10 more minutes for a crunchy top layer.\nStep 8\nVARIATIONS\nStep 9\nCrisp-Top Sourdough Stuffing with Sausage and Greens: Add 8 oz. sautéed crumbled Italian \xadsausage and 1 lb. briefly sautéed fresh spinach leaves to stuffing before baking.\nStep 10\nScandinavian Stuffing: Replace sourdough with a 1-lb. loaf of crusty rye bread, then add 1 cup chopped fresh dill and 8 oz. diced smoked pork chops to stuffing before baking. Top with 2 tbsp. fresh dill sprigs before serving.",
self.harvester_class.instructions(),
)
|
a0ce72c37ae0e0a6b640662ac1cae6f8408f0fe3
|
13800b7827598e76428a335559b7bf11867ec2f0
|
/python/ccxt/async_support/oceanex.py
|
ff051ca4bd36564dc07f6dd5795207d3a3926e74
|
[
"MIT"
] |
permissive
|
ccxt/ccxt
|
b40a0466f5c430a3c0c6026552ae697aa80ba6c6
|
e4065f6a490e6fc4dd7a72b375428b2faa570668
|
refs/heads/master
| 2023-09-04T03:41:29.787733
| 2023-09-03T19:25:57
| 2023-09-03T19:25:57
| 91,253,698
| 30,798
| 8,190
|
MIT
| 2023-09-14T21:59:09
| 2017-05-14T15:41:56
|
Python
|
UTF-8
|
Python
| false
| false
| 38,743
|
py
|
oceanex.py
|
# -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.async_support.base.exchange import Exchange
from ccxt.abstract.oceanex import ImplicitAPI
from ccxt.base.types import OrderSide
from ccxt.base.types import OrderType
from typing import Optional
from typing import List
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import PermissionDenied
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import BadRequest
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import AuthenticationError
from ccxt.base.decimal_to_precision import TICK_SIZE
class oceanex(Exchange, ImplicitAPI):
def describe(self):
return self.deep_extend(super(oceanex, self).describe(), {
'id': 'oceanex',
'name': 'OceanEx',
'countries': ['BS'], # Bahamas
'version': 'v1',
'rateLimit': 3000,
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/58385970-794e2d80-8001-11e9-889c-0567cd79b78e.jpg',
'api': {
'rest': 'https://api.oceanex.pro',
},
'www': 'https://www.oceanex.pro.com',
'doc': 'https://api.oceanex.pro/doc/v1',
'referral': 'https://oceanex.pro/signup?referral=VE24QX',
},
'has': {
'CORS': None,
'spot': True,
'margin': False,
'swap': None, # has but unimplemented
'future': None,
'option': None,
'cancelAllOrders': True,
'cancelOrder': True,
'cancelOrders': True,
'createMarketOrder': True,
'createOrder': True,
'fetchBalance': True,
'fetchBorrowRate': False,
'fetchBorrowRateHistories': False,
'fetchBorrowRateHistory': False,
'fetchBorrowRates': False,
'fetchBorrowRatesPerSymbol': False,
'fetchClosedOrders': True,
'fetchMarkets': True,
'fetchOHLCV': True,
'fetchOpenOrders': True,
'fetchOrder': True,
'fetchOrderBook': True,
'fetchOrderBooks': True,
'fetchOrders': True,
'fetchTicker': True,
'fetchTickers': True,
'fetchTime': True,
'fetchTrades': True,
'fetchTradingFee': False,
'fetchTradingFees': True,
'fetchTransactionFees': None,
},
'timeframes': {
'1m': '1',
'5m': '5',
'15m': '15',
'30m': '30',
'1h': '60',
'2h': '120',
'4h': '240',
'6h': '360',
'12h': '720',
'1d': '1440',
'3d': '4320',
'1w': '10080',
},
'api': {
'public': {
'get': [
'markets',
'tickers/{pair}',
'tickers_multi',
'order_book',
'order_book/multi',
'fees/trading',
'trades',
'timestamp',
],
'post': [
'k',
],
},
'private': {
'get': [
'key',
'members/me',
'orders',
'orders/filter',
],
'post': [
'orders',
'orders/multi',
'order/delete',
'order/delete/multi',
'orders/clear',
],
},
},
'fees': {
'trading': {
'tierBased': False,
'percentage': True,
'maker': self.parse_number('0.001'),
'taker': self.parse_number('0.001'),
},
},
'commonCurrencies': {
'PLA': 'Plair',
},
'precisionMode': TICK_SIZE,
'exceptions': {
'codes': {
'-1': BadRequest,
'-2': BadRequest,
'1001': BadRequest,
'1004': ArgumentsRequired,
'1006': AuthenticationError,
'1008': AuthenticationError,
'1010': AuthenticationError,
'1011': PermissionDenied,
'2001': AuthenticationError,
'2002': InvalidOrder,
'2004': OrderNotFound,
'9003': PermissionDenied,
},
'exact': {
'market does not have a valid value': BadRequest,
'side does not have a valid value': BadRequest,
'Account::AccountError: Cannot lock funds': InsufficientFunds,
'The account does not exist': AuthenticationError,
},
},
})
async def fetch_markets(self, params={}):
"""
retrieves data on all markets for oceanex
see https://api.oceanex.pro/doc/v1/#markets-post
:param dict [params]: extra parameters specific to the exchange api endpoint
:returns dict[]: an array of objects representing market data
"""
request = {'show_details': True}
response = await self.publicGetMarkets(self.extend(request, params))
#
# {
# id: 'xtzusdt',
# name: 'XTZ/USDT',
# ask_precision: '8',
# bid_precision: '8',
# enabled: True,
# price_precision: '4',
# amount_precision: '3',
# usd_precision: '4',
# minimum_trading_amount: '1.0'
# },
#
result = []
markets = self.safe_value(response, 'data', [])
for i in range(0, len(markets)):
market = markets[i]
id = self.safe_value(market, 'id')
name = self.safe_value(market, 'name')
baseId, quoteId = name.split('/')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
baseId = baseId.lower()
quoteId = quoteId.lower()
symbol = base + '/' + quote
result.append({
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'settle': None,
'baseId': baseId,
'quoteId': quoteId,
'settleId': None,
'type': 'spot',
'spot': True,
'margin': False,
'swap': False,
'future': False,
'option': False,
'active': None,
'contract': False,
'linear': None,
'inverse': None,
'contractSize': None,
'expiry': None,
'expiryDatetime': None,
'strike': None,
'optionType': None,
'precision': {
'amount': self.parse_number(self.parse_precision(self.safe_string(market, 'amount_precision'))),
'price': self.parse_number(self.parse_precision(self.safe_string(market, 'price_precision'))),
},
'limits': {
'leverage': {
'min': None,
'max': None,
},
'amount': {
'min': None,
'max': None,
},
'price': {
'min': None,
'max': None,
},
'cost': {
'min': self.safe_number(market, 'minimum_trading_amount'),
'max': None,
},
},
'info': market,
})
return result
async def fetch_ticker(self, symbol: str, params={}):
"""
fetches a price ticker, a statistical calculation with the information calculated over the past 24 hours for a specific market
see https://api.oceanex.pro/doc/v1/#ticker-post
:param str symbol: unified symbol of the market to fetch the ticker for
:param dict [params]: extra parameters specific to the oceanex api endpoint
:returns dict: a `ticker structure <https://github.com/ccxt/ccxt/wiki/Manual#ticker-structure>`
"""
await self.load_markets()
market = self.market(symbol)
request = {
'pair': market['id'],
}
response = await self.publicGetTickersPair(self.extend(request, params))
#
# {
# "code":0,
# "message":"Operation successful",
# "data": {
# "at":1559431729,
# "ticker": {
# "buy":"0.0065",
# "sell":"0.00677",
# "low":"0.00677",
# "high":"0.00677",
# "last":"0.00677",
# "vol":"2000.0"
# }
# }
# }
#
data = self.safe_value(response, 'data', {})
return self.parse_ticker(data, market)
async def fetch_tickers(self, symbols: Optional[List[str]] = None, params={}):
"""
fetches price tickers for multiple markets, statistical calculations with the information calculated over the past 24 hours each market
see https://api.oceanex.pro/doc/v1/#multiple-tickers-post
:param str[]|None symbols: unified symbols of the markets to fetch the ticker for, all market tickers are returned if not assigned
:param dict [params]: extra parameters specific to the oceanex api endpoint
:returns dict: a dictionary of `ticker structures <https://github.com/ccxt/ccxt/wiki/Manual#ticker-structure>`
"""
await self.load_markets()
symbols = self.market_symbols(symbols)
if symbols is None:
symbols = self.symbols
marketIds = self.market_ids(symbols)
request = {'markets': marketIds}
response = await self.publicGetTickersMulti(self.extend(request, params))
#
# {
# "code":0,
# "message":"Operation successful",
# "data": {
# "at":1559431729,
# "ticker": {
# "buy":"0.0065",
# "sell":"0.00677",
# "low":"0.00677",
# "high":"0.00677",
# "last":"0.00677",
# "vol":"2000.0"
# }
# }
# }
#
data = self.safe_value(response, 'data', [])
result = {}
for i in range(0, len(data)):
ticker = data[i]
marketId = self.safe_string(ticker, 'market')
market = self.safe_market(marketId)
symbol = market['symbol']
result[symbol] = self.parse_ticker(ticker, market)
return self.filter_by_array(result, 'symbol', symbols)
def parse_ticker(self, data, market=None):
#
# {
# "at":1559431729,
# "ticker": {
# "buy":"0.0065",
# "sell":"0.00677",
# "low":"0.00677",
# "high":"0.00677",
# "last":"0.00677",
# "vol":"2000.0"
# }
# }
#
ticker = self.safe_value(data, 'ticker', {})
timestamp = self.safe_timestamp(data, 'at')
symbol = self.safe_symbol(None, market)
return self.safe_ticker({
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_string(ticker, 'high'),
'low': self.safe_string(ticker, 'low'),
'bid': self.safe_string(ticker, 'buy'),
'bidVolume': None,
'ask': self.safe_string(ticker, 'sell'),
'askVolume': None,
'vwap': None,
'open': None,
'close': self.safe_string(ticker, 'last'),
'last': self.safe_string(ticker, 'last'),
'previousClose': None,
'change': None,
'percentage': None,
'average': None,
'baseVolume': self.safe_string(ticker, 'volume'),
'quoteVolume': None,
'info': ticker,
}, market)
async def fetch_order_book(self, symbol: str, limit: Optional[int] = None, params={}):
"""
fetches information on open orders with bid(buy) and ask(sell) prices, volumes and other data
see https://api.oceanex.pro/doc/v1/#order-book-post
:param str symbol: unified symbol of the market to fetch the order book for
:param int [limit]: the maximum amount of order book entries to return
:param dict [params]: extra parameters specific to the oceanex api endpoint
:returns dict: A dictionary of `order book structures <https://github.com/ccxt/ccxt/wiki/Manual#order-book-structure>` indexed by market symbols
"""
await self.load_markets()
market = self.market(symbol)
request = {
'market': market['id'],
}
if limit is not None:
request['limit'] = limit
response = await self.publicGetOrderBook(self.extend(request, params))
#
# {
# "code":0,
# "message":"Operation successful",
# "data": {
# "timestamp":1559433057,
# "asks": [
# ["100.0","20.0"],
# ["4.74","2000.0"],
# ["1.74","4000.0"],
# ],
# "bids":[
# ["0.0065","5482873.4"],
# ["0.00649","4781956.2"],
# ["0.00648","2876006.8"],
# ],
# }
# }
#
orderbook = self.safe_value(response, 'data', {})
timestamp = self.safe_timestamp(orderbook, 'timestamp')
return self.parse_order_book(orderbook, symbol, timestamp)
async def fetch_order_books(self, symbols: Optional[List[str]] = None, limit: Optional[int] = None, params={}):
"""
fetches information on open orders with bid(buy) and ask(sell) prices, volumes and other data for multiple markets
see https://api.oceanex.pro/doc/v1/#multiple-order-books-post
:param str[]|None symbols: list of unified market symbols, all symbols fetched if None, default is None
:param int [limit]: max number of entries per orderbook to return, default is None
:param dict [params]: extra parameters specific to the oceanex api endpoint
:returns dict: a dictionary of `order book structures <https://github.com/ccxt/ccxt/wiki/Manual#order-book-structure>` indexed by market symbol
"""
await self.load_markets()
if symbols is None:
symbols = self.symbols
marketIds = self.market_ids(symbols)
request = {
'markets': marketIds,
}
if limit is not None:
request['limit'] = limit
response = await self.publicGetOrderBookMulti(self.extend(request, params))
#
# {
# "code":0,
# "message":"Operation successful",
# "data": [
# {
# "timestamp":1559433057,
# "market": "bagvet",
# "asks": [
# ["100.0","20.0"],
# ["4.74","2000.0"],
# ["1.74","4000.0"],
# ],
# "bids":[
# ["0.0065","5482873.4"],
# ["0.00649","4781956.2"],
# ["0.00648","2876006.8"],
# ],
# },
# ...,
# ],
# }
#
data = self.safe_value(response, 'data', [])
result = {}
for i in range(0, len(data)):
orderbook = data[i]
marketId = self.safe_string(orderbook, 'market')
symbol = self.safe_symbol(marketId)
timestamp = self.safe_timestamp(orderbook, 'timestamp')
result[symbol] = self.parse_order_book(orderbook, symbol, timestamp)
return result
async def fetch_trades(self, symbol: str, since: Optional[int] = None, limit: Optional[int] = None, params={}):
"""
get the list of most recent trades for a particular symbol
see https://api.oceanex.pro/doc/v1/#trades-post
:param str symbol: unified symbol of the market to fetch trades for
:param int [since]: timestamp in ms of the earliest trade to fetch
:param int [limit]: the maximum amount of trades to fetch
:param dict [params]: extra parameters specific to the oceanex api endpoint
:returns Trade[]: a list of `trade structures <https://github.com/ccxt/ccxt/wiki/Manual#public-trades>`
"""
await self.load_markets()
market = self.market(symbol)
request = {
'market': market['id'],
}
if limit is not None:
request['limit'] = limit
response = await self.publicGetTrades(self.extend(request, params))
#
# {
# "code":0,
# "message":"Operation successful",
# "data": [
# {
# "id":220247666,
# "price":"3098.62",
# "volume":"0.00196",
# "funds":"6.0732952",
# "market":"ethusdt",
# "created_at":"2022-04-19T19:03:15Z",
# "created_on":1650394995,
# "side":"bid"
# },
# ]
# }
#
data = self.safe_value(response, 'data')
return self.parse_trades(data, market, since, limit)
def parse_trade(self, trade, market=None):
#
# fetchTrades(public)
#
# {
# "id":220247666,
# "price":"3098.62",
# "volume":"0.00196",
# "funds":"6.0732952",
# "market":"ethusdt",
# "created_at":"2022-04-19T19:03:15Z",
# "created_on":1650394995,
# "side":"bid"
# }
#
side = self.safe_value(trade, 'side')
if side == 'bid':
side = 'buy'
elif side == 'ask':
side = 'sell'
marketId = self.safe_value(trade, 'market')
symbol = self.safe_symbol(marketId, market)
timestamp = self.safe_timestamp(trade, 'created_on')
if timestamp is None:
timestamp = self.parse8601(self.safe_string(trade, 'created_at'))
priceString = self.safe_string(trade, 'price')
amountString = self.safe_string(trade, 'volume')
return self.safe_trade({
'info': trade,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'id': self.safe_string(trade, 'id'),
'order': None,
'type': 'limit',
'takerOrMaker': None,
'side': side,
'price': priceString,
'amount': amountString,
'cost': None,
'fee': None,
}, market)
async def fetch_time(self, params={}):
"""
fetches the current integer timestamp in milliseconds from the exchange server
see https://api.oceanex.pro/doc/v1/#api-server-time-post
:param dict [params]: extra parameters specific to the oceanex api endpoint
:returns int: the current integer timestamp in milliseconds from the exchange server
"""
response = await self.publicGetTimestamp(params)
#
# {"code":0,"message":"Operation successful","data":1559433420}
#
return self.safe_timestamp(response, 'data')
async def fetch_trading_fees(self, params={}):
"""
fetch the trading fees for multiple markets
see https://api.oceanex.pro/doc/v1/#trading-fees-post
:param dict [params]: extra parameters specific to the oceanex api endpoint
:returns dict: a dictionary of `fee structures <https://github.com/ccxt/ccxt/wiki/Manual#fee-structure>` indexed by market symbols
"""
response = await self.publicGetFeesTrading(params)
data = self.safe_value(response, 'data', [])
result = {}
for i in range(0, len(data)):
group = data[i]
maker = self.safe_value(group, 'ask_fee', {})
taker = self.safe_value(group, 'bid_fee', {})
marketId = self.safe_string(group, 'market')
symbol = self.safe_symbol(marketId)
result[symbol] = {
'info': group,
'symbol': symbol,
'maker': self.safe_number(maker, 'value'),
'taker': self.safe_number(taker, 'value'),
'percentage': True,
}
return result
async def fetch_key(self, params={}):
response = await self.privateGetKey(params)
return self.safe_value(response, 'data')
def parse_balance(self, response):
data = self.safe_value(response, 'data')
balances = self.safe_value(data, 'accounts', [])
result = {'info': response}
for i in range(0, len(balances)):
balance = balances[i]
currencyId = self.safe_value(balance, 'currency')
code = self.safe_currency_code(currencyId)
account = self.account()
account['free'] = self.safe_string(balance, 'balance')
account['used'] = self.safe_string(balance, 'locked')
result[code] = account
return self.safe_balance(result)
async def fetch_balance(self, params={}):
"""
query for balance and get the amount of funds available for trading or funds locked in orders
see https://api.oceanex.pro/doc/v1/#account-info-post
:param dict [params]: extra parameters specific to the oceanex api endpoint
:returns dict: a `balance structure <https://github.com/ccxt/ccxt/wiki/Manual#balance-structure>`
"""
await self.load_markets()
response = await self.privateGetMembersMe(params)
return self.parse_balance(response)
async def create_order(self, symbol: str, type: OrderType, side: OrderSide, amount, price=None, params={}):
"""
create a trade order
see https://api.oceanex.pro/doc/v1/#new-order-post
:param str symbol: unified symbol of the market to create an order in
:param str type: 'market' or 'limit'
:param str side: 'buy' or 'sell'
:param float amount: how much of currency you want to trade in units of base currency
:param float [price]: the price at which the order is to be fullfilled, in units of the quote currency, ignored in market orders
:param dict [params]: extra parameters specific to the oceanex api endpoint
:returns dict: an `order structure <https://github.com/ccxt/ccxt/wiki/Manual#order-structure>`
"""
await self.load_markets()
market = self.market(symbol)
request = {
'market': market['id'],
'side': side,
'ord_type': type,
'volume': self.amount_to_precision(symbol, amount),
}
if type == 'limit':
request['price'] = self.price_to_precision(symbol, price)
response = await self.privatePostOrders(self.extend(request, params))
data = self.safe_value(response, 'data')
return self.parse_order(data, market)
async def fetch_order(self, id: str, symbol: Optional[str] = None, params={}):
"""
fetches information on an order made by the user
see https://api.oceanex.pro/doc/v1/#order-status-get
:param str symbol: unified symbol of the market the order was made in
:param dict [params]: extra parameters specific to the oceanex api endpoint
:returns dict: An `order structure <https://github.com/ccxt/ccxt/wiki/Manual#order-structure>`
"""
await self.load_markets()
market = None
if symbol is not None:
market = self.market(symbol)
ids = [id]
request = {'ids': ids}
response = await self.privateGetOrders(self.extend(request, params))
data = self.safe_value(response, 'data')
dataLength = len(data)
if data is None:
raise OrderNotFound(self.id + ' could not found matching order')
if isinstance(id, list):
orders = self.parse_orders(data, market)
return orders[0]
if dataLength == 0:
raise OrderNotFound(self.id + ' could not found matching order')
return self.parse_order(data[0], market)
async def fetch_open_orders(self, symbol: Optional[str] = None, since: Optional[int] = None, limit: Optional[int] = None, params={}):
"""
fetch all unfilled currently open orders
see https://api.oceanex.pro/doc/v1/#order-status-get
:param str symbol: unified market symbol
:param int [since]: the earliest time in ms to fetch open orders for
:param int [limit]: the maximum number of open orders structures to retrieve
:param dict [params]: extra parameters specific to the oceanex api endpoint
:returns Order[]: a list of `order structures <https://github.com/ccxt/ccxt/wiki/Manual#order-structure>`
"""
request = {
'states': ['wait'],
}
return await self.fetch_orders(symbol, since, limit, self.extend(request, params))
async def fetch_closed_orders(self, symbol: Optional[str] = None, since: Optional[int] = None, limit: Optional[int] = None, params={}):
"""
fetches information on multiple closed orders made by the user
see https://api.oceanex.pro/doc/v1/#order-status-get
:param str symbol: unified market symbol of the market orders were made in
:param int [since]: the earliest time in ms to fetch orders for
:param int [limit]: the maximum number of orde structures to retrieve
:param dict [params]: extra parameters specific to the oceanex api endpoint
:returns Order[]: a list of `order structures <https://github.com/ccxt/ccxt/wiki/Manual#order-structure>`
"""
request = {
'states': ['done', 'cancel'],
}
return await self.fetch_orders(symbol, since, limit, self.extend(request, params))
async def fetch_orders(self, symbol: Optional[str] = None, since: Optional[int] = None, limit: Optional[int] = None, params={}):
"""
fetches information on multiple orders made by the user
see https://api.oceanex.pro/doc/v1/#order-status-with-filters-post
:param str symbol: unified market symbol of the market orders were made in
:param int [since]: the earliest time in ms to fetch orders for
:param int [limit]: the maximum number of orde structures to retrieve
:param dict [params]: extra parameters specific to the oceanex api endpoint
:returns Order[]: a list of `order structures <https://github.com/ccxt/ccxt/wiki/Manual#order-structure>`
"""
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOrders() requires a `symbol` argument')
await self.load_markets()
market = self.market(symbol)
states = self.safe_value(params, 'states', ['wait', 'done', 'cancel'])
query = self.omit(params, 'states')
request = {
'market': market['id'],
'states': states,
'need_price': 'True',
}
if limit is not None:
request['limit'] = limit
response = await self.privateGetOrdersFilter(self.extend(request, query))
data = self.safe_value(response, 'data', [])
result = []
for i in range(0, len(data)):
orders = self.safe_value(data[i], 'orders', [])
status = self.parse_order_status(self.safe_value(data[i], 'state'))
parsedOrders = self.parse_orders(orders, market, since, limit, {'status': status})
result = self.array_concat(result, parsedOrders)
return result
def parse_ohlcv(self, ohlcv, market=None):
# [
# 1559232000,
# 8889.22,
# 9028.52,
# 8889.22,
# 9028.52
# 0.3121
# ]
return [
self.safe_timestamp(ohlcv, 0),
self.safe_number(ohlcv, 1),
self.safe_number(ohlcv, 2),
self.safe_number(ohlcv, 3),
self.safe_number(ohlcv, 4),
self.safe_number(ohlcv, 5),
]
async def fetch_ohlcv(self, symbol: str, timeframe='1m', since: Optional[int] = None, limit: Optional[int] = None, params={}):
"""
fetches historical candlestick data containing the open, high, low, and close price, and the volume of a market
see https://api.oceanex.pro/doc/v1/#k-line-post
:param str symbol: unified symbol of the market to fetch OHLCV data for
:param str timeframe: the length of time each candle represents
:param int [since]: timestamp in ms of the earliest candle to fetch
:param int [limit]: the maximum amount of candles to fetch
:param dict [params]: extra parameters specific to the oceanex api endpoint
:returns int[][]: A list of candles ordered, open, high, low, close, volume
"""
await self.load_markets()
market = self.market(symbol)
request = {
'market': market['id'],
'period': self.safe_string(self.timeframes, timeframe, timeframe),
}
if since is not None:
request['timestamp'] = since
if limit is not None:
request['limit'] = limit
response = await self.publicPostK(self.extend(request, params))
ohlcvs = self.safe_value(response, 'data', [])
return self.parse_ohlcvs(ohlcvs, market, timeframe, since, limit)
def parse_order(self, order, market=None):
#
# {
# "created_at": "2019-01-18T00:38:18Z",
# "trades_count": 0,
# "remaining_volume": "0.2",
# "price": "1001.0",
# "created_on": "1547771898",
# "side": "buy",
# "volume": "0.2",
# "state": "wait",
# "ord_type": "limit",
# "avg_price": "0.0",
# "executed_volume": "0.0",
# "id": 473797,
# "market": "veteth"
# }
#
status = self.parse_order_status(self.safe_value(order, 'state'))
marketId = self.safe_string_2(order, 'market', 'market_id')
symbol = self.safe_symbol(marketId, market)
timestamp = self.safe_timestamp(order, 'created_on')
if timestamp is None:
timestamp = self.parse8601(self.safe_string(order, 'created_at'))
price = self.safe_string(order, 'price')
average = self.safe_string(order, 'avg_price')
amount = self.safe_string(order, 'volume')
remaining = self.safe_string(order, 'remaining_volume')
filled = self.safe_string(order, 'executed_volume')
return self.safe_order({
'info': order,
'id': self.safe_string(order, 'id'),
'clientOrderId': None,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': None,
'symbol': symbol,
'type': self.safe_value(order, 'ord_type'),
'timeInForce': None,
'postOnly': None,
'side': self.safe_value(order, 'side'),
'price': price,
'stopPrice': None,
'triggerPrice': None,
'average': average,
'amount': amount,
'remaining': remaining,
'filled': filled,
'status': status,
'cost': None,
'trades': None,
'fee': None,
}, market)
def parse_order_status(self, status):
statuses = {
'wait': 'open',
'done': 'closed',
'cancel': 'canceled',
}
return self.safe_string(statuses, status, status)
async def create_orders(self, symbol: str, orders, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'market': market['id'],
'orders': orders,
}
# orders: [{"side":"buy", "volume":.2, "price":1001}, {"side":"sell", "volume":0.2, "price":1002}]
response = await self.privatePostOrdersMulti(self.extend(request, params))
data = response['data']
return self.parse_orders(data)
async def cancel_order(self, id: str, symbol: Optional[str] = None, params={}):
"""
cancels an open order
see https://api.oceanex.pro/doc/v1/#cancel-order-post
:param str id: order id
:param str symbol: not used by oceanex cancelOrder()
:param dict [params]: extra parameters specific to the oceanex api endpoint
:returns dict: An `order structure <https://github.com/ccxt/ccxt/wiki/Manual#order-structure>`
"""
await self.load_markets()
response = await self.privatePostOrderDelete(self.extend({'id': id}, params))
data = self.safe_value(response, 'data')
return self.parse_order(data)
async def cancel_orders(self, ids, symbol: Optional[str] = None, params={}):
"""
cancel multiple orders
see https://api.oceanex.pro/doc/v1/#cancel-multiple-orders-post
:param str[] ids: order ids
:param str symbol: not used by oceanex cancelOrders()
:param dict [params]: extra parameters specific to the oceanex api endpoint
:returns dict: an list of `order structures <https://github.com/ccxt/ccxt/wiki/Manual#order-structure>`
"""
await self.load_markets()
response = await self.privatePostOrderDeleteMulti(self.extend({'ids': ids}, params))
data = self.safe_value(response, 'data')
return self.parse_orders(data)
async def cancel_all_orders(self, symbol: Optional[str] = None, params={}):
"""
cancel all open orders
see https://api.oceanex.pro/doc/v1/#cancel-all-orders-post
:param str symbol: unified market symbol, only orders in the market of self symbol are cancelled when symbol is not None
:param dict [params]: extra parameters specific to the oceanex api endpoint
:returns dict[]: a list of `order structures <https://github.com/ccxt/ccxt/wiki/Manual#order-structure>`
"""
await self.load_markets()
response = await self.privatePostOrdersClear(params)
data = self.safe_value(response, 'data')
return self.parse_orders(data)
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = self.urls['api']['rest'] + '/' + self.version + '/' + self.implode_params(path, params)
query = self.omit(params, self.extract_params(path))
if api == 'public':
if path == 'tickers_multi' or path == 'order_book/multi':
request = '?'
markets = self.safe_value(params, 'markets')
for i in range(0, len(markets)):
request += 'markets[]=' + markets[i] + '&'
limit = self.safe_value(params, 'limit')
if limit is not None:
request += 'limit=' + limit
url += request
elif query:
url += '?' + self.urlencode(query)
elif api == 'private':
self.check_required_credentials()
request = {
'uid': self.apiKey,
'data': query,
}
# to set the private key:
# fs = require('fs')
# exchange.secret = fs.readFileSync('oceanex.pem', 'utf8')
jwt_token = self.jwt(request, self.encode(self.secret), 'sha256', True)
url += '?user_jwt=' + jwt_token
headers = {'Content-Type': 'application/json'}
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, code, reason, url, method, headers, body, response, requestHeaders, requestBody):
#
# {"code":1011,"message":"This IP 'x.x.x.x' is not allowed","data":{}}
#
if response is None:
return None
errorCode = self.safe_string(response, 'code')
message = self.safe_string(response, 'message')
if (errorCode is not None) and (errorCode != '0'):
feedback = self.id + ' ' + body
self.throw_exactly_matched_exception(self.exceptions['codes'], errorCode, feedback)
self.throw_exactly_matched_exception(self.exceptions['exact'], message, feedback)
raise ExchangeError(feedback)
return None
|
a185dc1da0e3dfa0d0fcdeac153bbf683550d7e0
|
39b021eabbb8e3be1734cf92fd641965a796b0eb
|
/deepchem/data/tests/test_fastq_loader.py
|
09ffb9705e66f49c3ce6d0251dadf1a2255dacca
|
[
"MIT"
] |
permissive
|
deepchem/deepchem
|
066cbf42316b2f6bec0166727e0264a485d5266f
|
ee6e67ebcf7bf04259cf13aff6388e2b791fea3d
|
refs/heads/master
| 2023-09-02T01:32:17.860111
| 2023-08-31T18:49:00
| 2023-08-31T18:49:00
| 43,098,215
| 4,876
| 1,905
|
MIT
| 2023-09-14T19:10:44
| 2015-09-24T23:20:28
|
Python
|
UTF-8
|
Python
| false
| false
| 760
|
py
|
test_fastq_loader.py
|
import os
import unittest
from deepchem.data.data_loader import FASTQLoader
class TestFASTQLoader(unittest.TestCase):
"""
Test FASTQLoader
"""
def setUp(self):
super(TestFASTQLoader, self).setUp()
self.current_dir = os.path.dirname(os.path.abspath(__file__))
def test_fastq_one_hot(self):
input_file = os.path.join(self.current_dir, "sample1.fastq")
loader = FASTQLoader()
sequences = loader.create_dataset(input_file)
# Default file contains 4 sequences each of length 192 (excluding the end of line character '\n').
# The one-hot encoding turns base-pairs into vectors of length 5 (ATCGN).
# Expected shape is now (4, 192, 5)
assert sequences.X.shape == (4, 192, 5)
|
ea02349995af321aa6f6d89bddd0b0a80bf6cb9e
|
a61bf859ceeb1ba98de3863225e07b29e1d7ce8a
|
/thonny/plugins/circuitpython/api_stubs/displayio/__init__.pyi
|
052eae95d73016c35b67235603b47bd9d02640f2
|
[
"MIT",
"LicenseRef-scancode-warranty-disclaimer"
] |
permissive
|
thonny/thonny
|
3974b1860703e8450b837863682117f525a886c6
|
8fc9f5c7cbbe1d1c82aa5503ec4b684e28aa608c
|
refs/heads/master
| 2023-08-31T03:04:34.685140
| 2023-08-24T11:38:36
| 2023-08-24T11:38:36
| 163,728,962
| 2,788
| 1,048
|
MIT
| 2023-08-10T18:59:37
| 2019-01-01T10:29:50
|
Python
|
UTF-8
|
Python
| false
| false
| 39,554
|
pyi
|
__init__.pyi
|
"""Native helpers for driving displays
The `displayio` module contains classes to manage display output
including synchronizing with refresh rates and partial updating.
For more a more thorough explanation and guide for using `displayio`, please
refer to `this Learn guide
<https://learn.adafruit.com/circuitpython-display-support-using-displayio>`_.
"""
from __future__ import annotations
import typing
from typing import Optional, Tuple, Union
import busio
import circuitpython_typing
import microcontroller
import paralleldisplay
import vectorio
from circuitpython_typing import ReadableBuffer, WriteableBuffer
def release_displays() -> None:
"""Releases any actively used displays so their buses and pins can be used again. This will also
release the builtin display on boards that have one. You will need to reinitialize it yourself
afterwards. This may take seconds to complete if an active EPaperDisplay is refreshing.
Use this once in your code.py if you initialize a display. Place it right before the
initialization so the display is active as long as possible."""
...
class Colorspace:
"""The colorspace for a `ColorConverter` to operate in"""
RGB888: Colorspace
"""The standard 24-bit colorspace. Bits 0-7 are blue, 8-15 are green, and 16-24 are red. (0xRRGGBB)"""
RGB565: Colorspace
"""The standard 16-bit colorspace. Bits 0-4 are blue, bits 5-10 are green, and 11-15 are red (0bRRRRRGGGGGGBBBBB)"""
RGB565_SWAPPED: Colorspace
"""The swapped 16-bit colorspace. First, the high and low 8 bits of the number are swapped, then they are interpreted as for RGB565"""
RGB555: Colorspace
"""The standard 15-bit colorspace. Bits 0-4 are blue, bits 5-9 are green, and 11-14 are red. The top bit is ignored. (0bxRRRRRGGGGGBBBBB)"""
RGB555_SWAPPED: Colorspace
"""The swapped 15-bit colorspace. First, the high and low 8 bits of the number are swapped, then they are interpreted as for RGB555"""
class Bitmap:
"""Stores values of a certain size in a 2D array
Bitmaps can be treated as read-only buffers. If the number of bits in a pixel is 8, 16, or 32; and the number of bytes
per row is a multiple of 4, then the resulting memoryview will correspond directly with the bitmap's contents. Otherwise,
the bitmap data is packed into the memoryview with unspecified padding.
A Bitmap can be treated as a buffer, allowing its content to be
viewed and modified using e.g., with ``ulab.numpy.frombuffer``,
but the `displayio.Bitmap.dirty` method must be used to inform
displayio when a bitmap was modified through the buffer interface.
`bitmaptools.arrayblit` can also be useful to move data efficiently
into a Bitmap."""
def __init__(self, width: int, height: int, value_count: int) -> None:
"""Create a Bitmap object with the given fixed size. Each pixel stores a value that is used to
index into a corresponding palette. This enables differently colored sprites to share the
underlying Bitmap. value_count is used to minimize the memory used to store the Bitmap.
:param int width: The number of values wide
:param int height: The number of values high
:param int value_count: The number of possible pixel values."""
...
width: int
"""Width of the bitmap. (read only)"""
height: int
"""Height of the bitmap. (read only)"""
def __getitem__(self, index: Union[Tuple[int, int], int]) -> int:
"""Returns the value at the given index. The index can either be an x,y tuple or an int equal
to ``y * width + x``.
This allows you to::
print(bitmap[0,1])"""
...
def __setitem__(self, index: Union[Tuple[int, int], int], value: int) -> None:
"""Sets the value at the given index. The index can either be an x,y tuple or an int equal
to ``y * width + x``.
This allows you to::
bitmap[0,1] = 3"""
...
def blit(
self,
x: int,
y: int,
source_bitmap: Bitmap,
*,
x1: int,
y1: int,
x2: int,
y2: int,
skip_index: int,
) -> None:
"""Inserts the source_bitmap region defined by rectangular boundaries
(x1,y1) and (x2,y2) into the bitmap at the specified (x,y) location.
:param int x: Horizontal pixel location in bitmap where source_bitmap upper-left
corner will be placed
:param int y: Vertical pixel location in bitmap where source_bitmap upper-left
corner will be placed
:param bitmap source_bitmap: Source bitmap that contains the graphical region to be copied
:param int x1: Minimum x-value for rectangular bounding box to be copied from the source bitmap
:param int y1: Minimum y-value for rectangular bounding box to be copied from the source bitmap
:param int x2: Maximum x-value (exclusive) for rectangular bounding box to be copied from the source bitmap
:param int y2: Maximum y-value (exclusive) for rectangular bounding box to be copied from the source bitmap
:param int skip_index: bitmap palette index in the source that will not be copied,
set to None to copy all pixels"""
...
def fill(self, value: int) -> None:
"""Fills the bitmap with the supplied palette index value."""
...
def dirty(self, x1: int = 0, y1: int = 0, x2: int = -1, y2: int = -1) -> None:
"""Inform displayio of bitmap updates done via the buffer
protocol.
:param int x1: Minimum x-value for rectangular bounding box to be considered as modified
:param int y1: Minimum y-value for rectangular bounding box to be considered as modified
:param int x2: Maximum x-value (exclusive) for rectangular bounding box to be considered as modified
:param int y2: Maximum y-value (exclusive) for rectangular bounding box to be considered as modified
If x1 or y1 are not specified, they are taken as 0. If x2 or y2
are not specified, or are given as -1, they are taken as the width
and height of the image. Thus, calling dirty() with the
default arguments treats the whole bitmap as modified.
When a bitmap is modified through the buffer protocol, the
display will not be properly updated unless the bitmap is
notified of the "dirty rectangle" that encloses all modified
pixels."""
...
def deinit(self) -> None:
"""Release resources allocated by Bitmap."""
...
class ColorConverter:
"""Converts one color format to another."""
def __init__(
self, *, input_colorspace: Colorspace = Colorspace.RGB888, dither: bool = False
) -> None:
"""Create a ColorConverter object to convert color formats.
:param Colorspace colorspace: The source colorspace, one of the Colorspace constants
:param bool dither: Adds random noise to dither the output image"""
...
def convert(self, color: int) -> int:
"""Converts the given color to RGB565 according to the Colorspace"""
...
dither: bool
"""When `True` the ColorConverter dithers the output by adding random noise when
truncating to display bitdepth"""
def make_transparent(self, color: int) -> None:
"""Set the transparent color or index for the ColorConverter. This will
raise an Exception if there is already a selected transparent index.
:param int color: The color to be transparent"""
def make_opaque(self, color: int) -> None:
"""Make the ColorConverter be opaque and have no transparent pixels.
:param int color: [IGNORED] Use any value"""
_DisplayBus = Union["FourWire", "paralleldisplay.ParallelBus", "I2CDisplay"]
""":py:class:`FourWire`, :py:class:`paralleldisplay.ParallelBus` or :py:class:`I2CDisplay`"""
class Display:
"""Manage updating a display over a display bus
This initializes a display and connects it into CircuitPython. Unlike other
objects in CircuitPython, Display objects live until `displayio.release_displays()`
is called. This is done so that CircuitPython can use the display itself.
Most people should not use this class directly. Use a specific display driver instead that will
contain the initialization sequence at minimum."""
def __init__(
self,
display_bus: _DisplayBus,
init_sequence: ReadableBuffer,
*,
width: int,
height: int,
colstart: int = 0,
rowstart: int = 0,
rotation: int = 0,
color_depth: int = 16,
grayscale: bool = False,
pixels_in_byte_share_row: bool = True,
bytes_per_cell: int = 1,
reverse_pixels_in_byte: bool = False,
set_column_command: int = 0x2A,
set_row_command: int = 0x2B,
write_ram_command: int = 0x2C,
backlight_pin: Optional[microcontroller.Pin] = None,
brightness_command: Optional[int] = None,
brightness: float = 1.0,
single_byte_bounds: bool = False,
data_as_commands: bool = False,
auto_refresh: bool = True,
native_frames_per_second: int = 60,
backlight_on_high: bool = True,
SH1107_addressing: bool = False,
) -> None:
r"""Create a Display object on the given display bus (`FourWire`, `ParallelBus` or `I2CDisplay`).
The ``init_sequence`` is bitpacked to minimize the ram impact. Every command begins with a
command byte followed by a byte to determine the parameter count and delay. When the top bit
of the second byte is 1 (0x80), a delay will occur after the command parameters are sent.
The remaining 7 bits are the parameter count excluding any delay byte. The bytes following
are the parameters. When the delay bit is set, a single byte after the parameters specifies
the delay duration in milliseconds. The value 0xff will lead to an extra long 500 ms delay
instead of 255 ms. The next byte will begin a new command definition.
Here is an example:
.. code-block:: python
init_sequence = (b"\xe1\x0f\x00\x0E\x14\x03\x11\x07\x31\xC1\x48\x08\x0F\x0C\x31\x36\x0F" # Set Gamma
b"\x11\x80\x78"# Exit Sleep then delay 0x78 (120ms)
b"\x29\x81\xaa\x78"# Display on then delay 0x78 (120ms)
)
display = displayio.Display(display_bus, init_sequence, width=320, height=240)
The first command is 0xe1 with 15 (0xf) parameters following. The second is 0x11 with 0
parameters and a 120ms (0x78) delay. The third command is 0x29 with one parameter 0xaa and a
120ms delay (0x78). Multiple byte literals (b"") are merged together on load. The parens
are needed to allow byte literals on subsequent lines.
The initialization sequence should always leave the display memory access inline with the scan
of the display to minimize tearing artifacts.
:param display_bus: The bus that the display is connected to
:type _DisplayBus: FourWire, ParallelBus or I2CDisplay
:param ~circuitpython_typing.ReadableBuffer init_sequence: Byte-packed initialization sequence.
:param int width: Width in pixels
:param int height: Height in pixels
:param int colstart: The index if the first visible column
:param int rowstart: The index if the first visible row
:param int rotation: The rotation of the display in degrees clockwise. Must be in 90 degree increments (0, 90, 180, 270)
:param int color_depth: The number of bits of color per pixel transmitted. (Some displays
support 18 bit but 16 is easier to transmit. The last bit is extrapolated.)
:param bool grayscale: True if the display only shows a single color.
:param bool pixels_in_byte_share_row: True when pixels are less than a byte and a byte includes pixels from the same row of the display. When False, pixels share a column.
:param int bytes_per_cell: Number of bytes per addressable memory location when color_depth < 8. When greater than one, bytes share a row or column according to pixels_in_byte_share_row.
:param bool reverse_pixels_in_byte: Reverses the pixel order within each byte when color_depth < 8. Does not apply across multiple bytes even if there is more than one byte per cell (bytes_per_cell.)
:param bool reverse_bytes_in_word: Reverses the order of bytes within a word when color_depth == 16
:param int set_column_command: Command used to set the start and end columns to update
:param int set_row_command: Command used so set the start and end rows to update
:param int write_ram_command: Command used to write pixels values into the update region. Ignored if data_as_commands is set.
:param microcontroller.Pin backlight_pin: Pin connected to the display's backlight
:param int brightness_command: Command to set display brightness. Usually available in OLED controllers.
:param float brightness: Initial display brightness.
:param bool single_byte_bounds: Display column and row commands use single bytes
:param bool data_as_commands: Treat all init and boundary data as SPI commands. Certain displays require this.
:param bool auto_refresh: Automatically refresh the screen
:param int native_frames_per_second: Number of display refreshes per second that occur with the given init_sequence.
:param bool backlight_on_high: If True, pulling the backlight pin high turns the backlight on.
:param bool SH1107_addressing: Special quirk for SH1107, use upper/lower column set and page set
:param int set_vertical_scroll: This parameter is accepted but ignored for backwards compatibility. It will be removed in a future release.
:param int backlight_pwm_frequency: The frequency to use to drive the PWM for backlight brightness control. Default is 50000.
"""
...
def show(self, group: Group) -> None:
"""
.. note:: `show()` is deprecated and will be removed in CircuitPython 9.0.0.
Use ``.root_group = group`` instead.
Switches to displaying the given group of layers. When group is None, the default
CircuitPython terminal will be shown.
:param Group group: The group to show.
"""
...
def refresh(
self,
*,
target_frames_per_second: Optional[int] = None,
minimum_frames_per_second: int = 0,
) -> bool:
"""When auto_refresh is off, and :py:attr:`target_frames_per_second` is not `None` this waits
for the target frame rate and then refreshes the display,
returning `True`. If the call has taken too long since the last refresh call for the given
target frame rate, then the refresh returns `False` immediately without updating the screen to
hopefully help getting caught up.
If the time since the last successful refresh is below the minimum frame rate, then an
exception will be raised. The default :py:attr:`minimum_frames_per_second` of 0 disables this behavior.
When auto_refresh is off, and :py:attr:`target_frames_per_second` is `None` this
will update the display immediately.
When auto_refresh is on, updates the display immediately. (The display will also update
without calls to this.)
:param Optional[int] target_frames_per_second: The target frame rate that :py:func:`refresh` should try to
achieve. Set to `None` for immediate refresh.
:param int minimum_frames_per_second: The minimum number of times the screen should be updated per second.
"""
...
auto_refresh: bool
"""True when the display is refreshed automatically."""
brightness: float
"""The brightness of the display as a float. 0.0 is off and 1.0 is full brightness."""
width: int
"""Gets the width of the board"""
height: int
"""Gets the height of the board"""
rotation: int
"""The rotation of the display as an int in degrees."""
bus: _DisplayBus
"""The bus being used by the display"""
root_group: Group
"""The root group on the display.
If the root group is set to ``None``, the default CircuitPython terminal will be shown.
"""
def fill_row(self, y: int, buffer: WriteableBuffer) -> WriteableBuffer:
"""Extract the pixels from a single row
:param int y: The top edge of the area
:param ~circuitpython_typing.WriteableBuffer buffer: The buffer in which to place the pixel data
"""
...
class EPaperDisplay:
"""Manage updating an epaper display over a display bus
This initializes an epaper display and connects it into CircuitPython. Unlike other
objects in CircuitPython, EPaperDisplay objects live until `displayio.release_displays()`
is called. This is done so that CircuitPython can use the display itself.
Most people should not use this class directly. Use a specific display driver instead that will
contain the startup and shutdown sequences at minimum."""
def __init__(
self,
display_bus: _DisplayBus,
start_sequence: ReadableBuffer,
stop_sequence: ReadableBuffer,
*,
width: int,
height: int,
ram_width: int,
ram_height: int,
colstart: int = 0,
rowstart: int = 0,
rotation: int = 0,
set_column_window_command: Optional[int] = None,
set_row_window_command: Optional[int] = None,
set_current_column_command: Optional[int] = None,
set_current_row_command: Optional[int] = None,
write_black_ram_command: int,
black_bits_inverted: bool = False,
write_color_ram_command: Optional[int] = None,
color_bits_inverted: bool = False,
highlight_color: int = 0x000000,
refresh_display_command: Union[int, circuitpython_typing.ReadableBuffer],
refresh_time: float = 40,
busy_pin: Optional[microcontroller.Pin] = None,
busy_state: bool = True,
seconds_per_frame: float = 180,
always_toggle_chip_select: bool = False,
grayscale: bool = False,
advanced_color_epaper: bool = False,
two_byte_sequence_length: bool = False,
start_up_time: float = 0,
) -> None:
"""Create a EPaperDisplay object on the given display bus (`displayio.FourWire` or `paralleldisplay.ParallelBus`).
The ``start_sequence`` and ``stop_sequence`` are bitpacked to minimize the ram impact. Every
command begins with a command byte followed by a byte to determine the parameter count and
delay. When the top bit of the second byte is 1 (0x80), a delay will occur after the command
parameters are sent. The remaining 7 bits are the parameter count excluding any delay
byte. The bytes following are the parameters. When the delay bit is set, a single byte after
the parameters specifies the delay duration in milliseconds. The value 0xff will lead to an
extra long 500 ms delay instead of 255 ms. The next byte will begin a new command definition.
:param display_bus: The bus that the display is connected to
:type _DisplayBus: displayio.FourWire or paralleldisplay.ParallelBus
:param ~circuitpython_typing.ReadableBuffer start_sequence: Byte-packed command sequence.
:param ~circuitpython_typing.ReadableBuffer stop_sequence: Byte-packed command sequence.
:param int width: Width in pixels
:param int height: Height in pixels
:param int ram_width: RAM width in pixels
:param int ram_height: RAM height in pixels
:param int colstart: The index if the first visible column
:param int rowstart: The index if the first visible row
:param int rotation: The rotation of the display in degrees clockwise. Must be in 90 degree increments (0, 90, 180, 270)
:param int set_column_window_command: Command used to set the start and end columns to update
:param int set_row_window_command: Command used so set the start and end rows to update
:param int set_current_column_command: Command used to set the current column location
:param int set_current_row_command: Command used to set the current row location
:param int write_black_ram_command: Command used to write pixels values into the update region
:param bool black_bits_inverted: True if 0 bits are used to show black pixels. Otherwise, 1 means to show black.
:param int write_color_ram_command: Command used to write pixels values into the update region
:param bool color_bits_inverted: True if 0 bits are used to show the color. Otherwise, 1 means to show color.
:param int highlight_color: RGB888 of source color to highlight with third ePaper color.
:param int refresh_display_command: Command used to start a display refresh. Single int or byte-packed command sequence
:param float refresh_time: Time it takes to refresh the display before the stop_sequence should be sent. Ignored when busy_pin is provided.
:param microcontroller.Pin busy_pin: Pin used to signify the display is busy
:param bool busy_state: State of the busy pin when the display is busy
:param float seconds_per_frame: Minimum number of seconds between screen refreshes
:param bool always_toggle_chip_select: When True, chip select is toggled every byte
:param bool grayscale: When true, the color ram is the low bit of 2-bit grayscale
:param bool advanced_color_epaper: When true, the display is a 7-color advanced color epaper (ACeP)
:param bool two_byte_sequence_length: When true, use two bytes to define sequence length
:param float start_up_time: Time to wait after reset before sending commands
"""
...
def show(self, group: Group) -> None:
"""
.. note:: `show()` is deprecated and will be removed in CircuitPython 9.0.0.
Use ``.root_group = group`` instead.
Switches to displaying the given group of layers. When group is None, the default
CircuitPython terminal will be shown.
:param Group group: The group to show."""
...
def update_refresh_mode(
self, start_sequence: ReadableBuffer, seconds_per_frame: float = 180
) -> None:
"""Updates the ``start_sequence`` and ``seconds_per_frame`` parameters to enable
varying the refresh mode of the display."""
def refresh(self) -> None:
"""Refreshes the display immediately or raises an exception if too soon. Use
``time.sleep(display.time_to_refresh)`` to sleep until a refresh can occur."""
...
time_to_refresh: float
"""Time, in fractional seconds, until the ePaper display can be refreshed."""
busy: bool
"""True when the display is refreshing. This uses the ``busy_pin`` when available or the
``refresh_time`` otherwise."""
width: int
"""Gets the width of the display in pixels"""
height: int
"""Gets the height of the display in pixels"""
rotation: int
"""The rotation of the display as an int in degrees."""
bus: _DisplayBus
"""The bus being used by the display"""
root_group: Group
"""The root group on the epaper display.
If the root group is set to ``None``, the default CircuitPython terminal will be shown.
"""
class FourWire:
"""Manage updating a display over SPI four wire protocol in the background while Python code runs.
It doesn't handle display initialization."""
def __init__(
self,
spi_bus: busio.SPI,
*,
command: Optional[microcontroller.Pin],
chip_select: microcontroller.Pin,
reset: Optional[microcontroller.Pin] = None,
baudrate: int = 24000000,
polarity: int = 0,
phase: int = 0,
) -> None:
"""Create a FourWire object associated with the given pins.
The SPI bus and pins are then in use by the display until `displayio.release_displays()` is
called even after a reload. (It does this so CircuitPython can use the display after your code
is done.) So, the first time you initialize a display bus in code.py you should call
:py:func:`displayio.release_displays` first, otherwise it will error after the first code.py run.
If the ``command`` pin is not specified, a 9-bit SPI mode will be simulated by adding a
data/command bit to every bit being transmitted, and splitting the resulting data back
into 8-bit bytes for transmission. The extra bits that this creates at the end are ignored
by the receiving device.
:param busio.SPI spi_bus: The SPI bus that make up the clock and data lines
:param microcontroller.Pin command: Data or command pin. When None, 9-bit SPI is simulated.
:param microcontroller.Pin chip_select: Chip select pin
:param microcontroller.Pin reset: Reset pin. When None only software reset can be used
:param int baudrate: Maximum baudrate in Hz for the display on the bus
:param int polarity: the base state of the clock line (0 or 1)
:param int phase: the edge of the clock that data is captured. First (0)
or second (1). Rising or falling depends on clock polarity."""
...
def reset(self) -> None:
"""Performs a hardware reset via the reset pin. Raises an exception if called when no reset pin
is available."""
...
def send(
self, command: int, data: ReadableBuffer, *, toggle_every_byte: bool = False
) -> None:
"""Sends the given command value followed by the full set of data. Display state, such as
vertical scroll, set via ``send`` may or may not be reset once the code is done.
"""
...
class Group:
"""Manage a group of sprites and groups and how they are inter-related."""
def __init__(self, *, scale: int = 1, x: int = 0, y: int = 0) -> None:
"""Create a Group of a given size and scale. Scale is in one dimension. For example, scale=2
leads to a layer's pixel being 2x2 pixels when in the group.
:param int scale: Scale of layer pixels in one dimension.
:param int x: Initial x position within the parent.
:param int y: Initial y position within the parent."""
...
hidden: bool
"""True when the Group and all of it's layers are not visible. When False, the Group's layers
are visible if they haven't been hidden."""
scale: int
"""Scales each pixel within the Group in both directions. For example, when scale=2 each pixel
will be represented by 2x2 pixels."""
x: int
"""X position of the Group in the parent."""
y: int
"""Y position of the Group in the parent."""
def append(
self,
layer: Union[
vectorio.Circle, vectorio.Rectangle, vectorio.Polygon, Group, TileGrid
],
) -> None:
"""Append a layer to the group. It will be drawn above other layers."""
...
def insert(
self,
index: int,
layer: Union[
vectorio.Circle, vectorio.Rectangle, vectorio.Polygon, Group, TileGrid
],
) -> None:
"""Insert a layer into the group."""
...
def index(
self,
layer: Union[
vectorio.Circle, vectorio.Rectangle, vectorio.Polygon, Group, TileGrid
],
) -> int:
"""Returns the index of the first copy of layer. Raises ValueError if not found."""
...
def pop(
self, i: int = -1
) -> Union[vectorio.Circle, vectorio.Rectangle, vectorio.Polygon, Group, TileGrid]:
"""Remove the ith item and return it."""
...
def remove(
self,
layer: Union[
vectorio.Circle, vectorio.Rectangle, vectorio.Polygon, Group, TileGrid
],
) -> None:
"""Remove the first copy of layer. Raises ValueError if it is not present."""
...
def __bool__(self) -> bool: ...
def __len__(self) -> int:
"""Returns the number of layers in a Group"""
...
def __getitem__(
self, index: int
) -> Union[vectorio.Circle, vectorio.Rectangle, vectorio.Polygon, Group, TileGrid]:
"""Returns the value at the given index.
This allows you to::
print(group[0])"""
...
def __setitem__(
self,
index: int,
value: Union[
vectorio.Circle, vectorio.Rectangle, vectorio.Polygon, Group, TileGrid
],
) -> None:
"""Sets the value at the given index.
This allows you to::
group[0] = sprite"""
...
def __delitem__(self, index: int) -> None:
"""Deletes the value at the given index.
This allows you to::
del group[0]"""
...
def sort(self, key: function, reverse: bool) -> None:
"""Sort the members of the group."""
...
class I2CDisplay:
"""Manage updating a display over I2C in the background while Python code runs.
It doesn't handle display initialization."""
def __init__(
self,
i2c_bus: busio.I2C,
*,
device_address: int,
reset: Optional[microcontroller.Pin] = None,
) -> None:
"""Create a I2CDisplay object associated with the given I2C bus and reset pin.
The I2C bus and pins are then in use by the display until `displayio.release_displays()` is
called even after a reload. (It does this so CircuitPython can use the display after your code
is done.) So, the first time you initialize a display bus in code.py you should call
:py:func:`displayio.release_displays` first, otherwise it will error after the first code.py run.
:param busio.I2C i2c_bus: The I2C bus that make up the clock and data lines
:param int device_address: The I2C address of the device
:param microcontroller.Pin reset: Reset pin. When None only software reset can be used
"""
...
def reset(self) -> None:
"""Performs a hardware reset via the reset pin. Raises an exception if called when no reset pin
is available."""
...
def send(self, command: int, data: ReadableBuffer) -> None:
"""Sends the given command value followed by the full set of data. Display state, such as
vertical scroll, set via ``send`` may or may not be reset once the code is done.
"""
...
class OnDiskBitmap:
"""Loads values straight from disk. This minimizes memory use but can lead to
much slower pixel load times. These load times may result in frame tearing where only part of
the image is visible.
It's easiest to use on a board with a built in display such as the `Hallowing M0 Express
<https://www.adafruit.com/product/3900>`_.
.. code-block:: Python
import board
import displayio
import time
import pulseio
board.DISPLAY.brightness = 0
splash = displayio.Group()
board.DISPLAY.show(splash)
odb = displayio.OnDiskBitmap('/sample.bmp')
face = displayio.TileGrid(odb, pixel_shader=odb.pixel_shader)
splash.append(face)
# Wait for the image to load.
board.DISPLAY.refresh(target_frames_per_second=60)
# Fade up the backlight
for i in range(100):
board.DISPLAY.brightness = 0.01 * i
time.sleep(0.05)
# Wait forever
while True:
pass"""
def __init__(self, file: Union[str, typing.BinaryIO]) -> None:
"""Create an OnDiskBitmap object with the given file.
:param file file: The name of the bitmap file. For backwards compatibility, a file opened in binary mode may also be passed.
Older versions of CircuitPython required a file opened in binary
mode. CircuitPython 7.0 modified OnDiskBitmap so that it takes a
filename instead, and opens the file internally. A future version
of CircuitPython will remove the ability to pass in an opened file.
"""
...
width: int
"""Width of the bitmap. (read only)"""
height: int
"""Height of the bitmap. (read only)"""
pixel_shader: Union[ColorConverter, Palette]
"""The image's pixel_shader. The type depends on the underlying
bitmap's structure. The pixel shader can be modified (e.g., to set the
transparent pixel or, for palette shaded images, to update the palette.)"""
class Palette:
"""Map a pixel palette_index to a full color. Colors are transformed to the display's format internally to
save memory."""
def __init__(self, color_count: int, *, dither: bool = False) -> None:
"""Create a Palette object to store a set number of colors.
:param int color_count: The number of colors in the Palette
:param bool dither: When true, dither the RGB color before converting to the display's color space
"""
...
dither: bool
"""When `True` the Palette dithers the output color by adding random
noise when truncating to display bitdepth"""
def __bool__(self) -> bool: ...
def __len__(self) -> int:
"""Returns the number of colors in a Palette"""
...
def __getitem__(self, index: int) -> Optional[int]:
r"""Return the pixel color at the given index as an integer."""
...
def __setitem__(
self, index: int, value: Union[int, ReadableBuffer, Tuple[int, int, int]]
) -> None:
r"""Sets the pixel color at the given index. The index should be an integer in the range 0 to color_count-1.
The value argument represents a color, and can be from 0x000000 to 0xFFFFFF (to represent an RGB value).
Value can be an int, bytes (3 bytes (RGB) or 4 bytes (RGB + pad byte)), bytearray,
or a tuple or list of 3 integers.
This allows you to::
palette[0] = 0xFFFFFF # set using an integer
palette[1] = b'\xff\xff\x00' # set using 3 bytes
palette[2] = b'\xff\xff\x00\x00' # set using 4 bytes
palette[3] = bytearray(b'\x00\x00\xFF') # set using a bytearay of 3 or 4 bytes
palette[4] = (10, 20, 30) # set using a tuple of 3 integers"""
...
def make_transparent(self, palette_index: int) -> None: ...
def make_opaque(self, palette_index: int) -> None: ...
def is_transparent(self, palette_index: int) -> bool:
"""Returns `True` if the palette index is transparent. Returns `False` if opaque."""
...
class Shape:
"""Represents a shape made by defining boundaries that may be mirrored."""
def __init__(
self, width: int, height: int, *, mirror_x: bool = False, mirror_y: bool = False
) -> None:
"""Create a Shape object with the given fixed size. Each pixel is one bit and is stored by the
column boundaries of the shape on each row. Each row's boundary defaults to the full row.
:param int width: The number of pixels wide
:param int height: The number of pixels high
:param bool mirror_x: When true the left boundary is mirrored to the right.
:param bool mirror_y: When true the top boundary is mirrored to the bottom."""
...
def set_boundary(self, y: int, start_x: int, end_x: int) -> None:
"""Loads pre-packed data into the given row."""
...
class TileGrid:
"""A grid of tiles sourced out of one bitmap
Position a grid of tiles sourced from a bitmap and pixel_shader combination. Multiple grids
can share bitmaps and pixel shaders.
A single tile grid is also known as a Sprite."""
def __init__(
self,
bitmap: Union[Bitmap, OnDiskBitmap, Shape],
*,
pixel_shader: Union[ColorConverter, Palette],
width: int = 1,
height: int = 1,
tile_width: Optional[int] = None,
tile_height: Optional[int] = None,
default_tile: int = 0,
x: int = 0,
y: int = 0,
) -> None:
"""Create a TileGrid object. The bitmap is source for 2d pixels. The pixel_shader is used to
convert the value and its location to a display native pixel color. This may be a simple color
palette lookup, a gradient, a pattern or a color transformer.
To save RAM usage, tile values are only allowed in the range from 0 to 255 inclusive (single byte values).
tile_width and tile_height match the height of the bitmap by default.
:param Bitmap,OnDiskBitmap,Shape bitmap: The bitmap storing one or more tiles.
:param ColorConverter,Palette pixel_shader: The pixel shader that produces colors from values
:param int width: Width of the grid in tiles.
:param int height: Height of the grid in tiles.
:param int tile_width: Width of a single tile in pixels. Defaults to the full Bitmap and must evenly divide into the Bitmap's dimensions.
:param int tile_height: Height of a single tile in pixels. Defaults to the full Bitmap and must evenly divide into the Bitmap's dimensions.
:param int default_tile: Default tile index to show.
:param int x: Initial x position of the left edge within the parent.
:param int y: Initial y position of the top edge within the parent."""
hidden: bool
"""True when the TileGrid is hidden. This may be False even when a part of a hidden Group."""
x: int
"""X position of the left edge in the parent."""
y: int
"""Y position of the top edge in the parent."""
width: int
"""Width of the tilegrid in tiles."""
height: int
"""Height of the tilegrid in tiles."""
tile_width: int
"""Width of a single tile in pixels."""
tile_height: int
"""Height of a single tile in pixels."""
flip_x: bool
"""If true, the left edge rendered will be the right edge of the right-most tile."""
flip_y: bool
"""If true, the top edge rendered will be the bottom edge of the bottom-most tile."""
transpose_xy: bool
"""If true, the TileGrid's axis will be swapped. When combined with mirroring, any 90 degree
rotation can be achieved along with the corresponding mirrored version."""
def contains(self, touch_tuple: tuple) -> bool:
"""Returns True if the first two values in ``touch_tuple`` represent an x,y coordinate
inside the tilegrid rectangle bounds."""
pixel_shader: Union[ColorConverter, Palette]
"""The pixel shader of the tilegrid."""
bitmap: Union[Bitmap, OnDiskBitmap, Shape]
"""The bitmap of the tilegrid."""
def __getitem__(self, index: Union[Tuple[int, int], int]) -> int:
"""Returns the tile index at the given index. The index can either be an x,y tuple or an int equal
to ``y * width + x``.
This allows you to::
print(grid[0])"""
...
def __setitem__(self, index: Union[Tuple[int, int], int], value: int) -> None:
"""Sets the tile index at the given index. The index can either be an x,y tuple or an int equal
to ``y * width + x``.
This allows you to::
grid[0] = 10
or::
grid[0,0] = 10"""
...
|
36033c1a6815837630be4a3484ffe2d0f82cf547
|
a5a99f646e371b45974a6fb6ccc06b0a674818f2
|
/PhysicsTools/PatAlgos/python/cleaningLayer1/electronCleaner_cfi.py
|
138f775a6b93260eaef5713c47572435fe08aa64
|
[
"Apache-2.0"
] |
permissive
|
cms-sw/cmssw
|
4ecd2c1105d59c66d385551230542c6615b9ab58
|
19c178740257eb48367778593da55dcad08b7a4f
|
refs/heads/master
| 2023-08-23T21:57:42.491143
| 2023-08-22T20:22:40
| 2023-08-22T20:22:40
| 10,969,551
| 1,006
| 3,696
|
Apache-2.0
| 2023-09-14T19:14:28
| 2013-06-26T14:09:07
|
C++
|
UTF-8
|
Python
| false
| false
| 955
|
py
|
electronCleaner_cfi.py
|
import FWCore.ParameterSet.Config as cms
cleanPatElectrons = cms.EDProducer("PATElectronCleaner",
## pat electron input source
src = cms.InputTag("selectedPatElectrons"),
# preselection (any string-based cut for pat::Electron)
preselection = cms.string(''),
# overlap checking configurables
checkOverlaps = cms.PSet(
muons = cms.PSet(
src = cms.InputTag("cleanPatMuons"),
algorithm = cms.string("byDeltaR"),
preselection = cms.string(""), # don't preselect the muons
deltaR = cms.double(0.3),
checkRecoComponents = cms.bool(False), # don't check if they share some AOD object ref
pairCut = cms.string(""),
requireNoOverlaps = cms.bool(False), # overlaps don't cause the electron to be discared
)
),
# finalCut (any string-based cut for pat::Electron)
finalCut = cms.string(''),
)
|
5612206980eaea27bf3874dacdfd802fef10e5a8
|
6b27c39edc10b1353104043b7a523f4981c99ef2
|
/pytype/tools/merge_pyi/test_data/stars.pyi
|
6352a7a782d685a664953c07ee2a940a1985de04
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
google/pytype
|
ad0ff0b6c1083b4f0a1af1747869d422f2b5f4d8
|
bda0b9547af9a084bb2bd1427f58dcde968e48b5
|
refs/heads/main
| 2023-08-26T17:52:23.546035
| 2023-08-24T22:48:00
| 2023-08-24T22:48:00
| 32,483,713
| 4,595
| 367
|
NOASSERTION
| 2023-09-13T04:40:45
| 2015-03-18T20:52:08
|
Python
|
UTF-8
|
Python
| false
| false
| 24
|
pyi
|
stars.pyi
|
def f1(x:e1) -> r1: ...
|
fb92dca8f5259e58467724c4b043b6195e075df5
|
eb9f655206c43c12b497c667ba56a0d358b6bc3a
|
/python/testData/refactoring/move/withImportedTypeComments/before/src/src.py
|
bf4fb6b3ffddc0c64bad4f9665b171b75e8f390f
|
[
"Apache-2.0"
] |
permissive
|
JetBrains/intellij-community
|
2ed226e200ecc17c037dcddd4a006de56cd43941
|
05dbd4575d01a213f3f4d69aa4968473f2536142
|
refs/heads/master
| 2023-09-03T17:06:37.560889
| 2023-09-03T11:51:00
| 2023-09-03T12:12:27
| 2,489,216
| 16,288
| 6,635
|
Apache-2.0
| 2023-09-12T07:41:58
| 2011-09-30T13:33:05
| null |
UTF-8
|
Python
| false
| false
| 291
|
py
|
src.py
|
import asyncio as aio
import datetime
from typing import Text
from collections import OrderedDict as ODict
def test():
a = "a" # type: Text
b = aio.Condition() # type: aio.Condition
c = datetime.timedelta(0) # type: datetime.timedelta
d = ODict() # type: ODict
|
1c43b5b06cb2259b0fbe09a2696db78e68008f5e
|
7ee640f301894773eec3609feee2241a1bd5e88f
|
/scripts/plot_validations.py
|
5e81707cb9e9d71ed1155062038a9d23a110a89f
|
[
"Apache-2.0"
] |
permissive
|
joeynmt/joeynmt
|
01f121e82dba8766af0690067bba470353d68909
|
0968187ac0968007cabebed5e5cb6587c08dff78
|
refs/heads/main
| 2023-08-25T15:15:46.776608
| 2023-06-12T11:11:28
| 2023-06-12T11:11:28
| 153,133,227
| 668
| 204
|
Apache-2.0
| 2023-09-04T01:21:50
| 2018-10-15T15:00:57
|
Python
|
UTF-8
|
Python
| false
| false
| 3,359
|
py
|
plot_validations.py
|
#!/usr/bin/env python
# coding: utf-8
import argparse
from pathlib import Path
from typing import Dict, List
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
from matplotlib.backends.backend_pdf import PdfPages
matplotlib.use("Agg")
def read_vfiles(vfiles: List[Path]) -> Dict:
"""
Parse validation report files
:param vfiles: list of files
:return:
"""
models = {}
for vfile in vfiles:
assert vfile.is_file(), f"{vfile} not found."
model_name = vfile.parent.stem
steps = {}
for line in vfile.read_text(encoding="utf-8").splitlines():
entries = line.strip().split()
key = int(entries[1])
steps[key] = {}
for i in range(2, len(entries) - 1, 2):
name = entries[i].strip(":")
value = float(entries[i + 1])
steps[key][name] = value
models[model_name] = steps
return models
def plot_models(models: Dict, plot_values: List, output_path: str) -> None:
"""
Plot the learning curves for several models
:param models:
:param plot_values:
:param output_path:
:return:
"""
# models is a dict: name -> ckpt values
f, axes = plt.subplots(
len(plot_values),
len(models),
sharex="col",
sharey="row",
figsize=(3 * len(models), 3 * len(plot_values)),
)
axes = np.array(axes).reshape((len(plot_values), len(models)))
for col, model_name in enumerate(models):
values = {}
# get arrays for plotting
for step in sorted(models[model_name]):
logged_values = models[model_name][step]
for plot_value in plot_values:
if plot_value not in logged_values: # pylint: disable=no-else-continue
continue
elif plot_value not in values:
values[plot_value] = [[], []]
values[plot_value][1].append(logged_values[plot_value])
values[plot_value][0].append(step)
for row, plot_value in enumerate(plot_values):
axes[row][col].plot(values[plot_value][0], values[plot_value][1])
axes[row][0].set_ylabel(plot_value)
axes[0][col].set_title(model_name)
axes[-1][col].set_xlabel("steps")
plt.tight_layout()
if output_path.endswith(".pdf"):
pp = PdfPages(output_path)
pp.savefig(f)
pp.close()
else:
if not output_path.endswith(".png"):
output_path += ".png"
plt.savefig(output_path)
plt.close()
def main(args): # pylint: disable=redefined-outer-name
models = read_vfiles([Path(m) / "validations.txt" for m in args.model_dirs])
plot_models(models, args.plot_values, args.output_path)
if __name__ == "__main__":
parser = argparse.ArgumentParser("JoeyNMT Validation plotting.")
parser.add_argument("model_dirs", type=str, nargs="+", help="Model directories.")
parser.add_argument(
"--plot_values",
type=str,
nargs="+",
default=["bleu"],
help="Value(s) to plot. Default: bleu",
)
parser.add_argument(
"--output_path",
type=str,
default="plot.pdf",
help="Plot will be stored in this location.",
)
args = parser.parse_args()
main(args)
|
456e6c702fd15eaa389690f4a2a6b974d7530ac4
|
6fdb4eaf5b0e6dbd7db4bf947547541e9aebf110
|
/hardware-testing/hardware_testing/production_qc/ninety_six_assembly_qc_ot3/test_plunger.py
|
94e12df49ce0380b227f6176cb37c8c84ccd3d5e
|
[
"LicenseRef-scancode-warranty-disclaimer",
"Apache-2.0"
] |
permissive
|
Opentrons/opentrons
|
874321e01149184960eeaeaa31b1d21719a1ceda
|
026b523c8c9e5d45910c490efb89194d72595be9
|
refs/heads/edge
| 2023-09-02T02:51:49.579906
| 2023-08-31T16:02:45
| 2023-08-31T16:02:45
| 38,644,841
| 326
| 174
|
Apache-2.0
| 2023-09-14T21:47:20
| 2015-07-06T20:41:01
|
Python
|
UTF-8
|
Python
| false
| false
| 4,817
|
py
|
test_plunger.py
|
"""Test Plunger."""
from typing import List, Union, Tuple, Dict
from opentrons.hardware_control.ot3api import OT3API
from hardware_testing.data import ui
from hardware_testing.data.csv_report import (
CSVReport,
CSVResult,
CSVLine,
CSVLineRepeating,
)
from hardware_testing.opentrons_api import helpers_ot3
from hardware_testing.opentrons_api.types import Axis, OT3Mount
PLUNGER_MAX_SKIP_MM = 0.1
SPEEDS_TO_TEST: List[float] = [5, 8, 12, 16, 20]
CURRENTS_SPEEDS: Dict[float, List[float]] = {
2.2: SPEEDS_TO_TEST,
}
def _get_test_tag(
current: float, speed: float, direction: str, start_or_end: str
) -> str:
return f"current-{current}-speed-{speed}-{direction}-{start_or_end}"
def build_csv_lines() -> List[Union[CSVLine, CSVLineRepeating]]:
"""Build CSV Lines."""
lines: List[Union[CSVLine, CSVLineRepeating]] = list()
currents = list(CURRENTS_SPEEDS.keys())
for current in sorted(currents):
speeds = CURRENTS_SPEEDS[current]
for speed in sorted(speeds):
for dir in ["down", "up"]:
for step in ["start", "end"]:
tag = _get_test_tag(current, speed, dir, step)
lines.append(CSVLine(tag, [float, float, CSVResult]))
return lines
async def _is_plunger_still_aligned_with_encoder(
api: OT3API,
) -> Tuple[float, float, bool]:
enc_pos = await api.encoder_current_position_ot3(OT3Mount.LEFT)
motor_pos = await api.current_position_ot3(OT3Mount.LEFT)
p_enc = enc_pos[Axis.P_L]
p_est = motor_pos[Axis.P_L]
is_aligned = abs(p_est - p_enc) < PLUNGER_MAX_SKIP_MM
return p_enc, p_est, is_aligned
async def run(api: OT3API, report: CSVReport, section: str) -> None:
"""Run."""
ax = Axis.P_L
mount = OT3Mount.LEFT
settings = helpers_ot3.get_gantry_load_per_axis_motion_settings_ot3(api, ax)
default_current = settings.run_current
default_speed = settings.max_speed
_, _, blow_out, _ = helpers_ot3.get_plunger_positions_ot3(api, mount)
async def _save_result(tag: str) -> bool:
est, enc, aligned = await _is_plunger_still_aligned_with_encoder(api)
print(f"Estimate: {est}")
print(f"Encoder: {enc}")
result = CSVResult.from_bool(aligned)
report(section, tag, [est, enc, result])
return aligned
await api.home_z(OT3Mount.LEFT)
slot_5 = helpers_ot3.get_slot_calibration_square_position_ot3(5)
home_pos = await api.gantry_position(OT3Mount.LEFT)
await api.move_to(OT3Mount.LEFT, slot_5._replace(z=home_pos.z))
# LOOP THROUGH CURRENTS + SPEEDS
currents = list(CURRENTS_SPEEDS.keys())
for current in sorted(currents, reverse=True):
speeds = CURRENTS_SPEEDS[current]
for speed in sorted(speeds, reverse=False):
ui.print_header(f"CURRENT: {current}, SPEED: {speed}")
# HOME
print("homing...")
await api.home([ax])
print(f"lowering run-current to {current} amps")
await helpers_ot3.set_gantry_load_per_axis_current_settings_ot3(
api,
ax,
run_current=current,
)
await helpers_ot3.set_gantry_load_per_axis_motion_settings_ot3(
api, ax, default_max_speed=speed
)
# MOVE DOWN
print(f"moving down {blow_out} mm at {speed} mm/sec")
await _save_result(_get_test_tag(current, speed, "down", "start"))
await helpers_ot3.move_plunger_absolute_ot3(
api, mount, blow_out, speed=speed, motor_current=current
)
down_passed = await _save_result(
_get_test_tag(current, speed, "down", "end")
)
# MOVE UP
print(f"moving up {blow_out} mm at {speed} mm/sec")
await _save_result(_get_test_tag(current, speed, "up", "start"))
await helpers_ot3.move_plunger_absolute_ot3(
api, mount, 0, speed=speed, motor_current=current
)
up_passed = await _save_result(_get_test_tag(current, speed, "up", "end"))
# RESET CURRENTS AND HOME
print("homing...")
await helpers_ot3.set_gantry_load_per_axis_current_settings_ot3(
api, ax, run_current=default_current
)
await helpers_ot3.set_gantry_load_per_axis_motion_settings_ot3(
api, ax, default_max_speed=default_speed
)
await api._backend.set_active_current({Axis.P_L: default_current})
await api.home([ax])
if not down_passed or not up_passed and not api.is_simulator:
print(f"current {current} failed")
print("skipping any remaining speeds at this current")
break
|
5c93e606d342bb9a2bf0f3de130106126fe333d0
|
407d194b52fe9cf75cca9d6f3c162a565549a1ae
|
/OmsAgent/extension-test/verify_e2e.py
|
7ef7225709224e41f63ae4cd4d5ff4cc2d38bf00
|
[
"Apache-2.0"
] |
permissive
|
Azure/azure-linux-extensions
|
808761f927045f00548aa68e38d4bec8651c0eba
|
3cea1567fc4f4eb5beea9884153e92d70610394d
|
refs/heads/master
| 2023-08-27T14:06:05.775617
| 2023-08-23T01:56:05
| 2023-08-23T01:56:05
| 19,841,123
| 300
| 314
|
Apache-2.0
| 2023-09-14T04:21:26
| 2014-05-16T01:38:49
|
Python
|
UTF-8
|
Python
| false
| false
| 3,325
|
py
|
verify_e2e.py
|
'''Verify end-to-end data transmission.'''
import json
import os
import re
import sys
import subprocess
import adal
import requests
ENDPOINT = ('https://management.azure.com/subscriptions/{}/resourcegroups/'
'{}/providers/Microsoft.OperationalInsights/workspaces/{}/api/'
'query?api-version=2017-01-01-preview')
def check_e2e(hostname, timespan = 'PT30M'):
'''
Verify data from computer with provided hostname is
present in the Log Analytics workspace specified in
parameters.json, append results to e2eresults.json
'''
global success_count
global success_sources
global failed_sources
success_count = 0
failed_sources = []
success_sources = []
with open('{0}/parameters.json'.format(os.getcwd()), 'r') as f:
parameters = f.read()
if re.search(r'"<.*>"', parameters):
print('Please replace placeholders in parameters.json')
exit()
parameters = json.loads(parameters)
key_vault = parameters['key vault']
tenant_id = str(json.loads(subprocess.check_output('az keyvault secret show --name tenant-id --vault-name {0}'.format(key_vault), shell=True))["value"])
app_id = str(json.loads(subprocess.check_output('az keyvault secret show --name app-id --vault-name {0}'.format(key_vault), shell=True))["value"])
app_secret = str(json.loads(subprocess.check_output('az keyvault secret show --name app-secret --vault-name {0}'.format(key_vault), shell=True))["value"])
authority_url = parameters['authority host url'] + '/' + tenant_id
context = adal.AuthenticationContext(authority_url)
token = context.acquire_token_with_client_credentials(
parameters['resource'],
app_id,
app_secret)
head = {'Authorization': 'Bearer ' + token['accessToken']}
subscription = str(json.loads(subprocess.check_output('az keyvault secret show --name subscription-id --vault-name {0}'.format(key_vault), shell=True))["value"])
resource_group = parameters['resource group']
workspace = parameters['workspace']
url = ENDPOINT.format(subscription, resource_group, workspace)
sources = ['Heartbeat', 'Syslog', 'Perf', 'ApacheAccess_CL', 'MySQL_CL', 'Custom_Log_CL']
distro = hostname.split('-')[0]
results = {}
results[distro] = {}
print('Verifying data from computer {}'.format(hostname))
for s in sources:
query = '%s | where Computer == \'%s\' | take 1' % (s, hostname)
r = requests.post(url, headers=head, json={'query':query, 'timespan':timespan})
if r.status_code == requests.codes.ok:
r = (json.loads(r.text)['Tables'])[0]
if len(r['Rows']) < 1:
results[distro][s] = 'Failure: no logs'
failed_sources.append(s)
else:
results[distro][s] = 'Success'
success_count += 1
success_sources.append(s)
else:
results[distro][s] = 'Failure: {} {}'.format(r.status_code, r.text)
results[distro] = [results[distro]]
print(results)
return results
def main():
'''Check for data with given hostname.'''
if len(sys.argv) == 2:
check_e2e(sys.argv[1])
else:
print('Hostname not provided')
exit()
if __name__ == '__main__':
main()
|
874305c0cc2746b7739fee9116d40b5bcd9bdc0b
|
aaf572d39319e4400ae0c2655ea2cfa52845a429
|
/trio/_core/_multierror.py
|
6e4cb8b92385531c7020c6700e8513c3b6944955
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
python-trio/trio
|
04cbde76313c26abdea15317cea3980dd5bc475b
|
e97bcb61b8b02523c82435b5408ff46efca5dfc3
|
refs/heads/master
| 2023-08-30T23:22:05.599646
| 2023-08-30T06:11:28
| 2023-08-30T06:11:28
| 79,083,614
| 5,651
| 369
|
NOASSERTION
| 2023-09-13T14:14:21
| 2017-01-16T04:45:25
|
Python
|
UTF-8
|
Python
| false
| false
| 20,407
|
py
|
_multierror.py
|
from __future__ import annotations
import sys
import warnings
from collections.abc import Callable, Sequence
from types import TracebackType
from typing import TYPE_CHECKING, Any, cast, overload
import attr
from trio._deprecate import warn_deprecated
if sys.version_info < (3, 11):
from exceptiongroup import BaseExceptionGroup, ExceptionGroup, print_exception
else:
from traceback import print_exception
if TYPE_CHECKING:
from typing_extensions import Self
################################################################
# MultiError
################################################################
def _filter_impl(
handler: Callable[[BaseException], BaseException | None], root_exc: BaseException
) -> BaseException | None:
# We have a tree of MultiError's, like:
#
# MultiError([
# ValueError,
# MultiError([
# KeyError,
# ValueError,
# ]),
# ])
#
# or similar.
#
# We want to
# 1) apply the filter to each of the leaf exceptions -- each leaf
# might stay the same, be replaced (with the original exception
# potentially sticking around as __context__ or __cause__), or
# disappear altogether.
# 2) simplify the resulting tree -- remove empty nodes, and replace
# singleton MultiError's with their contents, e.g.:
# MultiError([KeyError]) -> KeyError
# (This can happen recursively, e.g. if the two ValueErrors above
# get caught then we'll just be left with a bare KeyError.)
# 3) preserve sensible tracebacks
#
# It's the tracebacks that are most confusing. As a MultiError
# propagates through the stack, it accumulates traceback frames, but
# the exceptions inside it don't. Semantically, the traceback for a
# leaf exception is the concatenation the tracebacks of all the
# exceptions you see when traversing the exception tree from the root
# to that leaf. Our correctness invariant is that this concatenated
# traceback should be the same before and after.
#
# The easy way to do that would be to, at the beginning of this
# function, "push" all tracebacks down to the leafs, so all the
# MultiErrors have __traceback__=None, and all the leafs have complete
# tracebacks. But whenever possible, we'd actually prefer to keep
# tracebacks as high up in the tree as possible, because this lets us
# keep only a single copy of the common parts of these exception's
# tracebacks. This is cheaper (in memory + time -- tracebacks are
# unpleasantly quadratic-ish to work with, and this might matter if
# you have thousands of exceptions, which can happen e.g. after
# cancelling a large task pool, and no-one will ever look at their
# tracebacks!), and more importantly, factoring out redundant parts of
# the tracebacks makes them more readable if/when users do see them.
#
# So instead our strategy is:
# - first go through and construct the new tree, preserving any
# unchanged subtrees
# - then go through the original tree (!) and push tracebacks down
# until either we hit a leaf, or we hit a subtree which was
# preserved in the new tree.
# This used to also support async handler functions. But that runs into:
# https://bugs.python.org/issue29600
# which is difficult to fix on our end.
# Filters a subtree, ignoring tracebacks, while keeping a record of
# which MultiErrors were preserved unchanged
def filter_tree(
exc: MultiError | BaseException, preserved: set[int]
) -> MultiError | BaseException | None:
if isinstance(exc, MultiError):
new_exceptions = []
changed = False
for child_exc in exc.exceptions:
new_child_exc = filter_tree(child_exc, preserved)
if new_child_exc is not child_exc:
changed = True
if new_child_exc is not None:
new_exceptions.append(new_child_exc)
if not new_exceptions:
return None
elif changed:
return MultiError(new_exceptions)
else:
preserved.add(id(exc))
return exc
else:
new_exc = handler(exc)
# Our version of implicit exception chaining
if new_exc is not None and new_exc is not exc:
new_exc.__context__ = exc
return new_exc
def push_tb_down(
tb: TracebackType | None, exc: BaseException, preserved: set[int]
) -> None:
if id(exc) in preserved:
return
new_tb = concat_tb(tb, exc.__traceback__)
if isinstance(exc, MultiError):
for child_exc in exc.exceptions:
push_tb_down(new_tb, child_exc, preserved)
exc.__traceback__ = None
else:
exc.__traceback__ = new_tb
preserved: set[int] = set()
new_root_exc = filter_tree(root_exc, preserved)
push_tb_down(None, root_exc, preserved)
# Delete the local functions to avoid a reference cycle (see
# test_simple_cancel_scope_usage_doesnt_create_cyclic_garbage)
del filter_tree, push_tb_down
return new_root_exc
# Normally I'm a big fan of (a)contextmanager, but in this case I found it
# easier to use the raw context manager protocol, because it makes it a lot
# easier to reason about how we're mutating the traceback as we go. (End
# result: if the exception gets modified, then the 'raise' here makes this
# frame show up in the traceback; otherwise, we leave no trace.)
@attr.s(frozen=True)
class MultiErrorCatcher:
_handler: Callable[[BaseException], BaseException | None] = attr.ib()
def __enter__(self) -> None:
pass
def __exit__(
self,
exc_type: type[BaseException] | None,
exc_value: BaseException | None,
traceback: TracebackType | None,
) -> bool | None:
if exc_value is not None:
filtered_exc = _filter_impl(self._handler, exc_value)
if filtered_exc is exc_value:
# Let the interpreter re-raise it
return False
if filtered_exc is None:
# Swallow the exception
return True
# When we raise filtered_exc, Python will unconditionally blow
# away its __context__ attribute and replace it with the original
# exc we caught. So after we raise it, we have to pause it while
# it's in flight to put the correct __context__ back.
old_context = filtered_exc.__context__
try:
raise filtered_exc
finally:
_, value, _ = sys.exc_info()
assert value is filtered_exc
value.__context__ = old_context
# delete references from locals to avoid creating cycles
# see test_MultiError_catch_doesnt_create_cyclic_garbage
del _, filtered_exc, value
return False
if TYPE_CHECKING:
_BaseExceptionGroup = BaseExceptionGroup[BaseException]
else:
_BaseExceptionGroup = BaseExceptionGroup
class MultiError(_BaseExceptionGroup):
"""An exception that contains other exceptions; also known as an
"inception".
It's main use is to represent the situation when multiple child tasks all
raise errors "in parallel".
Args:
exceptions (list): The exceptions
Returns:
If ``len(exceptions) == 1``, returns that exception. This means that a
call to ``MultiError(...)`` is not guaranteed to return a
:exc:`MultiError` object!
Otherwise, returns a new :exc:`MultiError` object.
Raises:
TypeError: if any of the passed in objects are not instances of
:exc:`BaseException`.
"""
def __init__(
self, exceptions: Sequence[BaseException], *, _collapse: bool = True
) -> None:
self.collapse = _collapse
# Avoid double initialization when _collapse is True and exceptions[0] returned
# by __new__() happens to be a MultiError and subsequently __init__() is called.
if _collapse and getattr(self, "exceptions", None) is not None:
# This exception was already initialized.
return
super().__init__("multiple tasks failed", exceptions)
def __new__( # type: ignore[misc] # mypy says __new__ must return a class instance
cls, exceptions: Sequence[BaseException], *, _collapse: bool = True
) -> NonBaseMultiError | Self | BaseException:
exceptions = list(exceptions)
for exc in exceptions:
if not isinstance(exc, BaseException):
raise TypeError(f"Expected an exception object, not {exc!r}")
if _collapse and len(exceptions) == 1:
# If this lone object happens to itself be a MultiError, then
# Python will implicitly call our __init__ on it again. See
# special handling in __init__.
return exceptions[0]
else:
# The base class __new__() implicitly invokes our __init__, which
# is what we want.
#
# In an earlier version of the code, we didn't define __init__ and
# simply set the `exceptions` attribute directly on the new object.
# However, linters expect attributes to be initialized in __init__.
from_class: type[Self] | type[NonBaseMultiError] = cls
if all(isinstance(exc, Exception) for exc in exceptions):
from_class = NonBaseMultiError
# Ignoring arg-type: 'Argument 3 to "__new__" of "BaseExceptionGroup" has incompatible type "list[BaseException]"; expected "Sequence[_BaseExceptionT_co]"'
# We have checked that exceptions is indeed a list of BaseException objects, this is fine.
new_obj = super().__new__(from_class, "multiple tasks failed", exceptions) # type: ignore[arg-type]
assert isinstance(new_obj, (cls, NonBaseMultiError))
return new_obj
def __reduce__(
self,
) -> tuple[object, tuple[type[Self], list[BaseException]], dict[str, bool]]:
return (
self.__new__,
(self.__class__, list(self.exceptions)),
{"collapse": self.collapse},
)
def __str__(self) -> str:
return ", ".join(repr(exc) for exc in self.exceptions)
def __repr__(self) -> str:
return f"<MultiError: {self}>"
@overload
def derive(self, excs: Sequence[Exception], /) -> NonBaseMultiError:
...
@overload
def derive(self, excs: Sequence[BaseException], /) -> MultiError:
...
def derive(
self, excs: Sequence[Exception | BaseException], /
) -> NonBaseMultiError | MultiError:
# We use _collapse=False here to get ExceptionGroup semantics, since derive()
# is part of the PEP 654 API
exc = MultiError(excs, _collapse=False)
exc.collapse = self.collapse
return exc
@classmethod
def filter(
cls,
handler: Callable[[BaseException], BaseException | None],
root_exc: BaseException,
) -> BaseException | None:
"""Apply the given ``handler`` to all the exceptions in ``root_exc``.
Args:
handler: A callable that takes an atomic (non-MultiError) exception
as input, and returns either a new exception object or None.
root_exc: An exception, often (though not necessarily) a
:exc:`MultiError`.
Returns:
A new exception object in which each component exception ``exc`` has
been replaced by the result of running ``handler(exc)`` – or, if
``handler`` returned None for all the inputs, returns None.
"""
warn_deprecated(
"MultiError.filter()",
"0.22.0",
instead="BaseExceptionGroup.split()",
issue=2211,
)
return _filter_impl(handler, root_exc)
@classmethod
def catch(
cls, handler: Callable[[BaseException], BaseException | None]
) -> MultiErrorCatcher:
"""Return a context manager that catches and re-throws exceptions
after running :meth:`filter` on them.
Args:
handler: as for :meth:`filter`
"""
warn_deprecated(
"MultiError.catch",
"0.22.0",
instead="except* or exceptiongroup.catch()",
issue=2211,
)
return MultiErrorCatcher(handler)
if TYPE_CHECKING:
_ExceptionGroup = ExceptionGroup[Exception]
else:
_ExceptionGroup = ExceptionGroup
class NonBaseMultiError(MultiError, _ExceptionGroup):
__slots__ = ()
# Clean up exception printing:
MultiError.__module__ = "trio"
NonBaseMultiError.__module__ = "trio"
################################################################
# concat_tb
################################################################
# We need to compute a new traceback that is the concatenation of two existing
# tracebacks. This requires copying the entries in 'head' and then pointing
# the final tb_next to 'tail'.
#
# NB: 'tail' might be None, which requires some special handling in the ctypes
# version.
#
# The complication here is that Python doesn't actually support copying or
# modifying traceback objects, so we have to get creative...
#
# On CPython, we use ctypes. On PyPy, we use "transparent proxies".
#
# Jinja2 is a useful source of inspiration:
# https://github.com/pallets/jinja/blob/master/jinja2/debug.py
try:
import tputil
except ImportError:
# ctypes it is
import ctypes
# How to handle refcounting? I don't want to use ctypes.py_object because
# I don't understand or trust it, and I don't want to use
# ctypes.pythonapi.Py_{Inc,Dec}Ref because we might clash with user code
# that also tries to use them but with different types. So private _ctypes
# APIs it is!
import _ctypes
class CTraceback(ctypes.Structure):
_fields_ = [
("PyObject_HEAD", ctypes.c_byte * object().__sizeof__()),
("tb_next", ctypes.c_void_p),
("tb_frame", ctypes.c_void_p),
("tb_lasti", ctypes.c_int),
("tb_lineno", ctypes.c_int),
]
def copy_tb(base_tb: TracebackType, tb_next: TracebackType | None) -> TracebackType:
# TracebackType has no public constructor, so allocate one the hard way
try:
raise ValueError
except ValueError as exc:
new_tb = exc.__traceback__
assert new_tb is not None
c_new_tb = CTraceback.from_address(id(new_tb))
# At the C level, tb_next either pointer to the next traceback or is
# NULL. c_void_p and the .tb_next accessor both convert NULL to None,
# but we shouldn't DECREF None just because we assigned to a NULL
# pointer! Here we know that our new traceback has only 1 frame in it,
# so we can assume the tb_next field is NULL.
assert c_new_tb.tb_next is None
# If tb_next is None, then we want to set c_new_tb.tb_next to NULL,
# which it already is, so we're done. Otherwise, we have to actually
# do some work:
if tb_next is not None:
_ctypes.Py_INCREF(tb_next) # type: ignore[attr-defined]
c_new_tb.tb_next = id(tb_next)
assert c_new_tb.tb_frame is not None
_ctypes.Py_INCREF(base_tb.tb_frame) # type: ignore[attr-defined]
old_tb_frame = new_tb.tb_frame
c_new_tb.tb_frame = id(base_tb.tb_frame)
_ctypes.Py_DECREF(old_tb_frame) # type: ignore[attr-defined]
c_new_tb.tb_lasti = base_tb.tb_lasti
c_new_tb.tb_lineno = base_tb.tb_lineno
try:
return new_tb
finally:
# delete references from locals to avoid creating cycles
# see test_MultiError_catch_doesnt_create_cyclic_garbage
del new_tb, old_tb_frame
else:
# http://doc.pypy.org/en/latest/objspace-proxies.html
def copy_tb(base_tb: TracebackType, tb_next: TracebackType | None) -> TracebackType:
# Mypy refuses to believe that ProxyOperation can be imported properly
# TODO: will need no-any-unimported if/when that's toggled on
def controller(operation: tputil.ProxyOperation) -> Any | None:
# Rationale for pragma: I looked fairly carefully and tried a few
# things, and AFAICT it's not actually possible to get any
# 'opname' that isn't __getattr__ or __getattribute__. So there's
# no missing test we could add, and no value in coverage nagging
# us about adding one.
if operation.opname in [
"__getattribute__",
"__getattr__",
]: # pragma: no cover
if operation.args[0] == "tb_next":
return tb_next
return operation.delegate() # Deligate is reverting to original behaviour
return cast(
TracebackType, tputil.make_proxy(controller, type(base_tb), base_tb)
) # Returns proxy to traceback
def concat_tb(
head: TracebackType | None, tail: TracebackType | None
) -> TracebackType | None:
# We have to use an iterative algorithm here, because in the worst case
# this might be a RecursionError stack that is by definition too deep to
# process by recursion!
head_tbs = []
pointer = head
while pointer is not None:
head_tbs.append(pointer)
pointer = pointer.tb_next
current_head = tail
for head_tb in reversed(head_tbs):
current_head = copy_tb(head_tb, tb_next=current_head)
return current_head
# Remove when IPython gains support for exception groups
# (https://github.com/ipython/ipython/issues/13753)
if "IPython" in sys.modules:
import IPython
ip = IPython.get_ipython()
if ip is not None:
if ip.custom_exceptions != ():
warnings.warn(
"IPython detected, but you already have a custom exception "
"handler installed. I'll skip installing Trio's custom "
"handler, but this means exception groups will not show full "
"tracebacks.",
category=RuntimeWarning,
)
else:
def trio_show_traceback(
self: IPython.core.interactiveshell.InteractiveShell,
etype: type[BaseException],
value: BaseException,
tb: TracebackType,
tb_offset: int | None = None,
) -> None:
# XX it would be better to integrate with IPython's fancy
# exception formatting stuff (and not ignore tb_offset)
print_exception(value)
ip.set_custom_exc((BaseExceptionGroup,), trio_show_traceback)
# Ubuntu's system Python has a sitecustomize.py file that import
# apport_python_hook and replaces sys.excepthook.
#
# The custom hook captures the error for crash reporting, and then calls
# sys.__excepthook__ to actually print the error.
#
# We don't mind it capturing the error for crash reporting, but we want to
# take over printing the error. So we monkeypatch the apport_python_hook
# module so that instead of calling sys.__excepthook__, it calls our custom
# hook.
#
# More details: https://github.com/python-trio/trio/issues/1065
if (
sys.version_info < (3, 11)
and getattr(sys.excepthook, "__name__", None) == "apport_excepthook"
):
from types import ModuleType
import apport_python_hook
from exceptiongroup import format_exception
assert sys.excepthook is apport_python_hook.apport_excepthook
def replacement_excepthook(
etype: type[BaseException], value: BaseException, tb: TracebackType | None
) -> None:
# This does work, it's an overloaded function
sys.stderr.write("".join(format_exception(etype, value, tb))) # type: ignore[arg-type]
fake_sys = ModuleType("trio_fake_sys")
fake_sys.__dict__.update(sys.__dict__)
# Fake does have __excepthook__ after __dict__ update, but type checkers don't recognize this
fake_sys.__excepthook__ = replacement_excepthook # type: ignore[attr-defined]
apport_python_hook.sys = fake_sys
|
da143d9a973a3136e30840a861cdce849efba804
|
e8cf6493fee2383f31e77d30c78e47e694dd298f
|
/tools/docs/epytext.py
|
7116cd606d9245f17da1fe8d47ddba2b4a1cad67
|
[
"BSD-3-Clause"
] |
permissive
|
idapython/src
|
30b5af8e819e2d86736cd63527dcda0e4696c680
|
e1c108a7df4b5d80d14d8b0c14ae73b924bff6f4
|
refs/heads/master
| 2023-09-04T08:27:09.228901
| 2023-07-31T14:26:58
| 2023-07-31T14:26:58
| 32,229,857
| 1,371
| 298
|
NOASSERTION
| 2023-07-28T12:34:06
| 2015-03-14T20:09:27
|
Python
|
UTF-8
|
Python
| false
| false
| 9,503
|
py
|
epytext.py
|
import re
class HR_Epytext:
def __init__(self, text):
self.html_translation = {
"&" : "&",
"<" : "<",
">" : ">",
"\"": """,
"'" : "'"
}
identifier = r"[a-zA-Z_][a-zA-Z_0-9]*"
parameter = r"{}(?:=[^,)]*)?".format(identifier)
macro_tail = r"(?:[^,}]+,)*(?:[^,}]+})?"
self.re_non_blank = re.compile(r"[^ ]")
self.re_identifier = re.compile(r" *({})" \
.format(identifier))
self.re_signature = re.compile(r"^ *{} *\( *({} *(, *{} *)*)?\) *(->.*)?$" \
.format(identifier, parameter, parameter))
self.re_ident_list = re.compile(r" *({}(?: *, *{})*)" \
.format(identifier, identifier))
self.re_macro = re.compile(r"\\ *({}) *{{({})" \
.format(identifier, macro_tail))
self.re_macro_more = re.compile(macro_tail)
self.blocks = self._blocks(text)
def html(self):
text = ""
for block in self.blocks:
text += block.format()
return text
def _blocks(self, text):
state = state_t()
blocks = []
fields = False
flen = 0
stack = []
in_macro = False
def end():
# close the current paragraph (if it has text at all)
if state.text:
while stack and state.indent <= stack[-1][0]: # nest blocks
stack.pop()
parent = stack[-1][1] if stack else None
block = parblock_t(parent, fields, flen,
state.indent, state.text, state.bullet)
stack.append( (state.indent, block) )
blocks.append(block)
state.reset()
lines = text.split("\n")
for line in lines:
m = self.re_non_blank.search(line)
start = m.start() if m else 0
line = line[start:]
if in_macro:
m = self.re_macro_more.match(line)
if m:
macro_tail = self._more_macro_tail(macro_tail, m.group(0))
if not self._is_macro_end(macro_tail):
continue
else:
# broken macro end; finish it here
self._verb("broken macro \"{}\" {{ \"{}\", end: \"{}\"".format(
macro_name, macro_tail, line))
in_macro = False
state.text += self._expand_macro(macro_name, macro_tail)
line = line[m.end():]
else:
m = self.re_macro.search(line)
if m:
self._process_line(start, line[:m.start()], state, end)
macro_name = m.group(1)
macro_tail = m.group(2)
in_macro = not self._is_macro_end(macro_tail)
if in_macro:
continue
state.text += self._expand_macro(macro_name, macro_tail)
line = line[m.end():]
self._process_line(start, line, state, end)
end()
blocks = self._collect_bullet_lists(blocks)
# set up nested blocks
top_blocks = []
for block in blocks:
if block.parent is None:
top_blocks.append(block)
else:
block.parent.add_child(block)
return top_blocks
def _process_line(self, start, line, state, end):
m = self.re_signature.match(line)
if m:
end()
state.text = "<strong class=\"epy_sig\">{}</strong>".format(
self._escape(line))
end()
return
if not line:
end()
return
if line[0] == "@":
end()
# as per the EpyText spec, all lines from now on are fields
fields = True
flen = 1
m = self.re_identifier.match(line, 1)
if m:
mb = self.re_non_blank.search(line, m.end())
flen = mb.start() if mb else m.end()
if m.group(1) == "param":
cls = "epy_parameter"
mp = self.re_ident_list.match(line, m.end())
if mp:
m = mp
else:
cls = "epy_tag"
line = "<strong class=\"{}\">{}</strong>" \
.format(cls, m.group(1)) \
+ self._escape(line[m.end():])
# Require at least one blank after the "-";
# f.i. "-1 (C++: int)" is not a bullet item.
# Allow also "* " as bullet marker.
elif line[:2] in ("- ", "* "):
end()
m = self.re_non_blank.search(line, 1)
line = self._escape(line[m.start():]) if m else ""
state.bullet = True
else:
line = self._escape(line)
if start != state.indent:
end()
state.indent = start
if state.text:
state.text += "\n"
state.text += line
def _more_macro_tail(self, tail, more_tail):
# hack around textwrap
if tail[-1:] == "-":
return tail + more_tail
return tail + " " + more_tail
def _is_macro_end(self, tail):
return tail[-1:] == "}"
def _expand_macro(self, name, tail):
params = self._parse_macro_tail(tail)
if name == "sq":
self._adjust_macro_params(params, 4, 4)
params = [p.strip() for p in params]
table = """
<table border="1">
<tr><td>{}</td><td>{}</td></tr>
<tr><td>{}</td><td>{}</td></tr>
</table>
"""
return table.format(*params)
if name == "link":
self._adjust_macro_params(params, 1, 2)
if len(params) == 1:
params += params
return "<a href=\"{}\">{}</a>".format(*params)
self._verb("Unimplemented macro {}, params {}".format(repr(name), repr(params)))
return "\\{}{{{}}}".format(name, ",".join(params))
def _parse_macro_tail(self, tail):
params = tail.split(",")
if params[-1][-1:] == "}":
params[-1] = params[-1][:-1]
elif params[-1] == "":
params.pop()
return params
def _adjust_macro_params(self, params, n_min, n_max):
if len(params) < n_min:
params.extend( (4 - len(params)) * [''] )
while len(params) > n_max:
params.pop()
def _collect_bullet_lists(self, blocks):
out = []
lists = []
for block in blocks:
if block.bullet:
while lists and lists[-1].start > block.start:
lists.pop()
if not lists or lists[-1].start != block.start:
lists.append(listblock_t(block.parent))
out.append(lists[-1])
lists[-1].add_child(block)
else:
if lists:
lists = []
out.append(block)
return out
def _escape(self, text):
# avoid the multiple versions of html.escape
trans = ""
for c in text:
if c in self.html_translation:
trans += self.html_translation[c]
else:
trans += c
return trans
def _verb(self, message):
print("VERBOSE:", message)
class state_t:
def __init__(self, indent=None, text=None, bullet=None):
if indent is None:
self.reset()
else:
self.indent = indent
self.text = text
self.bullet = bullet
def reset(self):
self.indent = -1
self.text = ""
self.bullet = False
class block_t:
def __init__(self, parent):
self.parent = parent
self.children = []
def add_child(self, block):
self.children.append(block)
class parblock_t(block_t):
def __init__(self, parent, field, flen, start, text, bullet):
super().__init__(parent)
self.field = field
self.flen = flen
self.start = start
self.text = text
self.bullet = bullet
def format(self):
start = self.start
if self.field:
start = max(0, start - self.flen)
if self.parent:
cls = "epy_nested"
start = max(0, start - self.parent.start)
elif self.field:
cls = "epy_field"
else:
cls ="epy_par"
tag = "span" if self.bullet else "div"
text = ""
if self.text:
text += "<{} class=\"{}\">\n".format(tag, cls)
text += self.text
for child in self.children:
text += child.format()
if self.text:
text += "</{}>\n".format(tag)
return text
class listblock_t(block_t):
def __init__(self, parent):
super().__init__(parent)
@property
def start(self):
return self.children[0].start
def format(self):
text = "<ul class=\"epy_ul\">\n"
for child in self.children:
text += "<li class=\"epy_li\">" + child.format() + "</li>\n"
text += "</ul>\n"
return text
|
fcfc541a2ac5ba386dd7352e1da8abb1c900b25b
|
1634f33c5021e8465a695fb5244504e2eeeecff5
|
/kitsune/tidings/events.py
|
8943df2b181c844a18d209cf3fe17a095f17e872
|
[] |
permissive
|
mozilla/kitsune
|
fee4b8598eb01f5b4add00f2f010b45e2a6ca901
|
67ec527bfc32c715bf9f29d5e01362c4903aebd2
|
refs/heads/main
| 2023-09-01T21:41:59.076570
| 2023-08-31T22:34:05
| 2023-08-31T22:34:05
| 489,645
| 1,218
| 697
|
BSD-3-Clause
| 2023-09-14T08:43:19
| 2010-01-26T18:53:57
|
Python
|
UTF-8
|
Python
| false
| false
| 25,965
|
py
|
events.py
|
import random
from smtplib import SMTPException
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.contenttypes.models import ContentType
from django.core import mail
from django.db import models
from django.db.models import Q
from kitsune.tidings.models import EmailUser, Watch, WatchFilter, multi_raw
from kitsune.tidings.tasks import send_emails
from kitsune.tidings.utils import collate, hash_to_unsigned
class ActivationRequestFailed(Exception):
"""Raised when activation request fails, e.g. if email could not be sent"""
def __init__(self, msgs):
self.msgs = msgs
def _unique_by_email(users_and_watches):
"""Given a sequence of (User/EmailUser, [Watch, ...]) pairs
clustered by email address (which is never ''), yield from each
cluster a single pair like this::
(User/EmailUser, [Watch, Watch, ...]).
The User/Email is that of...
(1) the first incoming pair where the User has an email and is not
anonymous, or, if there isn't such a user...
(2) the first pair.
The list of Watches consists of all those found in the cluster.
Compares email addresses case-insensitively.
"""
def ensure_user_has_email(user, cluster_email):
"""Make sure the user in the user-watch pair has an email address.
The caller guarantees us an email from either the user or the watch. If
the passed-in user has no email, we return an EmailUser instead having
the email address from the watch.
"""
# Some of these cases shouldn't happen, but we're tolerant.
if not getattr(user, "email", ""):
user = EmailUser(cluster_email)
return user
# TODO: Do this instead with clever SQL that somehow returns just the
# best row for each email.
cluster_email = "" # email of current cluster
favorite_user = None # best user in cluster so far
watches = [] # all watches in cluster
for u, w in users_and_watches:
# w always has at least 1 Watch. All the emails are the same.
row_email = u.email or w[0].email
if cluster_email.lower() != row_email.lower():
# Starting a new cluster.
if cluster_email != "":
# Ship the favorites from the previous cluster:
yield (ensure_user_has_email(favorite_user, cluster_email), watches)
favorite_user, watches = u, []
cluster_email = row_email
elif (
(not favorite_user.email or not u.is_authenticated) and u.email and u.is_authenticated
):
favorite_user = u
watches.extend(w)
if favorite_user is not None:
yield ensure_user_has_email(favorite_user, cluster_email), watches
class Event(object):
"""Abstract base class for events
An :class:`Event` represents, simply, something that occurs. A
:class:`~tidings.models.Watch` is a record of someone's interest in a
certain type of :class:`Event`, distinguished by ``Event.event_type``.
Fire an Event (``SomeEvent.fire()``) from the code that causes the
interesting event to occur. Fire it any time the event *might* have
occurred. The Event will determine whether conditions are right to actually
send notifications; don't succumb to the temptation to do these tests
outside the Event, because you'll end up repeating yourself if the event is
ever fired from more than one place.
:class:`Event` subclasses can optionally represent a more limited scope of
interest by populating the ``Watch.content_type`` field and/or adding
related :class:`~tidings.models.WatchFilter` rows holding name/value pairs,
the meaning of which is up to each individual subclass. NULL values are
considered wildcards.
"""
# event_type = 'hamster modified' # key for the event_type column
content_type: models.Model | None = None # or, for example, Hamster
#: Possible filter keys, for validation only. For example:
#: ``set(['color', 'flavor'])``
filters: set[str] = set()
def fire(self, exclude=None, delay=True):
"""
Notify everyone watching the event, either synchronously or asynchronously,
excluding the users provided by "exclude", which must be a sequence of user
objects if provided.
"""
if delay:
event_info = self.serialize()
if exclude:
exclude_user_ids = [user.id for user in exclude]
else:
exclude_user_ids = None
send_emails.delay(event_info, exclude_user_ids=exclude_user_ids)
else:
self.send_emails(exclude=exclude)
def send_emails(self, exclude=None):
"""
Notify everyone watching the event (build and send emails).
We are explicit about sending notifications; we don't just key off
creation signals, because the receiver of a ``post_save`` signal has no
idea what just changed, so it doesn't know which notifications to send.
Also, we could easily send mail accidentally: for instance, during
tests. If we want implicit event firing, we can always register a
signal handler that calls :meth:`fire()`.
:arg exclude: A sequence of users or None. If a sequence of users is
passed in, each of those users will not be notified, though anonymous
notifications having the same email address may still be sent.
"""
connection = mail.get_connection(fail_silently=True)
# Warning: fail_silently swallows errors thrown by the generators, too.
connection.open()
for m in self._mails(self._users_watching(exclude=exclude)):
connection.send_messages([m])
def serialize(self):
"""
Serialize this event into a JSON-friendly dictionary. Subclasses must
implement this method if they want to fire events asynchronously via
the "send_emails" Celery task. Here's an example:
def serialize(self):
return {
"event": {
"module": "kitsune.wiki.events"
"class": "ReadyRevisionEvent"
},
"instance": {
"module": "kitsune.wiki.models",
"class": "Revision",
"id": self.revision.id
}
}
where the "event" is always required, but the "instance" only if it's
needed to construct the event.
"""
raise NotImplementedError
@classmethod
def _validate_filters(cls, filters):
"""Raise a TypeError if ``filters`` contains any keys inappropriate to
this event class."""
for k in iter(filters.keys()):
if k not in cls.filters:
# Mirror "unexpected keyword argument" message:
raise TypeError("%s got an unsupported filter type '%s'" % (cls.__name__, k))
def _users_watching_by_filter(self, object_id=None, exclude=None, **filters):
"""Return an iterable of (``User``/:class:`~tidings.models.EmailUser`,
[:class:`~tidings.models.Watch` objects]) tuples watching the event.
Of multiple Users/EmailUsers having the same email address, only one is
returned. Users are favored over EmailUsers so we are sure to be able
to, for example, include a link to a user profile in the mail.
The list of :class:`~tidings.models.Watch` objects includes both
those tied to the given User (if there is a registered user)
and to any anonymous Watch having the same email address. This
allows you to include all relevant unsubscribe URLs in a mail,
for example. It also lets you make decisions in the
:meth:`~tidings.events.EventUnion._mails()` method of
:class:`~tidings.events.EventUnion` based on the kinds of
watches found.
"Watching the event" means having a Watch whose ``event_type`` is
``self.event_type``, whose ``content_type`` is ``self.content_type`` or
``NULL``, whose ``object_id`` is ``object_id`` or ``NULL``, and whose
WatchFilter rows match as follows: each name/value pair given in
``filters`` must be matched by a related WatchFilter, or there must be
no related WatchFilter having that name. If you find yourself wanting
the lack of a particularly named WatchFilter to scuttle the match, use
a different event_type instead.
:arg exclude: A sequence of users or None. If a sequence of users is
passed in, each of those users will not be notified, though anonymous
notifications having the same email address may still be sent.
"""
# I don't think we can use the ORM here, as there's no way to get a
# second condition (name=whatever) into a left join. However, if we
# were willing to have 2 subqueries run for every watch row--select
# {are there any filters with name=x?} and select {is there a filter
# with name=x and value=y?}--we could do it with extra(). Then we could
# have EventUnion simply | the QuerySets together, which would avoid
# having to merge in Python.
def filter_conditions():
"""Return joins, WHERE conditions, and params to bind to them in
order to check a notification against all the given filters."""
# Not a one-liner. You're welcome. :-)
self._validate_filters(filters)
joins, wheres, join_params, where_params = [], [], [], []
for n, (k, v) in enumerate(iter(filters.items())):
joins.append(
"LEFT JOIN tidings_watchfilter f{n} "
"ON f{n}.watch_id=w.id "
"AND f{n}.name=%s".format(n=n)
)
join_params.append(k)
wheres.append("(f{n}.value=%s " "OR f{n}.value IS NULL)".format(n=n))
where_params.append(hash_to_unsigned(v))
return joins, wheres, join_params + where_params
# Apply watchfilter constraints:
joins, wheres, params = filter_conditions()
# Start off with event_type, which is always a constraint. These go in
# the `wheres` list to guarantee that the AND after the {wheres}
# substitution in the query is okay.
wheres.append("w.event_type=%s")
params.append(self.event_type)
# Constrain on other 1-to-1 attributes:
if self.content_type:
wheres.append("(w.content_type_id IS NULL " "OR w.content_type_id=%s)")
params.append(ContentType.objects.get_for_model(self.content_type).id)
if object_id:
wheres.append("(w.object_id IS NULL OR w.object_id=%s)")
params.append(object_id)
if exclude:
# Don't try excluding unsaved Users:1
if not all(e.id for e in exclude):
raise ValueError("Can't exclude an unsaved User.")
wheres.append("(u.id IS NULL OR u.id NOT IN (%s))" % ", ".join("%s" for e in exclude))
params.extend(e.id for e in exclude)
def get_fields(model):
if hasattr(model._meta, "_fields"):
# For django versions < 1.6
return model._meta._fields()
else:
# For django versions >= 1.6
return model._meta.fields
User = get_user_model()
model_to_fields = dict(
(m, [f.get_attname() for f in get_fields(m)]) for m in [User, Watch]
)
query_fields = ["u.{0}".format(field) for field in model_to_fields[User]]
query_fields.extend(["w.{0}".format(field) for field in model_to_fields[Watch]])
query = (
"SELECT {fields} "
"FROM tidings_watch w "
"LEFT JOIN {user_table} u ON u.id=w.user_id {joins} "
"WHERE {wheres} "
"AND (length(w.email)>0 OR length(u.email)>0) "
"AND w.is_active "
"ORDER BY u.email DESC, w.email DESC"
).format(
fields=", ".join(query_fields),
joins=" ".join(joins),
wheres=" AND ".join(wheres),
user_table=User._meta.db_table,
)
# IIRC, the DESC ordering was something to do with the placement of
# NULLs. Track this down and explain it.
# Put watch in a list just for consistency. Once the pairs go through
# _unique_by_email, watches will be in a list, and EventUnion uses the
# same function to union already-list-enclosed pairs from individual
# events.
return _unique_by_email(
(u, [w]) for u, w in multi_raw(query, params, [User, Watch], model_to_fields)
)
@classmethod
def _watches_belonging_to_user(cls, user_or_email, object_id=None, **filters):
"""Return a QuerySet of watches having the given user or email, having
(only) the given filters, and having the event_type and content_type
attrs of the class.
Matched Watches may be either confirmed and unconfirmed. They may
include duplicates if the get-then-create race condition in
:meth:`notify()` allowed them to be created.
If you pass an email, it will be matched against only the email
addresses of anonymous watches. At the moment, the only integration
point planned between anonymous and registered watches is the claiming
of anonymous watches of the same email address on user registration
confirmation.
If you pass the AnonymousUser, this will return an empty QuerySet.
"""
# If we have trouble distinguishing subsets and such, we could store a
# number_of_filters on the Watch.
cls._validate_filters(filters)
if isinstance(user_or_email, str):
user_condition = Q(email=user_or_email)
elif user_or_email.is_authenticated:
user_condition = Q(user=user_or_email)
else:
return Watch.objects.none()
# Filter by stuff in the Watch row:
watches = (
getattr(Watch, "uncached", Watch.objects)
.filter(
user_condition,
Q(content_type=ContentType.objects.get_for_model(cls.content_type))
if cls.content_type
else Q(),
Q(object_id=object_id) if object_id else Q(),
event_type=cls.event_type,
)
.extra(
where=[
"(SELECT count(*) FROM tidings_watchfilter WHERE "
"tidings_watchfilter.watch_id="
"tidings_watch.id)=%s"
],
params=[len(filters)],
)
)
# Optimization: If the subselect ends up being slow, store the number
# of filters in each Watch row or try a GROUP BY.
# Apply 1-to-many filters:
for k, v in iter(filters.items()):
watches = watches.filter(filters__name=k, filters__value=hash_to_unsigned(v))
return watches
@classmethod
# Funny arg name to reserve use of nice ones for filters
def is_notifying(cls, user_or_email_, object_id=None, **filters):
"""Return whether the user/email is watching this event (either
active or inactive watches), conditional on meeting the criteria in
``filters``.
Count only watches that match the given filters exactly--not ones which
match merely a superset of them. This lets callers distinguish between
watches which overlap in scope. Equivalently, this lets callers check
whether :meth:`notify()` has been called with these arguments.
Implementations in subclasses may take different arguments--for
example, to assume certain filters--though most will probably just use
this. However, subclasses should clearly document what filters they
supports and the meaning of each.
Passing this an ``AnonymousUser`` always returns ``False``. This means
you can always pass it ``request.user`` in a view and get a sensible
response.
"""
return cls._watches_belonging_to_user(
user_or_email_, object_id=object_id, **filters
).exists()
@classmethod
def notify(cls, user_or_email_, object_id=None, **filters):
"""Start notifying the given user or email address when this event
occurs and meets the criteria given in ``filters``.
Return the created (or the existing matching) Watch so you can call
:meth:`~tidings.models.Watch.activate()` on it if you're so inclined.
Implementations in subclasses may take different arguments; see the
docstring of :meth:`is_notifying()`.
Send an activation email if an anonymous watch is created and
:data:`~django.conf.settings.TIDINGS_CONFIRM_ANONYMOUS_WATCHES` is
``True``. If the activation request fails, raise a
ActivationRequestFailed exception.
Calling :meth:`notify()` twice for an anonymous user will send the
email each time.
"""
# A test-for-existence-then-create race condition exists here, but it
# doesn't matter: de-duplication on fire() and deletion of all matches
# on stop_notifying() nullify its effects.
try:
# Pick 1 if >1 are returned:
watch = cls._watches_belonging_to_user(user_or_email_, object_id=object_id, **filters)[
0:1
].get()
except Watch.DoesNotExist:
create_kwargs = {}
if cls.content_type:
create_kwargs["content_type"] = ContentType.objects.get_for_model(cls.content_type)
create_kwargs["email" if isinstance(user_or_email_, str) else "user"] = user_or_email_
# Letters that can't be mistaken for other letters or numbers in
# most fonts, in case people try to type these:
distinguishable_letters = "abcdefghjkmnpqrstuvwxyzABCDEFGHJKLMNPQRTUVWXYZ"
secret = "".join(random.choice(distinguishable_letters) for x in range(10))
# Registered users don't need to confirm, but anonymous users do.
is_active = "user" in create_kwargs or not settings.TIDINGS_CONFIRM_ANONYMOUS_WATCHES
if object_id:
create_kwargs["object_id"] = object_id
watch = Watch.objects.create(
secret=secret, is_active=is_active, event_type=cls.event_type, **create_kwargs
)
for k, v in iter(filters.items()):
WatchFilter.objects.create(watch=watch, name=k, value=hash_to_unsigned(v))
# Send email for inactive watches.
if not watch.is_active:
email = watch.user.email if watch.user else watch.email
message = cls._activation_email(watch, email)
try:
message.send()
except SMTPException as e:
watch.delete()
raise ActivationRequestFailed(e.recipients)
return watch
@classmethod
def stop_notifying(cls, user_or_email_, **filters):
"""Delete all watches matching the exact user/email and filters.
Delete both active and inactive watches. If duplicate watches
exist due to the get-then-create race condition, delete them all.
Implementations in subclasses may take different arguments; see the
docstring of :meth:`is_notifying()`.
"""
cls._watches_belonging_to_user(user_or_email_, **filters).delete()
# TODO: If GenericForeignKeys don't give us cascading deletes, make a
# stop_notifying_all(**filters) or something. It should delete any watch of
# the class's event_type and content_type and having filters matching each
# of **filters. Even if there are additional filters on a watch, that watch
# should still be deleted so we can delete, for example, any watch that
# references a certain Question instance. To do that, factor such that you
# can effectively call _watches_belonging_to_user() without it calling
# extra().
# Subclasses should implement the following:
def _mails(self, users_and_watches):
"""Return an iterable yielding an EmailMessage to send to each user.
:arg users_and_watches: an iterable of (User or EmailUser, [Watches])
pairs where the first element is the user to send to and the second
is a list of watches (usually just one) that indicated the
user's interest in this event
:meth:`~tidings.utils.emails_with_users_and_watches()` can come in
handy for generating mails from Django templates.
"""
# Did this instead of mail() because a common case might be sending the
# same mail to many users. mail() would make it difficult to avoid
# redoing the templating every time.
raise NotImplementedError
def _users_watching(self, **kwargs):
"""Return an iterable of Users and EmailUsers watching this event
and the Watches that map them to it.
Each yielded item is a tuple: (User or EmailUser, [list of Watches]).
Default implementation returns users watching this object's event_type
and, if defined, content_type.
"""
return self._users_watching_by_filter(**kwargs)
@classmethod
def _activation_email(cls, watch, email):
"""Return an EmailMessage to send to anonymous watchers.
They are expected to follow the activation URL sent in the email to
activate their watch, so you should include at least that.
"""
# TODO: basic implementation.
return mail.EmailMessage("TODO", "Activate!", settings.TIDINGS_FROM_ADDRESS, [email])
@classmethod
def _activation_url(cls, watch):
"""Return a URL pointing to a view which :meth:`activates
<tidings.models.Watch.activate()>` a watch.
TODO: provide generic implementation of this before liberating.
Generic implementation could involve a setting to the default
``reverse()`` path, e.g. ``'tidings.activate_watch'``.
"""
raise NotImplementedError
@classmethod
def description_of_watch(cls, watch):
"""Return a description of the Watch which can be used in emails.
For example, "changes to English articles"
"""
raise NotImplementedError
class EventUnion(Event):
"""Fireable conglomeration of multiple events
Use this when you want to send a single mail to each person watching any of
several events. For example, this sends only 1 mail to a given user, even
if he was being notified of all 3 events::
EventUnion(SomeEvent(), OtherEvent(), ThirdEvent()).fire()
"""
# Calls some private methods on events, but this and Event are good
# friends.
def __init__(self, *events):
""":arg events: the events of which to take the union"""
super(EventUnion, self).__init__()
self.events = events
def _mails(self, users_and_watches):
"""Default implementation calls the
:meth:`~tidings.events.Event._mails()` of my first event but may
pass it any of my events as ``self``.
Use this default implementation when the content of each event's mail
template is essentially the same, e.g. "This new post was made.
Enjoy.". When the receipt of a second mail from the second event would
add no value, this is a fine choice. If the second event's email would
add value, you should probably fire both events independently and let
both mails be delivered. Or, if you would like to send a single mail
with a custom template for a batch of events, just subclass
:class:`EventUnion` and override this method.
"""
return self.events[0]._mails(users_and_watches)
def _users_watching(self, **kwargs):
# Get a sorted iterable of user-watches pairs:
def email_key(pair):
user, watch = pair
return user.email.lower()
users_and_watches = collate(
*[e._users_watching(**kwargs) for e in self.events], key=email_key, reverse=True
)
# Pick the best User out of each cluster of identical email addresses:
return _unique_by_email(users_and_watches)
class InstanceEvent(Event):
"""Abstract superclass for watching a specific instance of a Model.
Subclasses must specify an ``event_type`` and should specify a
``content_type``.
"""
def __init__(self, instance, *args, **kwargs):
"""Initialize an InstanceEvent
:arg instance: the instance someone would have to be watching in
order to be notified when this event is fired.
"""
super(InstanceEvent, self).__init__(*args, **kwargs)
self.instance = instance
@classmethod
def notify(cls, user_or_email, instance):
"""Create, save, and return a watch which fires when something
happens to ``instance``."""
return super(InstanceEvent, cls).notify(user_or_email, object_id=instance.pk)
@classmethod
def stop_notifying(cls, user_or_email, instance):
"""Delete the watch created by notify."""
super(InstanceEvent, cls).stop_notifying(user_or_email, object_id=instance.pk)
@classmethod
def is_notifying(cls, user_or_email, instance):
"""Check if the watch created by notify exists."""
return super(InstanceEvent, cls).is_notifying(user_or_email, object_id=instance.pk)
def _users_watching(self, **kwargs):
"""Return users watching this instance."""
return self._users_watching_by_filter(object_id=self.instance.pk, **kwargs)
|
f75d697e84d4702554d14b1d9e95bdfb5df376b9
|
a3d6556180e74af7b555f8d47d3fea55b94bcbda
|
/chromecast/renderer/DEPS
|
1c707f58626c3873ac7cfb75d208d6016b55f4e6
|
[
"BSD-3-Clause"
] |
permissive
|
chromium/chromium
|
aaa9eda10115b50b0616d2f1aed5ef35d1d779d6
|
a401d6cf4f7bf0e2d2e964c512ebb923c3d8832c
|
refs/heads/main
| 2023-08-24T00:35:12.585945
| 2023-08-23T22:01:11
| 2023-08-23T22:01:11
| 120,360,765
| 17,408
| 7,102
|
BSD-3-Clause
| 2023-09-10T23:44:27
| 2018-02-05T20:55:32
| null |
UTF-8
|
Python
| false
| false
| 1,011
|
DEPS
|
include_rules = [
"+chromecast/browser/mojom",
"+chromecast/common",
"+chromecast/crash",
"+chromecast/media",
"+components/cast_receiver/renderer",
"+components/cdm/renderer",
"+components/guest_view/renderer",
"+components/media_control/renderer",
"+components/media_control/mojom",
"+components/network_hints/renderer",
"+components/on_load_script_injector/renderer",
"+components/url_rewrite/common",
"+components/url_rewrite/renderer",
"+components/version_info",
"+content/public/common",
"+content/public/renderer",
"+gin",
"+media/base",
"+media/media_buildflags.h",
"+media/remoting/receiver_controller.h",
"+media/remoting/stream_provider.h",
"+media/remoting/remoting_constants.h",
"+media/renderers",
"+mojo/public",
"+base/strings/escape.h",
"+net/http/http_request_headers.h",
"+net/base/net_errors.h",
"+net/base/url_util.h",
"+services/network/public/cpp",
"+services/service_manager/public",
"+third_party/blink/public",
"+v8",
]
|
|
15f2427608a1adf0ae9350f7223b73f7ad820135
|
70e9a7da3d4e2a41b30544516e166dab2495253c
|
/l10n_br_automated_payment/__manifest__.py
|
c9a85a9e8e7795b5706531858e19451c1eb8506c
|
[
"MIT"
] |
permissive
|
Trust-Code/odoo-brasil
|
bf06ea58a4e0376cb5c297c18bf48eaf97104e54
|
d456a10e32f56e259061afbd989942ea1aae2c2d
|
refs/heads/16.0
| 2023-08-31T16:06:21.038792
| 2023-01-26T19:31:31
| 2023-01-26T19:31:31
| 72,882,959
| 206
| 253
|
MIT
| 2023-08-18T17:05:49
| 2016-11-04T20:28:03
|
Python
|
UTF-8
|
Python
| false
| false
| 1,137
|
py
|
__manifest__.py
|
# © 2018 Danimar Ribeiro, Trustcode
# Part of Trustcode. See LICENSE file for full copyright and licensing details.
{
'name': 'Integração Iugu',
'version': '13.0.1.0.1',
'category': 'Finance',
'sequence': 5,
'author': 'Trustcode',
'license': 'OPL-1',
'summary': """Realiza a integração com IUGU -
Created by Trustcode""",
'website': 'https://www.trustcode.com.br',
'support': 'comercial@trustcode.com.br',
'contributors': [
'Danimar Ribeiro <danimaribeiro@gmail.com>'
],
'depends': [
'account',
'l10n_br_base_address',
],
'external_dependencies': {
'python': [
'iugu',
],
},
'data': [
'data/verify_transaction_cron.xml',
'data/mail_template_data.xml',
'data/bank_slip_cron.xml',
'security/ir.model.access.csv',
'views/res_company.xml',
'views/account_move.xml',
'views/account_journal.xml',
'views/portal_templates.xml',
'views/res_config_settings.xml',
'wizard/wizard_iugu.xml',
'wizard/wizard_new_payment.xml',
],
}
|
6aa4fe703192f4a4ba8fea62d75d16eadaa7ee31
|
234c46d1249c9209f268417a19018afc12e378b4
|
/tests/data/token_indexers/pretrained_transformer_indexer_test.py
|
84021394cf3abd8eea16b2bf464f320b34b2a345
|
[
"Apache-2.0"
] |
permissive
|
allenai/allennlp
|
1f4bcddcb6f5ce60c7ef03a9a3cd6a38bdb987cf
|
80fb6061e568cb9d6ab5d45b661e86eb61b92c82
|
refs/heads/main
| 2023-07-07T11:43:33.781690
| 2022-11-22T00:42:46
| 2022-11-22T00:42:46
| 91,356,408
| 12,257
| 2,712
|
Apache-2.0
| 2022-11-22T00:42:47
| 2017-05-15T15:52:41
|
Python
|
UTF-8
|
Python
| false
| false
| 11,042
|
py
|
pretrained_transformer_indexer_test.py
|
import pytest
from allennlp.common import cached_transformers
from allennlp.common.testing import AllenNlpTestCase
from allennlp.data import Vocabulary
from allennlp.data.token_indexers import PretrainedTransformerIndexer
from allennlp.data.tokenizers import PretrainedTransformerTokenizer
class TestPretrainedTransformerIndexer(AllenNlpTestCase):
def test_as_array_produces_token_sequence_bert_uncased(self):
tokenizer = cached_transformers.get_tokenizer("bert-base-uncased")
allennlp_tokenizer = PretrainedTransformerTokenizer("bert-base-uncased")
indexer = PretrainedTransformerIndexer(model_name="bert-base-uncased")
string_specials = "[CLS] AllenNLP is great [SEP]"
string_no_specials = "AllenNLP is great"
tokens = tokenizer.tokenize(string_specials)
expected_ids = tokenizer.convert_tokens_to_ids(tokens)
# tokens tokenized with our pretrained tokenizer have indices in them
allennlp_tokens = allennlp_tokenizer.tokenize(string_no_specials)
vocab = Vocabulary()
indexed = indexer.tokens_to_indices(allennlp_tokens, vocab)
assert indexed["token_ids"] == expected_ids
def test_as_array_produces_token_sequence_bert_cased(self):
tokenizer = cached_transformers.get_tokenizer("bert-base-cased")
allennlp_tokenizer = PretrainedTransformerTokenizer("bert-base-cased")
indexer = PretrainedTransformerIndexer(model_name="bert-base-cased")
string_specials = "[CLS] AllenNLP is great [SEP]"
string_no_specials = "AllenNLP is great"
tokens = tokenizer.tokenize(string_specials)
expected_ids = tokenizer.convert_tokens_to_ids(tokens)
# tokens tokenized with our pretrained tokenizer have indices in them
allennlp_tokens = allennlp_tokenizer.tokenize(string_no_specials)
vocab = Vocabulary()
indexed = indexer.tokens_to_indices(allennlp_tokens, vocab)
assert indexed["token_ids"] == expected_ids
def test_as_array_produces_token_sequence_bert_cased_sentence_pair(self):
tokenizer = cached_transformers.get_tokenizer("bert-base-cased")
allennlp_tokenizer = PretrainedTransformerTokenizer(
"bert-base-cased", add_special_tokens=False
)
indexer = PretrainedTransformerIndexer(model_name="bert-base-cased")
default_format = "[CLS] AllenNLP is great! [SEP] Really it is! [SEP]"
tokens = tokenizer.tokenize(default_format)
expected_ids = tokenizer.convert_tokens_to_ids(tokens)
allennlp_tokens = allennlp_tokenizer.add_special_tokens(
allennlp_tokenizer.tokenize("AllenNLP is great!"),
allennlp_tokenizer.tokenize("Really it is!"),
)
vocab = Vocabulary()
indexed = indexer.tokens_to_indices(allennlp_tokens, vocab)
assert indexed["token_ids"] == expected_ids
def test_as_array_produces_token_sequence_roberta(self):
tokenizer = cached_transformers.get_tokenizer("roberta-base")
allennlp_tokenizer = PretrainedTransformerTokenizer("roberta-base")
indexer = PretrainedTransformerIndexer(model_name="roberta-base")
string_specials = "<s>AllenNLP is great</s>"
string_no_specials = "AllenNLP is great"
tokens = tokenizer.tokenize(string_specials)
expected_ids = tokenizer.convert_tokens_to_ids(tokens)
# tokens tokenized with our pretrained tokenizer have indices in them
allennlp_tokens = allennlp_tokenizer.tokenize(string_no_specials)
vocab = Vocabulary()
indexed = indexer.tokens_to_indices(allennlp_tokens, vocab)
assert indexed["token_ids"] == expected_ids
def test_as_array_produces_token_sequence_roberta_sentence_pair(self):
tokenizer = cached_transformers.get_tokenizer("roberta-base")
allennlp_tokenizer = PretrainedTransformerTokenizer(
"roberta-base", add_special_tokens=False
)
indexer = PretrainedTransformerIndexer(model_name="roberta-base")
default_format = "<s>AllenNLP is great!</s></s>Really it is!</s>"
tokens = tokenizer.tokenize(default_format)
expected_ids = tokenizer.convert_tokens_to_ids(tokens)
allennlp_tokens = allennlp_tokenizer.add_special_tokens(
allennlp_tokenizer.tokenize("AllenNLP is great!"),
allennlp_tokenizer.tokenize("Really it is!"),
)
vocab = Vocabulary()
indexed = indexer.tokens_to_indices(allennlp_tokens, vocab)
assert indexed["token_ids"] == expected_ids, f"{allennlp_tokens}\n{tokens}"
@pytest.mark.parametrize("model_name", ["roberta-base", "bert-base-cased", "xlm-mlm-ende-1024"])
def test_transformers_vocab_sizes(self, model_name):
namespace = "tags"
tokenizer = cached_transformers.get_tokenizer(model_name)
allennlp_tokenizer = PretrainedTransformerTokenizer(model_name)
indexer = PretrainedTransformerIndexer(model_name=model_name, namespace=namespace)
allennlp_tokens = allennlp_tokenizer.tokenize("AllenNLP is great!")
vocab = Vocabulary()
# here we copy entire transformers vocab
indexed = indexer.tokens_to_indices(allennlp_tokens, vocab)
del indexed
assert vocab.get_vocab_size(namespace=namespace) == tokenizer.vocab_size
def test_transformers_vocabs_added_correctly(self):
namespace, model_name = "tags", "roberta-base"
tokenizer = cached_transformers.get_tokenizer(model_name, use_fast=False)
allennlp_tokenizer = PretrainedTransformerTokenizer(model_name)
indexer = PretrainedTransformerIndexer(model_name=model_name, namespace=namespace)
allennlp_tokens = allennlp_tokenizer.tokenize("AllenNLP is great!")
vocab = Vocabulary()
# here we copy entire transformers vocab
indexed = indexer.tokens_to_indices(allennlp_tokens, vocab)
del indexed
assert vocab.get_token_to_index_vocabulary(namespace=namespace) == tokenizer.encoder
def test_mask(self):
# We try these models, because
# - BERT pads tokens with 0
# - RoBERTa pads tokens with 1
# - GPT2 has no padding token, so we choose 0
for model in ["bert-base-uncased", "roberta-base", "gpt2"]:
allennlp_tokenizer = PretrainedTransformerTokenizer(model)
indexer = PretrainedTransformerIndexer(model_name=model)
string_no_specials = "AllenNLP is great"
allennlp_tokens = allennlp_tokenizer.tokenize(string_no_specials)
vocab = Vocabulary()
indexed = indexer.tokens_to_indices(allennlp_tokens, vocab)
expected_masks = [True] * len(indexed["token_ids"])
assert indexed["mask"] == expected_masks
max_length = 10
padding_lengths = {key: max_length for key in indexed.keys()}
padded_tokens = indexer.as_padded_tensor_dict(indexed, padding_lengths)
padding_length = max_length - len(indexed["mask"])
expected_masks = expected_masks + ([False] * padding_length)
assert len(padded_tokens["mask"]) == max_length
assert padded_tokens["mask"].tolist() == expected_masks
assert len(padded_tokens["token_ids"]) == max_length
pad_token_id = allennlp_tokenizer.tokenizer.pad_token_id
if pad_token_id is None:
pad_token_id = 0
padding_suffix = [pad_token_id] * padding_length
assert padded_tokens["token_ids"][-padding_length:].tolist() == padding_suffix
def test_long_sequence_splitting(self):
tokenizer = cached_transformers.get_tokenizer("bert-base-uncased")
allennlp_tokenizer = PretrainedTransformerTokenizer("bert-base-uncased")
indexer = PretrainedTransformerIndexer(model_name="bert-base-uncased", max_length=4)
string_specials = "[CLS] AllenNLP is great [SEP]"
string_no_specials = "AllenNLP is great"
tokens = tokenizer.tokenize(string_specials)
expected_ids = tokenizer.convert_tokens_to_ids(tokens)
assert len(expected_ids) == 7 # just to make sure it's what we're expecting
cls_id, sep_id = expected_ids[0], expected_ids[-1]
expected_ids = (
expected_ids[:3]
+ [sep_id, cls_id]
+ expected_ids[3:5]
+ [sep_id, cls_id]
+ expected_ids[5:]
)
allennlp_tokens = allennlp_tokenizer.tokenize(string_no_specials)
vocab = Vocabulary()
indexed = indexer.tokens_to_indices(allennlp_tokens, vocab)
assert indexed["token_ids"] == expected_ids
assert indexed["segment_concat_mask"] == [True] * len(expected_ids)
assert indexed["mask"] == [True] * 7 # original length
def test_type_ids_when_folding(self):
allennlp_tokenizer = PretrainedTransformerTokenizer(
"bert-base-uncased", add_special_tokens=False
)
indexer = PretrainedTransformerIndexer(model_name="bert-base-uncased", max_length=6)
first_string = "How do trees get online?"
second_string = "They log in!"
tokens = allennlp_tokenizer.add_special_tokens(
allennlp_tokenizer.tokenize(first_string), allennlp_tokenizer.tokenize(second_string)
)
vocab = Vocabulary()
indexed = indexer.tokens_to_indices(tokens, vocab)
assert min(indexed["type_ids"]) == 0
assert max(indexed["type_ids"]) == 1
@staticmethod
def _assert_tokens_equal(expected_tokens, actual_tokens):
for expected, actual in zip(expected_tokens, actual_tokens):
assert expected.text == actual.text
assert expected.text_id == actual.text_id
assert expected.type_id == actual.type_id
def test_indices_to_tokens(self):
allennlp_tokenizer = PretrainedTransformerTokenizer("bert-base-uncased")
indexer_max_length = PretrainedTransformerIndexer(
model_name="bert-base-uncased", max_length=4
)
indexer_no_max_length = PretrainedTransformerIndexer(model_name="bert-base-uncased")
string_no_specials = "AllenNLP is great"
allennlp_tokens = allennlp_tokenizer.tokenize(string_no_specials)
vocab = Vocabulary()
indexed = indexer_no_max_length.tokens_to_indices(allennlp_tokens, vocab)
tokens_from_indices = indexer_no_max_length.indices_to_tokens(indexed, vocab)
self._assert_tokens_equal(allennlp_tokens, tokens_from_indices)
indexed = indexer_max_length.tokens_to_indices(allennlp_tokens, vocab)
tokens_from_indices = indexer_max_length.indices_to_tokens(indexed, vocab)
# For now we are not removing special tokens introduced from max_length
sep_cls = [allennlp_tokens[-1], allennlp_tokens[0]]
expected = (
allennlp_tokens[:3] + sep_cls + allennlp_tokens[3:5] + sep_cls + allennlp_tokens[5:]
)
self._assert_tokens_equal(expected, tokens_from_indices)
|
7ea7592cbeef76cf58dbb0d429bfcb63c48d3e15
|
1180c0bfe29959d95f3c131e6e839950e528d4ee
|
/17/bbelderbos/podify/__init__.py
|
5dc72193daf0ddb3697dd364b5ecd938f3afb991
|
[] |
no_license
|
pybites/challenges
|
e3e461accd8e7f890aee8007ba5070086ef983fc
|
02b77652d0901e6e06cb9b1e7cb3e59c675445c2
|
refs/heads/community
| 2023-08-20T18:19:02.982214
| 2022-11-17T09:23:31
| 2022-11-17T09:23:31
| 78,264,928
| 764
| 3,115
| null | 2023-07-21T05:58:19
| 2017-01-07T07:17:50
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 511
|
py
|
__init__.py
|
__title__ = 'podify'
__author__ = 'Bob Belderbos'
import logging
import os
import ssl
# some feeds get 'bozo_exception': URLError(SSLError(1,
# '[SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed
ssl._create_default_https_context = ssl._create_unverified_context
FORMAT = '%(asctime)-15s :: %(message)s'
logdir = os.path.abspath(os.path.join(os.path.dirname( __file__ ), '..'))
logging.basicConfig(
filename=os.path.join(logdir, 'podcast.log'),
level=logging.DEBUG,
format=FORMAT)
|
bee9a6799f8daf933cd3c8b6924acc56704ae078
|
6415c13547e6943f7b65337cbd2790c4e18723c8
|
/netbox/circuits/api/views.py
|
bd94318877054c97d7c709753b810ed5f97f2c04
|
[
"Apache-2.0"
] |
permissive
|
netbox-community/netbox
|
287254a9698270d51f57b1297118e9f01536da5a
|
506884bc4dc70299db3e2a7ad577dd7fd808065e
|
refs/heads/develop
| 2023-08-24T09:11:46.685121
| 2023-08-23T18:44:14
| 2023-08-23T18:44:14
| 52,796,596
| 8,122
| 1,817
|
Apache-2.0
| 2023-09-14T18:16:01
| 2016-02-29T14:15:46
|
Python
|
UTF-8
|
Python
| false
| false
| 2,300
|
py
|
views.py
|
from rest_framework.routers import APIRootView
from circuits import filtersets
from circuits.models import *
from dcim.api.views import PassThroughPortMixin
from netbox.api.viewsets import NetBoxModelViewSet
from utilities.utils import count_related
from . import serializers
class CircuitsRootView(APIRootView):
"""
Circuits API root view
"""
def get_view_name(self):
return 'Circuits'
#
# Providers
#
class ProviderViewSet(NetBoxModelViewSet):
queryset = Provider.objects.prefetch_related('asns', 'tags').annotate(
circuit_count=count_related(Circuit, 'provider')
)
serializer_class = serializers.ProviderSerializer
filterset_class = filtersets.ProviderFilterSet
#
# Circuit Types
#
class CircuitTypeViewSet(NetBoxModelViewSet):
queryset = CircuitType.objects.prefetch_related('tags').annotate(
circuit_count=count_related(Circuit, 'type')
)
serializer_class = serializers.CircuitTypeSerializer
filterset_class = filtersets.CircuitTypeFilterSet
#
# Circuits
#
class CircuitViewSet(NetBoxModelViewSet):
queryset = Circuit.objects.prefetch_related(
'type', 'tenant', 'provider', 'provider_account', 'termination_a', 'termination_z'
).prefetch_related('tags')
serializer_class = serializers.CircuitSerializer
filterset_class = filtersets.CircuitFilterSet
#
# Circuit Terminations
#
class CircuitTerminationViewSet(PassThroughPortMixin, NetBoxModelViewSet):
queryset = CircuitTermination.objects.prefetch_related(
'circuit', 'site', 'provider_network', 'cable__terminations'
)
serializer_class = serializers.CircuitTerminationSerializer
filterset_class = filtersets.CircuitTerminationFilterSet
brief_prefetch_fields = ['circuit']
#
# Provider accounts
#
class ProviderAccountViewSet(NetBoxModelViewSet):
queryset = ProviderAccount.objects.prefetch_related('provider', 'tags')
serializer_class = serializers.ProviderAccountSerializer
filterset_class = filtersets.ProviderAccountFilterSet
#
# Provider networks
#
class ProviderNetworkViewSet(NetBoxModelViewSet):
queryset = ProviderNetwork.objects.prefetch_related('tags')
serializer_class = serializers.ProviderNetworkSerializer
filterset_class = filtersets.ProviderNetworkFilterSet
|
aacb22e1cf755f7e8faad03eb70938fd6233942e
|
48fdd1305ea3b6aad7537909b2ee0764c991d4e8
|
/resources/auxiliary_workflows/benchmark/workflow/scripts/performance_measures_global.py
|
0205b0002af8c422e513308ca53289dd0cd216f2
|
[
"Apache-2.0"
] |
permissive
|
cbg-ethz/V-pipe
|
e62418cbe4f45f040bb6c071a08144dedf98281d
|
8c4599509955b33f761ded2889d68777628a4f12
|
refs/heads/master
| 2023-08-22T05:34:52.941749
| 2023-03-17T21:37:28
| 2023-03-17T22:11:55
| 87,789,709
| 115
| 38
|
Apache-2.0
| 2023-05-11T18:34:46
| 2017-04-10T09:01:20
|
HTML
|
UTF-8
|
Python
| false
| false
| 18,166
|
py
|
performance_measures_global.py
|
import datetime
import functools
import subprocess
from pathlib import Path
import pandas as pd
import numpy as np
from sklearn import manifold
import seaborn as sns
import matplotlib.pyplot as plt
from matplotlib.ticker import FuncFormatter
import editdistance
from Bio import SeqIO
import natsort as ns
from tqdm import tqdm
from pqdm.processes import pqdm
def read_fasta_files(fasta_files, with_method=True):
tmp = []
for fname in tqdm(fasta_files, desc="Read FASTA files"):
parts = str(fname).split("/")
if with_method:
params = parts[-5]
method = parts[-4]
else:
params = parts[-4]
method = None
replicate = parts[-2]
for record in SeqIO.parse(fname, "fasta"):
# description actually starts with id
description = record.description[len(record.id) + 1 :]
props = dict(pair.split(":") for pair in description.split("|"))
# extract properties
freq = props.get("freq")
if freq is None:
freq = props.get("Freq")
# finalize
tmp.append(
{
"method": method,
"params": params,
"replicate": replicate,
"sequence": str(record.seq),
"frequency": float(freq),
}
)
return pd.DataFrame(tmp)
def read_haplostats(haplostats_list):
df_list = []
for fname in tqdm(haplostats_list, desc="Read haplostat files"):
parts = str(fname).split("/")
params = parts[-4]
replicate = parts[-2]
tmp = pd.read_csv(fname)
tmp["params"] = params
tmp["replicate"] = replicate
df_list.append(tmp)
return pd.concat(df_list)
def read_runstats(runstatus_list):
tmp = []
for fname in tqdm(runstatus_list, desc="Read runstatus files"):
parts = str(fname).split("/")
params = parts[-5]
method = parts[-4]
replicate = parts[-2]
status = fname.read_text()
tmp.append(
{
"params": params,
"method": method,
"replicate": replicate,
"status": status if len(status) > 0 else "success",
}
)
return pd.DataFrame(tmp)
def read_benchmarks(benchmark_list):
df_list = []
for fname in tqdm(benchmark_list, desc="Read benchmark files"):
parts = str(fname).split("/")
params = parts[-5]
method = parts[-4]
replicate = parts[-2]
tmp = pd.read_csv(fname, sep="\t")
tmp["params"] = params
tmp["method"] = method
tmp["replicate"] = replicate
df_list.append(tmp)
return pd.concat(df_list)
def format_params(df):
# detect which parameters vary
varying_keys = set()
last_params = None
for row in df.itertuples():
params = dict(pair.split("~") for pair in row.params.split("__"))
if last_params is not None:
assert params.keys() == last_params.keys()
for key in params:
if params[key] != last_params[key]:
varying_keys.add(key)
last_params = params
varying_keys = ns.natsorted(varying_keys, alg=ns.REAL)
# retain only varying parameters
def retainer(param_str):
params = dict(pair.split("~") for pair in param_str.split("__"))
return "__".join(f"{key}~{params[key]}" for key in varying_keys)
df = df.assign(params=lambda x: x["params"].apply(retainer))
# sort parameters
df = df.sort_values(by="params", key=ns.natsort_keygen(alg=ns.REAL))
# make remaining parameters readable
df = df.assign(params=lambda x: x["params"].str.replace("__", "\n"))
return df
def overview_plots(df_haplo, dname_out):
if df_haplo.empty:
print("Warning: df_haplo is empty")
return
df_haplo["seq_len"] = df_haplo["sequence"].str.len()
df_long = format_params(
pd.melt(df_haplo, id_vars=["method", "params", "replicate"])
)
df_long = df_long[df_long["variable"] != "sequence"]
g = sns.catplot(
data=df_long,
x="params",
y="value",
hue="method",
col="variable",
kind="box",
sharey=False,
height=10,
)
g.map_dataframe(
sns.stripplot, x="params", y="value", hue="method", color="k", dodge=True
)
for ax in g.axes.flat:
ax.set_xticklabels(
ax.get_xticklabels(), rotation=45, ha="right", rotation_mode="anchor"
)
g.savefig(dname_out / "overview.pdf")
def benchmark_plots(df_bench, dname_out):
@FuncFormatter
def fmt_yaxis(x, pos):
return str(datetime.timedelta(seconds=x))
# prepare data
df_bench = format_params(df_bench)
# plot
fig, ax = plt.subplots()
sns.boxplot(data=df_bench, x="params", y="s", hue="method", ax=ax)
sns.swarmplot(
data=df_bench,
x="params",
y="s",
hue="method",
dodge=True,
clip_on=False,
linewidth=1,
edgecolor="gray",
ax=ax,
)
ax.set_xticklabels(
ax.get_xticklabels(), rotation=45, ha="right", rotation_mode="anchor"
)
ax.set_ylabel("Runtime [hh:mm:ss]")
ax.yaxis.set_major_formatter(fmt_yaxis)
handles, labels = ax.get_legend_handles_labels()
ax.legend(handles[: len(handles) // 2], labels[: len(handles) // 2])
fig.tight_layout()
fig.savefig(dname_out / "benchmark_plot.pdf")
def run_metaquast(predicted_haplos_list, true_haplos_list, workdir):
df_list = []
for fname_contigs in predicted_haplos_list:
cwd = workdir / fname_contigs.parent
# skip if empty
if fname_contigs.stat().st_size == 0:
print(f"Skipping empty file '{fname_contigs}'")
continue
# find matching ground truth
parts = str(fname_contigs).split("/")
params = parts[-5]
method = parts[-4]
replicate = parts[-2]
for fname_truth in true_haplos_list:
truth_parts = str(fname_truth).split("/")
truth_params = truth_parts[-4]
truth_replicate = truth_parts[-2]
if params == truth_params and replicate == truth_replicate:
break
else:
raise RuntimeError(f"No ground truth found for '{fname_contigs}'")
# split reference fasta into individual files
ref_dir = cwd / "haplotype_references"
ref_dir.mkdir(parents=True, exist_ok=True)
reference_fname_list = []
for record in SeqIO.parse(fname_truth, "fasta"):
fname = ref_dir / f"{record.id}.fasta"
SeqIO.write(record, fname, "fasta")
reference_fname_list.append(fname)
# run quast
subprocess.run(
[
"metaquast",
"-o",
cwd,
"-r",
",".join(str(p) for p in reference_fname_list),
"--min-contig",
"0",
"--silent",
"--unique-mapping",
fname_contigs,
],
check=True,
)
# parse output
for res_dir in (cwd / "runs_per_reference").iterdir():
if res_dir.name.startswith("."):
continue
# gather report
quast_report = pd.read_csv(
res_dir / "report.tsv",
sep="\t",
header=None,
names=["variable", "value"],
).set_index("variable")
tmp = pd.DataFrame(
{
"contig_count": quast_report.loc["# contigs", "value"],
"contig_total_length": quast_report.loc["Total length", "value"],
"contig_max_length": quast_report.loc["Largest contig", "value"],
"N50": quast_report.loc["N50", "value"],
"N75": quast_report.loc["N75", "value"],
"L50": quast_report.loc["L50", "value"],
"L75": quast_report.loc["L75", "value"],
},
index=[0],
).astype(int)
tmp["reference"] = res_dir.name
# finalize
tmp["params"] = params
tmp["method"] = method
tmp["replicate"] = replicate
df_list.append(tmp)
# set column dtypes
df_quast = pd.concat(df_list, ignore_index=True)
df_quast["method"] = pd.Categorical(
df_quast["method"], categories=sorted(snakemake.params.method_list_global)
)
return df_quast
def plot_quast(df_quast, dname_out):
dname_out.mkdir(parents=True, exist_ok=True)
df_quast = format_params(df_quast)
for col in df_quast.select_dtypes(include="number"):
fig, ax = plt.subplots(figsize=(8, 6))
sns.boxplot(data=df_quast, x="params", y=col, hue="method", ax=ax)
sns.stripplot(
data=df_quast,
x="params",
y=col,
hue="method",
dodge=True,
clip_on=False,
linewidth=1,
edgecolor="gray",
ax=ax,
)
ax.set_xticklabels(
ax.get_xticklabels(), rotation=45, ha="right", rotation_mode="anchor"
)
handles, labels = ax.get_legend_handles_labels()
ax.legend(handles[: len(handles) // 2], labels[: len(handles) // 2])
fig.tight_layout()
fig.savefig(dname_out / f"{col}.pdf")
def mds_worker(index, df_pred_grpd, df_true, mds_dir):
params, replicate = index
df_true_grpd = df_true[
(df_true["params"] == params) & (df_true["replicate"] == replicate)
]
# subsample large results
max_num = 50
df_pred_grpd = df_pred_grpd.copy()
df_pred_grpd = (
df_pred_grpd.groupby("method")
.apply(lambda x: x.sample(n=min(len(x), max_num)))
.reset_index(drop=True)
)
# compute dissimilarities
sequence_list = (
df_pred_grpd["sequence"].tolist() + df_true_grpd["sequence"].tolist()
)
mat = np.zeros(shape=(len(sequence_list), len(sequence_list)))
for i, seq1 in enumerate(tqdm(sequence_list, leave=False)):
for j, seq2 in enumerate(tqdm(sequence_list, leave=False)):
if i >= j:
continue
mat[i, j] = editdistance.eval(seq1, seq2)
mat = np.triu(mat) + np.tril(mat.T, 1) # mirror to make symmetric
# do MDS
embedding = manifold.MDS(n_components=2, dissimilarity="precomputed")
mat_trans = embedding.fit_transform(mat)
df = pd.concat(
[
pd.DataFrame(mat_trans, columns=["MDS0", "MDS1"]),
pd.concat([df_pred_grpd, df_true_grpd], axis=0, ignore_index=True),
],
axis=1,
)
df["method"] = df["method"].apply(lambda x: "ground_truth" if x is None else x)
df["params"] = params
df["replicate"] = replicate
# plot result
fig, ax = plt.subplots(figsize=(8, 6))
sns.scatterplot(data=df, x="MDS0", y="MDS1", hue="method", ax=ax)
fig.savefig(mds_dir / f"sequence_mds_{params}_{replicate}.pdf")
return df
def sequence_embedding(df_pred, df_true, dname_out):
mds_dir = dname_out / "mds_plots"
mds_dir.mkdir(parents=True)
# compute
df_list = pqdm(
(
(index, df_group, df_true, mds_dir)
for index, df_group in df_pred.groupby(["params", "replicate"])
),
mds_worker,
n_jobs=snakemake.threads,
argument_type="args",
desc="Compute MDS",
)
# finalize
return pd.concat(df_list, ignore_index=True)
@functools.lru_cache(None)
def relative_edit_distance(seq1, seq2):
dist = editdistance.eval(seq1, seq2)
rel = dist / max(len(seq1), len(seq2))
return rel
def pr_worker(index, df_group, df_true, thres):
(method, params, replicate) = index
tp = 0
fp = 0
fn = 0
df_true_grpd = df_true[
(df_true["params"] == params) & (df_true["replicate"] == replicate)
]
# subsample large results
max_num = 500
df_group = df_group.sample(n=min(df_group.shape[0], max_num))
# true positive: predicted seq appears in ground truth
# false positive: predicted seq does not appear in ground truth
df_cur = df_true_grpd.copy()
for row in tqdm(df_group.itertuples(), total=df_group.shape[0], leave=False):
ser_dist = df_cur["sequence"].apply(
lambda x: relative_edit_distance(x, row.sequence)
)
passed_thres = (ser_dist <= thres).any()
if passed_thres:
tp += 1
# remove ground truth because it was predicted
df_cur = df_cur.drop(ser_dist.idxmin())
else:
fp += 1
# false negative: ground truth sequence was not predicted
# single prediction should not map to multiple ground truth seqs
df_cur = df_group.copy()
for row in tqdm(
df_true_grpd.itertuples(), total=df_true_grpd.shape[0], leave=False
):
ser_dist = df_cur["sequence"].apply(
lambda x: relative_edit_distance(x, row.sequence)
)
passed_thres = (ser_dist <= thres).any()
if not passed_thres:
fn += 1
else:
# remove current prediction
df_cur = df_cur.drop(ser_dist.idxmin())
# finalize
return {
"method": method,
"params": params,
"replicate": replicate,
"tp": tp,
"fp": fp,
"fn": fn,
"precision": tp / (tp + fp),
"recall": tp / (tp + fn),
}
def compute_pr(df_pred, df_true, thres=0.01):
# compute
tmp = pqdm(
(
(index, df_group, df_true, thres)
for index, df_group in df_pred.groupby(["method", "params", "replicate"])
),
pr_worker,
n_jobs=snakemake.threads,
argument_type="args",
desc="Compute PR",
)
# set column dtypes
df_pr = pd.DataFrame(tmp)
df_pr["method"] = pd.Categorical(
df_pr["method"], categories=sorted(snakemake.params.method_list_global)
)
return df_pr
def plot_pr(df_pr, df_stats, dname_out):
# prepare data
diversity_column_list = ["population_nucleotide_diversity", "mean_position_shannon"]
df_m = format_params(df_pr.merge(df_stats, on=["params", "replicate"]))
# helper functions
def do_plot(df, x, y, fname):
fig, ax = plt.subplots()
sns.boxplot(data=df, x=x, y=y, hue="method", ax=ax)
sns.swarmplot(
data=df,
x=x,
y=y,
hue="method",
dodge=True,
clip_on=False,
linewidth=1,
edgecolor="gray",
ax=ax,
)
ax.set_ylim(0, 1)
ax.set_xticklabels(
ax.get_xticklabels(), rotation=45, ha="right", rotation_mode="anchor"
)
handles, labels = ax.get_legend_handles_labels()
ax.legend(handles[: len(handles) // 2], labels[: len(handles) // 2])
fig.tight_layout()
fig.savefig(fname)
# plots
do_plot(df_m, "params", "precision", dname_out / "overview_precision.pdf")
do_plot(df_m, "params", "recall", dname_out / "overview_recall.pdf")
for diversity_column in diversity_column_list:
if diversity_column not in df_m.columns:
print(f"Skipping {diversity_column} PR plot")
continue
do_plot(
df_m,
diversity_column,
"precision",
dname_out / f"overview_precision_{diversity_column}.pdf",
)
do_plot(
df_m,
diversity_column,
"recall",
dname_out / f"overview_recall_{diversity_column}.pdf",
)
def main(
predicted_haplos_list,
true_haplos_list,
haplostats_list,
runstatus_list,
benchmark_list,
dname_out,
):
dname_out.mkdir(parents=True)
csv_dir = dname_out / "csv_files"
csv_dir.mkdir(parents=True, exist_ok=True)
# read data
df_pred = read_fasta_files(predicted_haplos_list)
df_true = read_fasta_files(true_haplos_list, with_method=False)
df_true["method"] = "ground_truth"
df_stats = read_haplostats(haplostats_list)
df_runstats = read_runstats(runstatus_list)
df_bench = read_benchmarks(benchmark_list)
df_pred.to_csv(csv_dir / "predictions.csv.gz")
df_true.to_csv(csv_dir / "ground_truth.csv.gz")
df_stats.to_csv(csv_dir / "data_stats.csv")
df_runstats.to_csv(csv_dir / "run_stats.csv")
# quick stats
print("Run status")
print(df_runstats.groupby("method")["status"].value_counts())
print("Haplotype counts per method")
print(df_pred["method"].value_counts())
# create plots
overview_plots(df_pred, dname_out)
# benchmark plots
benchmark_plots(df_bench, dname_out)
# precision/recall
df_pr = compute_pr(df_pred, df_true)
plot_pr(df_pr, df_stats, dname_out)
df_pr.to_csv(csv_dir / "pr_results.csv")
# quast stuff
df_quast = run_metaquast(
predicted_haplos_list, true_haplos_list, dname_out / "quast" / "run"
)
plot_quast(df_quast, dname_out / "quast" / "images")
df_quast.to_csv(csv_dir / "quast_results.csv")
# MDS
df_mds = sequence_embedding(df_pred, df_true, dname_out)
df_mds.to_csv(csv_dir / "mds_results.csv.gz")
# subset MDS plot to show well-performing methods
sequence_embedding(
df_pred[
(df_pred["method"] != "haploclique") & (df_pred["method"] != "haploconduct")
],
df_true,
dname_out / "subset",
)
if __name__ == "__main__":
main(
[Path(e) for e in snakemake.input.predicted_haplos_list],
[Path(e) for e in snakemake.input.true_haplos_list],
[Path(e) for e in snakemake.input.haplostats_list],
[Path(e) for e in snakemake.input.runstatus_list],
[Path(e) for e in snakemake.input.benchmark_list],
Path(snakemake.output.dname_out),
)
|
6d53fc77082394bfe7af2881f1349988a13bdd23
|
6c9ce6ca26990b9ca2f9967768e3607fa0be3d23
|
/cloudsplaining/command/create_multi_account_config_file.py
|
f78b0ac3d333841050a8b0a97d423176d4671609
|
[] |
permissive
|
salesforce/cloudsplaining
|
100e062ee4b0c558ab408a25f05ace24d8d3b029
|
1c2255ca9280be2a6035346151570c596d6f3dfa
|
refs/heads/master
| 2023-09-04T08:08:49.544990
| 2023-08-29T21:20:47
| 2023-08-29T21:20:47
| 256,772,852
| 1,769
| 171
|
BSD-3-Clause
| 2023-09-05T21:50:55
| 2020-04-18T14:23:56
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 2,126
|
py
|
create_multi_account_config_file.py
|
"""
Create YML Template files for the exclusions template command.
This way, users don't have to remember exactly how to phrase the yaml files, since this command creates it for them.
"""
# Copyright (c) 2020, salesforce.com, inc.
# All rights reserved.
# Licensed under the BSD 3-Clause license.
# For full license text, see the LICENSE file in the repo root
# or https://opensource.org/licenses/BSD-3-Clause
import os
import logging
import click
from cloudsplaining.shared.constants import MULTI_ACCOUNT_CONFIG_TEMPLATE
from cloudsplaining import set_log_level
from cloudsplaining.shared import utils
logger = logging.getLogger(__name__)
OK_GREEN = "\033[92m"
END = "\033[0m"
@click.command(
context_settings=dict(max_content_width=160),
short_help="Creates a YML file to be used for multi-account scanning",
)
@click.option("-o", "--output-file", "output_file", type=click.Path(exists=False), default=os.path.join(os.getcwd(), "multi-account-config.yml"), required=True, help="Relative path to output file where we want to store the multi account config template.")
@click.option("-v", "--verbose", "verbosity", help="Log verbosity level.", count=True)
def create_multi_account_config_file(output_file: str, verbosity: int) -> None:
"""
Creates a YML file to be used as a multi-account config template, so users can scan many different accounts.
"""
set_log_level(verbosity)
if os.path.exists(output_file):
logger.debug(
"%s exists. Removing the file and replacing its contents.", output_file
)
os.remove(output_file)
with open(output_file, "a") as file_obj:
for line in MULTI_ACCOUNT_CONFIG_TEMPLATE:
file_obj.write(line)
utils.print_green(
f"Success! Multi-account config file written to: {os.path.relpath(output_file)}"
)
print(
f"\nMake sure you edit the {os.path.relpath(output_file)} file and then run the scan-multi-account command, as shown below."
)
print(
f"\n\tcloudsplaining scan-multi-account --exclusions-file exclusions.yml -c {os.path.relpath(output_file)} -o ./"
)
|
a6cd26e1a11124b3ed824518c58a13faca2675a1
|
6189f34eff2831e3e727cd7c5e43bc5b591adffc
|
/WebMirror/management/rss_parser_funcs/feed_parse_extractWwwKuronotranslationsCom.py
|
b825e0e34e1a460e390e49c0aa34ec27b903bfd1
|
[
"BSD-3-Clause"
] |
permissive
|
fake-name/ReadableWebProxy
|
24603660b204a9e7965cfdd4a942ff62d7711e27
|
ca2e086818433abc08c014dd06bfd22d4985ea2a
|
refs/heads/master
| 2023-09-04T03:54:50.043051
| 2023-08-26T16:08:46
| 2023-08-26T16:08:46
| 39,611,770
| 207
| 20
|
BSD-3-Clause
| 2023-09-11T15:48:15
| 2015-07-24T04:30:43
|
Python
|
UTF-8
|
Python
| false
| false
| 611
|
py
|
feed_parse_extractWwwKuronotranslationsCom.py
|
def extractWwwKuronotranslationsCom(item):
'''
Parser for 'www.kuronotranslations.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
if "Level Up Just By Eating" in item['tags']:
return buildReleaseMessageWithType(item, "Level Up Just By Eating", vol, chp, frag=frag, postfix=postfix)
if "Kou 1 Desu ga Isekai de Joushu Hajimemashita" in item['tags']:
return buildReleaseMessageWithType(item, "Kou 1 Desu ga Isekai de Joushu Hajimemashita", vol, chp, frag=frag, postfix=postfix)
return False
|
9e606b330f14c8714da145c618e3c31ca8f7c369
|
9ed4d46aedd4d4acadb48d610e940594b5b7b3fd
|
/matrix/pascal_triangle.py
|
7f6555f9c8b9f2b61199a73af7a5c947fd6f96ae
|
[
"MIT"
] |
permissive
|
TheAlgorithms/Python
|
7596a0e236ed12a61f9db19a7ea68309779cc85b
|
421ace81edb0d9af3a173f4ca7e66cc900078c1d
|
refs/heads/master
| 2023-09-01T17:32:20.190949
| 2023-08-29T13:18:10
| 2023-08-29T13:18:10
| 63,476,337
| 184,217
| 48,615
|
MIT
| 2023-09-14T02:05:29
| 2016-07-16T09:44:01
|
Python
|
UTF-8
|
Python
| false
| false
| 6,183
|
py
|
pascal_triangle.py
|
"""
This implementation demonstrates how to generate the elements of a Pascal's triangle.
The element havingva row index of r and column index of c can be derivedvas follows:
triangle[r][c] = triangle[r-1][c-1]+triangle[r-1][c]
A Pascal's triangle is a triangular array containing binomial coefficients.
https://en.wikipedia.org/wiki/Pascal%27s_triangle
"""
def print_pascal_triangle(num_rows: int) -> None:
"""
Print Pascal's triangle for different number of rows
>>> print_pascal_triangle(5)
1
1 1
1 2 1
1 3 3 1
1 4 6 4 1
"""
triangle = generate_pascal_triangle(num_rows)
for row_idx in range(num_rows):
# Print left spaces
for _ in range(num_rows - row_idx - 1):
print(end=" ")
# Print row values
for col_idx in range(row_idx + 1):
if col_idx != row_idx:
print(triangle[row_idx][col_idx], end=" ")
else:
print(triangle[row_idx][col_idx], end="")
print()
def generate_pascal_triangle(num_rows: int) -> list[list[int]]:
"""
Create Pascal's triangle for different number of rows
>>> generate_pascal_triangle(0)
[]
>>> generate_pascal_triangle(1)
[[1]]
>>> generate_pascal_triangle(2)
[[1], [1, 1]]
>>> generate_pascal_triangle(3)
[[1], [1, 1], [1, 2, 1]]
>>> generate_pascal_triangle(4)
[[1], [1, 1], [1, 2, 1], [1, 3, 3, 1]]
>>> generate_pascal_triangle(5)
[[1], [1, 1], [1, 2, 1], [1, 3, 3, 1], [1, 4, 6, 4, 1]]
>>> generate_pascal_triangle(-5)
Traceback (most recent call last):
...
ValueError: The input value of 'num_rows' should be greater than or equal to 0
>>> generate_pascal_triangle(7.89)
Traceback (most recent call last):
...
TypeError: The input value of 'num_rows' should be 'int'
"""
if not isinstance(num_rows, int):
raise TypeError("The input value of 'num_rows' should be 'int'")
if num_rows == 0:
return []
elif num_rows < 0:
raise ValueError(
"The input value of 'num_rows' should be greater than or equal to 0"
)
triangle: list[list[int]] = []
for current_row_idx in range(num_rows):
current_row = populate_current_row(triangle, current_row_idx)
triangle.append(current_row)
return triangle
def populate_current_row(triangle: list[list[int]], current_row_idx: int) -> list[int]:
"""
>>> triangle = [[1]]
>>> populate_current_row(triangle, 1)
[1, 1]
"""
current_row = [-1] * (current_row_idx + 1)
# first and last elements of current row are equal to 1
current_row[0], current_row[-1] = 1, 1
for current_col_idx in range(1, current_row_idx):
calculate_current_element(
triangle, current_row, current_row_idx, current_col_idx
)
return current_row
def calculate_current_element(
triangle: list[list[int]],
current_row: list[int],
current_row_idx: int,
current_col_idx: int,
) -> None:
"""
>>> triangle = [[1], [1, 1]]
>>> current_row = [1, -1, 1]
>>> calculate_current_element(triangle, current_row, 2, 1)
>>> current_row
[1, 2, 1]
"""
above_to_left_elt = triangle[current_row_idx - 1][current_col_idx - 1]
above_to_right_elt = triangle[current_row_idx - 1][current_col_idx]
current_row[current_col_idx] = above_to_left_elt + above_to_right_elt
def generate_pascal_triangle_optimized(num_rows: int) -> list[list[int]]:
"""
This function returns a matrix representing the corresponding pascal's triangle
according to the given input of number of rows of Pascal's triangle to be generated.
It reduces the operations done to generate a row by half
by eliminating redundant calculations.
:param num_rows: Integer specifying the number of rows in the Pascal's triangle
:return: 2-D List (matrix) representing the Pascal's triangle
Return the Pascal's triangle of given rows
>>> generate_pascal_triangle_optimized(3)
[[1], [1, 1], [1, 2, 1]]
>>> generate_pascal_triangle_optimized(1)
[[1]]
>>> generate_pascal_triangle_optimized(0)
[]
>>> generate_pascal_triangle_optimized(-5)
Traceback (most recent call last):
...
ValueError: The input value of 'num_rows' should be greater than or equal to 0
>>> generate_pascal_triangle_optimized(7.89)
Traceback (most recent call last):
...
TypeError: The input value of 'num_rows' should be 'int'
"""
if not isinstance(num_rows, int):
raise TypeError("The input value of 'num_rows' should be 'int'")
if num_rows == 0:
return []
elif num_rows < 0:
raise ValueError(
"The input value of 'num_rows' should be greater than or equal to 0"
)
result: list[list[int]] = [[1]]
for row_index in range(1, num_rows):
temp_row = [0] + result[-1] + [0]
row_length = row_index + 1
# Calculate the number of distinct elements in a row
distinct_elements = sum(divmod(row_length, 2))
row_first_half = [
temp_row[i - 1] + temp_row[i] for i in range(1, distinct_elements + 1)
]
row_second_half = row_first_half[: (row_index + 1) // 2]
row_second_half.reverse()
row = row_first_half + row_second_half
result.append(row)
return result
def benchmark() -> None:
"""
Benchmark multiple functions, with three different length int values.
"""
from collections.abc import Callable
from timeit import timeit
def benchmark_a_function(func: Callable, value: int) -> None:
call = f"{func.__name__}({value})"
timing = timeit(f"__main__.{call}", setup="import __main__")
# print(f"{call:38} = {func(value)} -- {timing:.4f} seconds")
print(f"{call:38} -- {timing:.4f} seconds")
for value in range(15): # (1, 7, 14):
for func in (generate_pascal_triangle, generate_pascal_triangle_optimized):
benchmark_a_function(func, value)
print()
if __name__ == "__main__":
import doctest
doctest.testmod()
benchmark()
|
986eff2b17fbebaeb3940654e9c11b41606a7022
|
3c41443364da8b44c74dce08ef94a1acd1b66b3e
|
/osf/utils/identifiers.py
|
6505ee3ada7be92559e57117dda91a812ded866d
|
[
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-free-unknown",
"LicenseRef-scancode-warranty-disclaimer",
"AGPL-3.0-only",
"LGPL-2.0-or-later",
"LicenseRef-scancode-proprietary-license",
"MPL-1.1",
"CPAL-1.0",
"LicenseRef-scancode-unknown-license-reference",
"BSD-2-Clause",
"Apache-2.0"
] |
permissive
|
CenterForOpenScience/osf.io
|
71d9540be7989f7118a33e15bc4a6ce2d2492ac1
|
a3e0a0b9ddda5dd75fc8248d58f3bcdeece0323e
|
refs/heads/develop
| 2023-09-04T03:21:14.970917
| 2023-08-31T14:49:20
| 2023-08-31T14:49:20
| 10,199,599
| 683
| 390
|
Apache-2.0
| 2023-09-14T17:07:52
| 2013-05-21T15:53:37
|
Python
|
UTF-8
|
Python
| false
| false
| 2,838
|
py
|
identifiers.py
|
import abc
import re
from urllib.parse import urljoin
import requests
from framework import sentry
from osf.exceptions import (
InvalidPIDError,
InvalidPIDFormatError,
NoSuchPIDError,
NoSuchPIDValidatorError
)
from website.settings import (
PID_VALIDATION_ENABLED,
PID_VALIDATION_ENDPOINTS,
)
class PIDValidator(abc.ABC):
@classmethod
def for_identifier_category(cls, category):
for subclass in cls.__subclasses__():
if subclass.IDENTIFIER_CATEGORY == category:
return subclass()
sentry.log_message(
f'Attempted to validate Identifier with unsupported category {category}.'
)
raise NoSuchPIDValidatorError(
f'PID validation not currently supported for PIDs of type {category}'
)
def __init__(self):
self._validation_endpoint = None
@property
def validation_endpoint(self):
if not PID_VALIDATION_ENABLED:
return None
return PID_VALIDATION_ENDPOINTS.get(self.IDENTIFIER_CATEGORY)
@abc.abstractmethod
def validate(self, pid_value):
pass
class DOIValidator(PIDValidator):
IDENTIFIER_CATEGORY = 'doi'
def validate(self, doi_value):
# Either validation is turned off or we don't know how to validate
# Either way, just let the people do what they want
if not self.validation_endpoint:
return True
# An Invalid DOI will raise an exception error. Let the caller handle what to do there.
return self.get_registration_agency(doi_value) is not None
def get_registration_agency(self, doi_value):
with requests.get(urljoin(self.validation_endpoint, doi_value)) as response:
response_data = response.json()[0]
registration_agency = response_data.get('RA')
if registration_agency:
return registration_agency
# These error messages were copied from actual responses;
# If they change, still raise an error, just not the most descriptive one
error_status = response_data.get('status')
if error_status == 'DOI does not exist':
pid_exception = NoSuchPIDError
elif error_status == 'Invalid DOI':
pid_exception = InvalidPIDFormatError
else:
sentry.log_message(
f'Unexpected response when checking Registration Agency for DOI {doi_value}: '
f'{response_data}'
)
pid_exception = InvalidPIDError
raise pid_exception(pid_value=doi_value, pid_category='DOI')
def normalize_identifier(pid_value):
'''Extract just the PID Value from a possible full URI.'''
pid_value_expression = '(.*://)?(doi.org/)?(?P<pid_value>.*)'
return re.match(pid_value_expression, pid_value).group('pid_value')
|
020f4b236355aed8d6bf8c294d6ce1cf96a5cf33
|
9fcb5164ff77e8cf48d860485fed262d36ed63e7
|
/packages/dcos-integration-test/extra/test_metrics.py
|
c550330ac95f9d6ed32e6cf90dddb701d635d81b
|
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-oracle-bcl-javase-javafx-2012",
"ErlPL-1.1",
"MPL-2.0",
"ISC",
"BSL-1.0",
"Python-2.0",
"BSD-2-Clause"
] |
permissive
|
dcos/dcos
|
2415d298979c6d6e3183aeb64c906a0959863576
|
79b9a39b4e639dc2c9435a869918399b50bfaf24
|
refs/heads/master
| 2023-08-09T04:16:19.696621
| 2021-07-05T06:42:39
| 2021-07-05T06:42:39
| 56,184,050
| 2,613
| 641
|
Apache-2.0
| 2023-07-27T04:13:50
| 2016-04-13T20:31:58
|
Python
|
UTF-8
|
Python
| false
| false
| 68,590
|
py
|
test_metrics.py
|
import contextlib
import copy
import logging
import re
import sys
import uuid
from typing import Any, Generator
import pytest
import retrying
from dcos_test_utils.dcos_api import DcosApiSession
from prometheus_client.parser import text_string_to_metric_families
from test_helpers import get_expanded_config
__maintainer__ = 'philipnrmn'
__contact__ = 'dcos-cluster-ops@mesosphere.io'
DEPLOY_TIMEOUT = 2 * 60
METRICS_WAITTIME = 4 * 60 * 1000
METRICS_INTERVAL = 2 * 1000
STD_WAITTIME = 15 * 60 * 1000
STD_INTERVAL = 5 * 1000
# tags added if a fault domain is present
FAULT_DOMAIN_TAGS = {'fault_domain_zone', 'fault_domain_region'}
def check_tags(tags: dict, required_tag_names: set, optional_tag_names: set = set()) -> None:
"""Assert that tags contains only expected keys with nonempty values."""
keys = set(tags.keys())
assert keys & required_tag_names == required_tag_names, 'Not all required tags were set'
assert keys - required_tag_names - optional_tag_names == set(), 'Encountered unexpected tags'
for tag_name, tag_val in tags.items():
assert tag_val != '', 'Value for tag "{}" must not be empty'.format(tag_name)
def test_metrics_ping(dcos_api_session: DcosApiSession) -> None:
""" Test that the dcos-metrics service is up on master and agents."""
nodes = get_master_and_agents(dcos_api_session)
for node in nodes:
response = dcos_api_session.metrics.get('/ping', node=node)
assert response.status_code == 200, 'Status code: {}, Content {}'.format(
response.status_code, response.content)
assert response.json()['ok'], 'Status code: {}, Content {}'.format(response.status_code, response.content)
def test_metrics_agents_prom(dcos_api_session: DcosApiSession) -> None:
"""Telegraf Prometheus endpoint is reachable on master and agents."""
nodes = get_master_and_agents(dcos_api_session)
for node in nodes:
response = dcos_api_session.session.request('GET', 'http://' + node + ':61091/metrics')
assert response.status_code == 200, 'Status code: {}'.format(response.status_code)
@retrying.retry(wait_fixed=STD_INTERVAL, stop_max_delay=METRICS_WAITTIME)
def get_metrics_prom(dcos_api_session: DcosApiSession, node: str) -> Any:
"""Gets metrics from prometheus port on node and returns the response.
Retries on non-200 status for up to 300 seconds.
"""
response = dcos_api_session.session.request(
'GET', 'http://{}:61091/metrics'.format(node))
assert response.status_code == 200, 'Status code: {}'.format(response.status_code)
return response
def test_metrics_procstat(dcos_api_session: DcosApiSession) -> None:
"""Assert that procstat metrics are present on master and agent nodes."""
nodes = get_master_and_agents(dcos_api_session)
for node in nodes:
@retrying.retry(wait_fixed=STD_INTERVAL, stop_max_delay=METRICS_WAITTIME)
def check_procstat_metrics() -> None:
response = get_metrics_prom(dcos_api_session, node)
for family in text_string_to_metric_families(response.text):
for sample in family.samples:
if sample[0] == 'procstat_lookup_pid_count':
return
raise Exception('Expected Procstat procstat_lookup_pid_count metric not found')
check_procstat_metrics()
def test_metrics_agents_mesos(dcos_api_session: DcosApiSession) -> None:
"""Assert that mesos metrics on agents are present."""
nodes = get_agents(dcos_api_session)
for node in nodes:
@retrying.retry(wait_fixed=STD_INTERVAL, stop_max_delay=METRICS_WAITTIME)
def check_mesos_metrics() -> None:
response = get_metrics_prom(dcos_api_session, node)
for family in text_string_to_metric_families(response.text):
for sample in family.samples:
if sample[0] == 'mesos_slave_uptime_secs':
return
raise Exception('Expected Mesos mesos_slave_uptime_secs metric not found')
check_mesos_metrics()
def test_metrics_master_mesos(dcos_api_session: DcosApiSession) -> None:
"""Assert that mesos metrics on master are present."""
@retrying.retry(wait_fixed=STD_INTERVAL, stop_max_delay=METRICS_WAITTIME)
def check_mesos_metrics() -> None:
response = get_metrics_prom(dcos_api_session, dcos_api_session.masters[0])
for family in text_string_to_metric_families(response.text):
for sample in family.samples:
if sample[0] == 'mesos_master_uptime_secs':
return
raise Exception('Expected Mesos mesos_master_uptime_secs metric not found')
check_mesos_metrics()
def test_metrics_agents_mesos_overlay(dcos_api_session: DcosApiSession) -> None:
"""Assert that mesos agent overlay module metrics on master and agents are present."""
@retrying.retry(wait_fixed=STD_INTERVAL, stop_max_delay=METRICS_WAITTIME)
def _check_mesos_overlay_metrics() -> None:
response = get_metrics_prom(dcos_api_session, node)
for family in text_string_to_metric_families(response.text):
for sample in family.samples:
if sample[0] == 'mesos_overlay_slave_registering':
return
raise Exception('Expected Mesos mesos_overlay_slave_registering metric not found')
nodes = get_master_and_agents(dcos_api_session)
for node in nodes:
_check_mesos_overlay_metrics()
def test_metrics_master_mesos_overlay(dcos_api_session: DcosApiSession) -> None:
"""Assert that mesos overlay module metrics on master are present."""
@retrying.retry(wait_fixed=STD_INTERVAL, stop_max_delay=METRICS_WAITTIME)
def _check_mesos_overlay_metrics() -> None:
response = get_metrics_prom(dcos_api_session, dcos_api_session.masters[0])
for family in text_string_to_metric_families(response.text):
for sample in family.samples:
if sample[0] == 'mesos_overlay_master_process_restarts':
return
raise Exception('Expected Mesos mesos_overlay_master_process_restarts metric not found')
_check_mesos_overlay_metrics()
def test_metrics_master_zookeeper(dcos_api_session: DcosApiSession) -> None:
"""Assert that ZooKeeper metrics on master are present."""
@retrying.retry(wait_fixed=STD_INTERVAL, stop_max_delay=METRICS_WAITTIME)
def check_zookeeper_metrics() -> None:
response = get_metrics_prom(dcos_api_session, dcos_api_session.masters[0])
for family in text_string_to_metric_families(response.text):
for sample in family.samples:
if sample[0] == 'zookeeper_avg_latency':
assert sample[1]['dcos_component_name'] == 'ZooKeeper'
return
raise Exception('Expected ZooKeeper zookeeper_avg_latency metric not found')
check_zookeeper_metrics()
def test_metrics_master_cockroachdb(dcos_api_session: DcosApiSession) -> None:
"""Assert that CockroachDB metrics on master are present."""
@retrying.retry(wait_fixed=STD_INTERVAL, stop_max_delay=METRICS_WAITTIME)
def check_cockroachdb_metrics() -> None:
response = get_metrics_prom(dcos_api_session, dcos_api_session.masters[0])
for family in text_string_to_metric_families(response.text):
for sample in family.samples:
if sample[0] == 'ranges_underreplicated':
assert sample[1]['dcos_component_name'] == 'CockroachDB'
return
raise Exception('Expected CockroachDB ranges_underreplicated metric not found')
check_cockroachdb_metrics()
def test_metrics_master_etcd(dcos_api_session: DcosApiSession) -> None:
"""Assert that DC/OS etcd metrics on master are present."""
@retrying.retry(wait_fixed=STD_INTERVAL, stop_max_delay=METRICS_WAITTIME)
def _check_etcd_metrics() -> None:
response = get_metrics_prom(dcos_api_session, dcos_api_session.masters[0])
for family in text_string_to_metric_families(response.text):
for sample in family.samples:
if sample[0].startswith('etcd_') and sample[1].get('dcos_component_name') == 'etcd':
return
raise Exception('Expected DC/OS etcd etcd_* metric on master nodes not found')
_check_etcd_metrics()
def test_metrics_master_calico(dcos_api_session: DcosApiSession) -> None:
"""Assert that DC/OS Calico metrics on master are present."""
@retrying.retry(wait_fixed=STD_INTERVAL, stop_max_delay=METRICS_WAITTIME)
def _check_calico_metrics() -> None:
response = get_metrics_prom(dcos_api_session, dcos_api_session.masters[0])
for family in text_string_to_metric_families(response.text):
for sample in family.samples:
if sample[0].startswith('felix') and sample[1].get('dcos_component_name') == 'DC/OS Calico':
return
raise Exception('Expected DC/OS Calico felix* metric on master nodes not found')
_check_calico_metrics()
def test_metrics_agents_calico(dcos_api_session: DcosApiSession) -> None:
"""Assert that DC/OS Calico metrics on agents are present."""
@retrying.retry(wait_fixed=STD_INTERVAL, stop_max_delay=METRICS_WAITTIME)
def _check_calico_metrics() -> None:
response = get_metrics_prom(dcos_api_session, node)
for family in text_string_to_metric_families(response.text):
for sample in family.samples:
if sample[0].startswith('felix') and sample[1].get('dcos_component_name') == 'DC/OS Calico':
return
raise Exception('Expected DC/OS Calico felix* metric on agent nodes not found')
nodes = get_agents(dcos_api_session)
for node in nodes:
_check_calico_metrics()
def test_metrics_master_adminrouter_nginx_vts(dcos_api_session: DcosApiSession) -> None:
"""Assert that Admin Router Nginx VTS metrics on master are present."""
@retrying.retry(
wait_fixed=STD_INTERVAL,
stop_max_delay=METRICS_WAITTIME,
retry_on_exception=lambda e: isinstance(e, AssertionError)
)
def check_adminrouter_metrics() -> None:
response = get_metrics_prom(dcos_api_session, dcos_api_session.masters[0])
for family in text_string_to_metric_families(response.text):
for sample in family.samples:
if sample[0].startswith('nginx_vts_') and sample[1].get('dcos_component_name') == 'Admin Router':
return
raise AssertionError('Expected Admin Router nginx_vts_* metrics not found')
check_adminrouter_metrics()
def test_metrics_master_exhibitor_status(dcos_api_session: DcosApiSession) -> None:
"""Assert that Exhibitor status metrics on master are present."""
@retrying.retry(wait_fixed=STD_INTERVAL, stop_max_delay=METRICS_WAITTIME)
def check_exhibitor_metrics() -> None:
response = get_metrics_prom(dcos_api_session, dcos_api_session.masters[0])
expected_metrics = {'exhibitor_status_code', 'exhibitor_status_isleader'}
samples = []
for family in text_string_to_metric_families(response.text):
for sample in family.samples:
if sample[0] in expected_metrics:
samples.append(sample)
reported_metrics = {sample[0] for sample in samples}
assert reported_metrics == expected_metrics, (
'Expected Exhibitor status metrics not found. '
'Expected: {} Reported: {}'.format(
expected_metrics, reported_metrics,
)
)
for sample in samples:
assert sample[1]['dcos_component_name'] == 'Exhibitor'
assert 'url' not in sample[1]
assert 'exhibitor_address' in sample[1]
check_exhibitor_metrics()
def _nginx_vts_measurement_basename(name: str) -> str:
"""
Extracts the base name of the metric reported by nginx vts filter module
and removes the metric suffix.
E.g.: nginx_server_status_request_bytes -> nginx_server_status
"""
return '_'.join(name.split('_')[:3])
def test_metrics_master_adminrouter_nginx_drop_requests_seconds(dcos_api_session: DcosApiSession) -> None:
"""
nginx_vts_*_request_seconds* metrics are not present.
"""
node = dcos_api_session.masters[0]
# Make request to a fine-grained metrics annotated upstream of
# Admin Router (IAM in this case).
dcos_api_session.get('/acs/api/v1/auth/jwks', host=node)
@retrying.retry(
wait_fixed=STD_INTERVAL,
stop_max_delay=METRICS_WAITTIME,
retry_on_exception=lambda e: isinstance(e, AssertionError)
)
def check_adminrouter_metrics() -> None:
vts_metrics_count = 0
response = get_metrics_prom(dcos_api_session, node)
for family in text_string_to_metric_families(response.text):
for sample in family.samples:
match = re.match(r'^nginx_vts_.+_request_seconds.*$', sample[0])
assert match is None
# We assert the validity of the test here by confirming that
# VTS reported metrics have been scraped by telegraf.
if sample[0].startswith('nginx_vts_'):
vts_metrics_count += 1
assert vts_metrics_count > 0
check_adminrouter_metrics()
def test_metrics_agent_adminrouter_nginx_drop_requests_seconds(dcos_api_session: DcosApiSession) -> None:
"""
nginx_vts_*_request_seconds* metrics are not present.
"""
# Make request to Admin Router on every agent to ensure metrics.
state_response = dcos_api_session.get('/state', host=dcos_api_session.masters[0], port=5050)
assert state_response.status_code == 200
state = state_response.json()
for agent in state['slaves']:
agent_url = '/system/v1/agent/{}/dcos-metadata/dcos-version.json'.format(agent['id'])
response = dcos_api_session.get(agent_url)
assert response.status_code == 200
nodes = get_agents(dcos_api_session)
for node in nodes:
@retrying.retry(
wait_fixed=STD_INTERVAL,
stop_max_delay=METRICS_WAITTIME,
retry_on_exception=lambda e: isinstance(e, AssertionError)
)
def check_adminrouter_metrics() -> None:
vts_metrics_count = 0
response = get_metrics_prom(dcos_api_session, node)
for family in text_string_to_metric_families(response.text):
for sample in family.samples:
match = re.match(r'^nginx_vts_.+_request_seconds.*$', sample[0])
assert match is None
# We assert the validity of the test here by confirming that
# VTS reported metrics have been scraped by telegraf.
if sample[0].startswith('nginx_vts_'):
vts_metrics_count += 1
assert vts_metrics_count > 0
check_adminrouter_metrics()
def test_metrics_master_adminrouter_nginx_vts_processor(dcos_api_session: DcosApiSession) -> None:
"""Assert that processed Admin Router metrics on master are present."""
node = dcos_api_session.masters[0]
# Make request to a fine-grained metrics annotated upstream of
# Admin Router (IAM in this case).
r = dcos_api_session.get('/acs/api/v1/auth/jwks', host=node)
assert r.status_code == 200
# Accessing /service/marathon/v2/queue via Admin Router will cause
# Telegraf to emit nginx_service_backend and nginx_service_status metrics.
r = dcos_api_session.get('/service/marathon/v2/queue', host=node)
assert r.status_code == 200
@retrying.retry(
wait_fixed=STD_INTERVAL,
stop_max_delay=METRICS_WAITTIME,
retry_on_exception=lambda e: isinstance(e, AssertionError)
)
def check_adminrouter_metrics() -> None:
measurements = set()
expect_dropped = set([
'nginx_vts_filter',
'nginx_vts_upstream',
'nginx_vts_server',
])
unexpected_samples = []
response = get_metrics_prom(dcos_api_session, node)
for family in text_string_to_metric_families(response.text):
for sample in family.samples:
if sample[0].startswith('nginx_') and sample[1].get('dcos_component_name') == 'Admin Router':
basename = _nginx_vts_measurement_basename(sample[0])
measurements.add(basename)
if basename in expect_dropped:
unexpected_samples.append(sample)
assert unexpected_samples == []
expected = set([
'nginx_server_status',
'nginx_upstream_status',
'nginx_upstream_backend',
'nginx_service_backend',
'nginx_service_status',
])
difference = expected - measurements
assert not difference
remainders = expect_dropped & measurements
assert not remainders
check_adminrouter_metrics()
def test_metrics_agents_adminrouter_nginx_vts(dcos_api_session: DcosApiSession) -> None:
"""Assert that Admin Router Nginx VTS metrics on agents are present."""
nodes = get_agents(dcos_api_session)
for node in nodes:
@retrying.retry(
wait_fixed=STD_INTERVAL,
stop_max_delay=METRICS_WAITTIME,
retry_on_exception=lambda e: isinstance(e, AssertionError)
)
def check_adminrouter_metrics() -> None:
response = get_metrics_prom(dcos_api_session, node)
for family in text_string_to_metric_families(response.text):
for sample in family.samples:
if (
sample[0].startswith('nginx_vts_') and
sample[1].get('dcos_component_name') == 'Admin Router Agent'
):
return
raise AssertionError('Expected Admin Router nginx_vts_* metrics not found')
check_adminrouter_metrics()
def test_metrics_agent_adminrouter_nginx_vts_processor(dcos_api_session: DcosApiSession) -> None:
"""Assert that processed Admin Router metrics on agent are present."""
# Make request to Admin Router on every agent to ensure metrics.
state_response = dcos_api_session.get('/state', host=dcos_api_session.masters[0], port=5050)
assert state_response.status_code == 200
state = state_response.json()
for agent in state['slaves']:
agent_url = '/system/v1/agent/{}/dcos-metadata/dcos-version.json'.format(agent['id'])
response = dcos_api_session.get(agent_url)
assert response.status_code == 200
nodes = get_agents(dcos_api_session)
for node in nodes:
@retrying.retry(
wait_fixed=STD_INTERVAL,
stop_max_delay=METRICS_WAITTIME,
retry_on_exception=lambda e: isinstance(e, AssertionError)
)
def check_adminrouter_metrics() -> None:
measurements = set()
expect_dropped = set([
'nginx_vts_filter',
'nginx_vts_upstream',
'nginx_vts_server',
])
unexpected_samples = []
response = get_metrics_prom(dcos_api_session, node)
for family in text_string_to_metric_families(response.text):
for sample in family.samples:
if sample[0].startswith('nginx_') and sample[1].get('dcos_component_name') == 'Admin Router Agent':
basename = _nginx_vts_measurement_basename(sample[0])
measurements.add(basename)
if basename in expect_dropped:
unexpected_samples.append(sample)
assert unexpected_samples == []
expected = set([
'nginx_server_status',
])
difference = expected - measurements
assert not difference
remainders = expect_dropped & measurements
assert not remainders
check_adminrouter_metrics()
def test_metrics_diagnostics(dcos_api_session: DcosApiSession) -> None:
"""Assert that DC/OS Diagnostics metrics on master are present."""
nodes = get_master_and_agents(dcos_api_session)
for node in nodes:
@retrying.retry(wait_fixed=STD_INTERVAL, stop_max_delay=METRICS_WAITTIME)
def check_diagnostics_metrics() -> None:
response = get_metrics_prom(dcos_api_session, node)
for family in text_string_to_metric_families(response.text):
for sample in family.samples:
if sample[1]['dcos_component_name'] == 'DC/OS Diagnostics':
return
raise Exception('Expected DC/OS Diagnostics metrics not found')
check_diagnostics_metrics()
def test_metrics_fluentbit(dcos_api_session: DcosApiSession) -> None:
"""Ensure that fluent bit metrics are present on masters and agents"""
nodes = get_master_and_agents(dcos_api_session)
for node in nodes:
@retrying.retry(wait_fixed=STD_INTERVAL, stop_max_delay=METRICS_WAITTIME)
def check_fluentbit_metrics() -> None:
response = get_metrics_prom(dcos_api_session, node)
for family in text_string_to_metric_families(response.text):
for sample in family.samples:
if sample[0].startswith('fluentbit_output_errors_total'):
assert sample[1]['dcos_component_name'] == 'DC/OS Fluent Bit'
return
raise Exception('Expected DC/OS Fluent Bit metrics not found')
check_fluentbit_metrics()
def check_statsd_app_metrics(dcos_api_session: DcosApiSession, marathon_app: Any, node: str, expected_metrics: Any
) -> None:
with dcos_api_session.marathon.deploy_and_cleanup(marathon_app, check_health=False, timeout=DEPLOY_TIMEOUT):
endpoints = dcos_api_session.marathon.get_app_service_endpoints(marathon_app['id'])
assert len(endpoints) == 1, 'The marathon app should have been deployed exactly once.'
@retrying.retry(wait_fixed=STD_INTERVAL, stop_max_delay=METRICS_WAITTIME)
def check_statsd_metrics() -> None:
expected_copy = copy.deepcopy(expected_metrics)
response = get_metrics_prom(dcos_api_session, node)
for family in text_string_to_metric_families(response.text):
for sample in family.samples:
if sample[0] in expected_copy:
val = expected_copy.pop(sample[0])
assert sample[2] == val
if len(expected_copy) == 0:
return
sys.stderr.write(
"%r\n%r\n" % (
expected_metrics,
expected_copy,
)
)
raise Exception('Expected statsd metrics not found')
check_statsd_metrics()
def test_metrics_agent_statsd(dcos_api_session: DcosApiSession) -> None:
"""Assert that statsd metrics on private agent are present."""
task_name = 'test-metrics-statsd-app'
metric_name_pfx = 'test_metrics_statsd_app'
marathon_app = {
'id': '/' + task_name,
'instances': 1,
'cpus': 0.1,
'mem': 128,
'env': {
'STATIC_STATSD_UDP_PORT': '61825',
'STATIC_STATSD_UDP_HOST': 'localhost'
},
'cmd': '\n'.join([
'echo "Sending metrics to $STATIC_STATSD_UDP_HOST:$STATIC_STATSD_UDP_PORT"',
'echo "Sending gauge"',
'echo "{}.gauge:100|g" | nc -w 1 -u $STATIC_STATSD_UDP_HOST $STATIC_STATSD_UDP_PORT'.format(
metric_name_pfx),
'echo "Sending counts"',
'echo "{}.count:1|c" | nc -w 1 -u $STATIC_STATSD_UDP_HOST $STATIC_STATSD_UDP_PORT'.format(
metric_name_pfx),
'echo "Sending timings"',
'echo "{}.timing:1|ms" | nc -w 1 -u $STATIC_STATSD_UDP_HOST $STATIC_STATSD_UDP_PORT'.format(
metric_name_pfx),
'echo "Sending histograms"',
'echo "{}.histogram:1|h" | nc -w 1 -u $STATIC_STATSD_UDP_HOST $STATIC_STATSD_UDP_PORT'.format(
metric_name_pfx),
'echo "Done. Sleeping forever."',
'while true; do',
' sleep 1000',
'done',
]),
'container': {
'type': 'MESOS',
# pin image to working version - https://jira.mesosphere.com/browse/DCOS-62478
'docker': {'image': 'library/alpine:3.10.3'}
},
'networks': [{'mode': 'host'}],
}
expected_metrics = {
metric_name_pfx + '_gauge': 100.0,
# NOTE: prometheus_client appends _total to counter-type metrics if they don't already have the suffix
# ref: https://github.com/prometheus/client_python/blob/master/prometheus_client/parser.py#L169
# (the raw prometheus output here omits _total)
metric_name_pfx + '_count_total': 1.0,
metric_name_pfx + '_timing_count': 1.0,
metric_name_pfx + '_histogram_count': 1.0,
}
if dcos_api_session.slaves:
marathon_app['constraints'] = [['hostname', 'LIKE', dcos_api_session.slaves[0]]]
check_statsd_app_metrics(dcos_api_session, marathon_app, dcos_api_session.slaves[0], expected_metrics)
if dcos_api_session.public_slaves:
marathon_app['acceptedResourceRoles'] = ["slave_public"]
marathon_app['constraints'] = [['hostname', 'LIKE', dcos_api_session.public_slaves[0]]]
check_statsd_app_metrics(dcos_api_session, marathon_app, dcos_api_session.public_slaves[0], expected_metrics)
@contextlib.contextmanager
def deploy_and_cleanup_dcos_package(dcos_api_session: DcosApiSession, package_name: str, package_version: str,
framework_name: str) -> Generator:
"""Deploys dcos package and waits for package teardown once the context is left"""
app_id = dcos_api_session.cosmos.install_package(package_name, package_version=package_version).json()['appId']
dcos_api_session.marathon.wait_for_deployments_complete()
try:
yield
finally:
dcos_api_session.cosmos.uninstall_package(package_name, app_id=app_id)
# Retry for 15 minutes for teardown completion
@retrying.retry(wait_fixed=STD_INTERVAL, stop_max_delay=STD_WAITTIME)
def wait_for_package_teardown() -> None:
state_response = dcos_api_session.get('/state', host=dcos_api_session.masters[0], port=5050)
assert state_response.status_code == 200
state = state_response.json()
# Rarely, the framework will continue to show up in 'frameworks' instead of
# 'completed_frameworks', even after teardown. To avoid this causing a test
# failure, if the framework continues to show up in 'frameworks', we instead
# check if there are any running tasks.
frameworks = {f['name']: f for f in state['frameworks']}
assert framework_name not in frameworks or len(
frameworks[framework_name]['tasks']) == 0, 'Framework {} still running'.format(framework_name)
wait_for_package_teardown()
@retrying.retry(wait_fixed=STD_INTERVAL, stop_max_delay=METRICS_WAITTIME)
def get_task_hostname(dcos_api_session: DcosApiSession, framework_name: str, task_name: str) -> Any:
# helper func that gets a framework's task's hostname
mesos_id = node = ''
state_response = dcos_api_session.get('/state', host=dcos_api_session.masters[0], port=5050)
assert state_response.status_code == 200
state = state_response.json()
for framework in state['frameworks']:
if framework['name'] == framework_name:
for task in framework['tasks']:
if task['name'] == task_name:
mesos_id = task['slave_id']
break
break
assert mesos_id is not None
for agent in state['slaves']:
if agent['id'] == mesos_id:
node = agent['hostname']
break
return node
def test_task_metrics_metadata(dcos_api_session: DcosApiSession) -> None:
"""Test that task metrics have expected metadata/labels"""
expanded_config = get_expanded_config()
if expanded_config.get('security') == 'strict':
pytest.skip('MoM disabled for strict mode')
with deploy_and_cleanup_dcos_package(dcos_api_session, 'marathon', '1.6.535', 'marathon-user'):
node = get_task_hostname(dcos_api_session, 'marathon', 'marathon-user')
@retrying.retry(wait_fixed=STD_INTERVAL, stop_max_delay=METRICS_WAITTIME)
def check_metrics_metadata() -> None:
response = get_metrics_prom(dcos_api_session, node)
for family in text_string_to_metric_families(response.text):
for sample in family.samples:
if sample[1].get('task_name') == 'marathon-user':
assert sample[1]['service_name'] == 'marathon'
# check for whitelisted label
assert sample[1]['DCOS_SERVICE_NAME'] == 'marathon-user'
return
raise Exception('Expected marathon task metrics not found')
check_metrics_metadata()
def test_executor_metrics_metadata(dcos_api_session: DcosApiSession) -> None:
"""Test that executor metrics have expected metadata/labels"""
expanded_config = get_expanded_config()
if expanded_config.get('security') == 'strict':
pytest.skip('Framework disabled for strict mode')
with deploy_and_cleanup_dcos_package(dcos_api_session, 'hello-world', '2.2.0-0.42.2', 'hello-world'):
node = get_task_hostname(dcos_api_session, 'marathon', 'hello-world')
@retrying.retry(wait_fixed=STD_INTERVAL, stop_max_delay=METRICS_WAITTIME)
def check_executor_metrics_metadata() -> None:
response = get_metrics_prom(dcos_api_session, node)
for family in text_string_to_metric_families(response.text):
for sample in family.samples:
if sample[0] == 'cpus_nr_periods' and sample[1].get('service_name') == 'hello-world':
assert sample[1]['task_name'] == ''
# hello-world executors can be named "hello" or "world"
assert (sample[1]['executor_name'] == 'hello' or sample[1]['executor_name'] == 'world')
return
raise Exception('Expected hello-world executor metrics not found')
check_executor_metrics_metadata()
def test_metrics_node(dcos_api_session: DcosApiSession) -> None:
"""Test that the '/system/v1/metrics/v0/node' endpoint returns the expected
metrics and metric metadata.
"""
def expected_datapoint_response(response: dict) -> bool:
"""Enure that the "node" endpoint returns a "datapoints" dict.
"""
assert 'datapoints' in response, '"datapoints" dictionary not found'
'in response, got {}'.format(response)
for dp in response['datapoints']:
assert 'name' in dp, '"name" parameter should not be empty, got {}'.format(dp)
if 'filesystem' in dp['name']:
assert 'tags' in dp, '"tags" key not found, got {}'.format(dp)
assert 'path' in dp['tags'], ('"path" tag not found for filesystem metric, '
'got {}'.format(dp))
assert len(dp['tags']['path']) > 0, ('"path" tag should not be empty for '
'filesystem metrics, got {}'.format(dp))
return True
def expected_dimension_response(response: dict) -> bool:
"""Ensure that the "node" endpoint returns a dimensions dict that
contains a non-empty string for cluster_id.
"""
assert 'dimensions' in response, '"dimensions" object not found in'
'response, got {}'.format(response)
assert 'cluster_id' in response['dimensions'], '"cluster_id" key not'
'found in dimensions, got {}'.format(response)
assert response['dimensions']['cluster_id'] != "", 'expected cluster to contain a value'
assert response['dimensions']['mesos_id'] == '', 'expected dimensions to include empty "mesos_id"'
return True
# Retry for 5 minutes for for the node metrics content to appear.
@retrying.retry(stop_max_delay=METRICS_WAITTIME)
def wait_for_node_response(node: Any) -> Any:
response = dcos_api_session.metrics.get('/node', node=node)
assert response.status_code == 200
return response
nodes = get_master_and_agents(dcos_api_session)
for node in nodes:
response = wait_for_node_response(node)
assert response.status_code == 200, 'Status code: {}, Content {}'.format(
response.status_code, response.content)
assert expected_datapoint_response(response.json())
assert expected_dimension_response(response.json())
def get_master_and_agents(dcos_api_session: DcosApiSession) -> list:
nodes = [dcos_api_session.masters[0]]
nodes.extend(get_agents(dcos_api_session))
return nodes
def get_agents(dcos_api_session: DcosApiSession) -> list:
nodes = []
if dcos_api_session.slaves:
nodes.append(dcos_api_session.slaves[0])
if dcos_api_session.public_slaves:
nodes.append(dcos_api_session.public_slaves[0])
return nodes
def test_metrics_containers(dcos_api_session: DcosApiSession) -> None:
"""Assert that a Marathon app's container and app metrics can be retrieved."""
@retrying.retry(wait_fixed=STD_INTERVAL, stop_max_delay=METRICS_WAITTIME)
def test_containers(app_endpoints: list) -> None:
for agent in app_endpoints:
container_metrics, app_metrics = get_metrics_for_task(dcos_api_session, agent.host, 'statsd-emitter')
# Check container metrics.
# Check tags on each datapoint.
cid_registry = set()
for dp in container_metrics['datapoints']:
# Verify expected tags are present.
assert 'tags' in dp, 'got {}'.format(dp)
expected_tag_names = {
'container_id',
}
if 'executor_name' in dp['tags']:
# if present we want to make sure it has a valid value.
expected_tag_names.add('executor_name')
if dp['name'].startswith('blkio.'):
# blkio stats have 'blkio_device' tags.
expected_tag_names.add('blkio_device')
check_tags(dp['tags'], expected_tag_names, FAULT_DOMAIN_TAGS)
# Ensure all container ID's in the container/<id> endpoint are
# the same.
cid_registry.add(dp['tags']['container_id'])
assert len(cid_registry) == 1, 'Not all container IDs in the metrics response are equal'
# Check app metrics.
# We expect three datapoints, could be in any order
uptime_dp = None
for dp in app_metrics['datapoints']:
if dp['name'] == 'statsd_tester.time.uptime':
uptime_dp = dp
break
# If this metric is missing, statsd-emitter's metrics were not received
assert uptime_dp is not None, 'got {}'.format(app_metrics)
datapoint_keys = ['name', 'value', 'unit', 'timestamp', 'tags']
for k in datapoint_keys:
assert k in uptime_dp, 'got {}'.format(uptime_dp)
expected_tag_names = {
'dcos_cluster_id',
'test_tag_key',
'dcos_cluster_name',
'host'
}
# If fault domain is enabled, ensure that fault domain tags are present
expanded_config = get_expanded_config()
if expanded_config.get('fault_domain_enabled') == 'true':
expected_tag_names |= FAULT_DOMAIN_TAGS
check_tags(uptime_dp['tags'], expected_tag_names)
assert uptime_dp['tags']['test_tag_key'] == 'test_tag_value', 'got {}'.format(uptime_dp)
assert uptime_dp['value'] > 0
marathon_config = {
"id": "/statsd-emitter",
"cmd": "./statsd-emitter -debug",
"fetch": [
{
"uri": "https://downloads.mesosphere.com/dcos-metrics/1.11.0/statsd-emitter",
"executable": True
}
],
"cpus": 0.5,
"mem": 128.0,
"instances": 1
}
with dcos_api_session.marathon.deploy_and_cleanup(marathon_config, check_health=False, timeout=DEPLOY_TIMEOUT):
endpoints = dcos_api_session.marathon.get_app_service_endpoints(marathon_config['id'])
assert len(endpoints) == 1, 'The marathon app should have been deployed exactly once.'
test_containers(endpoints)
def test_statsd_metrics_containers_app(dcos_api_session: DcosApiSession) -> None:
"""Assert that statsd app metrics appear in the v0 metrics API."""
task_name = 'test-statsd-metrics-containers-app'
metric_name_pfx = 'test_statsd_metrics_containers_app'
marathon_app = {
'id': '/' + task_name,
'instances': 1,
'cpus': 0.1,
'mem': 128,
'cmd': '\n'.join([
'echo "Sending metrics to $STATSD_UDP_HOST:$STATSD_UDP_PORT"',
'echo "Sending gauge"',
'echo "{}.gauge:100|g" | nc -w 1 -u $STATSD_UDP_HOST $STATSD_UDP_PORT'.format(metric_name_pfx),
'echo "Sending counts"',
'echo "{}.count:1|c" | nc -w 1 -u $STATSD_UDP_HOST $STATSD_UDP_PORT'.format(metric_name_pfx),
'echo "{}.count:1|c" | nc -w 1 -u $STATSD_UDP_HOST $STATSD_UDP_PORT'.format(metric_name_pfx),
'echo "Sending timings"',
'echo "{}.timing:1|ms" | nc -w 1 -u $STATSD_UDP_HOST $STATSD_UDP_PORT'.format(metric_name_pfx),
'echo "{}.timing:2|ms" | nc -w 1 -u $STATSD_UDP_HOST $STATSD_UDP_PORT'.format(metric_name_pfx),
'echo "{}.timing:3|ms" | nc -w 1 -u $STATSD_UDP_HOST $STATSD_UDP_PORT'.format(metric_name_pfx),
'echo "Sending histograms"',
'echo "{}.histogram:1|h" | nc -w 1 -u $STATSD_UDP_HOST $STATSD_UDP_PORT'.format(metric_name_pfx),
'echo "{}.histogram:2|h" | nc -w 1 -u $STATSD_UDP_HOST $STATSD_UDP_PORT'.format(metric_name_pfx),
'echo "{}.histogram:3|h" | nc -w 1 -u $STATSD_UDP_HOST $STATSD_UDP_PORT'.format(metric_name_pfx),
'echo "{}.histogram:4|h" | nc -w 1 -u $STATSD_UDP_HOST $STATSD_UDP_PORT'.format(metric_name_pfx),
'echo "Done. Sleeping forever."',
'while true; do',
' sleep 1000',
'done',
]),
'container': {
'type': 'MESOS',
'docker': {'image': 'library/alpine'}
},
'networks': [{'mode': 'host'}],
}
expected_metrics = [
# metric_name, metric_value
('.'.join([metric_name_pfx, 'gauge']), 100),
('.'.join([metric_name_pfx, 'count']), 2),
('.'.join([metric_name_pfx, 'timing', 'count']), 3),
('.'.join([metric_name_pfx, 'histogram', 'count']), 4),
]
deploy_marathon_app_and_check_metrics(dcos_api_session, expected_metrics, marathon_app, task_name)
def deploy_marathon_app_and_check_metrics(dcos_api_session: DcosApiSession, expected_metrics: list, marathon_app: Any,
task_name: str) -> None:
with dcos_api_session.marathon.deploy_and_cleanup(marathon_app, check_health=False, timeout=DEPLOY_TIMEOUT):
endpoints = dcos_api_session.marathon.get_app_service_endpoints(marathon_app['id'])
assert len(endpoints) == 1, 'The marathon app should have been deployed exactly once.'
node = endpoints[0].host
for metric_name, metric_value in expected_metrics:
assert_app_metric_value_for_task(dcos_api_session, node, task_name, metric_name, metric_value)
def test_prom_metrics_containers_app_host(dcos_api_session: DcosApiSession) -> None:
"""Assert that prometheus app metrics appear in the v0 metrics API."""
task_name = 'test-prom-metrics-containers-app-host'
metric_name_pfx = 'test_prom_metrics_containers_app_host'
marathon_app = {
'id': '/' + task_name,
'instances': 1,
'cpus': 0.1,
'mem': 128,
'cmd': '\n'.join([
'echo "Creating metrics file..."',
'touch metrics',
'echo "# TYPE {}_gauge gauge" >> metrics'.format(metric_name_pfx),
'echo "{}_gauge 100" >> metrics'.format(metric_name_pfx),
'echo "# TYPE {}_count counter" >> metrics'.format(metric_name_pfx),
'echo "{}_count 2" >> metrics'.format(metric_name_pfx),
'echo "# TYPE {}_histogram histogram" >> metrics'.format(metric_name_pfx),
'echo "{}_histogram_bucket{{le=\\"+Inf\\"}} 4" >> metrics'.format(metric_name_pfx),
'echo "{}_histogram_sum 4" >> metrics'.format(metric_name_pfx),
'echo "{}_histogram_seconds_count 4" >> metrics'.format(metric_name_pfx),
'echo "Serving prometheus metrics on http://localhost:$PORT0"',
'python3 -m http.server $PORT0',
]),
'container': {
'type': 'DOCKER',
'docker': {'image': 'library/python:3'}
},
'portDefinitions': [{
'protocol': 'tcp',
'port': 0,
'labels': {'DCOS_METRICS_FORMAT': 'prometheus'},
}],
}
logging.debug('Starting marathon app with config: %s', marathon_app)
expected_metrics = [
# metric_name, metric_value
('_'.join([metric_name_pfx, 'gauge.gauge']), 100),
('_'.join([metric_name_pfx, 'count.counter']), 2),
('_'.join([metric_name_pfx, 'histogram_seconds', 'count']), 4),
]
deploy_marathon_app_and_check_metrics(dcos_api_session, expected_metrics, marathon_app, task_name)
def test_prom_metrics_containers_app_bridge(dcos_api_session: DcosApiSession) -> None:
"""Assert that prometheus app metrics appear in the v0 metrics API."""
task_name = 'test-prom-metrics-containers-app-bridge'
metric_name_pfx = 'test_prom_metrics_containers_app_bridge'
marathon_app = {
'id': '/' + task_name,
'instances': 1,
'cpus': 0.1,
'mem': 128,
'cmd': '\n'.join([
'echo "Creating metrics file..."',
'touch metrics',
'echo "# TYPE {}_gauge gauge" >> metrics'.format(metric_name_pfx),
'echo "{}_gauge 100" >> metrics'.format(metric_name_pfx),
'echo "# TYPE {}_count counter" >> metrics'.format(metric_name_pfx),
'echo "{}_count 2" >> metrics'.format(metric_name_pfx),
'echo "# TYPE {}_histogram histogram" >> metrics'.format(metric_name_pfx),
'echo "{}_histogram_bucket{{le=\\"+Inf\\"}} 4" >> metrics'.format(metric_name_pfx),
'echo "{}_histogram_sum 4" >> metrics'.format(metric_name_pfx),
'echo "{}_histogram_seconds_count 4" >> metrics'.format(metric_name_pfx),
'echo "Serving prometheus metrics on http://localhost:8000"',
'python3 -m http.server 8000',
]),
'networks': [{'mode': 'container/bridge'}],
'container': {
'type': 'MESOS',
'docker': {'image': 'library/python:3'},
'portMappings': [
{
'containerPort': 8000,
'hostPort': 0,
'protocol': 'tcp',
'labels': {'DCOS_METRICS_FORMAT': 'prometheus'},
}
]
},
}
logging.debug('Starting marathon app with config: %s', marathon_app)
expected_metrics = [
# metric_name, metric_value
('_'.join([metric_name_pfx, 'gauge.gauge']), 100),
('_'.join([metric_name_pfx, 'count.counter']), 2),
('_'.join([metric_name_pfx, 'histogram_seconds', 'count']), 4),
]
deploy_marathon_app_and_check_metrics(dcos_api_session, expected_metrics, marathon_app, task_name)
def test_task_prom_metrics_not_filtered(dcos_api_session: DcosApiSession) -> None:
"""Assert that prometheus app metrics aren't filtered according to adminrouter config.
This is a regression test protecting a fix for a bug that mistakenly applied filter criteria intended for
adminrouter metrics to Prometheus-formatted metrics gathered from tasks.
"""
task_name = 'test-task-prom-metrics-not-filtered'
metric_name_pfx = 'test_task_prom_metrics_not_filtered'
marathon_app = {
'id': '/' + task_name,
'instances': 1,
'cpus': 0.1,
'mem': 128,
'cmd': '\n'.join([
# Serve metrics that would be dropped by Telegraf were they collected from the adminrouter. These are task
# metrics, so we expect Telegraf to gather and output them.
'echo "Creating metrics file..."',
# Adminrouter metrics with direction="[1-5]xx" tags get dropped.
'echo "# TYPE {}_gauge gauge" >> metrics'.format(metric_name_pfx),
'echo "{}_gauge{{direction=\\"1xx\\"}} 100" >> metrics'.format(metric_name_pfx),
# Adminrouter metrics with these names get dropped.
'echo "# TYPE nginx_vts_filter_cache_foo gauge" >> metrics',
'echo "nginx_vts_filter_cache_foo 100" >> metrics',
'echo "# TYPE nginx_vts_server_foo gauge" >> metrics',
'echo "nginx_vts_server_foo 100" >> metrics',
'echo "# TYPE nginx_vts_upstream_foo gauge" >> metrics',
'echo "nginx_vts_upstream_foo 100" >> metrics',
'echo "# TYPE nginx_vts_foo_request_seconds gauge" >> metrics',
'echo "nginx_vts_foo_request_seconds 100" >> metrics',
'echo "Serving prometheus metrics on http://localhost:8000"',
'python3 -m http.server 8000',
]),
'networks': [{'mode': 'container/bridge'}],
'container': {
'type': 'MESOS',
'docker': {'image': 'library/python:3'},
'portMappings': [
{
'containerPort': 8000,
'hostPort': 0,
'protocol': 'tcp',
'labels': {'DCOS_METRICS_FORMAT': 'prometheus'},
}
]
},
}
logging.debug('Starting marathon app with config: %s', marathon_app)
expected_metrics = [
# metric_name, metric_value
('_'.join([metric_name_pfx, 'gauge.gauge']), 100),
('nginx_vts_filter_cache_foo.gauge', 100),
('nginx_vts_server_foo.gauge', 100),
('nginx_vts_upstream_foo.gauge', 100),
('nginx_vts_foo_request_seconds.gauge', 100),
]
deploy_marathon_app_and_check_metrics(dcos_api_session, expected_metrics, marathon_app, task_name)
def test_metrics_containers_nan(dcos_api_session: DcosApiSession) -> None:
"""Assert that the metrics API can handle app metric gauges with NaN values."""
task_name = 'test-metrics-containers-nan'
metric_name = 'test_metrics_containers_nan'
marathon_app = {
'id': '/' + task_name,
'instances': 1,
'cpus': 0.1,
'mem': 128,
'cmd': '\n'.join([
'echo "Sending gauge with NaN value to $STATSD_UDP_HOST:$STATSD_UDP_PORT"',
'echo "{}:NaN|g" | nc -w 1 -u $STATSD_UDP_HOST $STATSD_UDP_PORT'.format(metric_name),
'echo "Done. Sleeping forever."',
'while true; do',
' sleep 1000',
'done',
]),
'container': {
'type': 'MESOS',
'docker': {'image': 'library/alpine'}
},
'networks': [{'mode': 'host'}],
}
with dcos_api_session.marathon.deploy_and_cleanup(marathon_app, check_health=False, timeout=DEPLOY_TIMEOUT):
endpoints = dcos_api_session.marathon.get_app_service_endpoints(marathon_app['id'])
assert len(endpoints) == 1, 'The marathon app should have been deployed exactly once.'
node = endpoints[0].host
# NaN should be converted to empty string.
metric_value = get_app_metric_for_task(dcos_api_session, node, task_name, metric_name)['value']
assert metric_value == '', 'unexpected metric value: {}'.format(metric_value)
@retrying.retry(wait_fixed=METRICS_INTERVAL, stop_max_delay=METRICS_WAITTIME)
def assert_app_metric_value_for_task(dcos_api_session: DcosApiSession, node: str, task_name: str, metric_name: str,
metric_value: Any) -> None:
"""Assert the value of app metric metric_name for container task_name is metric_value.
Retries on error, non-200 status, missing container metrics, missing app
metric, or unexpected app metric value for up to 5 minutes.
"""
assert get_app_metric_for_task(dcos_api_session, node, task_name, metric_name)['value'] == metric_value
@retrying.retry(wait_fixed=METRICS_INTERVAL, stop_max_delay=METRICS_WAITTIME)
def get_app_metric_for_task(dcos_api_session: DcosApiSession, node: str, task_name: str, metric_name: str) -> Any:
"""Return the app metric metric_name for container task_name.
Retries on error, non-200 status, or missing container metrics, or missing
app metric for up to 5 minutes.
"""
_, app_metrics = get_metrics_for_task(dcos_api_session, node, task_name)
assert app_metrics is not None, "missing metrics for task {}".format(task_name)
dps = [dp for dp in app_metrics['datapoints'] if dp['name'] == metric_name]
assert len(dps) == 1, 'expected 1 datapoint for metric {}, got {}'.format(metric_name, len(dps))
return dps[0]
# Retry for 5 minutes since the collector collects state
# every 2 minutes to propogate containers to the API
@retrying.retry(wait_fixed=METRICS_INTERVAL, stop_max_delay=METRICS_WAITTIME)
def get_container_ids(dcos_api_session: DcosApiSession, node: str) -> Any:
"""Return container IDs reported by the metrics API on node.
Retries on error, non-200 status, or empty response for up to 5 minutes.
"""
response = dcos_api_session.metrics.get('/containers', node=node)
assert response.status_code == 200
container_ids = response.json()
assert len(container_ids) > 0, 'must have at least 1 container'
return container_ids
@retrying.retry(wait_fixed=METRICS_INTERVAL, stop_max_delay=METRICS_WAITTIME)
def get_container_metrics(dcos_api_session: DcosApiSession, node: str, container_id: str) -> Any:
"""Return container_id's metrics from the metrics API on node.
Returns None on 204.
Retries on error, non-200 status, or missing response fields for up
to 5 minutes.
"""
response = dcos_api_session.metrics.get('/containers/' + container_id, node=node)
if response.status_code == 204:
return None
assert response.status_code == 200
container_metrics = response.json()
assert 'datapoints' in container_metrics, (
'container metrics must include datapoints. Got: {}'.format(container_metrics)
)
assert 'dimensions' in container_metrics, (
'container metrics must include dimensions. Got: {}'.format(container_metrics)
)
return container_metrics
@retrying.retry(wait_fixed=METRICS_INTERVAL, stop_max_delay=METRICS_WAITTIME)
def get_app_metrics(dcos_api_session: DcosApiSession, node: str, container_id: str) -> Any:
"""Return app metrics for container_id from the metrics API on node.
Returns None on 204.
Retries on error or non-200 status for up to 5 minutes.
"""
resp = dcos_api_session.metrics.get('/containers/' + container_id + '/app', node=node)
if resp.status_code == 204:
return None
assert resp.status_code == 200, 'got {}'.format(resp.status_code)
app_metrics = resp.json()
assert 'datapoints' in app_metrics, 'got {}'.format(app_metrics)
assert 'dimensions' in app_metrics, 'got {}'.format(app_metrics)
return app_metrics
@retrying.retry(wait_fixed=METRICS_INTERVAL, stop_max_delay=METRICS_WAITTIME)
def get_metrics_for_task(dcos_api_session: DcosApiSession, node: str, task_name: str) -> Any:
"""Return (container_metrics, app_metrics) for task_name on node.
Retries on error, non-200 responses, or missing metrics for task_name for
up to 5 minutes.
"""
task_names_seen = [] # Used for exception message if task_name can't be found.
for cid in get_container_ids(dcos_api_session, node):
container_metrics = get_container_metrics(dcos_api_session, node, cid)
if container_metrics is None:
task_names_seen.append((cid, None))
continue
if container_metrics['dimensions'].get('task_name') != task_name:
task_names_seen.append((cid, container_metrics['dimensions'].get('task_name')))
continue
app_metrics = get_app_metrics(dcos_api_session, node, cid)
return container_metrics, app_metrics
raise Exception(
'No metrics found for task {} on host {}. Task names seen: {}'.format(task_name, node, task_names_seen)
)
def test_standalone_container_metrics(dcos_api_session: DcosApiSession) -> None:
"""
An operator should be able to launch a standalone container using the
LAUNCH_CONTAINER call of the agent operator API. Additionally, if the
process running within the standalone container emits statsd metrics, they
should be accessible via the DC/OS metrics API.
"""
expanded_config = get_expanded_config()
if expanded_config.get('security') == 'strict':
reason = (
'Only resource providers are authorized to launch standalone '
'containers in strict mode. See DCOS-42325.'
)
pytest.skip(reason)
# Fetch the mesos master state to get an agent ID
master_ip = dcos_api_session.masters[0]
r = dcos_api_session.get('/state', host=master_ip, port=5050)
assert r.status_code == 200
state = r.json()
# Find hostname and ID of an agent
assert len(state['slaves']) > 0, 'No agents found in master state'
agent_hostname = state['slaves'][0]['hostname']
agent_id = state['slaves'][0]['id']
logging.debug('Selected agent %s at %s', agent_id, agent_hostname)
def _post_agent(json: dict) -> Any:
headers = {
'Content-Type': 'application/json',
'Accept': 'application/json'
}
r = dcos_api_session.post(
'/api/v1',
host=agent_hostname,
port=5051,
headers=headers,
json=json,
data=None,
stream=False)
return r
# Prepare container ID data
container_id = {'value': 'test-standalone-%s' % str(uuid.uuid4())}
# Launch standalone container. The command for this container executes a
# binary installed with DC/OS which will emit statsd metrics.
launch_data = {
'type': 'LAUNCH_CONTAINER',
'launch_container': {
'command': {
'value': './statsd-emitter',
'uris': [{
'value': 'https://downloads.mesosphere.com/dcos-metrics/1.11.0/statsd-emitter',
'executable': True
}]
},
'container_id': container_id,
'resources': [
{
'name': 'cpus',
'scalar': {'value': 0.2},
'type': 'SCALAR'
},
{
'name': 'mem',
'scalar': {'value': 64.0},
'type': 'SCALAR'
},
{
'name': 'disk',
'scalar': {'value': 1024.0},
'type': 'SCALAR'
}
],
'container': {
'type': 'MESOS'
}
}
}
# There is a short delay between the container starting and metrics becoming
# available via the metrics service. Because of this, we wait up to 5
# minutes for these metrics to appear before throwing an exception.
def _should_retry_metrics_fetch(response: Any) -> Any:
return response.status_code == 204
@retrying.retry(wait_fixed=METRICS_INTERVAL,
stop_max_delay=METRICS_WAITTIME,
retry_on_result=_should_retry_metrics_fetch,
retry_on_exception=lambda x: False)
def _get_metrics() -> Any:
master_response = dcos_api_session.get(
'/system/v1/agent/%s/metrics/v0/containers/%s/app' % (agent_id, container_id['value']),
host=master_ip)
return master_response
r = _post_agent(launch_data)
assert r.status_code == 200, 'Received unexpected status code when launching standalone container'
try:
logging.debug('Successfully created standalone container with container ID %s', container_id['value'])
# Verify that the standalone container's metrics are being collected
r = _get_metrics()
assert r.status_code == 200, 'Received unexpected status code when fetching standalone container metrics'
metrics_response = r.json()
assert 'datapoints' in metrics_response, 'got {}'.format(metrics_response)
uptime_dp = None
for dp in metrics_response['datapoints']:
if dp['name'] == 'statsd_tester.time.uptime':
uptime_dp = dp
break
# If this metric is missing, statsd-emitter's metrics were not received
assert uptime_dp is not None, 'got {}'.format(metrics_response)
datapoint_keys = ['name', 'value', 'unit', 'timestamp', 'tags']
for k in datapoint_keys:
assert k in uptime_dp, 'got {}'.format(uptime_dp)
expected_tag_names = {
'dcos_cluster_id',
'test_tag_key',
'dcos_cluster_name',
'host'
}
check_tags(uptime_dp['tags'], expected_tag_names, FAULT_DOMAIN_TAGS)
assert uptime_dp['tags']['test_tag_key'] == 'test_tag_value', 'got {}'.format(uptime_dp)
assert uptime_dp['value'] > 0
assert 'dimensions' in metrics_response, 'got {}'.format(metrics_response)
assert metrics_response['dimensions']['container_id'] == container_id['value']
finally:
# Clean up the standalone container
kill_data = {
'type': 'KILL_CONTAINER',
'kill_container': {
'container_id': container_id
}
}
_post_agent(kill_data)
def test_pod_application_metrics(dcos_api_session: DcosApiSession) -> None:
"""Launch a pod, wait for its containers to be added to the metrics service,
and then verify that:
1) Container statistics metrics are provided for the executor container
2) Application metrics are exposed for the task container
"""
@retrying.retry(wait_fixed=STD_INTERVAL, stop_max_delay=METRICS_WAITTIME)
def test_application_metrics(agent_ip: str, agent_id: str, task_name: str, num_containers: int) -> Any:
# Get expected 2 container ids from mesos state endpoint
# (one container + its parent container)
@retrying.retry(wait_fixed=STD_INTERVAL, stop_max_delay=METRICS_WAITTIME)
def get_container_ids_from_state(dcos_api_session: DcosApiSession, num_containers: int) -> set:
state_response = dcos_api_session.get('/state', host=dcos_api_session.masters[0], port=5050)
assert state_response.status_code == 200
state = state_response.json()
cids = set()
for framework in state['frameworks']:
if framework['name'] == 'marathon':
for task in framework['tasks']:
if task['name'] == 'statsd-emitter-task':
container = task['statuses'][0]['container_status']['container_id']
cids.add(container['value'])
if 'parent' in container:
cids.add(container['parent']['value'])
break
break
assert len(cids) == num_containers, 'Test should create {} containers'.format(num_containers)
return cids
container_ids = get_container_ids_from_state(dcos_api_session, num_containers)
# Retry for two and a half minutes since the collector collects
# state every 2 minutes to propagate containers to the API
@retrying.retry(wait_fixed=STD_INTERVAL, stop_max_delay=METRICS_WAITTIME)
def wait_for_container_metrics_propagation(container_ids: set) -> None:
response = dcos_api_session.metrics.get('/containers', node=agent_ip)
assert response.status_code == 200
assert container_ids.issubset(
response.json()), "Containers {} should have been propagated".format(container_ids)
wait_for_container_metrics_propagation(container_ids)
get_containers = {
"type": "GET_CONTAINERS",
"get_containers": {
"show_nested": True,
"show_standalone": True
}
}
r = dcos_api_session.post('/agent/{}/api/v1'.format(agent_id), json=get_containers)
r.raise_for_status()
mesos_agent_containers = r.json()['get_containers']['containers']
mesos_agent_cids = [container['container_id']['value'] for container in mesos_agent_containers]
assert container_ids.issubset(mesos_agent_cids), "Missing expected containers {}".format(container_ids)
def is_nested_container(container: dict) -> Any:
"""Helper to check whether or not a container returned in the
GET_CONTAINERS response is a nested container.
"""
return 'parent' in container['container_status']['container_id']
for container in mesos_agent_containers:
container_id = container['container_id']['value']
# Test that /containers/<id> responds with expected data.
container_id_path = '/containers/{}'.format(container_id)
if (is_nested_container(container)):
# Retry for 5 minutes for each nested container to appear.
# Since nested containers do not report resource statistics, we
# expect the response code to be 204.
@retrying.retry(stop_max_delay=METRICS_WAITTIME)
def wait_for_container_response() -> Any:
response = dcos_api_session.metrics.get(container_id_path, node=agent_ip)
assert response.status_code == 204
return response
# For the nested container, we do not expect any container-level
# resource statistics, so this response should be empty.
assert not wait_for_container_response().json()
# Test that expected application metrics are present.
app_response = dcos_api_session.metrics.get('/containers/{}/app'.format(container_id), node=agent_ip)
assert app_response.status_code == 200, 'got {}'.format(app_response.status_code)
# Ensure all /container/<id>/app data is correct
assert 'datapoints' in app_response.json(), 'got {}'.format(app_response.json())
# We expect three datapoints, could be in any order
uptime_dp = None
for dp in app_response.json()['datapoints']:
if dp['name'] == 'statsd_tester.time.uptime':
uptime_dp = dp
break
# If this metric is missing, statsd-emitter's metrics were not received
assert uptime_dp is not None, 'got {}'.format(app_response.json())
datapoint_keys = ['name', 'value', 'unit', 'timestamp', 'tags']
for k in datapoint_keys:
assert k in uptime_dp, 'got {}'.format(uptime_dp)
expected_tag_names = {
'dcos_cluster_id',
'test_tag_key',
'dcos_cluster_name',
'host'
}
check_tags(uptime_dp['tags'], expected_tag_names, FAULT_DOMAIN_TAGS)
assert uptime_dp['tags']['test_tag_key'] == 'test_tag_value', 'got {}'.format(uptime_dp)
assert uptime_dp['value'] > 0
assert 'dimensions' in app_response.json(), 'got {}'.format(app_response.json())
assert 'task_name' in app_response.json()['dimensions'], 'got {}'.format(
app_response.json()['dimensions'])
# Look for the specified task name.
assert task_name.strip('/') == app_response.json()['dimensions']['task_name'],\
'Nested container was not tagged with the correct task name'
else:
# Retry for 5 minutes for each parent container to present its
# content.
@retrying.retry(stop_max_delay=METRICS_WAITTIME)
def wait_for_container_response() -> Any:
response = dcos_api_session.metrics.get(container_id_path, node=agent_ip)
assert response.status_code == 200
return response
container_response = wait_for_container_response()
assert 'datapoints' in container_response.json(), 'got {}'.format(container_response.json())
cid_registry = set()
for dp in container_response.json()['datapoints']:
# Verify expected tags are present.
assert 'tags' in dp, 'got {}'.format(dp)
expected_tag_names = {
'container_id',
}
if dp['name'].startswith('blkio.'):
# blkio stats have 'blkio_device' tags.
expected_tag_names.add('blkio_device')
check_tags(dp['tags'], expected_tag_names, FAULT_DOMAIN_TAGS)
# Ensure all container IDs in the response from the
# containers/<id> endpoint are the same.
cid_registry.add(dp['tags']['container_id'])
assert len(cid_registry) == 1, 'Not all container IDs in the metrics response are equal'
assert 'dimensions' in container_response.json(), 'got {}'.format(container_response.json())
# The executor container shouldn't expose application metrics.
app_response = dcos_api_session.metrics.get('/containers/{}/app'.format(container_id), node=agent_ip)
assert app_response.status_code == 204, 'got {}'.format(app_response.status_code)
return True
marathon_pod_config = {
"id": "/statsd-emitter-task-group",
"containers": [{
"name": "statsd-emitter-task",
"resources": {
"cpus": 0.5,
"mem": 128.0,
"disk": 1024.0
},
"image": {
"kind": "DOCKER",
"id": "alpine"
},
"exec": {
"command": {
"shell": "./statsd-emitter"
}
},
"artifacts": [{
"uri": "https://downloads.mesosphere.com/dcos-metrics/1.11.0/statsd-emitter",
"executable": True
}],
}],
"scheduling": {
"instances": 1
}
}
with dcos_api_session.marathon.deploy_pod_and_cleanup(marathon_pod_config):
r = dcos_api_session.marathon.get('/v2/pods/{}::status'.format(marathon_pod_config['id']))
r.raise_for_status()
data = r.json()
assert len(data['instances']) == 1, 'The marathon pod should have been deployed exactly once.'
test_application_metrics(
data['instances'][0]['agentHostname'],
data['instances'][0]['agentId'],
marathon_pod_config['containers'][0]['name'], 2) # type: ignore
|
e6621c2bee69f88960a4a3f33a4d0ed2ce398d5a
|
2d3a0b9caaff0ec56de90ca9285648848395c18b
|
/imageio/plugins/pillow_info.py
|
59b971ce792cca172764da7f2faf8f0654547643
|
[
"BSD-2-Clause"
] |
permissive
|
imageio/imageio
|
f96a03fd0c0c704f76ec4ed2f3f5137b5d1d3d2b
|
a0091371dd42442ca3fae0fc0e8a4f0925757ac7
|
refs/heads/master
| 2023-09-04T11:09:46.646163
| 2023-09-04T02:23:41
| 2023-09-04T02:23:41
| 9,861,437
| 1,332
| 346
|
BSD-2-Clause
| 2023-09-04T13:54:07
| 2013-05-04T22:56:45
|
Python
|
UTF-8
|
Python
| false
| false
| 36,624
|
py
|
pillow_info.py
|
# -*- coding: utf-8 -*-
# styletest: ignore E122 E123 E501
"""
Module that contain info about the Pillow formats. The first part of
this module generates this info and writes it to its own bottom half
if run as a script.
"""
import warnings
warnings.warn(
"The `PillowFormat` plugin is deprecated and will be removed in ImageIO v3."
" Use the new `PillowPlugin` instead.",
DeprecationWarning,
)
def generate_info(): # pragma: no cover
from urllib.request import urlopen
import PIL
from PIL import Image
Image.init()
ids = []
formats = []
docs = {}
# Collect formats and their summary from plugin modules
for mod_name in dir(PIL):
if "ImagePlugin" in mod_name:
mod = getattr(PIL, mod_name)
for ob_name in dir(mod):
ob = getattr(mod, ob_name)
if isinstance(ob, type) and issubclass(ob, Image.Image):
if ob.format in ids:
print("Found duplicate for", ob.format)
else:
ids.append(ob.format)
formats.append((ob.format, ob.format_description))
# Add extension info
for i in range(len(formats)):
id, summary = formats[i]
ext = " ".join([e for e in Image.EXTENSION if Image.EXTENSION[e] == id])
formats[i] = id, summary, ext
# Get documentation of formats
url = "https://raw.githubusercontent.com/python-pillow/Pillow/master/docs/handbook/image-file-formats.rst" # noqa
lines = urlopen(url).read().decode().splitlines()
lines.append("End")
lines.append("---") # for the end
# Parse documentation
cur_name = ""
cur_part = []
for i in range(len(lines)):
line = lines[i]
if line.startswith(("^^^", "---", "===")):
if cur_name and cur_name in ids:
text = "\n".join(cur_part[:-1])
text = text.replace("versionadded::", "versionadded:: Pillow ")
text = text.replace("Image.open`", "Image.write`")
docs[cur_name] = text
cur_part = []
cur_name = lines[i - 1].strip().replace(" ", "").upper()
else:
cur_part.append(" " + line)
# Fill in the blancs
for id in ids:
if id in docs:
docs[id] = "*From the Pillow docs:*\n\n" + docs[id]
else:
docs[id] = "No docs for %s." % id
print("no docs for", id)
# Sort before writing
formats.sort(key=lambda x: x[0])
ids.sort()
# Read file ...
code = open(__file__, "rb").read().decode()
code, divider, _ = code.partition("## BELOW IS " + "AUTOGENERATED")
code += divider + "\n\n"
# Write formats
code += "pillow_formats = [\n"
for i in range(len(formats)):
print(formats[i])
code += " (%r, %r, %r),\n" % formats[i]
code += " ]\n\n\n"
# Write docs
code += "pillow_docs = {\n"
for id in ids:
code += '%r:\nu"""%s""",\n' % (id, docs[id])
code += "}\n"
# Write back
with open(__file__, "wb") as f:
f.write(code.encode())
if __name__ == "__main__":
generate_info()
# BELOW IS AUTOGENERATED
pillow_formats = [
("BMP", "Windows Bitmap", ".bmp"),
("BUFR", "BUFR", ".bufr"),
("CUR", "Windows Cursor", ".cur"),
("DCX", "Intel DCX", ".dcx"),
("DDS", "DirectDraw Surface", ".dds"),
("DIB", "Windows Bitmap", ""),
("EPS", "Encapsulated Postscript", ".ps .eps"),
("FITS", "FITS", ".fit .fits"),
("FLI", "Autodesk FLI/FLC Animation", ".fli .flc"),
("FPX", "FlashPix", ".fpx"),
("FTEX", "Texture File Format (IW2:EOC)", ".ftc .ftu"),
("GBR", "GIMP brush file", ".gbr"),
("GIF", "Compuserve GIF", ".gif"),
("GRIB", "GRIB", ".grib"),
("HDF5", "HDF5", ".h5 .hdf"),
("ICNS", "Mac OS icns resource", ".icns"),
("ICO", "Windows Icon", ".ico"),
("IM", "IFUNC Image Memory", ".im"),
("IMT", "IM Tools", ""),
("IPTC", "IPTC/NAA", ".iim"),
("JPEG", "JPEG (ISO 10918)", ".jfif .jpe .jpg .jpeg"),
("JPEG2000", "JPEG 2000 (ISO 15444)", ".jp2 .j2k .jpc .jpf .jpx .j2c"),
("MCIDAS", "McIdas area file", ""),
("MIC", "Microsoft Image Composer", ".mic"),
("MPEG", "MPEG", ".mpg .mpeg"),
("MPO", "MPO (CIPA DC-007)", ".mpo"),
("MSP", "Windows Paint", ".msp"),
("PCD", "Kodak PhotoCD", ".pcd"),
("PCX", "Paintbrush", ".pcx"),
("PIXAR", "PIXAR raster image", ".pxr"),
("PNG", "Portable network graphics", ".png"),
("PPM", "Pbmplus image", ".pbm .pgm .ppm"),
("PSD", "Adobe Photoshop", ".psd"),
("SGI", "SGI Image File Format", ".bw .rgb .rgba .sgi"),
("SPIDER", "Spider 2D image", ""),
("SUN", "Sun Raster File", ".ras"),
("TGA", "Targa", ".tga"),
("TIFF", "Adobe TIFF", ".tif .tiff"),
("WMF", "Windows Metafile", ".wmf .emf"),
("XBM", "X11 Bitmap", ".xbm"),
("XPM", "X11 Pixel Map", ".xpm"),
("XVThumb", "XV thumbnail image", ""),
]
pillow_docs = {
"BMP": """*From the Pillow docs:*
PIL reads and writes Windows and OS/2 BMP files containing ``1``, ``L``, ``P``,
or ``RGB`` data. 16-colour images are read as ``P`` images. Run-length encoding
is not supported.
The :py:meth:`~PIL.Image.Image.write` method sets the following
:py:attr:`~PIL.Image.Image.info` properties:
**compression**
Set to ``bmp_rle`` if the file is run-length encoded.
""",
"BUFR": """*From the Pillow docs:*
.. versionadded:: Pillow 1.1.3
PIL provides a stub driver for BUFR files.
To add read or write support to your application, use
:py:func:`PIL.BufrStubImagePlugin.register_handler`.
""",
"CUR": """*From the Pillow docs:*
CUR is used to store cursors on Windows. The CUR decoder reads the largest
available cursor. Animated cursors are not supported.
""",
"DCX": """*From the Pillow docs:*
DCX is a container file format for PCX files, defined by Intel. The DCX format
is commonly used in fax applications. The DCX decoder can read files containing
``1``, ``L``, ``P``, or ``RGB`` data.
When the file is opened, only the first image is read. You can use
:py:meth:`~file.seek` or :py:mod:`~PIL.ImageSequence` to read other images.
""",
"DDS": """*From the Pillow docs:*
DDS is a popular container texture format used in video games and natively
supported by DirectX.
Currently, DXT1, DXT3, and DXT5 pixel formats are supported and only in ``RGBA``
mode.
.. versionadded:: Pillow 3.4.0 DXT3
""",
"DIB": """No docs for DIB.""",
"EPS": """*From the Pillow docs:*
PIL identifies EPS files containing image data, and can read files that contain
embedded raster images (ImageData descriptors). If Ghostscript is available,
other EPS files can be read as well. The EPS driver can also write EPS
images. The EPS driver can read EPS images in ``L``, ``LAB``, ``RGB`` and
``CMYK`` mode, but Ghostscript may convert the images to ``RGB`` mode rather
than leaving them in the original color space. The EPS driver can write images
in ``L``, ``RGB`` and ``CMYK`` modes.
If Ghostscript is available, you can call the :py:meth:`~PIL.Image.Image.load`
method with the following parameter to affect how Ghostscript renders the EPS
**scale**
Affects the scale of the resultant rasterized image. If the EPS suggests
that the image be rendered at 100px x 100px, setting this parameter to
2 will make the Ghostscript render a 200px x 200px image instead. The
relative position of the bounding box is maintained::
im = Image.open(...)
im.size #(100,100)
im.load(scale=2)
im.size #(200,200)
""",
"FITS": """*From the Pillow docs:*
.. versionadded:: Pillow 1.1.5
PIL provides a stub driver for FITS files.
To add read or write support to your application, use
:py:func:`PIL.FitsStubImagePlugin.register_handler`.
""",
"FLI": """No docs for FLI.""",
"FPX": """*From the Pillow docs:*
PIL reads Kodak FlashPix files. In the current version, only the highest
resolution image is read from the file, and the viewing transform is not taken
into account.
.. note::
To enable full FlashPix support, you need to build and install the IJG JPEG
library before building the Python Imaging Library. See the distribution
README for details.
""",
"FTEX": """*From the Pillow docs:*
.. versionadded:: Pillow 3.2.0
The FTEX decoder reads textures used for 3D objects in
Independence War 2: Edge Of Chaos. The plugin reads a single texture
per file, in the compressed and uncompressed formats.
""",
"GBR": """*From the Pillow docs:*
The GBR decoder reads GIMP brush files, version 1 and 2.
The :py:meth:`~PIL.Image.Image.write` method sets the following
:py:attr:`~PIL.Image.Image.info` properties:
**comment**
The brush name.
**spacing**
The spacing between the brushes, in pixels. Version 2 only.
GD
^^
PIL reads uncompressed GD files. Note that this file format cannot be
automatically identified, so you must use :py:func:`PIL.GdImageFile.open` to
read such a file.
The :py:meth:`~PIL.Image.Image.write` method sets the following
:py:attr:`~PIL.Image.Image.info` properties:
**transparency**
Transparency color index. This key is omitted if the image is not
transparent.
""",
"GIF": """*From the Pillow docs:*
PIL reads GIF87a and GIF89a versions of the GIF file format. The library writes
run-length encoded files in GIF87a by default, unless GIF89a features
are used or GIF89a is already in use.
Note that GIF files are always read as grayscale (``L``)
or palette mode (``P``) images.
The :py:meth:`~PIL.Image.Image.write` method sets the following
:py:attr:`~PIL.Image.Image.info` properties:
**background**
Default background color (a palette color index).
**transparency**
Transparency color index. This key is omitted if the image is not
transparent.
**version**
Version (either ``GIF87a`` or ``GIF89a``).
**duration**
May not be present. The time to display the current frame
of the GIF, in milliseconds.
**loop**
May not be present. The number of times the GIF should loop.
Reading sequences
~~~~~~~~~~~~~~~~~
The GIF loader supports the :py:meth:`~file.seek` and :py:meth:`~file.tell`
methods. You can seek to the next frame (``im.seek(im.tell() + 1)``), or rewind
the file by seeking to the first frame. Random access is not supported.
``im.seek()`` raises an ``EOFError`` if you try to seek after the last frame.
Saving
~~~~~~
When calling :py:meth:`~PIL.Image.Image.save`, the following options
are available::
im.save(out, save_all=True, append_images=[im1, im2, ...])
**save_all**
If present and true, all frames of the image will be saved. If
not, then only the first frame of a multiframe image will be saved.
**append_images**
A list of images to append as additional frames. Each of the
images in the list can be single or multiframe images.
This is currently only supported for GIF, PDF, TIFF, and WebP.
**duration**
The display duration of each frame of the multiframe gif, in
milliseconds. Pass a single integer for a constant duration, or a
list or tuple to set the duration for each frame separately.
**loop**
Integer number of times the GIF should loop.
**optimize**
If present and true, attempt to compress the palette by
eliminating unused colors. This is only useful if the palette can
be compressed to the next smaller power of 2 elements.
**palette**
Use the specified palette for the saved image. The palette should
be a bytes or bytearray object containing the palette entries in
RGBRGB... form. It should be no more than 768 bytes. Alternately,
the palette can be passed in as an
:py:class:`PIL.ImagePalette.ImagePalette` object.
**disposal**
Indicates the way in which the graphic is to be treated after being displayed.
* 0 - No disposal specified.
* 1 - Do not dispose.
* 2 - Restore to background color.
* 3 - Restore to previous content.
Pass a single integer for a constant disposal, or a list or tuple
to set the disposal for each frame separately.
Reading local images
~~~~~~~~~~~~~~~~~~~~
The GIF loader creates an image memory the same size as the GIF file’s *logical
screen size*, and pastes the actual pixel data (the *local image*) into this
image. If you only want the actual pixel rectangle, you can manipulate the
:py:attr:`~PIL.Image.Image.size` and :py:attr:`~PIL.Image.Image.tile`
attributes before loading the file::
im = Image.open(...)
if im.tile[0][0] == "gif":
# only read the first "local image" from this GIF file
tag, (x0, y0, x1, y1), offset, extra = im.tile[0]
im.size = (x1 - x0, y1 - y0)
im.tile = [(tag, (0, 0) + im.size, offset, extra)]
""",
"GRIB": """*From the Pillow docs:*
.. versionadded:: Pillow 1.1.5
PIL provides a stub driver for GRIB files.
The driver requires the file to start with a GRIB header. If you have files
with embedded GRIB data, or files with multiple GRIB fields, your application
has to seek to the header before passing the file handle to PIL.
To add read or write support to your application, use
:py:func:`PIL.GribStubImagePlugin.register_handler`.
""",
"HDF5": """*From the Pillow docs:*
.. versionadded:: Pillow 1.1.5
PIL provides a stub driver for HDF5 files.
To add read or write support to your application, use
:py:func:`PIL.Hdf5StubImagePlugin.register_handler`.
""",
"ICNS": """*From the Pillow docs:*
PIL reads and (macOS only) writes macOS ``.icns`` files. By default, the
largest available icon is read, though you can override this by setting the
:py:attr:`~PIL.Image.Image.size` property before calling
:py:meth:`~PIL.Image.Image.load`. The :py:meth:`~PIL.Image.Image.write` method
sets the following :py:attr:`~PIL.Image.Image.info` property:
**sizes**
A list of supported sizes found in this icon file; these are a
3-tuple, ``(width, height, scale)``, where ``scale`` is 2 for a retina
icon and 1 for a standard icon. You *are* permitted to use this 3-tuple
format for the :py:attr:`~PIL.Image.Image.size` property if you set it
before calling :py:meth:`~PIL.Image.Image.load`; after loading, the size
will be reset to a 2-tuple containing pixel dimensions (so, e.g. if you
ask for ``(512, 512, 2)``, the final value of
:py:attr:`~PIL.Image.Image.size` will be ``(1024, 1024)``).
""",
"ICO": """*From the Pillow docs:*
ICO is used to store icons on Windows. The largest available icon is read.
The :py:meth:`~PIL.Image.Image.save` method supports the following options:
**sizes**
A list of sizes including in this ico file; these are a 2-tuple,
``(width, height)``; Default to ``[(16, 16), (24, 24), (32, 32), (48, 48),
(64, 64), (128, 128), (256, 256)]``. Any sizes bigger than the original
size or 256 will be ignored.
IM
^^
IM is a format used by LabEye and other applications based on the IFUNC image
processing library. The library reads and writes most uncompressed interchange
versions of this format.
IM is the only format that can store all internal PIL formats.
""",
"IM": """No docs for IM.""",
"IMT": """*From the Pillow docs:*
PIL reads Image Tools images containing ``L`` data.
""",
"IPTC": """No docs for IPTC.""",
"JPEG": """*From the Pillow docs:*
PIL reads JPEG, JFIF, and Adobe JPEG files containing ``L``, ``RGB``, or
``CMYK`` data. It writes standard and progressive JFIF files.
Using the :py:meth:`~PIL.Image.Image.draft` method, you can speed things up by
converting ``RGB`` images to ``L``, and resize images to 1/2, 1/4 or 1/8 of
their original size while loading them.
The :py:meth:`~PIL.Image.Image.write` method may set the following
:py:attr:`~PIL.Image.Image.info` properties if available:
**jfif**
JFIF application marker found. If the file is not a JFIF file, this key is
not present.
**jfif_version**
A tuple representing the jfif version, (major version, minor version).
**jfif_density**
A tuple representing the pixel density of the image, in units specified
by jfif_unit.
**jfif_unit**
Units for the jfif_density:
* 0 - No Units
* 1 - Pixels per Inch
* 2 - Pixels per Centimeter
**dpi**
A tuple representing the reported pixel density in pixels per inch, if
the file is a jfif file and the units are in inches.
**adobe**
Adobe application marker found. If the file is not an Adobe JPEG file, this
key is not present.
**adobe_transform**
Vendor Specific Tag.
**progression**
Indicates that this is a progressive JPEG file.
**icc_profile**
The ICC color profile for the image.
**exif**
Raw EXIF data from the image.
The :py:meth:`~PIL.Image.Image.save` method supports the following options:
**quality**
The image quality, on a scale from 1 (worst) to 95 (best). The default is
75. Values above 95 should be avoided; 100 disables portions of the JPEG
compression algorithm, and results in large files with hardly any gain in
image quality.
**optimize**
If present and true, indicates that the encoder should make an extra pass
over the image in order to select optimal encoder settings.
**progressive**
If present and true, indicates that this image should be stored as a
progressive JPEG file.
**dpi**
A tuple of integers representing the pixel density, ``(x,y)``.
**icc_profile**
If present and true, the image is stored with the provided ICC profile.
If this parameter is not provided, the image will be saved with no profile
attached. To preserve the existing profile::
im.save(filename, 'jpeg', icc_profile=im.info.get('icc_profile'))
**exif**
If present, the image will be stored with the provided raw EXIF data.
**subsampling**
If present, sets the subsampling for the encoder.
* ``keep``: Only valid for JPEG files, will retain the original image setting.
* ``4:4:4``, ``4:2:2``, ``4:2:0``: Specific sampling values
* ``-1``: equivalent to ``keep``
* ``0``: equivalent to ``4:4:4``
* ``1``: equivalent to ``4:2:2``
* ``2``: equivalent to ``4:2:0``
**qtables**
If present, sets the qtables for the encoder. This is listed as an
advanced option for wizards in the JPEG documentation. Use with
caution. ``qtables`` can be one of several types of values:
* a string, naming a preset, e.g. ``keep``, ``web_low``, or ``web_high``
* a list, tuple, or dictionary (with integer keys =
range(len(keys))) of lists of 64 integers. There must be
between 2 and 4 tables.
.. versionadded:: Pillow 2.5.0
.. note::
To enable JPEG support, you need to build and install the IJG JPEG library
before building the Python Imaging Library. See the distribution README for
details.
""",
"JPEG2000": """*From the Pillow docs:*
.. versionadded:: Pillow 2.4.0
PIL reads and writes JPEG 2000 files containing ``L``, ``LA``, ``RGB`` or
``RGBA`` data. It can also read files containing ``YCbCr`` data, which it
converts on read into ``RGB`` or ``RGBA`` depending on whether or not there is
an alpha channel. PIL supports JPEG 2000 raw codestreams (``.j2k`` files), as
well as boxed JPEG 2000 files (``.j2p`` or ``.jpx`` files). PIL does *not*
support files whose components have different sampling frequencies.
When loading, if you set the ``mode`` on the image prior to the
:py:meth:`~PIL.Image.Image.load` method being invoked, you can ask PIL to
convert the image to either ``RGB`` or ``RGBA`` rather than choosing for
itself. It is also possible to set ``reduce`` to the number of resolutions to
discard (each one reduces the size of the resulting image by a factor of 2),
and ``layers`` to specify the number of quality layers to load.
The :py:meth:`~PIL.Image.Image.save` method supports the following options:
**offset**
The image offset, as a tuple of integers, e.g. (16, 16)
**tile_offset**
The tile offset, again as a 2-tuple of integers.
**tile_size**
The tile size as a 2-tuple. If not specified, or if set to None, the
image will be saved without tiling.
**quality_mode**
Either `"rates"` or `"dB"` depending on the units you want to use to
specify image quality.
**quality_layers**
A sequence of numbers, each of which represents either an approximate size
reduction (if quality mode is `"rates"`) or a signal to noise ratio value
in decibels. If not specified, defaults to a single layer of full quality.
**num_resolutions**
The number of different image resolutions to be stored (which corresponds
to the number of Discrete Wavelet Transform decompositions plus one).
**codeblock_size**
The code-block size as a 2-tuple. Minimum size is 4 x 4, maximum is 1024 x
1024, with the additional restriction that no code-block may have more
than 4096 coefficients (i.e. the product of the two numbers must be no
greater than 4096).
**precinct_size**
The precinct size as a 2-tuple. Must be a power of two along both axes,
and must be greater than the code-block size.
**irreversible**
If ``True``, use the lossy Irreversible Color Transformation
followed by DWT 9-7. Defaults to ``False``, which means to use the
Reversible Color Transformation with DWT 5-3.
**progression**
Controls the progression order; must be one of ``"LRCP"``, ``"RLCP"``,
``"RPCL"``, ``"PCRL"``, ``"CPRL"``. The letters stand for Component,
Position, Resolution and Layer respectively and control the order of
encoding, the idea being that e.g. an image encoded using LRCP mode can
have its quality layers decoded as they arrive at the decoder, while one
encoded using RLCP mode will have increasing resolutions decoded as they
arrive, and so on.
**cinema_mode**
Set the encoder to produce output compliant with the digital cinema
specifications. The options here are ``"no"`` (the default),
``"cinema2k-24"`` for 24fps 2K, ``"cinema2k-48"`` for 48fps 2K, and
``"cinema4k-24"`` for 24fps 4K. Note that for compliant 2K files,
*at least one* of your image dimensions must match 2048 x 1080, while
for compliant 4K files, *at least one* of the dimensions must match
4096 x 2160.
.. note::
To enable JPEG 2000 support, you need to build and install the OpenJPEG
library, version 2.0.0 or higher, before building the Python Imaging
Library.
Windows users can install the OpenJPEG binaries available on the
OpenJPEG website, but must add them to their PATH in order to use PIL (if
you fail to do this, you will get errors about not being able to load the
``_imaging`` DLL).
""",
"MCIDAS": """*From the Pillow docs:*
PIL identifies and reads 8-bit McIdas area files.
""",
"MIC": """*From the Pillow docs:*
PIL identifies and reads Microsoft Image Composer (MIC) files. When opened, the
first sprite in the file is loaded. You can use :py:meth:`~file.seek` and
:py:meth:`~file.tell` to read other sprites from the file.
Note that there may be an embedded gamma of 2.2 in MIC files.
""",
"MPEG": """*From the Pillow docs:*
PIL identifies MPEG files.
""",
"MPO": """*From the Pillow docs:*
Pillow identifies and reads Multi Picture Object (MPO) files, loading the primary
image when first opened. The :py:meth:`~file.seek` and :py:meth:`~file.tell`
methods may be used to read other pictures from the file. The pictures are
zero-indexed and random access is supported.
""",
"MSP": """*From the Pillow docs:*
PIL identifies and reads MSP files from Windows 1 and 2. The library writes
uncompressed (Windows 1) versions of this format.
""",
"PCD": """*From the Pillow docs:*
PIL reads PhotoCD files containing ``RGB`` data. This only reads the 768x512
resolution image from the file. Higher resolutions are encoded in a proprietary
encoding.
""",
"PCX": """*From the Pillow docs:*
PIL reads and writes PCX files containing ``1``, ``L``, ``P``, or ``RGB`` data.
""",
"PIXAR": """*From the Pillow docs:*
PIL provides limited support for PIXAR raster files. The library can identify
and read “dumped” RGB files.
The format code is ``PIXAR``.
""",
"PNG": """*From the Pillow docs:*
PIL identifies, reads, and writes PNG files containing ``1``, ``L``, ``P``,
``RGB``, or ``RGBA`` data. Interlaced files are supported as of v1.1.7.
The :py:meth:`~PIL.Image.Image.write` method sets the following
:py:attr:`~PIL.Image.Image.info` properties, when appropriate:
**chromaticity**
The chromaticity points, as an 8 tuple of floats. (``White Point
X``, ``White Point Y``, ``Red X``, ``Red Y``, ``Green X``, ``Green
Y``, ``Blue X``, ``Blue Y``)
**gamma**
Gamma, given as a floating point number.
**srgb**
The sRGB rendering intent as an integer.
* 0 Perceptual
* 1 Relative Colorimetric
* 2 Saturation
* 3 Absolute Colorimetric
**transparency**
For ``P`` images: Either the palette index for full transparent pixels,
or a byte string with alpha values for each palette entry.
For ``L`` and ``RGB`` images, the color that represents full transparent
pixels in this image.
This key is omitted if the image is not a transparent palette image.
``Open`` also sets ``Image.text`` to a list of the values of the
``tEXt``, ``zTXt``, and ``iTXt`` chunks of the PNG image. Individual
compressed chunks are limited to a decompressed size of
``PngImagePlugin.MAX_TEXT_CHUNK``, by default 1MB, to prevent
decompression bombs. Additionally, the total size of all of the text
chunks is limited to ``PngImagePlugin.MAX_TEXT_MEMORY``, defaulting to
64MB.
The :py:meth:`~PIL.Image.Image.save` method supports the following options:
**optimize**
If present and true, instructs the PNG writer to make the output file as
small as possible. This includes extra processing in order to find optimal
encoder settings.
**transparency**
For ``P``, ``L``, and ``RGB`` images, this option controls what
color image to mark as transparent.
For ``P`` images, this can be a either the palette index,
or a byte string with alpha values for each palette entry.
**dpi**
A tuple of two numbers corresponding to the desired dpi in each direction.
**pnginfo**
A :py:class:`PIL.PngImagePlugin.PngInfo` instance containing text tags.
**compress_level**
ZLIB compression level, a number between 0 and 9: 1 gives best speed,
9 gives best compression, 0 gives no compression at all. Default is 6.
When ``optimize`` option is True ``compress_level`` has no effect
(it is set to 9 regardless of a value passed).
**icc_profile**
The ICC Profile to include in the saved file.
**bits (experimental)**
For ``P`` images, this option controls how many bits to store. If omitted,
the PNG writer uses 8 bits (256 colors).
**dictionary (experimental)**
Set the ZLIB encoder dictionary.
.. note::
To enable PNG support, you need to build and install the ZLIB compression
library before building the Python Imaging Library. See the installation
documentation for details.
""",
"PPM": """*From the Pillow docs:*
PIL reads and writes PBM, PGM and PPM files containing ``1``, ``L`` or ``RGB``
data.
""",
"PSD": """*From the Pillow docs:*
PIL identifies and reads PSD files written by Adobe Photoshop 2.5 and 3.0.
""",
"SGI": """*From the Pillow docs:*
Pillow reads and writes uncompressed ``L``, ``RGB``, and ``RGBA`` files.
""",
"SPIDER": """*From the Pillow docs:*
PIL reads and writes SPIDER image files of 32-bit floating point data
("F;32F").
PIL also reads SPIDER stack files containing sequences of SPIDER images. The
:py:meth:`~file.seek` and :py:meth:`~file.tell` methods are supported, and
random access is allowed.
The :py:meth:`~PIL.Image.Image.write` method sets the following attributes:
**format**
Set to ``SPIDER``
**istack**
Set to 1 if the file is an image stack, else 0.
**nimages**
Set to the number of images in the stack.
A convenience method, :py:meth:`~PIL.Image.Image.convert2byte`, is provided for
converting floating point data to byte data (mode ``L``)::
im = Image.open('image001.spi').convert2byte()
Writing files in SPIDER format
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The extension of SPIDER files may be any 3 alphanumeric characters. Therefore
the output format must be specified explicitly::
im.save('newimage.spi', format='SPIDER')
For more information about the SPIDER image processing package, see the
`SPIDER homepage`_ at `Wadsworth Center`_.
.. _SPIDER homepage: https://spider.wadsworth.org/spider_doc/spider/docs/spider.html
.. _Wadsworth Center: https://www.wadsworth.org/
""",
"SUN": """No docs for SUN.""",
"TGA": """*From the Pillow docs:*
PIL reads 24- and 32-bit uncompressed and run-length encoded TGA files.
""",
"TIFF": """*From the Pillow docs:*
Pillow reads and writes TIFF files. It can read both striped and tiled
images, pixel and plane interleaved multi-band images. If you have
libtiff and its headers installed, PIL can read and write many kinds
of compressed TIFF files. If not, PIL will only read and write
uncompressed files.
.. note::
Beginning in version 5.0.0, Pillow requires libtiff to read or
write compressed files. Prior to that release, Pillow had buggy
support for reading Packbits, LZW and JPEG compressed TIFFs
without using libtiff.
The :py:meth:`~PIL.Image.Image.write` method sets the following
:py:attr:`~PIL.Image.Image.info` properties:
**compression**
Compression mode.
.. versionadded:: Pillow 2.0.0
**dpi**
Image resolution as an ``(xdpi, ydpi)`` tuple, where applicable. You can use
the :py:attr:`~PIL.Image.Image.tag` attribute to get more detailed
information about the image resolution.
.. versionadded:: Pillow 1.1.5
**resolution**
Image resolution as an ``(xres, yres)`` tuple, where applicable. This is a
measurement in whichever unit is specified by the file.
.. versionadded:: Pillow 1.1.5
The :py:attr:`~PIL.Image.Image.tag_v2` attribute contains a dictionary
of TIFF metadata. The keys are numerical indexes from
:py:attr:`~PIL.TiffTags.TAGS_V2`. Values are strings or numbers for single
items, multiple values are returned in a tuple of values. Rational
numbers are returned as a :py:class:`~PIL.TiffImagePlugin.IFDRational`
object.
.. versionadded:: Pillow 3.0.0
For compatibility with legacy code, the
:py:attr:`~PIL.Image.Image.tag` attribute contains a dictionary of
decoded TIFF fields as returned prior to version 3.0.0. Values are
returned as either strings or tuples of numeric values. Rational
numbers are returned as a tuple of ``(numerator, denominator)``.
.. deprecated:: 3.0.0
Saving Tiff Images
~~~~~~~~~~~~~~~~~~
The :py:meth:`~PIL.Image.Image.save` method can take the following keyword arguments:
**save_all**
If true, Pillow will save all frames of the image to a multiframe tiff document.
.. versionadded:: Pillow 3.4.0
**tiffinfo**
A :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2` object or dict
object containing tiff tags and values. The TIFF field type is
autodetected for Numeric and string values, any other types
require using an :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2`
object and setting the type in
:py:attr:`~PIL.TiffImagePlugin.ImageFileDirectory_v2.tagtype` with
the appropriate numerical value from
``TiffTags.TYPES``.
.. versionadded:: Pillow 2.3.0
Metadata values that are of the rational type should be passed in
using a :py:class:`~PIL.TiffImagePlugin.IFDRational` object.
.. versionadded:: Pillow 3.1.0
For compatibility with legacy code, a
:py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1` object may
be passed in this field. However, this is deprecated.
.. versionadded:: Pillow 3.0.0
.. note::
Only some tags are currently supported when writing using
libtiff. The supported list is found in
:py:attr:`~PIL:TiffTags.LIBTIFF_CORE`.
**compression**
A string containing the desired compression method for the
file. (valid only with libtiff installed) Valid compression
methods are: ``None``, ``"tiff_ccitt"``, ``"group3"``,
``"group4"``, ``"tiff_jpeg"``, ``"tiff_adobe_deflate"``,
``"tiff_thunderscan"``, ``"tiff_deflate"``, ``"tiff_sgilog"``,
``"tiff_sgilog24"``, ``"tiff_raw_16"``
These arguments to set the tiff header fields are an alternative to
using the general tags available through tiffinfo.
**description**
**software**
**date_time**
**artist**
**copyright**
Strings
**resolution_unit**
A string of "inch", "centimeter" or "cm"
**resolution**
**x_resolution**
**y_resolution**
**dpi**
Either a Float, 2 tuple of (numerator, denominator) or a
:py:class:`~PIL.TiffImagePlugin.IFDRational`. Resolution implies
an equal x and y resolution, dpi also implies a unit of inches.
""",
"WMF": """*From the Pillow docs:*
PIL can identify playable WMF files.
In PIL 1.1.4 and earlier, the WMF driver provides some limited rendering
support, but not enough to be useful for any real application.
In PIL 1.1.5 and later, the WMF driver is a stub driver. To add WMF read or
write support to your application, use
:py:func:`PIL.WmfImagePlugin.register_handler` to register a WMF handler.
::
from PIL import Image
from PIL import WmfImagePlugin
class WmfHandler:
def open(self, im):
...
def load(self, im):
...
return image
def save(self, im, fp, filename):
...
wmf_handler = WmfHandler()
WmfImagePlugin.register_handler(wmf_handler)
im = Image.open("sample.wmf")""",
"XBM": """*From the Pillow docs:*
PIL reads and writes X bitmap files (mode ``1``).
""",
"XPM": """*From the Pillow docs:*
PIL reads X pixmap files (mode ``P``) with 256 colors or less.
The :py:meth:`~PIL.Image.Image.write` method sets the following
:py:attr:`~PIL.Image.Image.info` properties:
**transparency**
Transparency color index. This key is omitted if the image is not
transparent.
""",
"XVThumb": """No docs for XVThumb.""",
}
|
af41399a45ae6e8cb712cc81c145fcce6e519f9b
|
88dda5e76cef286c7db3ae7e5d1a32d28f7815a3
|
/reviewboard/reviews/tests/test_entries.py
|
6609e1cd472f7f2f797e6b508a5c250696195476
|
[
"MIT"
] |
permissive
|
reviewboard/reviewboard
|
f4d3bada08ba9d6ef53add2d1fdb82bd6cc63a1e
|
c3a991f1e9d7682239a1ab0e8661cee6da01d537
|
refs/heads/master
| 2023-08-31T09:03:14.170335
| 2023-08-30T08:22:43
| 2023-08-30T08:22:43
| 285,304
| 1,141
| 353
|
MIT
| 2023-06-07T16:51:02
| 2009-08-22T21:39:49
|
Python
|
UTF-8
|
Python
| false
| false
| 72,137
|
py
|
test_entries.py
|
"""Unit tests for review request page entries."""
from datetime import datetime, timedelta
from django.contrib.auth.models import AnonymousUser, User
from django.template import RequestContext
from django.test.client import RequestFactory
from django.utils import timezone
from django.utils.timezone import utc
from djblets.testing.decorators import add_fixtures
from kgb import SpyAgency
from reviewboard.changedescs.models import ChangeDescription
from reviewboard.reviews.detail import (BaseReviewRequestPageEntry,
ChangeEntry,
InitialStatusUpdatesEntry,
ReviewEntry,
ReviewRequestPageData,
StatusUpdatesEntryMixin)
from reviewboard.reviews.models import (BaseComment, GeneralComment,
StatusUpdate)
from reviewboard.testing import TestCase
class BaseReviewRequestPageEntryTests(SpyAgency, TestCase):
"""Unit tests for BaseReviewRequestPageEntry."""
fixtures = ['test_users']
def setUp(self):
super(BaseReviewRequestPageEntryTests, self).setUp()
self.review_request = self.create_review_request()
self.request = RequestFactory().request()
self.request.user = AnonymousUser()
self.data = ReviewRequestPageData(review_request=self.review_request,
request=self.request)
def test_init_with_no_updated_timestamp(self):
"""Testing BaseReviewRequestPageEntry.__init__ without an
updated_timestamp specified
"""
entry = BaseReviewRequestPageEntry(
data=self.data,
entry_id='test',
added_timestamp=datetime(2017, 9, 7, 17, 0, 0, tzinfo=utc))
self.assertEqual(entry.updated_timestamp,
datetime(2017, 9, 7, 17, 0, 0, tzinfo=utc))
def test_render_to_string(self):
"""Testing BaseReviewRequestPageEntry.render_to_string"""
entry = BaseReviewRequestPageEntry(data=self.data,
entry_id='test',
added_timestamp=None)
entry.template_name = 'reviews/entries/base.html'
html = entry.render_to_string(
self.request,
RequestContext(self.request, {
'last_visited': timezone.now(),
}))
self.assertNotEqual(html, '')
def test_render_to_string_with_entry_pos_main(self):
"""Testing BaseReviewRequestPageEntry.render_to_string with
entry_pos=ENTRY_POS_MAIN
"""
entry = BaseReviewRequestPageEntry(data=self.data,
entry_id='test',
added_timestamp=None)
entry.template_name = 'reviews/entries/base.html'
entry.entry_pos = BaseReviewRequestPageEntry.ENTRY_POS_MAIN
html = entry.render_to_string(
self.request,
RequestContext(self.request, {
'last_visited': timezone.now(),
}))
self.assertIn('<div class="box-statuses">', html)
def test_render_to_string_with_entry_pos_initial(self):
"""Testing BaseReviewRequestPageEntry.render_to_string with
entry_pos=ENTRY_POS_INITIAL
"""
entry = BaseReviewRequestPageEntry(data=self.data,
entry_id='test',
added_timestamp=None)
entry.template_name = 'reviews/entries/base.html'
entry.entry_pos = BaseReviewRequestPageEntry.ENTRY_POS_INITIAL
html = entry.render_to_string(
self.request,
RequestContext(self.request, {
'last_visited': timezone.now(),
}))
self.assertNotIn('<div class="box-statuses">', html)
def test_render_to_string_with_new_entry(self):
"""Testing BaseReviewRequestPageEntry.render_to_string with
entry_is_new=True
"""
entry = BaseReviewRequestPageEntry(
data=self.data,
entry_id='test',
added_timestamp=datetime(2017, 9, 7, 17, 0, 0, tzinfo=utc))
entry.template_name = 'reviews/entries/base.html'
self.request.user = User.objects.create_user(username='test-user',
email='user@example.com')
html = entry.render_to_string(
self.request,
RequestContext(self.request, {
'last_visited': datetime(2017, 9, 7, 10, 0, 0, tzinfo=utc),
}))
self.assertIn(
'class="review-request-page-entry new-review-request-page-entry',
html)
def test_render_to_string_without_new_entry(self):
"""Testing BaseReviewRequestPageEntry.render_to_string with
entry_is_new=False
"""
entry = BaseReviewRequestPageEntry(
data=self.data,
entry_id='test',
added_timestamp=datetime(2017, 9, 7, 17, 0, 0, tzinfo=utc))
entry.template_name = 'reviews/entries/base.html'
self.request.user = User.objects.create_user(username='test-user',
email='user@example.com')
html = entry.render_to_string(
self.request,
RequestContext(self.request, {
'last_visited': datetime(2017, 9, 7, 18, 0, 0, tzinfo=utc),
}))
self.assertNotEqual(html, '')
self.assertNotIn(
'class="review-request-page-entry new-review-request-page-entry"',
html)
def test_render_to_string_with_no_template(self):
"""Testing BaseReviewRequestPageEntry.render_to_string with
template_name=None
"""
entry = BaseReviewRequestPageEntry(data=self.data,
entry_id='test',
added_timestamp=None)
html = entry.render_to_string(
self.request,
RequestContext(self.request, {
'last_visited': timezone.now(),
}))
self.assertEqual(html, '')
def test_render_to_string_with_has_content_false(self):
"""Testing BaseReviewRequestPageEntry.render_to_string with
has_content=False
"""
entry = BaseReviewRequestPageEntry(data=self.data,
entry_id='test',
added_timestamp=None)
entry.template_name = 'reviews/entries/base.html'
entry.has_content = False
html = entry.render_to_string(
self.request,
RequestContext(self.request, {
'last_visited': timezone.now(),
}))
self.assertEqual(html, '')
def test_render_to_string_with_exception(self):
"""Testing BaseReviewRequestPageEntry.render_to_string with
exception
"""
entry = BaseReviewRequestPageEntry(data=self.data,
entry_id='test',
added_timestamp=None)
entry.template_name = 'reviews/entries/NOT_FOUND.html'
from reviewboard.reviews.detail import logger
self.spy_on(logger.exception)
html = entry.render_to_string(
self.request,
RequestContext(self.request, {
'last_visited': timezone.now(),
}))
self.assertEqual(html, '')
self.assertTrue(logger.exception.spy.called)
self.assertEqual(logger.exception.spy.calls[0].args[0],
'Error rendering template for %s (ID=%s): %s')
def test_is_entry_new_with_timestamp(self):
"""Testing BaseReviewRequestPageEntry.is_entry_new with timestamp"""
entry = BaseReviewRequestPageEntry(
data=self.data,
entry_id='test',
added_timestamp=datetime(2017, 9, 7, 15, 36, 0, tzinfo=utc))
user = User.objects.create_user(username='test-user',
email='user@example.com')
self.assertTrue(entry.is_entry_new(
last_visited=datetime(2017, 9, 7, 10, 0, 0, tzinfo=utc),
user=user))
self.assertFalse(entry.is_entry_new(
last_visited=datetime(2017, 9, 7, 16, 0, 0, tzinfo=utc),
user=user))
self.assertFalse(entry.is_entry_new(
last_visited=datetime(2017, 9, 7, 15, 36, 0, tzinfo=utc),
user=user))
def test_is_entry_new_without_timestamp(self):
"""Testing BaseReviewRequestPageEntry.is_entry_new without timestamp
"""
entry = BaseReviewRequestPageEntry(data=self.data,
entry_id='test',
added_timestamp=None)
self.assertFalse(entry.is_entry_new(
last_visited=datetime(2017, 9, 7, 10, 0, 0, tzinfo=utc),
user=User.objects.create_user(username='test-user',
email='user@example.com')))
def test_collapsed_with_older_than_last_visited(self):
"""Testing BaseReviewRequestPageEntry.collapsed with entry older than
last visited
"""
self.data.latest_changedesc_timestamp = \
self.review_request.time_added + timedelta(days=5)
self.data.last_visited = datetime(2017, 9, 7, 10, 0, 0, tzinfo=utc)
entry = BaseReviewRequestPageEntry(
data=self.data,
entry_id='test',
added_timestamp=self.data.last_visited - timedelta(days=2),
updated_timestamp=self.data.last_visited - timedelta(days=1))
self.assertTrue(entry.collapsed)
def test_collapsed_with_newer_than_last_visited(self):
"""Testing BaseReviewRequestPageEntry.collapsed with entry newer than
last visited
"""
self.data.last_visited = datetime(2017, 9, 7, 10, 0, 0, tzinfo=utc)
entry = BaseReviewRequestPageEntry(
data=self.data,
entry_id='test',
added_timestamp=self.data.last_visited,
updated_timestamp=self.data.last_visited + timedelta(days=1))
self.assertFalse(entry.collapsed)
def test_collapsed_without_last_visited(self):
"""Testing BaseReviewRequestPageEntry.collapsed without last visited
timestamp
"""
entry = BaseReviewRequestPageEntry(
data=self.data,
entry_id='test',
added_timestamp=datetime(2017, 9, 6, 10, 0, 0, tzinfo=utc),
updated_timestamp=datetime(2017, 9, 7, 10, 0, 0, tzinfo=utc))
self.assertFalse(entry.collapsed)
def test_collapsed_with_older_than_changedesc(self):
"""Testing BaseReviewRequestPageEntry.collapsed with older than latest
Change Description
"""
self.data.latest_changedesc_timestamp = \
self.review_request.time_added + timedelta(days=5)
self.data.last_visited = \
self.review_request.time_added + timedelta(days=10)
entry = BaseReviewRequestPageEntry(
data=self.data,
entry_id='test',
added_timestamp=(self.data.latest_changedesc_timestamp -
timedelta(days=2)),
updated_timestamp=(self.data.latest_changedesc_timestamp -
timedelta(days=1)))
self.assertTrue(entry.collapsed)
def test_collapsed_with_newer_than_changedesc(self):
"""Testing BaseReviewRequestPageEntry.collapsed with newer than latest
Change Description
"""
self.data.latest_changedesc_timestamp = self.review_request.time_added
self.data.last_visited = \
self.review_request.time_added + timedelta(days=10)
entry = BaseReviewRequestPageEntry(
data=self.data,
entry_id='test',
added_timestamp=self.data.latest_changedesc_timestamp,
updated_timestamp=(self.data.latest_changedesc_timestamp +
timedelta(days=1)))
self.assertFalse(entry.collapsed)
class StatusUpdatesEntryMixinTests(TestCase):
"""Unit tests for StatusUpdatesEntryMixin."""
def test_add_update_with_done_failure(self):
"""Testing StatusUpdatesEntryMixin.add_update with DONE_FAILURE"""
status_update = StatusUpdate(state=StatusUpdate.DONE_FAILURE)
entry = StatusUpdatesEntryMixin()
entry.add_update(status_update)
self.assertEqual(entry.status_updates, [status_update])
self.assertEqual(status_update.header_class,
'status-update-state-failure')
def test_add_update_with_error(self):
"""Testing StatusUpdatesEntryMixin.add_update with ERROR"""
status_update = StatusUpdate(state=StatusUpdate.ERROR)
entry = StatusUpdatesEntryMixin()
entry.add_update(status_update)
self.assertEqual(entry.status_updates, [status_update])
self.assertEqual(status_update.header_class,
'status-update-state-failure')
def test_add_update_with_timeout(self):
"""Testing StatusUpdatesEntryMixin.add_update with TIMEOUT"""
status_update = StatusUpdate(state=StatusUpdate.TIMEOUT)
entry = StatusUpdatesEntryMixin()
entry.add_update(status_update)
self.assertEqual(entry.status_updates, [status_update])
self.assertEqual(status_update.header_class,
'status-update-state-failure')
def test_add_update_with_pending(self):
"""Testing StatusUpdatesEntryMixin.add_update with PENDING"""
status_update = StatusUpdate(state=StatusUpdate.PENDING)
entry = StatusUpdatesEntryMixin()
entry.add_update(status_update)
self.assertEqual(entry.status_updates, [status_update])
self.assertEqual(status_update.header_class,
'status-update-state-pending')
def test_add_update_with_not_yet_run(self):
"""Testing StatusUpdatesEntryMixin.add_update with NOT_YET_RUN"""
status_update = StatusUpdate(state=StatusUpdate.NOT_YET_RUN)
entry = StatusUpdatesEntryMixin()
entry.add_update(status_update)
self.assertEqual(entry.status_updates, [status_update])
self.assertEqual(status_update.header_class,
'status-update-state-not-yet-run')
def test_add_update_with_done_success(self):
"""Testing StatusUpdatesEntryMixin.add_update with DONE_SUCCESS"""
status_update = StatusUpdate(state=StatusUpdate.DONE_SUCCESS)
entry = StatusUpdatesEntryMixin()
entry.add_update(status_update)
self.assertEqual(entry.status_updates, [status_update])
self.assertEqual(status_update.header_class,
'status-update-state-success')
def test_add_update_html_rendering(self):
"""Testing StatusUpdatesEntryMixin.add_update HTML rendering"""
status_update = StatusUpdate(state=StatusUpdate.DONE_SUCCESS,
description='My description.',
summary='My summary.')
entry = StatusUpdatesEntryMixin()
entry.add_update(status_update)
self.assertHTMLEqual(
status_update.summary_html,
('<div class="status-update-summary-entry'
' status-update-state-success">\n'
' <span class="summary">My summary.</span>\n'
' My description.\n'
'</div>'))
def test_add_update_html_rendering_with_url(self):
"""Testing StatusUpdatesEntryMixin.add_update HTML rendering with URL
"""
status_update = StatusUpdate(state=StatusUpdate.DONE_SUCCESS,
description='My description.',
summary='My summary.',
url='https://example.com/')
entry = StatusUpdatesEntryMixin()
entry.add_update(status_update)
self.assertHTMLEqual(
status_update.summary_html,
('<div class="status-update-summary-entry'
' status-update-state-success">\n'
' <span class="summary">My summary.</span>\n'
' My description.\n'
' <a href="https://example.com/">https://example.com/</a>'
'</div>'))
def test_add_update_html_rendering_with_url_and_text(self):
"""Testing StatusUpdatesEntryMixin.add_update HTML rendering with URL
and URL text
"""
status_update = StatusUpdate(state=StatusUpdate.DONE_SUCCESS,
description='My description.',
summary='My summary.',
url='https://example.com/',
url_text='My URL')
entry = StatusUpdatesEntryMixin()
entry.add_update(status_update)
self.assertHTMLEqual(
status_update.summary_html,
('<div class="status-update-summary-entry'
' status-update-state-success">\n'
' <span class="summary">My summary.</span>\n'
' My description.\n'
' <a href="https://example.com/">My URL</a>'
'</div>'))
def test_add_update_html_rendering_with_timeout(self):
"""Testing StatusUpdatesEntryMixin.add_update HTML rendering with
timeout
"""
status_update = StatusUpdate(state=StatusUpdate.TIMEOUT,
description='My description.',
summary='My summary.')
entry = StatusUpdatesEntryMixin()
entry.add_update(status_update)
self.assertHTMLEqual(
status_update.summary_html,
('<div class="status-update-summary-entry'
' status-update-state-failure">\n'
' <span class="summary">My summary.</span>\n'
' timed out.\n'
'</div>'))
@add_fixtures(['test_users'])
def test_add_update_html_rendering_with_timeout_can_retry(self):
"""Testing StatusUpdatesEntryMixin.add_update HTML rendering with
timeout and retry
"""
review_request = self.create_review_request()
status_update = StatusUpdate(state=StatusUpdate.TIMEOUT,
description='My description.',
summary='My summary.',
review_request=review_request)
status_update.extra_data['can_retry'] = True
status_update.save()
entry = StatusUpdatesEntryMixin()
entry.add_update(status_update)
self.assertHTMLEqual(
status_update.summary_html,
('<div class="status-update-summary-entry'
' status-update-state-failure">\n'
' <span class="summary">My summary.</span>\n'
' timed out.\n'
' <input class="status-update-request-run"'
' data-status-update-id="1"'
' type="button" value="Retry" />'
'</div>'))
@add_fixtures(['test_users'])
def test_add_update_html_rendering_with_not_yet_run(self):
"""Testing StatusUpdatesEntryMixin.add_update HTML rendering with not
yet run
"""
review_request = self.create_review_request()
status_update = StatusUpdate(state=StatusUpdate.NOT_YET_RUN,
description='My description.',
summary='My summary.',
review_request=review_request)
status_update.save()
entry = StatusUpdatesEntryMixin()
entry.add_update(status_update)
self.assertHTMLEqual(
status_update.summary_html,
('<div class="status-update-summary-entry'
' status-update-state-not-yet-run">\n'
' <span class="summary">My summary.</span>\n'
' not yet run.\n'
' <input class="status-update-request-run"'
' data-status-update-id="1"'
' type="button" value="Run" />'
'</div>'))
@add_fixtures(['test_users'])
def test_add_comment(self):
"""Testing StatusUpdatesEntryMixin.add_comment"""
review_request = self.create_review_request()
review = self.create_review(review_request)
comment = self.create_general_comment(review)
# This is needed by the entry's add_comment(). It's normally built when
# creating the entries and their data.
comment.review_obj = review
status_update = self.create_status_update(
review_request=review_request,
review=review)
entry = StatusUpdatesEntryMixin()
entry.add_update(status_update)
entry.add_comment('general_comments', comment)
self.assertEqual(status_update.comments['general_comments'], [comment])
def test_finalize_with_all_states(self):
"""Testing StatusUpdatesEntryMixin.finalize with all states"""
entry = StatusUpdatesEntryMixin()
entry.add_update(StatusUpdate(state=StatusUpdate.DONE_FAILURE))
for i in range(2):
entry.add_update(StatusUpdate(state=StatusUpdate.DONE_SUCCESS))
for i in range(3):
entry.add_update(StatusUpdate(state=StatusUpdate.PENDING))
for i in range(4):
entry.add_update(StatusUpdate(state=StatusUpdate.NOT_YET_RUN))
for i in range(5):
entry.add_update(StatusUpdate(state=StatusUpdate.ERROR))
for i in range(6):
entry.add_update(StatusUpdate(state=StatusUpdate.TIMEOUT))
entry.finalize()
self.assertEqual(
entry.state_summary,
'1 failed, 2 succeeded, 3 pending, 4 not yet run, '
'5 failed with error, 6 timed out')
def test_finalize_with_done_failure(self):
"""Testing StatusUpdatesEntryMixin.finalize with DONE_FAILURE"""
entry = StatusUpdatesEntryMixin()
entry.add_update(StatusUpdate(state=StatusUpdate.DONE_FAILURE))
entry.finalize()
self.assertEqual(entry.state_summary, '1 failed')
self.assertEqual(entry.state_summary_class,
'status-update-state-failure')
def test_finalize_with_error(self):
"""Testing StatusUpdatesEntryMixin.finalize with ERROR"""
entry = StatusUpdatesEntryMixin()
entry.add_update(StatusUpdate(state=StatusUpdate.ERROR))
entry.finalize()
self.assertEqual(entry.state_summary, '1 failed with error')
self.assertEqual(entry.state_summary_class,
'status-update-state-failure')
def test_finalize_with_timeout(self):
"""Testing StatusUpdatesEntryMixin.finalize with TIMEOUT"""
entry = StatusUpdatesEntryMixin()
entry.add_update(StatusUpdate(state=StatusUpdate.TIMEOUT))
entry.finalize()
self.assertEqual(entry.state_summary, '1 timed out')
self.assertEqual(entry.state_summary_class,
'status-update-state-failure')
def test_finalize_with_pending(self):
"""Testing StatusUpdatesEntryMixin.finalize with PENDING"""
entry = StatusUpdatesEntryMixin()
entry.add_update(StatusUpdate(state=StatusUpdate.PENDING))
entry.finalize()
self.assertEqual(entry.state_summary, '1 pending')
self.assertEqual(entry.state_summary_class,
'status-update-state-pending')
def test_finalize_with_not_yet_run(self):
"""Testing StatusUpdatesEntryMixin.finalize with NOT_YET_RUN"""
entry = StatusUpdatesEntryMixin()
entry.add_update(StatusUpdate(state=StatusUpdate.NOT_YET_RUN))
entry.finalize()
self.assertEqual(entry.state_summary, '1 not yet run')
self.assertEqual(entry.state_summary_class,
'status-update-state-pending')
def test_finalize_with_done_success(self):
"""Testing StatusUpdatesEntryMixin.finalize with DONE_SUCCESS"""
entry = StatusUpdatesEntryMixin()
entry.add_update(StatusUpdate(state=StatusUpdate.DONE_SUCCESS))
entry.finalize()
self.assertEqual(entry.state_summary, '1 succeeded')
self.assertEqual(entry.state_summary_class,
'status-update-state-success')
def test_finalize_with_failures_take_precedence(self):
"""Testing StatusUpdatesEntryMixin.finalize with failures taking
precedence over PENDING and DONE_SUCCESS
"""
entry = StatusUpdatesEntryMixin()
entry.add_update(StatusUpdate(state=StatusUpdate.DONE_FAILURE))
entry.add_update(StatusUpdate(state=StatusUpdate.PENDING))
entry.add_update(StatusUpdate(state=StatusUpdate.DONE_SUCCESS))
entry.add_update(StatusUpdate(state=StatusUpdate.NOT_YET_RUN))
entry.finalize()
self.assertEqual(entry.state_summary,
'1 failed, 1 succeeded, 1 pending, 1 not yet run')
self.assertEqual(entry.state_summary_class,
'status-update-state-failure')
def test_finalize_with_pending_take_precedence(self):
"""Testing StatusUpdatesEntryMixin.finalize with PENDING taking
precedence SUCCESS
"""
entry = StatusUpdatesEntryMixin()
entry.add_update(StatusUpdate(state=StatusUpdate.PENDING))
entry.add_update(StatusUpdate(state=StatusUpdate.DONE_SUCCESS))
entry.finalize()
self.assertEqual(entry.state_summary, '1 succeeded, 1 pending')
self.assertEqual(entry.state_summary_class,
'status-update-state-pending')
@add_fixtures(['test_users'])
def test_populate_status_updates(self):
"""Testing StatusUpdatesEntryMixin.populate_status_updates"""
review_request = self.create_review_request()
review = self.create_review(review_request, public=True)
comment = self.create_general_comment(review)
# This state is normally set in ReviewRequestPageData.
comment._type = 'general_comments'
comment.review_obj = review
status_updates = [
StatusUpdate(state=StatusUpdate.PENDING),
StatusUpdate(state=StatusUpdate.DONE_FAILURE,
review=review)
]
request = RequestFactory().get('/r/1/')
request.user = AnonymousUser()
data = ReviewRequestPageData(review_request=review_request,
request=request)
data.review_comments[review.pk] = [comment]
entry = StatusUpdatesEntryMixin()
entry.collapsed = True
entry.data = data
entry.populate_status_updates(status_updates)
self.assertTrue(entry.collapsed)
self.assertEqual(entry.status_updates, status_updates)
status_update = entry.status_updates[0]
self.assertIsNone(status_update.review)
self.assertEqual(
status_update.comments,
{
'diff_comments': [],
'screenshot_comments': [],
'file_attachment_comments': [],
'general_comments': [],
})
status_update = entry.status_updates[1]
self.assertEqual(status_update.review, review)
self.assertEqual(
status_update.comments,
{
'diff_comments': [],
'screenshot_comments': [],
'file_attachment_comments': [],
'general_comments': [comment],
})
@add_fixtures(['test_users'])
def test_populate_status_updates_with_draft_replies(self):
"""Testing StatusUpdatesEntryMixin.populate_status_updates with
draft replies
"""
review_request = self.create_review_request()
review = self.create_review(review_request, public=True)
comment = self.create_general_comment(review)
reply = self.create_reply(review)
reply_comment = self.create_general_comment(reply, reply_to=comment)
# This state is normally set in ReviewRequestPageData.
comment._type = 'general_comments'
comment.review_obj = review
status_updates = [
StatusUpdate(state=StatusUpdate.PENDING),
StatusUpdate(state=StatusUpdate.DONE_FAILURE,
review=review)
]
request = RequestFactory().get('/r/1/')
request.user = AnonymousUser()
data = ReviewRequestPageData(review_request=review_request,
request=request)
data.review_comments[review.pk] = [comment]
data.draft_reply_comments[review.pk] = [reply_comment]
entry = StatusUpdatesEntryMixin()
entry.data = data
entry.populate_status_updates(status_updates)
self.assertEqual(entry.status_updates, status_updates)
status_update = entry.status_updates[0]
self.assertIsNone(status_update.review)
self.assertEqual(
status_update.comments,
{
'diff_comments': [],
'screenshot_comments': [],
'file_attachment_comments': [],
'general_comments': [],
})
status_update = entry.status_updates[1]
self.assertEqual(status_update.review, review)
self.assertEqual(
status_update.comments,
{
'diff_comments': [],
'screenshot_comments': [],
'file_attachment_comments': [],
'general_comments': [comment],
})
class InitialStatusUpdatesEntryTests(TestCase):
"""Unit tests for InitialStatusUpdatesEntry."""
fixtures = ['test_users']
def setUp(self):
super(InitialStatusUpdatesEntryTests, self).setUp()
self.request = RequestFactory().get('/r/1/')
self.request.user = AnonymousUser()
self.review_request = self.create_review_request(
time_added=datetime(2017, 9, 7, 17, 0, 0, tzinfo=utc))
self.review = self.create_review(
self.review_request,
public=True,
timestamp=datetime(2017, 9, 14, 15, 40, 0, tzinfo=utc))
self.general_comment = self.create_general_comment(self.review,
issue_opened=False)
self.status_update = self.create_status_update(
self.review_request,
review=self.review,
timestamp=datetime(2017, 9, 14, 15, 40, 0, tzinfo=utc),
state=StatusUpdate.DONE_FAILURE)
self.data = ReviewRequestPageData(
review_request=self.review_request,
request=self.request,
last_visited=self.review_request.time_added + timedelta(days=10))
def test_added_timestamp(self):
"""Testing InitialStatusUpdatesEntry.added_timestamp"""
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
entry = InitialStatusUpdatesEntry(data=self.data)
self.assertEqual(entry.added_timestamp,
datetime(2017, 9, 7, 17, 0, 0, tzinfo=utc))
def test_updated_timestamp(self):
"""Testing InitialStatusUpdatesEntry.updated_timestamp"""
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
entry = InitialStatusUpdatesEntry(data=self.data)
self.assertEqual(entry.updated_timestamp,
datetime(2017, 9, 14, 15, 40, 0, tzinfo=utc))
def test_build_entries(self):
"""Testing InitialStatusUpdatesEntry.build_entries"""
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
entries = list(InitialStatusUpdatesEntry.build_entries(self.data))
self.assertEqual(len(entries), 1)
entry = entries[0]
self.assertEqual(entry.added_timestamp,
datetime(2017, 9, 7, 17, 0, 0, tzinfo=utc))
self.assertEqual(entry.updated_timestamp,
datetime(2017, 9, 14, 15, 40, 0, tzinfo=utc))
self.assertEqual(entry.status_updates, [self.status_update])
self.assertEqual(
entry.status_updates_by_review,
{
self.review.pk: self.status_update,
})
self.assertEqual(
entry.status_updates[0].comments,
{
'diff_comments': [],
'screenshot_comments': [],
'file_attachment_comments': [],
'general_comments': [self.general_comment],
})
def test_build_entries_with_changedesc(self):
"""Testing InitialStatusUpdatesEntry.build_entries with
ChangeDescription following this entry
"""
self.review_request.changedescs.create(public=True)
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
entries = list(InitialStatusUpdatesEntry.build_entries(self.data))
self.assertEqual(len(entries), 1)
entry = entries[0]
self.assertEqual(entry.status_updates, [self.status_update])
self.assertEqual(
entry.status_updates_by_review,
{
self.review.pk: self.status_update,
})
status_update = entry.status_updates[0]
self.assertEqual(status_update.review, self.review)
self.assertIsNone(status_update.change_description)
self.assertEqual(
status_update.comments,
{
'diff_comments': [],
'screenshot_comments': [],
'file_attachment_comments': [],
'general_comments': [self.general_comment],
})
def test_is_entry_new_with_timestamp(self):
"""Testing InitialStatusUpdatesEntry.is_entry_new"""
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
user = User.objects.create_user(username='test-user',
email='user@example.com')
entry = InitialStatusUpdatesEntry(data=self.data)
self.assertFalse(entry.is_entry_new(
last_visited=self.review_request.last_updated - timedelta(days=1),
user=user))
def test_collapsed_with_no_changedescs_and_last_visited(self):
"""Testing InitialStatusUpdatesEntry.collapsed with no Change
Descriptions and page previously visited
"""
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
self.assertTrue(len(self.data.changedescs) == 0)
entry = InitialStatusUpdatesEntry(data=self.data)
self.assertTrue(entry.collapsed)
def test_collapsed_with_no_changedescs_and_not_last_visited(self):
"""Testing InitialStatusUpdatesEntry.collapsed with no Change
Descriptions and page not previously visited
"""
self.data.last_visited = None
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
self.assertTrue(len(self.data.changedescs) == 0)
entry = InitialStatusUpdatesEntry(data=self.data)
self.assertFalse(entry.collapsed)
def test_collapsed_with_changedescs_and_last_visited(self):
"""Testing InitialStatusUpdatesEntry.collapsed with Change Descriptions
and page previously visited
"""
self.review_request.changedescs.create(public=True)
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
self.assertTrue(len(self.data.changedescs) > 0)
entry = InitialStatusUpdatesEntry(data=self.data)
self.assertTrue(entry.collapsed)
def test_collapsed_with_changedescs_and_no_last_visited(self):
"""Testing InitialStatusUpdatesEntry.collapsed with Change Descriptions
and page not previously visited
"""
self.data.last_visited = None
self.review_request.changedescs.create(public=True)
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
self.assertTrue(len(self.data.changedescs) > 0)
entry = InitialStatusUpdatesEntry(data=self.data)
self.assertFalse(entry.collapsed)
def test_collapsed_with_pending_status_updates(self):
"""Testing InitialStatusUpdatesEntry.collapsed with pending status
updates
"""
self.status_update.state = StatusUpdate.PENDING
self.status_update.review = None
self.status_update.save(update_fields=('state', 'review'))
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
entry = InitialStatusUpdatesEntry(data=self.data)
self.assertFalse(entry.collapsed)
def test_collapsed_with_not_yet_run_status_updates(self):
"""Testing InitialStatusUpdatesEntry.collapsed with not yet run status
updates
"""
self.status_update.state = StatusUpdate.NOT_YET_RUN
self.status_update.review = None
self.status_update.save(update_fields=('state', 'review'))
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
entry = InitialStatusUpdatesEntry(data=self.data)
self.assertFalse(entry.collapsed)
def test_collapsed_with_status_update_timestamp_gt_last_visited(self):
"""Testing InitialStatusUpdatesEntry.collapsed with status update
timestamp newer than last visited
"""
# To update the status update's timestamp, we need to perform an
# update() call on the queryset and reload.
StatusUpdate.objects.filter(pk=self.status_update.pk).update(
timestamp=self.data.last_visited + timedelta(days=1))
self.status_update = StatusUpdate.objects.get(pk=self.status_update.pk)
self.assertTrue(self.status_update.timestamp > self.data.last_visited)
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
entry = InitialStatusUpdatesEntry(data=self.data)
self.assertFalse(entry.collapsed)
def test_collapsed_with_status_update_timestamp_lt_last_visited(self):
"""Testing InitialStatusUpdatesEntry.collapsed with status update
timestamp newer than last visited
"""
# To update the status update's timestamp, we need to perform an
# update() call on the queryset and reload.
StatusUpdate.objects.filter(pk=self.status_update.pk).update(
timestamp=self.data.last_visited - timedelta(days=1))
self.status_update = StatusUpdate.objects.get(pk=self.status_update.pk)
self.assertTrue(self.status_update.timestamp < self.data.last_visited)
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
entry = InitialStatusUpdatesEntry(data=self.data)
self.assertTrue(entry.collapsed)
def test_collapsed_with_status_updates_and_no_reviews(self):
"""Testing InitialStatusUpdatesEntry.collapsed with status updates
and no reviews
"""
self.status_update.state = StatusUpdate.DONE_SUCCESS
self.status_update.review = None
self.status_update.save(update_fields=('state', 'review'))
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
entry = InitialStatusUpdatesEntry(data=self.data)
self.assertTrue(entry.collapsed)
def test_collapsed_with_status_updates_and_draft_comment_replies(self):
"""Testing InitialStatusUpdatesEntry.collapsed with status updates
containing draft comment replies
"""
self.request.user = self.review_request.submitter
self.assertEqual(self.status_update.state, StatusUpdate.DONE_FAILURE)
reply = self.create_reply(self.review, user=self.request.user)
self.create_general_comment(reply, reply_to=self.general_comment)
self.review_request.changedescs.create(public=True)
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
self.assertIn(self.review.pk, self.data.draft_reply_comments)
entry = InitialStatusUpdatesEntry(data=self.data)
self.assertFalse(entry.collapsed)
def test_collapsed_with_status_updates_and_draft_body_top_replies(self):
"""Testing InitialStatusUpdatesEntry.collapsed with status updates
containing draft replies to body_top
"""
self.request.user = self.review_request.submitter
self.assertEqual(self.status_update.state, StatusUpdate.DONE_FAILURE)
self.create_reply(self.review,
user=self.request.user,
body_top_reply_to=self.review)
self.review_request.changedescs.create(public=True)
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
self.assertIn(self.review.pk, self.data.draft_body_top_replies)
entry = InitialStatusUpdatesEntry(data=self.data)
self.assertFalse(entry.collapsed)
def test_collapsed_with_status_updates_and_draft_body_bottom_replies(self):
"""Testing InitialStatusUpdatesEntry.collapsed with status updates
containing draft replies to body_bottom
"""
self.request.user = self.review_request.submitter
self.assertEqual(self.status_update.state, StatusUpdate.DONE_FAILURE)
self.create_reply(self.review,
user=self.request.user,
body_bottom_reply_to=self.review)
self.review_request.changedescs.create(public=True)
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
self.assertIn(self.review.pk, self.data.draft_body_bottom_replies)
entry = InitialStatusUpdatesEntry(data=self.data)
self.assertFalse(entry.collapsed)
class ReviewEntryTests(TestCase):
"""Unit tests for ReviewEntry."""
fixtures = ['test_users']
def setUp(self):
super(ReviewEntryTests, self).setUp()
self.request = RequestFactory().get('/r/1/')
self.request.user = AnonymousUser()
self.review_request = self.create_review_request(publish=True)
self.review = self.create_review(
self.review_request,
id=123,
public=True,
timestamp=datetime(2017, 9, 7, 17, 0, 0, tzinfo=utc))
self.changedesc = self.review_request.changedescs.create(
timestamp=self.review.timestamp + timedelta(days=10),
public=True)
self.data = ReviewRequestPageData(
review_request=self.review_request,
request=self.request,
last_visited=self.changedesc.timestamp)
def test_added_timestamp(self):
"""Testing ReviewEntry.added_timestamp"""
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
entry = ReviewEntry(data=self.data,
review=self.review)
self.assertEqual(entry.added_timestamp,
datetime(2017, 9, 7, 17, 0, 0, tzinfo=utc))
def test_updated_timestamp(self):
"""Testing ReviewEntry.updated_timestamp"""
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
entry = ReviewEntry(data=self.data,
review=self.review)
self.assertEqual(entry.updated_timestamp,
datetime(2017, 9, 7, 17, 0, 0, tzinfo=utc))
def test_updated_timestamp_with_replies(self):
"""Testing ReviewEntry.updated_timestamp with replies"""
self.create_reply(self.review,
timestamp=datetime(2017, 9, 14, 15, 40, 0,
tzinfo=utc),
publish=True)
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
entry = ReviewEntry(data=self.data,
review=self.review)
self.assertEqual(entry.updated_timestamp,
datetime(2017, 9, 14, 15, 40, 0, tzinfo=utc))
def test_get_dom_element_id(self):
"""Testing ReviewEntry.get_dom_element_id"""
entry = ReviewEntry(data=self.data,
review=self.review)
self.assertEqual(entry.get_dom_element_id(), 'review123')
def test_collapsed_with_open_issues(self):
"""Testing ReviewEntry.collapsed with open issues"""
self.create_general_comment(self.review,
issue_opened=True,
issue_status=BaseComment.OPEN)
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
entry = ReviewEntry(data=self.data,
review=self.review)
self.assertFalse(entry.collapsed)
def test_collapsed_with_open_issues_verifying_resolved(self):
"""Testing ReviewEntry.collapsed with open issues marked Verifying
Resolved
"""
self.create_general_comment(
self.review,
issue_opened=True,
issue_status=BaseComment.VERIFYING_RESOLVED)
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
entry = ReviewEntry(data=self.data,
review=self.review)
self.assertFalse(entry.collapsed)
def test_collapsed_with_open_issues_verifying_dropped(self):
"""Testing ReviewEntry.collapsed with open issues marked Verifying
Dropped
"""
self.create_general_comment(self.review,
issue_opened=True,
issue_status=BaseComment.VERIFYING_DROPPED)
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
entry = ReviewEntry(data=self.data,
review=self.review)
self.assertFalse(entry.collapsed)
def test_collapsed_with_dropped_issues(self):
"""Testing ReviewEntry.collapsed with dropped issues"""
self.create_general_comment(self.review,
issue_opened=True,
issue_status=BaseComment.DROPPED)
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
entry = ReviewEntry(data=self.data,
review=self.review)
self.assertTrue(entry.collapsed)
def test_collapsed_with_resolved_issues(self):
"""Testing ReviewEntry.collapsed with resolved issues"""
self.create_general_comment(self.review,
issue_opened=True,
issue_status=BaseComment.RESOLVED)
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
entry = ReviewEntry(data=self.data,
review=self.review)
self.assertTrue(entry.collapsed)
def test_collapsed_with_draft_reply_comments(self):
"""Testing ReviewEntry.collapsed with draft reply comments"""
self.request.user = self.review_request.submitter
comment = self.create_general_comment(self.review)
reply = self.create_reply(self.review, user=self.request.user)
self.create_general_comment(reply, reply_to=comment)
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
self.assertIn(self.review.pk, self.data.draft_reply_comments)
entry = ReviewEntry(data=self.data,
review=self.review)
self.assertFalse(entry.collapsed)
def test_collapsed_with_draft_body_top_replies(self):
"""Testing ReviewEntry.collapsed with draft replies to body_top"""
self.request.user = self.review_request.submitter
self.create_reply(self.review,
user=self.request.user,
body_top_reply_to=self.review)
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
self.assertIn(self.review.pk, self.data.draft_body_top_replies)
entry = ReviewEntry(data=self.data,
review=self.review)
self.assertFalse(entry.collapsed)
def test_collapsed_with_draft_body_bottom_replies(self):
"""Testing ReviewEntry.collapsed with draft replies to body_bottom"""
self.request.user = self.review_request.submitter
self.create_reply(self.review,
user=self.request.user,
body_bottom_reply_to=self.review)
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
self.assertIn(self.review.pk, self.data.draft_body_bottom_replies)
entry = ReviewEntry(data=self.data,
review=self.review)
self.assertFalse(entry.collapsed)
def test_collapsed_with_reply_older_than_last_visited(self):
"""Testing ReviewEntry.collapsed with reply older than last visited"""
reply = self.create_reply(
self.review,
publish=True,
timestamp=self.review.timestamp + timedelta(days=2))
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
self.data.last_visited = reply.timestamp + timedelta(days=1)
entry = ReviewEntry(data=self.data,
review=self.review)
self.assertTrue(entry.collapsed)
def test_collapsed_with_reply_newer_than_last_visited(self):
"""Testing ReviewEntry.collapsed with reply newer than last visited"""
reply = self.create_reply(
self.review,
publish=True,
timestamp=self.review.timestamp + timedelta(days=2))
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
self.data.last_visited = reply.timestamp - timedelta(days=1)
entry = ReviewEntry(data=self.data,
review=self.review)
self.assertFalse(entry.collapsed)
def test_get_js_model_data(self):
"""Testing ReviewEntry.get_js_model_data"""
self.review.ship_it = True
self.review.publish()
entry = ReviewEntry(data=self.data,
review=self.review)
self.assertEqual(entry.get_js_model_data(), {
'reviewData': {
'authorName': 'dopey',
'id': self.review.pk,
'bodyTop': 'Test Body Top',
'bodyBottom': 'Test Body Bottom',
'public': True,
'shipIt': True,
},
})
@add_fixtures(['test_scmtools'])
def test_get_js_model_data_with_diff_comments(self):
"""Testing ReviewEntry.get_js_model_data with diff comments"""
self.review_request.repository = self.create_repository()
diffset = self.create_diffset(self.review_request)
filediff = self.create_filediff(diffset)
comment1 = self.create_diff_comment(self.review, filediff)
comment2 = self.create_diff_comment(self.review, filediff)
self.review.publish()
# This is needed by the entry's add_comment(). It's normally built when
# creating the entries and their data.
comment1.review_obj = self.review
comment2.review_obj = self.review
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
entry = ReviewEntry(data=self.data,
review=self.review)
entry.add_comment('diff_comments', comment1)
entry.add_comment('diff_comments', comment2)
self.assertEqual(entry.get_js_model_data(), {
'reviewData': {
'authorName': 'dopey',
'id': self.review.pk,
'bodyTop': 'Test Body Top',
'bodyBottom': 'Test Body Bottom',
'public': True,
'shipIt': False,
},
'diffCommentsData': [
(str(comment1.pk), str(filediff.pk)),
(str(comment2.pk), str(filediff.pk)),
],
})
def test_add_comment_with_no_open_issues(self):
"""Testing ReviewEntry.add_comment with comment not opening an issue"""
self.request.user = self.review_request.submitter
entry = ReviewEntry(data=self.data,
review=self.review)
self.assertFalse(entry.has_issues)
self.assertEqual(entry.issue_open_count, 0)
entry.add_comment('general_comments', GeneralComment())
self.assertFalse(entry.has_issues)
self.assertEqual(entry.issue_open_count, 0)
def test_add_comment_with_open_issues(self):
"""Testing ReviewEntry.add_comment with comment opening an issue"""
entry = ReviewEntry(data=self.data,
review=self.review)
self.assertFalse(entry.has_issues)
self.assertEqual(entry.issue_open_count, 0)
entry.add_comment('general_comments',
GeneralComment(issue_opened=True,
issue_status=GeneralComment.OPEN))
self.assertTrue(entry.has_issues)
self.assertEqual(entry.issue_open_count, 1)
def test_add_comment_with_open_issues_and_viewer_is_owner(self):
"""Testing ReviewEntry.add_comment with comment opening an issue and
the review request owner is viewing the page
"""
self.request.user = self.review_request.submitter
entry = ReviewEntry(data=self.data,
review=self.review)
self.assertFalse(entry.has_issues)
self.assertEqual(entry.issue_open_count, 0)
entry.add_comment('general_comments',
GeneralComment(issue_opened=True,
issue_status=GeneralComment.OPEN))
self.assertTrue(entry.has_issues)
self.assertEqual(entry.issue_open_count, 1)
def test_build_entries(self):
"""Testing ReviewEntry.build_entries"""
review1 = self.create_review(
self.review_request,
timestamp=self.review.timestamp - timedelta(days=2),
public=True)
review2 = self.review
comment = self.create_general_comment(review1)
# These shouldn't show up in the results.
self.create_review(
self.review_request,
timestamp=self.review.timestamp - timedelta(days=1),
public=False)
self.create_reply(review1)
status_update_review = self.create_review(self.review_request,
public=True)
self.create_general_comment(status_update_review)
self.create_status_update(self.review_request,
review=status_update_review,
state=StatusUpdate.DONE_FAILURE)
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
entries = list(ReviewEntry.build_entries(self.data))
self.assertEqual(len(entries), 2)
# These will actually be in database query order (newest to oldest),
# not the order shown on the page.
entry = entries[0]
self.assertEqual(entry.review, review2)
self.assertEqual(
entry.comments,
{
'diff_comments': [],
'screenshot_comments': [],
'file_attachment_comments': [],
'general_comments': [],
})
entry = entries[1]
self.assertEqual(entry.review, review1)
self.assertEqual(
entry.comments,
{
'diff_comments': [],
'screenshot_comments': [],
'file_attachment_comments': [],
'general_comments': [comment],
})
class ChangeEntryTests(TestCase):
"""Unit tests for ChangeEntry."""
fixtures = ['test_users']
def setUp(self):
super(ChangeEntryTests, self).setUp()
self.request = RequestFactory().get('/r/1/')
self.request.user = AnonymousUser()
self.review_request = self.create_review_request(publish=True)
self.changedesc = ChangeDescription.objects.create(
id=123,
public=True,
timestamp=datetime(2017, 9, 7, 17, 0, 0, tzinfo=utc))
self.review_request.changedescs.add(self.changedesc)
self.data = ReviewRequestPageData(review_request=self.review_request,
request=self.request)
def test_added_timestamp(self):
"""Testing ChangeEntry.added_timestamp"""
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
entry = ChangeEntry(data=self.data,
changedesc=self.changedesc)
self.assertEqual(entry.added_timestamp,
datetime(2017, 9, 7, 17, 0, 0, tzinfo=utc))
def test_updated_timestamp(self):
"""Testing ChangeEntry.updated_timestamp"""
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
entry = ChangeEntry(data=self.data,
changedesc=self.changedesc)
self.assertEqual(entry.updated_timestamp,
datetime(2017, 9, 7, 17, 0, 0, tzinfo=utc))
def test_updated_timestamp_with_status_update(self):
"""Testing ChangeEntry.updated_timestamp with status updates"""
self.create_status_update(
self.review_request,
change_description=self.changedesc,
timestamp=datetime(2017, 9, 14, 15, 40, 0, tzinfo=utc))
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
entry = ChangeEntry(data=self.data,
changedesc=self.changedesc)
self.assertEqual(entry.updated_timestamp,
datetime(2017, 9, 14, 15, 40, 0, tzinfo=utc))
def test_get_dom_element_id(self):
"""Testing ChangeEntry.get_dom_element_id"""
entry = ChangeEntry(data=self.data,
changedesc=self.changedesc)
self.assertEqual(entry.get_dom_element_id(), 'changedesc123')
def test_collapsed_with_older_than_latest_changedesc(self):
"""Testing ChangeEntry.collapsed with older than latest Change
Description
"""
self.review_request.changedescs.create(
timestamp=self.changedesc.timestamp + timedelta(days=1),
public=True)
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
entry = ChangeEntry(data=self.data,
changedesc=self.changedesc)
self.assertTrue(entry.collapsed)
def test_collapsed_with_latest_changedesc(self):
"""Testing ChangeEntry.collapsed with older than latest Change
Description
"""
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
self.assertEqual(self.changedesc.timestamp,
self.data.latest_changedesc_timestamp)
entry = ChangeEntry(data=self.data,
changedesc=self.changedesc)
self.assertFalse(entry.collapsed)
def test_collapsed_with_status_updates_and_no_reviews(self):
"""Testing ChangeEntry.collapsed with status updates and no reviews"""
self.create_status_update(self.review_request,
change_description=self.changedesc,
state=StatusUpdate.DONE_SUCCESS)
self.review_request.changedescs.create(
timestamp=self.changedesc.timestamp + timedelta(days=1),
public=True)
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
entry = ChangeEntry(data=self.data,
changedesc=self.changedesc)
self.assertTrue(entry.collapsed)
def test_collapsed_with_status_updates_and_draft_comment_replies(self):
"""Testing ChangeEntry.collapsed with status updates containing draft
comment replies
"""
self.request.user = self.review_request.submitter
review = self.create_review(self.review_request, publish=True)
comment = self.create_general_comment(review)
self.create_status_update(self.review_request,
review=review,
change_description=self.changedesc,
state=StatusUpdate.DONE_FAILURE)
reply = self.create_reply(review, user=self.request.user)
self.create_general_comment(reply, reply_to=comment)
self.review_request.changedescs.create(
timestamp=self.changedesc.timestamp + timedelta(days=1),
public=True)
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
self.assertIn(review.pk, self.data.draft_reply_comments)
entry = ChangeEntry(data=self.data,
changedesc=self.changedesc)
self.assertFalse(entry.collapsed)
def test_collapsed_with_pending_status_updates(self):
"""Testing ChangeEntry.collapsed with pending status updates"""
self.request.user = self.review_request.submitter
self.create_status_update(self.review_request,
change_description=self.changedesc,
state=StatusUpdate.PENDING)
self.review_request.changedescs.create(
timestamp=self.changedesc.timestamp + timedelta(days=1),
public=True)
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
entry = ChangeEntry(data=self.data,
changedesc=self.changedesc)
self.assertFalse(entry.collapsed)
def test_collapsed_with_not_yet_run_status_updates(self):
"""Testing ChangeEntry.collapsed with not yet run status updates"""
self.request.user = self.review_request.submitter
self.create_status_update(self.review_request,
change_description=self.changedesc,
state=StatusUpdate.NOT_YET_RUN)
self.review_request.changedescs.create(
timestamp=self.changedesc.timestamp + timedelta(days=1),
public=True)
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
entry = ChangeEntry(data=self.data,
changedesc=self.changedesc)
self.assertFalse(entry.collapsed)
def test_collapsed_with_status_update_timestamp_gt_last_visited(self):
"""Testing ChangeEntry.collapsed with status update timestamp newer
than last visited
"""
self.request.user = self.review_request.submitter
self.data.last_visited = self.changedesc.timestamp + timedelta(days=1)
status_update = self.create_status_update(
self.review_request,
change_description=self.changedesc,
state=StatusUpdate.DONE_SUCCESS,
timestamp=self.data.last_visited + timedelta(days=1))
self.assertTrue(status_update.timestamp > self.data.last_visited)
self.review_request.changedescs.create(
timestamp=self.changedesc.timestamp + timedelta(days=1),
public=True)
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
entry = ChangeEntry(data=self.data,
changedesc=self.changedesc)
self.assertFalse(entry.collapsed)
def test_collapsed_with_status_update_timestamp_lt_last_visited(self):
"""Testing ChangeEntry.collapsed with status update timestamp older
than last visited
"""
self.request.user = self.review_request.submitter
self.data.last_visited = self.changedesc.timestamp + timedelta(days=1)
status_update = self.create_status_update(
self.review_request,
change_description=self.changedesc,
state=StatusUpdate.DONE_SUCCESS,
timestamp=self.data.last_visited - timedelta(days=1))
self.assertTrue(status_update.timestamp < self.data.last_visited)
self.review_request.changedescs.create(
timestamp=self.changedesc.timestamp + timedelta(days=1),
public=True)
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
entry = ChangeEntry(data=self.data,
changedesc=self.changedesc)
self.assertTrue(entry.collapsed)
def test_collapsed_with_status_updates_and_draft_body_top_replies(self):
"""Testing ChangeEntry.collapsed with status updates containing draft
comment replies to body_top
"""
self.request.user = self.review_request.submitter
review = self.create_review(self.review_request, publish=True)
self.create_status_update(self.review_request,
review=review,
change_description=self.changedesc,
state=StatusUpdate.DONE_FAILURE)
self.create_reply(review,
user=self.request.user,
body_top_reply_to=review)
self.review_request.changedescs.create(
timestamp=self.changedesc.timestamp + timedelta(days=1),
public=True)
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
self.assertIn(review.pk, self.data.draft_body_top_replies)
entry = ChangeEntry(data=self.data,
changedesc=self.changedesc)
self.assertFalse(entry.collapsed)
def test_collapsed_with_status_updates_and_draft_body_bottom_replies(self):
"""Testing ChangeEntry.collapsed with status updates containing draft
comment replies to body_bottom
"""
self.request.user = self.review_request.submitter
review = self.create_review(self.review_request, publish=True)
self.create_status_update(self.review_request,
review=review,
change_description=self.changedesc,
state=StatusUpdate.DONE_FAILURE)
self.create_reply(review,
user=self.request.user,
body_bottom_reply_to=review)
self.review_request.changedescs.create(
timestamp=self.changedesc.timestamp + timedelta(days=1),
public=True)
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
self.assertIn(review.pk, self.data.draft_body_bottom_replies)
entry = ChangeEntry(data=self.data,
changedesc=self.changedesc)
self.assertFalse(entry.collapsed)
def test_get_js_model_data(self):
"""Testing ChangeEntry.get_js_model_data for standard ChangeDescription
"""
entry = ChangeEntry(data=self.data,
changedesc=self.changedesc)
self.assertEqual(entry.get_js_model_data(), {
'pendingStatusUpdates': False,
})
@add_fixtures(['test_scmtools'])
def test_get_js_model_data_with_status_updates(self):
"""Testing ChangeEntry.get_js_model_data for ChangeDescription with
status updates
"""
self.review_request.repository = self.create_repository()
diffset = self.create_diffset(self.review_request)
filediff = self.create_filediff(diffset)
review = self.create_review(self.review_request,
body_top='Body top',
body_bottom='Body bottom',
ship_it=True)
comment1 = self.create_diff_comment(review, filediff)
comment2 = self.create_diff_comment(review, filediff)
review.publish()
# This is needed by the entry's add_comment(). It's normally built when
# creating the entries and their data.
comment1.review_obj = review
comment2.review_obj = review
status_update = self.create_status_update(
self.review_request,
review=review,
change_description=self.changedesc)
entry = ChangeEntry(data=self.data,
changedesc=self.changedesc)
entry.add_update(status_update)
entry.add_comment('diff_comments', comment1)
entry.add_comment('diff_comments', comment2)
self.assertEqual(entry.get_js_model_data(), {
'reviewsData': [
{
'authorName': 'dopey',
'id': review.pk,
'bodyTop': 'Body top',
'bodyBottom': 'Body bottom',
'public': True,
'shipIt': True,
},
],
'diffCommentsData': [
(str(comment1.pk), str(filediff.pk)),
(str(comment2.pk), str(filediff.pk)),
],
'pendingStatusUpdates': False,
})
def test_build_entries(self):
"""Testing ChangeEntry.build_entries"""
changedesc1 = self.changedesc
changedesc2 = self.review_request.changedescs.create(
timestamp=changedesc1.timestamp + timedelta(days=1),
public=True)
review = self.create_review(self.review_request, public=True)
comment = self.create_general_comment(review)
status_update = self.create_status_update(
self.review_request,
review=review,
change_description=changedesc2)
self.data.query_data_pre_etag()
self.data.query_data_post_etag()
entries = list(ChangeEntry.build_entries(self.data))
# These will actually be in database query order (newest to oldest),
# not the order shown on the page.
entry = entries[0]
self.assertEqual(entry.changedesc, changedesc2)
self.assertFalse(entry.collapsed)
self.assertEqual(entry.status_updates, [status_update])
self.assertEqual(
entry.status_updates_by_review,
{
review.pk: status_update,
})
self.assertEqual(
entry.status_updates[0].comments,
{
'diff_comments': [],
'screenshot_comments': [],
'file_attachment_comments': [],
'general_comments': [comment],
})
entry = entries[1]
self.assertEqual(entry.changedesc, changedesc1)
self.assertTrue(entry.collapsed)
self.assertEqual(entry.status_updates, [])
def test_is_entry_new_with_timestamp(self):
"""Testing ChangeEntry.is_entry_new with timestamp"""
entry = ChangeEntry(data=self.data,
changedesc=self.changedesc)
user = User.objects.create_user(username='test-user',
email='user@example.com')
self.assertTrue(entry.is_entry_new(
last_visited=self.changedesc.timestamp - timedelta(days=1),
user=user))
self.assertFalse(entry.is_entry_new(
last_visited=self.changedesc.timestamp,
user=user))
self.assertFalse(entry.is_entry_new(
last_visited=self.changedesc.timestamp + timedelta(days=1),
user=user))
|
2dfce237148ecfc7c41c742fb26e8fa5a788621e
|
ec85250addb7357dfe7bb3e0680d53fc7b0fd8fb
|
/examples/docs_snippets/docs_snippets_tests/concepts_tests/assets_tests/test_asset_definition_metadata.py
|
e088501c66356539a3199bb863e050930846afed
|
[
"Apache-2.0"
] |
permissive
|
dagster-io/dagster
|
6adb5deee8bcf3ea1866a6a64f2ed81e1db5e73a
|
fe21995e0402878437a828c6a4244025eac8c43b
|
refs/heads/master
| 2023-09-05T20:46:08.203794
| 2023-09-05T19:54:52
| 2023-09-05T19:54:52
| 131,619,646
| 8,565
| 1,154
|
Apache-2.0
| 2023-09-14T21:57:37
| 2018-04-30T16:30:04
|
Python
|
UTF-8
|
Python
| false
| false
| 138
|
py
|
test_asset_definition_metadata.py
|
from docs_snippets.concepts.assets.asset_definition_metadata import my_asset
def test():
assert my_asset.op.outs["result"].metadata
|
6c111646bfe46fb79ca65f79baeac703b41cd7de
|
10ddfb2d43a8ec5d47ce35dc0b8acf4fd58dea94
|
/Python/special-binary-string.py
|
bd11d040bbabb5a129109282a49801bdcafd1bb2
|
[
"MIT"
] |
permissive
|
kamyu104/LeetCode-Solutions
|
f54822059405ef4df737d2e9898b024f051fd525
|
4dc4e6642dc92f1983c13564cc0fd99917cab358
|
refs/heads/master
| 2023-09-02T13:48:26.830566
| 2023-08-28T10:11:12
| 2023-08-28T10:11:12
| 152,631,182
| 4,549
| 1,651
|
MIT
| 2023-05-31T06:10:33
| 2018-10-11T17:38:35
|
C++
|
UTF-8
|
Python
| false
| false
| 628
|
py
|
special-binary-string.py
|
# Time: f(n) = k * f(n/k) + n/k * klogk <= O(logn * nlogk) <= O(n^2)
# n is the length of S, k is the max number of special strings in each depth
# Space: O(n)
class Solution(object):
def makeLargestSpecial(self, S):
"""
:type S: str
:rtype: str
"""
result = []
anchor = count = 0
for i, v in enumerate(S):
count += 1 if v == '1' else -1
if count == 0:
result.append("1{}0".format(self.makeLargestSpecial(S[anchor+1:i])))
anchor = i+1
result.sort(reverse = True)
return "".join(result)
|
84af2add2bc09117bd2afc0ae36f042c163cc380
|
1299ffaa8bb1cd13db0ed53598b638ec36c555ac
|
/benedict/core/standardize.py
|
d255ad664024ffe2af7f74e615da8fd4b32cb880
|
[
"MIT"
] |
permissive
|
fabiocaccamo/python-benedict
|
c93240bf526696c7b11043fef898a461d3fd6f14
|
27d76331a00fff1fffe7890a77ffd93c8833aeda
|
refs/heads/main
| 2023-08-31T04:06:24.451591
| 2023-08-22T20:59:04
| 2023-08-22T20:59:04
| 187,202,744
| 1,118
| 51
|
MIT
| 2023-09-08T12:43:04
| 2019-05-17T11:13:40
|
Python
|
UTF-8
|
Python
| false
| false
| 515
|
py
|
standardize.py
|
import re
from slugify import slugify
from benedict.core.rename import rename
from benedict.core.traverse import traverse
from benedict.utils import type_util
def _standardize_item(d, key, value):
if type_util.is_string(key):
# https://stackoverflow.com/a/12867228/2096218
norm_key = re.sub(r"((?<=[a-z0-9])[A-Z]|(?!^)[A-Z](?=[a-z]))", r"_\1", key)
norm_key = slugify(norm_key, separator="_")
rename(d, key, norm_key)
def standardize(d):
traverse(d, _standardize_item)
|
9ba4d7f86dcf6f16099333521d1467b598bc4ff1
|
c024edfeda471bd2bdb42531f59c6b5fa0621ea0
|
/config/settings.template.py
|
6ff4c9ec2e418ede24ea547317e1bf42b0a376c6
|
[
"MIT"
] |
permissive
|
aellerton/demo-allauth-bootstrap
|
58bffa2a6620dc614a07fad9c353531fbb010762
|
d55590a008288490f81cb4f128ab8b93b8810678
|
refs/heads/master
| 2022-07-03T23:24:09.622983
| 2022-06-04T02:06:28
| 2022-06-04T02:06:28
| 20,476,587
| 226
| 60
|
NOASSERTION
| 2022-04-22T23:37:16
| 2014-06-04T08:57:11
|
Python
|
UTF-8
|
Python
| false
| false
| 5,145
|
py
|
settings.template.py
|
"""
Django settings for allauthdemo project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
from os.path import dirname, join
BASE_DIR = dirname(dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '{{ secret_key }}'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'bootstrap4', # optional module for making bootstrap forms easier
'allauth',
'allauth.account',
'allauth.socialaccount',
{% if facebook or google %}
{% if facebook %}
'allauth.socialaccount.providers.facebook', # enabled by configure
{% endif %}
{% if google %}
'allauth.socialaccount.providers.google', # enabled by configure
{% endif %}
#'allauth.socialaccount.providers.dropbox',
#'allauth.socialaccount.providers.github',
#'allauth.socialaccount.providers.linkedin',
# etc
{% endif %}
'allauthdemo.auth',
'allauthdemo.demo',
)
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'allauthdemo.urls'
WSGI_APPLICATION = 'allauthdemo.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = '/static/'
# Authentication
AUTHENTICATION_BACKENDS = (
"allauth.account.auth_backends.AuthenticationBackend",
)
TEMPLATES = [
{
#'TEMPLATE_DEBUG': True,
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
# allauth templates: you could copy this directory into your
# project and tweak it according to your needs
# join(PROJECT_ROOT, 'templates', 'uniform', 'allauth'),
# example project specific templates
join(BASE_DIR, 'allauthdemo', 'templates', 'plain', 'example'),
#join(BASE_DIR, 'allauthdemo', 'templates', 'bootstrap', 'allauth'),
join(BASE_DIR, 'allauthdemo', 'templates', 'allauth'),
join(BASE_DIR, 'allauthdemo', 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
# needed for admin templates
'django.contrib.auth.context_processors.auth',
# these *may* not be needed
'django.template.context_processors.debug',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
# allauth needs this from django
'django.template.context_processors.request',
# allauth specific context processors
#'allauth.account.context_processors.account',
#'allauth.socialaccount.context_processors.socialaccount',
],
},
}
]
MESSAGE_STORAGE = 'django.contrib.messages.storage.session.SessionStorage'
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
STATICFILES_DIRS = (
join(BASE_DIR, "static"),
)
SITE_ID = 1
AUTH_USER_MODEL = 'allauthdemo_auth.User'
LOGIN_REDIRECT_URL = '/member/'
ACCOUNT_AUTHENTICATION_METHOD = 'email'
ACCOUNT_USERNAME_REQUIRED = False
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_USERNAME_MIN_LENGTH = 3
# ACCOUNT_EMAIL_VERIFICATION = 'none' # testing...
ACCOUNT_USER_MODEL_USERNAME_FIELD = None
SOCIALACCOUNT_AUTO_SIGNUP = False # require social accounts to use the signup form ... I think
# For custom sign-up form:
# http://stackoverflow.com/questions/12303478/how-to-customize-user-profile-when-using-django-allauth
SOCIALACCOUNT_PROVIDERS = {
'facebook': {
'SCOPE': ['email'], #, 'publish_stream'],
'METHOD': 'oauth2' # 'js_sdk' # instead of 'oauth2'
},
'google':
{ 'SCOPE': ['profile', 'email'],
'AUTH_PARAMS': { 'access_type': 'online' }
},
}
|
db405d09976453920f6311ee0d23f8104eaf4c85
|
08ee04ae665dcb930ed4b98ca7b91b2dac2cc3b0
|
/src/rayoptics/mpl/__init__.py
|
f4a523b2bd1ba8d263ee719e7e7a734698f4a495
|
[
"BSD-3-Clause"
] |
permissive
|
mjhoptics/ray-optics
|
6bad622f7bb9b3485823b9cc511a6d2b679f7048
|
41ea6d618a93fe14f8bee45fb3efff6a6762bcce
|
refs/heads/master
| 2023-07-09T18:03:36.621685
| 2023-05-08T22:46:36
| 2023-05-08T22:46:36
| 109,168,474
| 195
| 49
|
BSD-3-Clause
| 2023-08-10T16:53:28
| 2017-11-01T18:34:12
|
Python
|
UTF-8
|
Python
| false
| false
| 584
|
py
|
__init__.py
|
""" package implementing useful rayoptics graphics using matplotlib
The :mod:`~.mpl` subpackage provides useful basic optical graphics
using the matplotlib plotting package. Particular features include:
- 2D lens layout, :mod:`~.interactivelayout`
- |ybar| and |nubar| paraxial ray diagrams, :mod:`~.interactivediagram`
- ray aberration and wavefront pupil/field plots,
:mod:`~.analysisfigure`, :mod:`~.axisarrayfigure` and
:mod:`~.analysisplots`
- base class to manage light and dark UI styles, :mod:`~.styledfigure`
"""
|
b0590e0f091ad5d73a5b7e86fc491325f1932c1a
|
831c6ac1fa3253a5ef49b493ab211590d39e220e
|
/generation/builtins_templates/render.tmpl.pyi
|
9080bda1cd3e1c2f70b37a0c3633183adb7eab28
|
[
"MIT",
"CC-BY-3.0"
] |
permissive
|
touilleMan/godot-python
|
a61bd686e929f62962b196243c0edf17b662271f
|
b9757da859a4d9fae86c330224881738d6b97392
|
refs/heads/master
| 2023-09-04T15:02:44.858077
| 2022-08-20T13:22:02
| 2022-08-20T13:22:02
| 69,164,674
| 1,766
| 168
|
NOASSERTION
| 2023-02-11T21:26:19
| 2016-09-25T13:03:54
|
Python
|
UTF-8
|
Python
| false
| false
| 1,249
|
pyi
|
render.tmpl.pyi
|
{#- `render_target` must be defined by calling context -#}
{% set get_target_method_spec = get_target_method_spec_factory(render_target) %}
{#- Define rendering macros -#}
{% macro render_method(method_name, py_name=None, default_args={}) %}
{% set spec = get_target_method_spec(method_name) %}
def {{ py_name or spec.py_name }}(self{%- if spec.args -%},{%- endif -%}
{%- for arg in spec.args %}
{{ arg.name }}: {{ arg.type.py_type }}
,
{%- endfor -%}
) -> {{ spec.return_type.py_type }}: ...
{% endmacro %}
{% macro render_operator_eq() %}
def __eq__(self, other) -> bool: ...
{% endmacro %}
{% macro render_operator_ne() %}
def __ne__(self, other) -> bool: ...
{% endmacro %}
{% macro render_operator_lt() %}
def __lt__(self, other) -> bool: ...
{% endmacro %}
{% macro render_property(py_name, getter, setter=None) %}
{{ pyname }}: {{ getter.return_type.py_type }}
{% endmacro %}
{#- Overwrite blocks to be ignored -#}
{% block python_defs %}
pass
{% endblock %}
{% block pxd_header %}{% endblock %}
{% block pyx_header %}{% endblock %}
{% block python_consts %}{% endblock %}
{% block cdef_attributes %}{% endblock %}
{#- Now the template will be generated with the context -#}
{% extends render_target_to_template(render_target) %}
|
dea68b5132fe3ee8081ac4810ba96c73fcbebc46
|
a5a99f646e371b45974a6fb6ccc06b0a674818f2
|
/Validation/HGCalValidation/test/python/standalone_fromRECO_HARVESTING.py
|
86bdcd97b1666e61d8f976c6674e0286d4342c30
|
[
"Apache-2.0"
] |
permissive
|
cms-sw/cmssw
|
4ecd2c1105d59c66d385551230542c6615b9ab58
|
19c178740257eb48367778593da55dcad08b7a4f
|
refs/heads/master
| 2023-08-23T21:57:42.491143
| 2023-08-22T20:22:40
| 2023-08-22T20:22:40
| 10,969,551
| 1,006
| 3,696
|
Apache-2.0
| 2023-09-14T19:14:28
| 2013-06-26T14:09:07
|
C++
|
UTF-8
|
Python
| false
| false
| 1,585
|
py
|
standalone_fromRECO_HARVESTING.py
|
import FWCore.ParameterSet.Config as cms
from Configuration.Eras.Era_Phase2_cff import Phase2
process = cms.Process('HARVESTING',Phase2)
# import of standard configurations
process.load('Configuration.StandardSequences.Services_cff')
process.load('FWCore.MessageService.MessageLogger_cfi')
process.load('Configuration.EventContent.EventContent_cff')
process.load('Configuration.Geometry.GeometryExtended2023D17Reco_cff')
process.load('Configuration.StandardSequences.DQMSaverAtRunEnd_cff')
process.load('Configuration.StandardSequences.Harvesting_cff')
process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff')
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(-1)
)
# Input source
process.source = cms.Source("DQMRootSource",
fileNames = cms.untracked.vstring('file:step3_inDQM.root')
)
process.options = cms.untracked.PSet(
Rethrow = cms.untracked.vstring('ProductNotFound'),
fileMode = cms.untracked.string('FULLMERGE')
)
# Production Info
process.configurationMetadata = cms.untracked.PSet(
annotation = cms.untracked.string('step4 nevts:100'),
name = cms.untracked.string('Applications'),
version = cms.untracked.string('$Revision: 1.19 $')
)
# Output definition
# Additional output definition
# Other statements
from Configuration.AlCa.GlobalTag import GlobalTag
process.GlobalTag = GlobalTag(process.GlobalTag, 'auto:phase2_realistic', '')
# Path and EndPath definitions
process.dqmsave_step = cms.Path(process.DQMSaver)
# Schedule definition
process.schedule = cms.Schedule(process.dqmsave_step)
|
881e03a2fbd0f26fee88b8357d36c62182838f38
|
bad6940b201987d55edb3d7128f2ce3718d3b40f
|
/examples/add_singleton_misc_feature.py
|
b24b9e7d0972a26a6fe63acc319d89593c680f6c
|
[
"MIT"
] |
permissive
|
pyconll/pyconll
|
ff985ef2e80820a0dd86b5d7c81914712f5dd5cd
|
1bb8cd31fe18bfb048e6e26bc583004952094bff
|
refs/heads/master
| 2023-06-23T12:17:10.758862
| 2023-06-21T03:20:45
| 2023-06-21T03:20:45
| 107,349,042
| 148
| 11
|
MIT
| 2023-06-21T03:13:56
| 2017-10-18T02:30:31
|
Python
|
UTF-8
|
Python
| false
| false
| 821
|
py
|
add_singleton_misc_feature.py
|
#
# Add a singleton feature to the misc column of all tokens of a certain form.
#
# Format
# add_singleton_misc_feature.py filename > transform.conll
#
import argparse
import pyconll
parser = argparse.ArgumentParser()
parser.add_argument('filename', help='The name of the file to transform')
args = parser.parse_args()
corpus = pyconll.load_from_file(args.filename)
for sentence in corpus:
for token in sentence:
if token.lemma.lower() == 'dog' and token.upos == 'VERB':
# Note: This means that 'Polysemous' will be present as a singleton
# in the token line. To remove 'Polysemous' from the token's
# features, call del token.misc['Polysemous']
token.misc['Polysemous'] = None
# Print to standard out which can then be redirected.
print(corpus.conll())
|
95c2efa374c5b56a7a4077a64a446ab5f7c14b73
|
8f267fe1157904023004aa1fcee8cdcaf1d69f74
|
/tempest/tests/lib/services/network/test_extensions_client.py
|
27eb4858db1eb8dfe6308ea69aaac776bb5f2fcc
|
[
"Apache-2.0"
] |
permissive
|
openstack/tempest
|
a65737f3e62d4ebeb7e387feac7bcc636d3f5fe0
|
3932a799e620a20d7abf7b89e21b520683a1809b
|
refs/heads/master
| 2023-08-28T15:04:21.241805
| 2023-08-28T10:16:57
| 2023-08-28T10:16:57
| 2,356,406
| 270
| 407
|
Apache-2.0
| 2022-06-29T15:52:45
| 2011-09-09T15:56:02
|
Python
|
UTF-8
|
Python
| false
| false
| 7,776
|
py
|
test_extensions_client.py
|
# Copyright 2017 AT&T Corporation.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.lib.services.network import extensions_client
from tempest.tests.lib import fake_auth_provider
from tempest.tests.lib.services import base
class TestExtensionsClient(base.BaseServiceTest):
FAKE_EXTENSIONS = {
"extensions": [
{
"updated": "2013-01-20T00:00:00-00:00",
"name": "Neutron Service Type Management",
"links": [],
"alias": "service-type",
"description": "API for retrieving service providers for"
" Neutron advanced services"
},
{
"updated": "2012-10-05T10:00:00-00:00",
"name": "security-group",
"links": [],
"alias": "security-group",
"description": "The security groups extension."
},
{
"updated": "2013-02-07T10:00:00-00:00",
"name": "L3 Agent Scheduler",
"links": [],
"alias": "l3_agent_scheduler",
"description": "Schedule routers among l3 agents"
},
{
"updated": "2013-02-07T10:00:00-00:00",
"name": "Loadbalancer Agent Scheduler",
"links": [],
"alias": "lbaas_agent_scheduler",
"description": "Schedule pools among lbaas agents"
},
{
"updated": "2013-03-28T10:00:00-00:00",
"name": "Neutron L3 Configurable external gateway mode",
"links": [],
"alias": "ext-gw-mode",
"description":
"Extension of the router abstraction for specifying whether"
" SNAT should occur on the external gateway"
},
{
"updated": "2014-02-03T10:00:00-00:00",
"name": "Port Binding",
"links": [],
"alias": "binding",
"description": "Expose port bindings of a virtual port to"
" external application"
},
{
"updated": "2012-09-07T10:00:00-00:00",
"name": "Provider Network",
"links": [],
"alias": "provider",
"description": "Expose mapping of virtual networks to"
" physical networks"
},
{
"updated": "2013-02-03T10:00:00-00:00",
"name": "agent",
"links": [],
"alias": "agent",
"description": "The agent management extension."
},
{
"updated": "2012-07-29T10:00:00-00:00",
"name": "Quota management support",
"links": [],
"alias": "quotas",
"description": "Expose functions for quotas management per"
" tenant"
},
{
"updated": "2013-02-07T10:00:00-00:00",
"name": "DHCP Agent Scheduler",
"links": [],
"alias": "dhcp_agent_scheduler",
"description": "Schedule networks among dhcp agents"
},
{
"updated": "2013-06-27T10:00:00-00:00",
"name": "Multi Provider Network",
"links": [],
"alias": "multi-provider",
"description": "Expose mapping of virtual networks to"
" multiple physical networks"
},
{
"updated": "2013-01-14T10:00:00-00:00",
"name": "Neutron external network",
"links": [],
"alias": "external-net",
"description": "Adds external network attribute to network"
" resource."
},
{
"updated": "2012-07-20T10:00:00-00:00",
"name": "Neutron L3 Router",
"links": [],
"alias": "router",
"description": "Router abstraction for basic L3 forwarding"
" between L2 Neutron networks and access to external"
" networks via a NAT gateway."
},
{
"updated": "2013-07-23T10:00:00-00:00",
"name": "Allowed Address Pairs",
"links": [],
"alias": "allowed-address-pairs",
"description": "Provides allowed address pairs"
},
{
"updated": "2013-03-17T12:00:00-00:00",
"name": "Neutron Extra DHCP opts",
"links": [],
"alias": "extra_dhcp_opt",
"description": "Extra options configuration for DHCP. For"
" example PXE boot options to DHCP clients can be specified"
" (e.g. tftp-server, server-ip-address, bootfile-name)"
},
{
"updated": "2012-10-07T10:00:00-00:00",
"name": "LoadBalancing service",
"links": [],
"alias": "lbaas",
"description": "Extension for LoadBalancing service"
},
{
"updated": "2013-02-01T10:00:00-00:00",
"name": "Neutron Extra Route",
"links": [],
"alias": "extraroute",
"description": "Extra routes configuration for L3 router"
},
{
"updated": "2016-01-24T10:00:00-00:00",
"name": "Neutron Port Data Plane Status",
"links": [],
"alias": "data-plane-status",
"description": "Status of the underlying data plane."
}
]
}
FAKE_EXTENSION_ALIAS = "service-type"
def setUp(self):
super(TestExtensionsClient, self).setUp()
fake_auth = fake_auth_provider.FakeAuthProvider()
self.extensions_client = extensions_client.ExtensionsClient(
fake_auth, "network", "regionOne")
def _test_list_extensions(self, bytes_body=False):
self.check_service_client_function(
self.extensions_client.list_extensions,
"tempest.lib.common.rest_client.RestClient.get",
self.FAKE_EXTENSIONS,
bytes_body,
200)
def _test_show_extension(self, bytes_body=False):
self.check_service_client_function(
self.extensions_client.show_extension,
"tempest.lib.common.rest_client.RestClient.get",
{"extension": self.FAKE_EXTENSIONS["extensions"][0]},
bytes_body,
200,
ext_alias=self.FAKE_EXTENSION_ALIAS)
def test_list_extensions_with_str_body(self):
self._test_list_extensions()
def test_list_extensions_with_bytes_body(self):
self._test_list_extensions(bytes_body=True)
def test_show_extension_with_str_body(self):
self._test_show_extension()
def test_show_extension_with_bytes_body(self):
self._test_show_extension(bytes_body=True)
|
b7fcd102005c088ac99343450f2026addf24f43a
|
ef2c1a0ae0f1746e58fcc160844788ab92a8d488
|
/archai/discrete_search/search_spaces/nlp/__init__.py
|
e027cac5946ea6c7392a4b9ba9bdcc930972e988
|
[
"MIT",
"LicenseRef-scancode-free-unknown",
"LGPL-2.1-or-later",
"Apache-2.0",
"LicenseRef-scancode-generic-cla"
] |
permissive
|
microsoft/archai
|
4d04476ef6a434148638ef91df0ef3bf2c948422
|
95d6e19a1523a701b3fbc249dd1a7d1e7ba44aee
|
refs/heads/main
| 2023-09-03T13:23:48.576626
| 2023-07-27T01:30:01
| 2023-07-27T01:30:01
| 245,036,506
| 439
| 97
|
MIT
| 2023-05-09T21:10:10
| 2020-03-05T00:54:29
|
Python
|
UTF-8
|
Python
| false
| false
| 258
|
py
|
__init__.py
|
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from archai.discrete_search.search_spaces.nlp.transformer_flex.search_space import TransformerFlexSearchSpace
from archai.discrete_search.search_spaces.nlp.tfpp import TfppSearchSpace
|
a6d896b596c18256c72dd56f3df6017bbea4aac7
|
92d97211aebc18d62fdde441ea775198a231c362
|
/tests/unit/small_text/utils/test_labels.py
|
14e5a24763080c301a5c1988ff02b80047744ff1
|
[
"MIT"
] |
permissive
|
webis-de/small-text
|
4a510bc4dd9a2110976121603bcc859581a5141c
|
2bb16b7413f85f3b933887c7054db45b5652d3a2
|
refs/heads/main
| 2023-09-03T06:00:20.976398
| 2023-08-19T18:28:43
| 2023-08-19T18:28:43
| 370,275,343
| 476
| 58
|
MIT
| 2023-08-23T20:54:25
| 2021-05-24T08:06:41
|
Python
|
UTF-8
|
Python
| false
| false
| 6,155
|
py
|
test_labels.py
|
import unittest
import numpy as np
from numpy.testing import assert_array_equal
from scipy.sparse import csr_matrix
from small_text.base import LABEL_IGNORED, LABEL_UNLABELED
from small_text.utils.labels import (
concatenate,
csr_to_list,
get_flattened_unique_labels,
get_ignored_labels_mask,
get_num_labels,
list_to_csr,
remove_by_index
)
from tests.utils.testing import assert_csr_matrix_equal
from tests.utils.datasets import random_sklearn_dataset
class LabelUtilsTest(unittest.TestCase):
def test_get_num_labels_dense(self):
self.assertEqual(4, get_num_labels(np.array([3, 2, 1, 0])))
self.assertEqual(4, get_num_labels(np.array([3])))
with self.assertRaisesRegex(ValueError, 'Invalid labeling'):
self.assertEqual(0, get_num_labels(np.array([])))
def test_get_num_labels_sparse(self):
mat = csr_matrix(np.array([
[1, 1],
[0, 1],
[1, 0],
[0, 0]
]))
self.assertEqual(2, get_num_labels(mat))
mat = csr_matrix(np.array([
[1, 1]
]))
self.assertEqual(2, get_num_labels(mat))
mat = csr_matrix((0, 0), dtype=np.int64)
with self.assertRaisesRegex(ValueError, 'Invalid labeling'):
self.assertEqual(0, get_num_labels(mat))
def test_concatenate_dense(self):
x = np.array([1, 2, 3])
y = np.array([3, 2, 1])
result = concatenate(x, y)
expected = np.array([1, 2, 3, 3, 2, 1])
assert_array_equal(expected, result)
def test_concatenate_sparse(self):
x = csr_matrix(np.array([[0, 1], [1, 0], [1, 1]]))
y = csr_matrix(np.array([[1, 1], [1, 0], [0, 1]]))
result = concatenate(x, y)
expected = csr_matrix(
np.array([
[0, 1], [1, 0], [1, 1], [1, 1], [1, 0], [0, 1]
])
)
assert_csr_matrix_equal(expected, result)
def test_get_ignored_labels_mask_dense(self):
y = np.array([1, LABEL_IGNORED, 3, 2])
mask = get_ignored_labels_mask(y, LABEL_IGNORED)
assert_array_equal(np.array([False, True, False, False]), mask)
def test_get_ignored_labels_mask_sparse(self):
y = csr_matrix(np.array([[1, 1], [LABEL_IGNORED, 0], [LABEL_IGNORED, LABEL_IGNORED], [1, 0]]))
mask = get_ignored_labels_mask(y, LABEL_IGNORED)
assert_array_equal(np.array([False, True, True, False]), mask)
def test_remove_by_index_dense(self):
y = np.array([3, 2, 1, 2, 1])
y_new = remove_by_index(y, 3)
expected = np.array([3, 2, 1, 1])
assert_array_equal(expected, y_new)
def test_remove_by_index_list_dense(self):
y = np.array([3, 2, 1, 2, 1])
y_new = remove_by_index(y, [3, 4])
expected = np.array([3, 2, 1])
assert_array_equal(expected, y_new)
def test_remove_by_index_sparse(self):
y = csr_matrix(np.array([[1, 1], [1, 0], [0, 1], [1, 1]]))
y_new = remove_by_index(y, 2)
expected = csr_matrix(
np.array([
[1, 1], [1, 0], [1, 1]
])
)
assert_csr_matrix_equal(expected, y_new)
def test_remove_by_index_list_sparse(self):
y = csr_matrix(np.array([[1, 1], [1, 0], [0, 1], [1, 1]]))
y_new = remove_by_index(y, [2, 3])
expected = csr_matrix(
np.array([
[1, 1], [1, 0]
])
)
assert_csr_matrix_equal(expected, y_new)
def test_csr_to_list(self):
mat = csr_matrix(np.array([
[1, 1],
[0, 1],
[1, 0],
[0, 0]
]))
label_list = csr_to_list(mat)
self.assertEqual([[0, 1], [1], [0], []], label_list)
def test_list_to_csr(self):
label_list = [[], [0, 1], [1, 2, 3], [1], [], [0]]
result = list_to_csr(label_list, (6, 4))
self.assertTrue(isinstance(result, csr_matrix))
self.assertEqual(np.int64, result.dtype)
self.assertEqual(np.int64, result.data.dtype)
self.assertEqual(np.int32, result.indices.dtype)
self.assertEqual(np.int32, result.indices.dtype)
def test_list_to_csr_all_empty(self):
label_list = [[], [], [], [], [], []]
result = list_to_csr(label_list, (6, 4), dtype=np.float64)
self.assertTrue(isinstance(result, csr_matrix))
self.assertEqual(np.float64, result.dtype)
self.assertEqual(np.float64, result.data.dtype)
self.assertEqual(np.int32, result.indices.dtype)
self.assertEqual(np.int32, result.indices.dtype)
def test_list_to_csr_float(self):
label_list = [[], [0, 1], [1, 2, 3], [1], [], [0]]
result = list_to_csr(label_list, (6, 4), dtype=np.float64)
self.assertTrue(isinstance(result, csr_matrix))
self.assertEqual(np.float64, result.dtype)
self.assertEqual(np.float64, result.data.dtype)
self.assertEqual(np.int32, result.indices.dtype)
self.assertEqual(np.int32, result.indices.dtype)
def test_get_flattened_unique_labels(self):
dataset = random_sklearn_dataset(10)
labels = get_flattened_unique_labels(dataset)
assert_array_equal(np.array([0, 1]), labels)
def test_get_flattened_unique_labels_no_labels(self):
dataset = random_sklearn_dataset(10)
dataset.y = np.array([LABEL_UNLABELED] * len(dataset))
labels = get_flattened_unique_labels(dataset)
self.assertEqual((0,), labels.shape)
def test_get_flattened_unique_labels_multi_label(self):
num_classes = 3
dataset = random_sklearn_dataset(10, multi_label=True, num_classes=num_classes)
labels = get_flattened_unique_labels(dataset)
assert_array_equal(np.array([0, 1, 2]), labels)
def test_get_flattened_unique_labels_multi_label_no_labels(self):
num_classes = 3
dataset = random_sklearn_dataset(10, multi_label=True, num_classes=num_classes)
dataset.y = csr_matrix((10, num_classes))
labels = get_flattened_unique_labels(dataset)
self.assertEqual((0,), labels.shape)
|
c9075456414d1497e144ab8f237c0b47001a17b2
|
300b0e36434201d7796d5ddd5b2d1789321379d4
|
/Addons/PolyQuilt/__init__.py
|
f41d7d3310731f843e54cbe9acdb9b7a34ba0c99
|
[] |
no_license
|
sakana3/PolyQuilt
|
bdf6101da46c49511734b34664c01cf2ec949697
|
5ed6c9ac20b04008e726299ea4e3a36080024f11
|
refs/heads/master
| 2023-07-28T16:50:21.154579
| 2021-12-05T11:26:11
| 2021-12-05T11:26:11
| 180,507,265
| 489
| 40
| null | 2022-09-27T19:28:10
| 2019-04-10T05:20:42
|
Python
|
UTF-8
|
Python
| false
| false
| 2,776
|
py
|
__init__.py
|
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTIBILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
bl_info = {
"name" : "PolyQuilt",
"author" : "Sakana3",
"version": (1, 3, 1),
"blender" : (2, 83, 0),
"location": "View3D > Mesh > PolyQuilt",
"description": "Lowpoly Tool",
"warning" : "",
"wiki_url": "",
"category": "Mesh",
}
import bpy
from bpy.utils.toolsystem import ToolDef
from .pq_operator import *
from .pq_operator_add_empty_object import *
from .pq_icon import *
from .pq_tool import PolyQuiltTools
from .pq_tool_ui import VIEW3D_PT_tools_polyquilt_options
from .pq_keymap_editor import PQ_OT_DirtyKeymap
from .gizmo_preselect import *
from .pq_preferences import *
from .translation import pq_translation_dict
classes = (
MESH_OT_poly_quilt ,
MESH_OT_poly_quilt_brush_size ,
MESH_OT_poly_quilt_daemon ,
PQ_OT_SetupUnityLikeKeymap ,
PolyQuiltPreferences ,
PQ_OT_CheckAddonUpdate ,
PQ_OT_UpdateAddon ,
VIEW3D_PT_tools_polyquilt_options ,
PQ_OT_DirtyKeymap ,
) + gizmo_preselect.all_gizmos
def register():
bpy.app.translations.register(__name__, pq_translation_dict)
register_icons()
register_updater(bl_info)
# 空メッシュ追加
bpy.utils.register_class(pq_operator_add_empty_object.OBJECT_OT_add_object)
bpy.utils.register_manual_map(pq_operator_add_empty_object.add_object_manual_map)
bpy.types.VIEW3D_MT_mesh_add.append(pq_operator_add_empty_object.add_object_button)
for cls in classes:
bpy.utils.register_class(cls)
for tool in PolyQuiltTools :
bpy.utils.register_tool(tool['tool'] , after = tool['after'] , group = tool['group'] )
def unregister():
for tool in PolyQuiltTools :
bpy.utils.unregister_tool(tool['tool'])
for cls in reversed(classes):
bpy.utils.unregister_class(cls)
bpy.utils.unregister_class(pq_operator_add_empty_object.OBJECT_OT_add_object)
bpy.utils.unregister_manual_map(pq_operator_add_empty_object.add_object_manual_map)
bpy.types.VIEW3D_MT_mesh_add.remove(pq_operator_add_empty_object.add_object_button)
unregister_icons()
bpy.app.translations.unregister(__name__)
if __name__ == "__main__":
register()
|
0e55ed35a61a5c3d488886c94b37a2947d95e206
|
8f76cee606ca901d6b9c1ecdcadaa172aa861c00
|
/languages/python/web_urllib2_basic2.py
|
5e9746fc8bb2856787c71bb29af51f76a92eea66
|
[
"BSD-3-Clause"
] |
permissive
|
uthcode/learntosolveit
|
b0cdc386ab17dadcefef9867aacc5ef0326b7215
|
88b1cbfea313fdca50f48573c396bed9ba38c354
|
refs/heads/master
| 2023-05-24T14:00:34.115585
| 2023-05-15T00:18:02
| 2023-05-15T00:18:02
| 14,986,557
| 171
| 1,714
|
NOASSERTION
| 2022-09-13T13:37:07
| 2013-12-06T15:44:38
|
Java
|
UTF-8
|
Python
| false
| false
| 2,469
|
py
|
web_urllib2_basic2.py
|
import urllib.request, urllib.error, urllib.parse
URL = 'http://localhost/basic.html'
ah = urllib.request.HTTPBasicAuthHandler()
ah.add_password('Realm','http://localhost/','username','veryverylongpassword')
urllib.request.install_opener(urllib.request.build_opener(ah))
r = urllib.request.Request(URL)
obj = urllib.request.urlopen(r)
print(obj.read())
print('*********************************************************')
import urllib.request, urllib.error, urllib.parse
import sys
import re
import base64
from urllib.parse import urlparse
theurl = 'http://localhost/basic.html'
# if you want to run this example you'll need to supply
# a protected page with your username and password
username = 'username'
password = 'veryverylongpassword' # a very bad password
req = urllib.request.Request(theurl)
try:
handle = urllib.request.urlopen(req)
except IOError as e:
# here we *want* to fail
pass
else:
# If we don't fail then the page isn't protected
print("This page isn't protected by authentication.")
sys.exit(1)
if not hasattr(e, 'code') or e.code != 401:
# we got an error - but not a 401 error
print("This page isn't protected by authentication.")
print('But we failed for another reason.')
sys.exit(1)
authline = e.headers['www-authenticate']
# this gets the www-authenticate line from the headers
# which has the authentication scheme and realm in it
authobj = re.compile(
r'''(?:\s*www-authenticate\s*:)?\s*(\w*)\s+realm=['"]([^'"]+)['"]''',
re.IGNORECASE)
# this regular expression is used to extract scheme and realm
matchobj = authobj.match(authline)
if not matchobj:
# if the authline isn't matched by the regular expression
# then something is wrong
print('The authentication header is badly formed.')
print(authline)
sys.exit(1)
scheme = matchobj.group(1)
realm = matchobj.group(2)
# here we've extracted the scheme
# and the realm from the header
if scheme.lower() != 'basic':
print('This example only works with BASIC authentication.')
sys.exit(1)
base64string = base64.encodestring(
'%s:%s' % (username, password))[:-1]
authheader = "Basic %s" % base64string
req.add_header("Authorization", authheader)
try:
handle = urllib.request.urlopen(req)
except IOError as e:
# here we shouldn't fail if the username/password is right
print("It looks like the username or password is wrong.")
sys.exit(1)
thepage = handle.read()
|
053f774817dfa4d681c0f88ccf13a6322c63d4fc
|
2ad93a1cf25a580fe980482d2d17a657de3b2523
|
/django-stubs/contrib/gis/gdal/field.pyi
|
6d1a27a3b09fc0385b28f2164db68d3fb42a208a
|
[
"MIT"
] |
permissive
|
typeddjango/django-stubs
|
f35dfcb001e54694a0a1e8c0afcc6e6a3d130c32
|
0117348c3c7713f25f96b46e53ebdeed7bdba544
|
refs/heads/master
| 2023-08-25T19:42:52.707151
| 2023-08-23T15:13:25
| 2023-08-23T15:13:25
| 142,779,680
| 1,133
| 376
|
MIT
| 2023-09-13T19:05:06
| 2018-07-29T17:08:50
|
Python
|
UTF-8
|
Python
| false
| false
| 1,460
|
pyi
|
field.pyi
|
from typing import Any
from django.contrib.gis.gdal.base import GDALBase
class Field(GDALBase):
ptr: Any
def __init__(self, feat: Any, index: Any) -> None: ...
def as_double(self) -> float | None: ...
def as_int(self, is_64: bool = ...) -> int | None: ...
def as_string(self) -> str | None: ...
def as_datetime(self) -> tuple[int, int, int, int, int, int, int] | None: ...
@property
def is_set(self) -> bool: ...
@property
def name(self) -> str: ...
@property
def precision(self) -> int: ...
@property
def type(self) -> int: ...
@property
def type_name(self) -> bytes: ...
@property
def value(self) -> Any: ...
@property
def width(self) -> int: ...
class OFTInteger(Field):
@property
def value(self) -> Any: ...
@property
def type(self) -> Any: ...
class OFTReal(Field):
@property
def value(self) -> Any: ...
class OFTString(Field): ...
class OFTWideString(Field): ...
class OFTBinary(Field): ...
class OFTDate(Field):
@property
def value(self) -> Any: ...
class OFTDateTime(Field):
@property
def value(self) -> Any: ...
class OFTTime(Field):
@property
def value(self) -> Any: ...
class OFTInteger64(OFTInteger): ...
class OFTIntegerList(Field): ...
class OFTRealList(Field): ...
class OFTStringList(Field): ...
class OFTWideStringList(Field): ...
class OFTInteger64List(Field): ...
OGRFieldTypes: Any
ROGRFieldTypes: Any
|
a1426ef921ed5bde5016381c41f38ca40ae6de12
|
73305ddcc6dc9775b1e9a71506e2f3c74f678edc
|
/examples/anonymize_query_example.py
|
f8d699aa58d5125a212681c8a549f0855d88e9ed
|
[
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer"
] |
permissive
|
google/starthinker
|
ef359557da4140275a8524d0d813eecf022ece9e
|
b596df09c52511e2e0c0987f6245aa4607190dd0
|
refs/heads/master
| 2023-08-25T21:16:45.578012
| 2023-07-17T22:19:18
| 2023-07-17T22:20:10
| 123,017,995
| 167
| 64
|
Apache-2.0
| 2023-08-02T01:24:51
| 2018-02-26T19:15:09
|
Python
|
UTF-8
|
Python
| false
| false
| 3,994
|
py
|
anonymize_query_example.py
|
###########################################################################
#
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
###########################################################################
#
# This code generated (see scripts folder for possible source):
# - Command: "python starthinker_ui/manage.py example"
#
###########################################################################
import argparse
import textwrap
from starthinker.util.configuration import Configuration
from starthinker.task.anonymize.run import anonymize
def recipe_anonymize_query(config, auth_read, from_project, from_dataset, from_query, to_project, to_dataset, to_table):
"""Runs a query and anynonamizes all rows. Used to create sample table for
dashboards.
Args:
auth_read (authentication) - Credentials used.
from_project (string) - Original project to read from.
from_dataset (string) - Original dataset to read from.
from_query (string) - Query to read data.
to_project (string) - Anonymous data will be writen to.
to_dataset (string) - Anonymous data will be writen to.
to_table (string) - Anonymous data will be writen to.
"""
anonymize(config, {
'auth':auth_read,
'bigquery':{
'from':{
'project':from_project,
'dataset':from_dataset,
'query':from_query
},
'to':{
'project':to_project,
'dataset':to_dataset,
'table':to_table
}
}
})
if __name__ == "__main__":
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=textwrap.dedent("""
Runs a query and anynonamizes all rows. Used to create sample table for dashboards.
1. Ensure you have user access to both datasets.
2. Provide the source project, dataset and query.
3. Provide the destination project, dataset, and table.
"""))
parser.add_argument("-project", help="Cloud ID of Google Cloud Project.", default=None)
parser.add_argument("-key", help="API Key of Google Cloud Project.", default=None)
parser.add_argument("-client", help="Path to CLIENT credentials json file.", default=None)
parser.add_argument("-user", help="Path to USER credentials json file.", default=None)
parser.add_argument("-service", help="Path to SERVICE credentials json file.", default=None)
parser.add_argument("-verbose", help="Print all the steps as they happen.", action="store_true")
parser.add_argument("-auth_read", help="Credentials used.", default='service')
parser.add_argument("-from_project", help="Original project to read from.", default=None)
parser.add_argument("-from_dataset", help="Original dataset to read from.", default=None)
parser.add_argument("-from_query", help="Query to read data.", default=None)
parser.add_argument("-to_project", help="Anonymous data will be writen to.", default=None)
parser.add_argument("-to_dataset", help="Anonymous data will be writen to.", default=None)
parser.add_argument("-to_table", help="Anonymous data will be writen to.", default=None)
args = parser.parse_args()
config = Configuration(
project=args.project,
user=args.user,
service=args.service,
client=args.client,
key=args.key,
verbose=args.verbose
)
recipe_anonymize_query(config, args.auth_read, args.from_project, args.from_dataset, args.from_query, args.to_project, args.to_dataset, args.to_table)
|
585350d43516b2471732c7e90d2ddf793c28a99b
|
069c2295076c482afadfe6351da5ae02be8e18e6
|
/tests/defer_regress/tests.py
|
3dfe96ddb3a3536022249ec811f071df83f8df97
|
[
"LicenseRef-scancode-other-copyleft",
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause",
"GPL-1.0-or-later",
"Python-2.0.1",
"LicenseRef-scancode-free-unknown",
"LicenseRef-scancode-other-permissive",
"Python-2.0"
] |
permissive
|
django/django
|
5eb557f57053631cd4f566f451e43197309dbeeb
|
c74a6fad5475495756a5bdb18b2cab2b68d429bc
|
refs/heads/main
| 2023-09-01T03:43:44.033530
| 2023-08-31T08:27:32
| 2023-08-31T08:27:32
| 4,164,482
| 73,530
| 38,187
|
BSD-3-Clause
| 2023-09-14T20:03:48
| 2012-04-28T02:47:18
|
Python
|
UTF-8
|
Python
| false
| false
| 13,969
|
py
|
tests.py
|
from operator import attrgetter
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.db.models import Count
from django.test import TestCase
from .models import (
Base,
Child,
Derived,
Feature,
Item,
ItemAndSimpleItem,
Leaf,
Location,
OneToOneItem,
Proxy,
ProxyRelated,
RelatedItem,
Request,
ResolveThis,
SimpleItem,
SpecialFeature,
)
class DeferRegressionTest(TestCase):
def test_basic(self):
# Deferred fields should really be deferred and not accidentally use
# the field's default value just because they aren't passed to __init__
Item.objects.create(name="first", value=42)
obj = Item.objects.only("name", "other_value").get(name="first")
# Accessing "name" doesn't trigger a new database query. Accessing
# "value" or "text" should.
with self.assertNumQueries(0):
self.assertEqual(obj.name, "first")
self.assertEqual(obj.other_value, 0)
with self.assertNumQueries(1):
self.assertEqual(obj.value, 42)
with self.assertNumQueries(1):
self.assertEqual(obj.text, "xyzzy")
with self.assertNumQueries(0):
self.assertEqual(obj.text, "xyzzy")
# Regression test for #10695. Make sure different instances don't
# inadvertently share data in the deferred descriptor objects.
i = Item.objects.create(name="no I'm first", value=37)
items = Item.objects.only("value").order_by("-value")
self.assertEqual(items[0].name, "first")
self.assertEqual(items[1].name, "no I'm first")
RelatedItem.objects.create(item=i)
r = RelatedItem.objects.defer("item").get()
self.assertEqual(r.item_id, i.id)
self.assertEqual(r.item, i)
# Some further checks for select_related() and inherited model
# behavior (regression for #10710).
c1 = Child.objects.create(name="c1", value=42)
c2 = Child.objects.create(name="c2", value=37)
Leaf.objects.create(name="l1", child=c1, second_child=c2)
obj = Leaf.objects.only("name", "child").select_related()[0]
self.assertEqual(obj.child.name, "c1")
self.assertQuerySetEqual(
Leaf.objects.select_related().only("child__name", "second_child__name"),
[
"l1",
],
attrgetter("name"),
)
# Models instances with deferred fields should still return the same
# content types as their non-deferred versions (bug #10738).
ctype = ContentType.objects.get_for_model
c1 = ctype(Item.objects.all()[0])
c2 = ctype(Item.objects.defer("name")[0])
c3 = ctype(Item.objects.only("name")[0])
self.assertTrue(c1 is c2 is c3)
# Regression for #10733 - only() can be used on a model with two
# foreign keys.
results = Leaf.objects.only("name", "child", "second_child").select_related()
self.assertEqual(results[0].child.name, "c1")
self.assertEqual(results[0].second_child.name, "c2")
results = Leaf.objects.only(
"name", "child", "second_child", "child__name", "second_child__name"
).select_related()
self.assertEqual(results[0].child.name, "c1")
self.assertEqual(results[0].second_child.name, "c2")
# Regression for #16409 - make sure defer() and only() work with annotate()
self.assertIsInstance(
list(SimpleItem.objects.annotate(Count("feature")).defer("name")), list
)
self.assertIsInstance(
list(SimpleItem.objects.annotate(Count("feature")).only("name")), list
)
def test_ticket_16409(self):
# Regression for #16409 - make sure defer() and only() work with annotate()
self.assertIsInstance(
list(SimpleItem.objects.annotate(Count("feature")).defer("name")), list
)
self.assertIsInstance(
list(SimpleItem.objects.annotate(Count("feature")).only("name")), list
)
def test_ticket_23270(self):
d = Derived.objects.create(text="foo", other_text="bar")
with self.assertNumQueries(1):
obj = Base.objects.select_related("derived").defer("text")[0]
self.assertIsInstance(obj.derived, Derived)
self.assertEqual("bar", obj.derived.other_text)
self.assertNotIn("text", obj.__dict__)
self.assertEqual(d.pk, obj.derived.base_ptr_id)
def test_only_and_defer_usage_on_proxy_models(self):
# Regression for #15790 - only() broken for proxy models
proxy = Proxy.objects.create(name="proxy", value=42)
msg = "QuerySet.only() return bogus results with proxy models"
dp = Proxy.objects.only("other_value").get(pk=proxy.pk)
self.assertEqual(dp.name, proxy.name, msg=msg)
self.assertEqual(dp.value, proxy.value, msg=msg)
# also test things with .defer()
msg = "QuerySet.defer() return bogus results with proxy models"
dp = Proxy.objects.defer("name", "text", "value").get(pk=proxy.pk)
self.assertEqual(dp.name, proxy.name, msg=msg)
self.assertEqual(dp.value, proxy.value, msg=msg)
def test_resolve_columns(self):
ResolveThis.objects.create(num=5.0, name="Foobar")
qs = ResolveThis.objects.defer("num")
self.assertEqual(1, qs.count())
self.assertEqual("Foobar", qs[0].name)
def test_reverse_one_to_one_relations(self):
# Refs #14694. Test reverse relations which are known unique (reverse
# side has o2ofield or unique FK) - the o2o case
item = Item.objects.create(name="first", value=42)
o2o = OneToOneItem.objects.create(item=item, name="second")
self.assertEqual(len(Item.objects.defer("one_to_one_item__name")), 1)
self.assertEqual(len(Item.objects.select_related("one_to_one_item")), 1)
self.assertEqual(
len(
Item.objects.select_related("one_to_one_item").defer(
"one_to_one_item__name"
)
),
1,
)
self.assertEqual(
len(Item.objects.select_related("one_to_one_item").defer("value")), 1
)
# Make sure that `only()` doesn't break when we pass in a unique relation,
# rather than a field on the relation.
self.assertEqual(len(Item.objects.only("one_to_one_item")), 1)
with self.assertNumQueries(1):
i = Item.objects.select_related("one_to_one_item")[0]
self.assertEqual(i.one_to_one_item.pk, o2o.pk)
self.assertEqual(i.one_to_one_item.name, "second")
with self.assertNumQueries(1):
i = Item.objects.select_related("one_to_one_item").defer(
"value", "one_to_one_item__name"
)[0]
self.assertEqual(i.one_to_one_item.pk, o2o.pk)
self.assertEqual(i.name, "first")
with self.assertNumQueries(1):
self.assertEqual(i.one_to_one_item.name, "second")
with self.assertNumQueries(1):
self.assertEqual(i.value, 42)
with self.assertNumQueries(1):
i = Item.objects.select_related("one_to_one_item").only(
"name", "one_to_one_item__item"
)[0]
self.assertEqual(i.one_to_one_item.pk, o2o.pk)
self.assertEqual(i.name, "first")
with self.assertNumQueries(1):
self.assertEqual(i.one_to_one_item.name, "second")
with self.assertNumQueries(1):
self.assertEqual(i.value, 42)
def test_defer_with_select_related(self):
item1 = Item.objects.create(name="first", value=47)
item2 = Item.objects.create(name="second", value=42)
simple = SimpleItem.objects.create(name="simple", value="23")
ItemAndSimpleItem.objects.create(item=item1, simple=simple)
obj = ItemAndSimpleItem.objects.defer("item").select_related("simple").get()
self.assertEqual(obj.item, item1)
self.assertEqual(obj.item_id, item1.id)
obj.item = item2
obj.save()
obj = ItemAndSimpleItem.objects.defer("item").select_related("simple").get()
self.assertEqual(obj.item, item2)
self.assertEqual(obj.item_id, item2.id)
def test_proxy_model_defer_with_select_related(self):
# Regression for #22050
item = Item.objects.create(name="first", value=47)
RelatedItem.objects.create(item=item)
# Defer fields with only()
obj = ProxyRelated.objects.select_related().only("item__name")[0]
with self.assertNumQueries(0):
self.assertEqual(obj.item.name, "first")
with self.assertNumQueries(1):
self.assertEqual(obj.item.value, 47)
def test_only_with_select_related(self):
# Test for #17485.
item = SimpleItem.objects.create(name="first", value=47)
feature = Feature.objects.create(item=item)
SpecialFeature.objects.create(feature=feature)
qs = Feature.objects.only("item__name").select_related("item")
self.assertEqual(len(qs), 1)
qs = SpecialFeature.objects.only("feature__item__name").select_related(
"feature__item"
)
self.assertEqual(len(qs), 1)
def test_defer_annotate_select_related(self):
location = Location.objects.create()
Request.objects.create(location=location)
self.assertIsInstance(
list(
Request.objects.annotate(Count("items"))
.select_related("profile", "location")
.only("profile", "location")
),
list,
)
self.assertIsInstance(
list(
Request.objects.annotate(Count("items"))
.select_related("profile", "location")
.only("profile__profile1", "location__location1")
),
list,
)
self.assertIsInstance(
list(
Request.objects.annotate(Count("items"))
.select_related("profile", "location")
.defer("request1", "request2", "request3", "request4")
),
list,
)
def test_common_model_different_mask(self):
child = Child.objects.create(name="Child", value=42)
second_child = Child.objects.create(name="Second", value=64)
Leaf.objects.create(child=child, second_child=second_child)
with self.assertNumQueries(1):
leaf = (
Leaf.objects.select_related("child", "second_child")
.defer("child__name", "second_child__value")
.get()
)
self.assertEqual(leaf.child, child)
self.assertEqual(leaf.second_child, second_child)
self.assertEqual(leaf.child.get_deferred_fields(), {"name"})
self.assertEqual(leaf.second_child.get_deferred_fields(), {"value"})
with self.assertNumQueries(0):
self.assertEqual(leaf.child.value, 42)
self.assertEqual(leaf.second_child.name, "Second")
with self.assertNumQueries(1):
self.assertEqual(leaf.child.name, "Child")
with self.assertNumQueries(1):
self.assertEqual(leaf.second_child.value, 64)
def test_defer_many_to_many_ignored(self):
location = Location.objects.create()
request = Request.objects.create(location=location)
with self.assertNumQueries(1):
self.assertEqual(Request.objects.defer("items").get(), request)
def test_only_many_to_many_ignored(self):
location = Location.objects.create()
request = Request.objects.create(location=location)
with self.assertNumQueries(1):
self.assertEqual(Request.objects.only("items").get(), request)
def test_defer_reverse_many_to_many_ignored(self):
location = Location.objects.create()
request = Request.objects.create(location=location)
item = Item.objects.create(value=1)
request.items.add(item)
with self.assertNumQueries(1):
self.assertEqual(Item.objects.defer("request").get(), item)
def test_only_reverse_many_to_many_ignored(self):
location = Location.objects.create()
request = Request.objects.create(location=location)
item = Item.objects.create(value=1)
request.items.add(item)
with self.assertNumQueries(1):
self.assertEqual(Item.objects.only("request").get(), item)
class DeferDeletionSignalsTests(TestCase):
senders = [Item, Proxy]
@classmethod
def setUpTestData(cls):
cls.item_pk = Item.objects.create(value=1).pk
def setUp(self):
self.pre_delete_senders = []
self.post_delete_senders = []
for sender in self.senders:
models.signals.pre_delete.connect(self.pre_delete_receiver, sender)
models.signals.post_delete.connect(self.post_delete_receiver, sender)
def tearDown(self):
for sender in self.senders:
models.signals.pre_delete.disconnect(self.pre_delete_receiver, sender)
models.signals.post_delete.disconnect(self.post_delete_receiver, sender)
def pre_delete_receiver(self, sender, **kwargs):
self.pre_delete_senders.append(sender)
def post_delete_receiver(self, sender, **kwargs):
self.post_delete_senders.append(sender)
def test_delete_defered_model(self):
Item.objects.only("value").get(pk=self.item_pk).delete()
self.assertEqual(self.pre_delete_senders, [Item])
self.assertEqual(self.post_delete_senders, [Item])
def test_delete_defered_proxy_model(self):
Proxy.objects.only("value").get(pk=self.item_pk).delete()
self.assertEqual(self.pre_delete_senders, [Proxy])
self.assertEqual(self.post_delete_senders, [Proxy])
|
3fd45369f24f9c4f2be78ffc5ba3a31c49191784
|
abe6c00f9790df7e6ef20dc02d0b1b225b5020cb
|
/src/prefect/server/database/migrations/versions/postgresql/2023_04_04_132534_3bf47e3ce2dd_add_index_on_log.py
|
4626dba6df6dfb82bd9405fa43d6f47116bfb5ac
|
[
"Apache-2.0"
] |
permissive
|
PrefectHQ/prefect
|
000e6c5f7df80f76a181f0a30f8661c96417c8bd
|
2c50d2b64c811c364cbc5faa2b5c80a742572090
|
refs/heads/main
| 2023-09-05T20:25:42.965208
| 2023-09-05T18:58:06
| 2023-09-05T18:58:06
| 139,199,684
| 12,917
| 1,539
|
Apache-2.0
| 2023-09-14T20:25:45
| 2018-06-29T21:59:26
|
Python
|
UTF-8
|
Python
| false
| false
| 523
|
py
|
2023_04_04_132534_3bf47e3ce2dd_add_index_on_log.py
|
"""add_index_on_log
Revision ID: 3bf47e3ce2dd
Revises: 46bd82c6279a
Create Date: 2023-04-04 13:25:34.694078
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = "3bf47e3ce2dd"
down_revision = "46bd82c6279a"
branch_labels = None
depends_on = None
def upgrade():
op.create_index(
"ix_log__flow_run_id_timestamp",
"log",
["flow_run_id", "timestamp"],
unique=False,
)
def downgrade():
op.drop_index("ix_log__flow_run_id_timestamp", table_name="log")
|
f6b69c558da3c18e9cf126cccebea8e9a3bd293f
|
833d4cc8ec460902d0a8beb7f2e1ab13ba9114d0
|
/powermon/dto/resultDTO.py
|
28ad951b8f8613d1dd3157b572870005092554a4
|
[
"MIT"
] |
permissive
|
jblance/mpp-solar
|
3d834e88715591ec63c2abbff97b41417286f451
|
d541a7ec05754c570c44db21f271f5628c7f19e3
|
refs/heads/master
| 2023-08-31T10:09:36.558171
| 2023-08-24T22:42:36
| 2023-08-24T22:42:36
| 102,808,429
| 284
| 141
|
MIT
| 2023-09-11T20:47:11
| 2017-09-08T02:35:31
|
Python
|
UTF-8
|
Python
| false
| false
| 164
|
py
|
resultDTO.py
|
from typing import Iterable
from pydantic import BaseModel
# TODO: update
class ResultDTO(BaseModel):
device_identifier: str
command: str
data: dict
|
96386627e903984bd5c2ccce2fcd7d56b39a97ec
|
6eb0ba72a576b18873e53b0ff4f86fb581c6c806
|
/docker/credentials/__init__.py
|
a1247700d3ed8208a4e89f9384f1e685afc6fa6f
|
[
"Apache-2.0"
] |
permissive
|
docker/docker-py
|
566f9dd69c71ef79fbe2b9dd2745c905e1c613df
|
c38656dc7894363f32317affecc3e4279e1163f8
|
refs/heads/main
| 2023-08-31T14:13:48.087317
| 2023-08-21T13:31:57
| 2023-08-21T13:31:57
| 10,247,874
| 6,473
| 1,943
|
Apache-2.0
| 2023-09-08T18:24:21
| 2013-05-23T16:15:07
|
Python
|
UTF-8
|
Python
| false
| false
| 197
|
py
|
__init__.py
|
from .store import Store
from .errors import StoreError, CredentialsNotFound
from .constants import (
DEFAULT_LINUX_STORE,
DEFAULT_OSX_STORE,
DEFAULT_WIN32_STORE,
PROGRAM_PREFIX,
)
|
c6c022bbcca9e7d99d7ed86a9ffff789df0f7855
|
7ae9aa9e9d359e2182bbab6ae7e083fc2c7fa815
|
/rapidsms/router/celery/router.py
|
bef5019d6e91120ab02a7175a3197ae242c89fc4
|
[
"BSD-3-Clause"
] |
permissive
|
rapidsms/rapidsms
|
8ce6d3f46002146e76cf68fdca3288865578b17a
|
aaa2ddab68e19d979525c3823c3ec0e646e92c83
|
refs/heads/develop
| 2023-08-15T16:44:27.206841
| 2022-03-16T15:09:36
| 2022-03-16T15:09:36
| 132,857
| 409
| 196
|
BSD-3-Clause
| 2023-09-11T20:41:56
| 2009-02-19T22:21:40
|
Python
|
UTF-8
|
Python
| false
| false
| 2,063
|
py
|
router.py
|
import logging
from rapidsms.router.blocking import BlockingRouter
from rapidsms.router.celery.tasks import receive_async, send_async
logger = logging.getLogger(__name__)
class CeleryRouter(BlockingRouter):
"""Skeleton router only used to execute the Celery task."""
def is_eager(self, backend_name):
"""Return whether this backend is eager, meaning it runs
tasks synchronously rather than queueing them to celery.
A backend configures its eagerness by setting the backend
configuration value ``router.celery.eager`` to True or
False. The default is False.
"""
try:
backend = self.backends[backend_name]
except KeyError:
return False
return backend._config.get('router.celery.eager', False)
def receive_incoming(self, msg):
"""Queue incoming message to be processed in the background."""
eager = self.is_eager(msg.connections[0].backend.name)
if eager:
logger.debug('Executing in current process')
receive_async(msg.text, msg.connections[0].pk, msg.id,
msg.fields)
else:
logger.debug('Executing asynchronously')
receive_async.delay(msg.text, msg.connections[0].pk, msg.id,
msg.fields)
def backend_preparation(self, msg):
"""Queue outbound message to be processed in the background."""
context = msg.extra_backend_context()
grouped_identities = self.group_outgoing_identities(msg)
for backend_name, identities in grouped_identities.items():
eager = self.is_eager(backend_name)
if eager:
logger.debug('Executing in current process')
send_async(backend_name, msg.id, msg.text, identities,
context)
else:
logger.debug('Executing asynchronously')
send_async.delay(backend_name, msg.id, msg.text, identities,
context)
|
3f48d0c4369b15390f793086be26eb47235f0900
|
620323fc090cebaf7aca456ff3f7fbbe1e210394
|
/job_compassplus/job_report/report_person.py
|
34ba7f93fd09058eec8ed703b2bfa31288f76017
|
[
"CC-BY-4.0"
] |
permissive
|
gil9red/SimplePyScripts
|
bd2733372728bf9b9f00570e90316fa12116516b
|
773c2c9724edd8827a1dbd91694d780e03fcb05a
|
refs/heads/master
| 2023-08-31T04:26:09.120173
| 2023-08-30T17:22:59
| 2023-08-30T17:22:59
| 22,650,442
| 157
| 46
| null | 2023-09-08T17:51:33
| 2014-08-05T16:19:52
|
Python
|
UTF-8
|
Python
| false
| false
| 2,931
|
py
|
report_person.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = "ipetrash"
from functools import total_ordering
class ReportPerson:
"""Класс для описания сотрудника в отчете."""
def __init__(self, tags: list[str]):
# ФИО
self.second_name, self.first_name, self.middle_name = tags[0].split(maxsplit=2)
# Невыходов на работу
self.absence_from_work = int(tags[1])
# По календарю (смен / ч:мин)
# Для точного значения посещенных дней, может быть указано как "3 = 4- (1 О)", поэтому
# отсекаем правую, от знака равно, сторону, удаляем пробелы и переводим в число
self.need_to_work_days = self.get_work_day(tags[2])
self.need_to_work_on_time = self.get_work_time(tags[3])
# Фактически (смен / ч:мин)
self.worked_days = self.get_work_day(tags[4])
self.worked_time = self.get_work_time(tags[5])
# Отклонение (смен / ч:мин)
self.deviation_of_day = self.get_work_day(tags[6])
self.deviation_of_time = self.get_work_time(tags[7])
@property
def full_name(self):
return self.second_name + " " + self.first_name + " " + self.middle_name
@staticmethod
def get_work_day(day_str):
return (
int(day_str) if "=" not in day_str else int(day_str.split("=")[0].strip())
)
@total_ordering
class Time:
"""Простой класс для хранения даты работы."""
def __init__(self, time_str: str):
# TODO: supports self._seconds
self._hours, self._minutes, self._seconds = map(int, time_str.split(":"))
@property
def total(self) -> int:
"""Всего минут"""
return self._hours * 60 + self._minutes
def __repr__(self):
return f"{self._hours:0>2}:{self._minutes:0>2}"
def __eq__(self, other):
return self.total == other.total
def __lt__(self, other):
return self.total < other.total
@staticmethod
def get_work_time(time_str):
return ReportPerson.Time(time_str)
def __hash__(self):
return hash(self.full_name)
def __eq__(self, other):
return self.full_name == other.full_name
def __repr__(self):
return (
f"{self.full_name}. Невыходов на работу: {self.absence_from_work}. По календарю ({self.need_to_work_days} смен / {self.need_to_work_on_time} ч:мин). "
f"Фактически ({self.worked_days} смен / {self.worked_time} ч:мин) Отклонение ({self.deviation_of_day} смен / {self.deviation_of_time} ч:мин)"
)
|
2b9e2c7ea55d63f10d6ecdfbe255a1064e52c693
|
8ca19f1a31070738b376c0370c4bebf6b7efcb43
|
/examples/sharepoint/files/get_properties.py
|
47f86cd93094ef89b8a9bf17b9e54ce7d9aeed23
|
[
"MIT"
] |
permissive
|
vgrem/Office365-REST-Python-Client
|
2ef153d737c6ed5445ba1e446aeaec39c4ef4ed3
|
cbd245d1af8d69e013c469cfc2a9851f51c91417
|
refs/heads/master
| 2023-09-02T14:20:40.109462
| 2023-08-31T19:14:05
| 2023-08-31T19:14:05
| 51,305,798
| 1,006
| 326
|
MIT
| 2023-08-28T05:38:02
| 2016-02-08T15:24:51
|
Python
|
UTF-8
|
Python
| false
| false
| 1,185
|
py
|
get_properties.py
|
import json
from office365.sharepoint.client_context import ClientContext
from tests import test_team_site_url, test_client_credentials
ctx = ClientContext(test_team_site_url).with_credentials(test_client_credentials)
file_url = '/sites/team/Shared Documents/big_buck_bunny.mp4'
file = ctx.web.get_file_by_server_relative_url(file_url).get().execute_query()
#file = ctx.web.get_file_by_server_relative_url(file_url).expand(["ModifiedBy"]).get().execute_query()
#file = ctx.web.get_file_by_server_relative_url(file_url).expand(["ListItemAllFields"]).get().execute_query()
# print all file properties
#print(json.dumps(file.properties, indent=4))
# or directly via object properties
print("File size: ", file.length)
print("File name: ", file.name)
#print("File modified by: {0}".format(file.modified_by.properties.get('UserPrincipalName')))
#print("File modified by: {0}".format(file.listItemAllFields))
if file.properties.get('CheckOutType') == 0:
print("The file is checked out for editing on the server")
elif file.properties.get('CheckOutType') == 1:
print("The file is checked out for editing on the local computer.")
else:
print("The file is not checked out.")
|
341dfa3872468426cc8c0b526c251ccac593f65a
|
b049a961f100444dde14599bab06a0a4224d869b
|
/sdk/python/pulumi_azure_native/storsimple/v20170601/get_backup_policy.py
|
acf9365dc6fe5aa1dfd6b011cf1381040834fee7
|
[
"BSD-3-Clause",
"Apache-2.0"
] |
permissive
|
pulumi/pulumi-azure-native
|
b390c88beef8381f9a71ab2bed5571e0dd848e65
|
4c499abe17ec6696ce28477dde1157372896364e
|
refs/heads/master
| 2023-08-30T08:19:41.564780
| 2023-08-28T19:29:04
| 2023-08-28T19:29:04
| 172,386,632
| 107
| 29
|
Apache-2.0
| 2023-09-14T13:17:00
| 2019-02-24T20:30:21
|
Python
|
UTF-8
|
Python
| false
| false
| 8,830
|
py
|
get_backup_policy.py
|
# coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
__all__ = [
'GetBackupPolicyResult',
'AwaitableGetBackupPolicyResult',
'get_backup_policy',
'get_backup_policy_output',
]
@pulumi.output_type
class GetBackupPolicyResult:
"""
The backup policy.
"""
def __init__(__self__, backup_policy_creation_type=None, id=None, kind=None, last_backup_time=None, name=None, next_backup_time=None, scheduled_backup_status=None, schedules_count=None, ssm_host_name=None, type=None, volume_ids=None):
if backup_policy_creation_type and not isinstance(backup_policy_creation_type, str):
raise TypeError("Expected argument 'backup_policy_creation_type' to be a str")
pulumi.set(__self__, "backup_policy_creation_type", backup_policy_creation_type)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if kind and not isinstance(kind, str):
raise TypeError("Expected argument 'kind' to be a str")
pulumi.set(__self__, "kind", kind)
if last_backup_time and not isinstance(last_backup_time, str):
raise TypeError("Expected argument 'last_backup_time' to be a str")
pulumi.set(__self__, "last_backup_time", last_backup_time)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if next_backup_time and not isinstance(next_backup_time, str):
raise TypeError("Expected argument 'next_backup_time' to be a str")
pulumi.set(__self__, "next_backup_time", next_backup_time)
if scheduled_backup_status and not isinstance(scheduled_backup_status, str):
raise TypeError("Expected argument 'scheduled_backup_status' to be a str")
pulumi.set(__self__, "scheduled_backup_status", scheduled_backup_status)
if schedules_count and not isinstance(schedules_count, float):
raise TypeError("Expected argument 'schedules_count' to be a float")
pulumi.set(__self__, "schedules_count", schedules_count)
if ssm_host_name and not isinstance(ssm_host_name, str):
raise TypeError("Expected argument 'ssm_host_name' to be a str")
pulumi.set(__self__, "ssm_host_name", ssm_host_name)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
if volume_ids and not isinstance(volume_ids, list):
raise TypeError("Expected argument 'volume_ids' to be a list")
pulumi.set(__self__, "volume_ids", volume_ids)
@property
@pulumi.getter(name="backupPolicyCreationType")
def backup_policy_creation_type(self) -> str:
"""
The backup policy creation type. Indicates whether this was created through SaaS or through StorSimple Snapshot Manager.
"""
return pulumi.get(self, "backup_policy_creation_type")
@property
@pulumi.getter
def id(self) -> str:
"""
The path ID that uniquely identifies the object.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def kind(self) -> Optional[str]:
"""
The Kind of the object. Currently only Series8000 is supported
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter(name="lastBackupTime")
def last_backup_time(self) -> str:
"""
The time of the last backup for the backup policy.
"""
return pulumi.get(self, "last_backup_time")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the object.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="nextBackupTime")
def next_backup_time(self) -> str:
"""
The time of the next backup for the backup policy.
"""
return pulumi.get(self, "next_backup_time")
@property
@pulumi.getter(name="scheduledBackupStatus")
def scheduled_backup_status(self) -> str:
"""
Indicates whether at least one of the schedules in the backup policy is active or not.
"""
return pulumi.get(self, "scheduled_backup_status")
@property
@pulumi.getter(name="schedulesCount")
def schedules_count(self) -> float:
"""
The count of schedules the backup policy contains.
"""
return pulumi.get(self, "schedules_count")
@property
@pulumi.getter(name="ssmHostName")
def ssm_host_name(self) -> str:
"""
If the backup policy was created by StorSimple Snapshot Manager, then this field indicates the hostname of the StorSimple Snapshot Manager.
"""
return pulumi.get(self, "ssm_host_name")
@property
@pulumi.getter
def type(self) -> str:
"""
The hierarchical type of the object.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="volumeIds")
def volume_ids(self) -> Sequence[str]:
"""
The path IDs of the volumes which are part of the backup policy.
"""
return pulumi.get(self, "volume_ids")
class AwaitableGetBackupPolicyResult(GetBackupPolicyResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetBackupPolicyResult(
backup_policy_creation_type=self.backup_policy_creation_type,
id=self.id,
kind=self.kind,
last_backup_time=self.last_backup_time,
name=self.name,
next_backup_time=self.next_backup_time,
scheduled_backup_status=self.scheduled_backup_status,
schedules_count=self.schedules_count,
ssm_host_name=self.ssm_host_name,
type=self.type,
volume_ids=self.volume_ids)
def get_backup_policy(backup_policy_name: Optional[str] = None,
device_name: Optional[str] = None,
manager_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetBackupPolicyResult:
"""
Gets the properties of the specified backup policy name.
:param str backup_policy_name: The name of backup policy to be fetched.
:param str device_name: The device name
:param str manager_name: The manager name
:param str resource_group_name: The resource group name
"""
__args__ = dict()
__args__['backupPolicyName'] = backup_policy_name
__args__['deviceName'] = device_name
__args__['managerName'] = manager_name
__args__['resourceGroupName'] = resource_group_name
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:storsimple/v20170601:getBackupPolicy', __args__, opts=opts, typ=GetBackupPolicyResult).value
return AwaitableGetBackupPolicyResult(
backup_policy_creation_type=pulumi.get(__ret__, 'backup_policy_creation_type'),
id=pulumi.get(__ret__, 'id'),
kind=pulumi.get(__ret__, 'kind'),
last_backup_time=pulumi.get(__ret__, 'last_backup_time'),
name=pulumi.get(__ret__, 'name'),
next_backup_time=pulumi.get(__ret__, 'next_backup_time'),
scheduled_backup_status=pulumi.get(__ret__, 'scheduled_backup_status'),
schedules_count=pulumi.get(__ret__, 'schedules_count'),
ssm_host_name=pulumi.get(__ret__, 'ssm_host_name'),
type=pulumi.get(__ret__, 'type'),
volume_ids=pulumi.get(__ret__, 'volume_ids'))
@_utilities.lift_output_func(get_backup_policy)
def get_backup_policy_output(backup_policy_name: Optional[pulumi.Input[str]] = None,
device_name: Optional[pulumi.Input[str]] = None,
manager_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetBackupPolicyResult]:
"""
Gets the properties of the specified backup policy name.
:param str backup_policy_name: The name of backup policy to be fetched.
:param str device_name: The device name
:param str manager_name: The manager name
:param str resource_group_name: The resource group name
"""
...
|
2e7743bc576f9c88579d9a279412464a52871c94
|
1aaddad1e1c99c222f6a9ba4907a535e0e1bbbae
|
/broadlink/exceptions.py
|
2343ad6ed79158238f3bb8523da0cea188791dfe
|
[
"MIT"
] |
permissive
|
mjg59/python-broadlink
|
6b07e721637e1e487bdfdf6c6d8265e00dde01c3
|
3c183eaaef6cbaf9c1154b232116bc130cd2113f
|
refs/heads/master
| 2023-08-15T22:59:01.997876
| 2022-11-20T18:48:08
| 2022-11-20T18:48:08
| 68,303,993
| 1,323
| 552
|
MIT
| 2023-09-01T07:28:45
| 2016-09-15T15:00:33
|
Python
|
UTF-8
|
Python
| false
| false
| 4,472
|
py
|
exceptions.py
|
"""Exceptions for Broadlink devices."""
import collections
import struct
class BroadlinkException(Exception):
"""Base class common to all Broadlink exceptions."""
def __init__(self, *args, **kwargs):
"""Initialize the exception."""
super().__init__(*args, **kwargs)
if len(args) >= 2:
self.errno = args[0]
self.strerror = ": ".join(str(arg) for arg in args[1:])
elif len(args) == 1:
self.errno = None
self.strerror = str(args[0])
else:
self.errno = None
self.strerror = ""
def __str__(self):
"""Return str(self)."""
if self.errno is not None:
return "[Errno %s] %s" % (self.errno, self.strerror)
return self.strerror
def __eq__(self, other):
"""Return self==value."""
# pylint: disable=unidiomatic-typecheck
return type(self) == type(other) and self.args == other.args
def __hash__(self):
"""Return hash(self)."""
return hash((type(self), self.args))
class MultipleErrors(BroadlinkException):
"""Multiple errors."""
def __init__(self, *args, **kwargs):
"""Initialize the exception."""
errors = args[0][:] if args else []
counter = collections.Counter(errors)
strerror = "Multiple errors occurred: %s" % counter
super().__init__(strerror, **kwargs)
self.errors = errors
def __repr__(self):
"""Return repr(self)."""
return "MultipleErrors(%r)" % self.errors
def __str__(self):
"""Return str(self)."""
return self.strerror
class AuthenticationError(BroadlinkException):
"""Authentication error."""
class AuthorizationError(BroadlinkException):
"""Authorization error."""
class CommandNotSupportedError(BroadlinkException):
"""Command not supported error."""
class ConnectionClosedError(BroadlinkException):
"""Connection closed error."""
class StructureAbnormalError(BroadlinkException):
"""Structure abnormal error."""
class DeviceOfflineError(BroadlinkException):
"""Device offline error."""
class ReadError(BroadlinkException):
"""Read error."""
class SendError(BroadlinkException):
"""Send error."""
class SSIDNotFoundError(BroadlinkException):
"""SSID not found error."""
class StorageError(BroadlinkException):
"""Storage error."""
class WriteError(BroadlinkException):
"""Write error."""
class NetworkTimeoutError(BroadlinkException):
"""Network timeout error."""
class DataValidationError(BroadlinkException):
"""Data validation error."""
class UnknownError(BroadlinkException):
"""Unknown error."""
BROADLINK_EXCEPTIONS = {
# Firmware-related errors are generated by the device.
-1: (AuthenticationError, "Authentication failed"),
-2: (ConnectionClosedError, "You have been logged out"),
-3: (DeviceOfflineError, "The device is offline"),
-4: (CommandNotSupportedError, "Command not supported"),
-5: (StorageError, "The device storage is full"),
-6: (StructureAbnormalError, "Structure is abnormal"),
-7: (AuthorizationError, "Control key is expired"),
-8: (SendError, "Send error"),
-9: (WriteError, "Write error"),
-10: (ReadError, "Read error"),
-11: (SSIDNotFoundError, "SSID could not be found in AP configuration"),
# SDK related errors are generated by this module.
-2040: (DataValidationError, "Device information is not intact"),
-4000: (NetworkTimeoutError, "Network timeout"),
-4007: (DataValidationError, "Received data packet length error"),
-4008: (DataValidationError, "Received data packet check error"),
-4009: (DataValidationError, "Received data packet information type error"),
-4010: (DataValidationError, "Received encrypted data packet length error"),
-4011: (DataValidationError, "Received encrypted data packet check error"),
-4012: (AuthorizationError, "Device control ID error"),
}
def exception(err_code: int) -> BroadlinkException:
"""Return exception corresponding to an error code."""
try:
exc, msg = BROADLINK_EXCEPTIONS[err_code]
return exc(err_code, msg)
except KeyError:
return UnknownError(err_code, "Unknown error")
def check_error(error: bytes) -> None:
"""Raise exception if an error occurred."""
error_code = struct.unpack("h", error)[0]
if error_code:
raise exception(error_code)
|
8f250f0e9c470597164187c61f21212c7d221965
|
dcd772f567ef8a8a1173a9f437cd68f211fb9362
|
/tests/framework/user_guide/optimizing/Constrain/constraint.py
|
e1879a739d5fe988f754b6cee5ffaebfd3a71aba
|
[
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer",
"BSD-2-Clause",
"BSD-3-Clause"
] |
permissive
|
idaholab/raven
|
39cdce98ad916c638399232cdc01a9be00e200a2
|
2b16e7aa3325fe84cab2477947a951414c635381
|
refs/heads/devel
| 2023-08-31T08:40:16.653099
| 2023-08-29T16:21:51
| 2023-08-29T16:21:51
| 85,989,537
| 201
| 126
|
Apache-2.0
| 2023-09-13T21:55:43
| 2017-03-23T19:29:27
|
C++
|
UTF-8
|
Python
| false
| false
| 104
|
py
|
constraint.py
|
def constrain(self):
if (self.x-1.)**2 + self.y**2 < 1.0:
return False
else:
return True
|
271eeafd5d402c6a8b9d5c3ac91548d56e54dde1
|
6ba5116e37f67c613c855efd33a1615c8c851054
|
/openaerostruct/structures/vonmises_wingbox.py
|
052bfbfef2a3392775bd81c84a5e61aa4c7f8469
|
[
"Apache-2.0"
] |
permissive
|
mdolab/OpenAeroStruct
|
08dc28fc12df9927c78469ea6188f4cee87a9d5b
|
f2f974fb06a34244a3bed6f99b486769256353fe
|
refs/heads/main
| 2023-08-04T02:04:58.875285
| 2023-07-29T19:31:22
| 2023-07-29T19:31:22
| 53,821,266
| 154
| 100
|
Apache-2.0
| 2023-07-29T19:31:23
| 2016-03-14T02:37:30
|
Python
|
UTF-8
|
Python
| false
| false
| 6,588
|
py
|
vonmises_wingbox.py
|
import numpy as np
import openmdao.api as om
from openaerostruct.structures.utils import norm, unit
class VonMisesWingbox(om.ExplicitComponent):
"""Compute the von Mises stresses for each element.
See Chauhan et al. (https://doi.org/10.1007/978-3-319-97773-7_38) for more.
Parameters
----------
nodes[ny, 3] : numpy array
Flattened array with coordinates for each FEM node.
disp[ny, 6] : numpy array
Displacements of each FEM node.
Qz[ny-1] : numpy array
First moment of area above the neutral axis parallel to the local
z-axis (for each wingbox segment).
J[ny-1] : numpy array
Torsion constants for each wingbox segment.
A_enc[ny-1] : numpy array
Cross-sectional enclosed area (measured using the material midlines) of
each wingbox segment.
spar_thickness[ny-1] : numpy array
Material thicknesses of the front and rear spars for each wingbox segment.
htop[ny-1] : numpy array
Distance to the point on the top skin that is the farthest away from
the local-z neutral axis (for each wingbox segment).
hbottom[ny-1] : numpy array
Distance to the point on the bottom skin that is the farthest away from
the local-z neutral axis (for each wingbox segment).
hfront[ny-1] : numpy array
Distance to the point on the front spar that is the farthest away from
the local-y neutral axis (for each wingbox segment).
hrear[ny-1] : numpy array
Distance to the point on the rear spar that is the farthest away
from the local-y neutral axis (for each wingbox segment).
Returns
-------
vonmises[ny-1, 4] : numpy array
von Mises stresses for 4 stress combinations for each FEM element.
"""
def initialize(self):
self.options.declare("surface", types=dict)
def setup(self):
self.surface = surface = self.options["surface"]
self.ny = surface["mesh"].shape[1]
self.add_input("nodes", val=np.zeros((self.ny, 3)), units="m")
self.add_input("disp", val=np.zeros((self.ny, 6)), units="m")
self.add_input("Qz", val=np.zeros((self.ny - 1)), units="m**3")
self.add_input("J", val=np.zeros((self.ny - 1)), units="m**4")
self.add_input("A_enc", val=np.zeros((self.ny - 1)), units="m**2")
self.add_input("spar_thickness", val=np.zeros((self.ny - 1)), units="m")
self.add_input("htop", val=np.zeros((self.ny - 1)), units="m")
self.add_input("hbottom", val=np.zeros((self.ny - 1)), units="m")
self.add_input("hfront", val=np.zeros((self.ny - 1)), units="m")
self.add_input("hrear", val=np.zeros((self.ny - 1)), units="m")
self.add_output("vonmises", val=np.zeros((self.ny - 1, 4)), units="N/m**2")
self.E = surface["E"]
self.G = surface["G"]
self.tssf = surface["strength_factor_for_upper_skin"]
self.declare_partials("*", "*", method="cs")
def compute(self, inputs, outputs):
disp = inputs["disp"]
nodes = inputs["nodes"]
A_enc = inputs["A_enc"]
Qy = inputs["Qz"]
J = inputs["J"]
htop = inputs["htop"]
hbottom = inputs["hbottom"]
hfront = inputs["hfront"]
hrear = inputs["hrear"]
spar_thickness = inputs["spar_thickness"]
vonmises = outputs["vonmises"]
# Only use complex type for these arrays if we're using cs to check derivs
dtype = type(disp[0, 0])
T = np.zeros((3, 3), dtype=dtype)
x_gl = np.array([1, 0, 0], dtype=dtype)
E = self.E
G = self.G
num_elems = self.ny - 1
for ielem in range(num_elems):
P0 = nodes[ielem, :]
P1 = nodes[ielem + 1, :]
L = norm(P1 - P0)
x_loc = unit(P1 - P0)
y_loc = unit(np.cross(x_loc, x_gl))
z_loc = unit(np.cross(x_loc, y_loc))
T[0, :] = x_loc
T[1, :] = y_loc
T[2, :] = z_loc
u0x, u0y, u0z = T.dot(disp[ielem, :3])
r0x, r0y, r0z = T.dot(disp[ielem, 3:])
u1x, u1y, u1z = T.dot(disp[ielem + 1, :3])
r1x, r1y, r1z = T.dot(disp[ielem + 1, 3:])
# this is stress = modulus * strain; positive is tensile
axial_stress = E * (u1x - u0x) / L
# this is Torque / (2 * thickness_min * Area_enclosed)
torsion_stress = G * J[ielem] / L * (r1x - r0x) / 2 / spar_thickness[ielem] / A_enc[ielem]
# this is moment * h / I
top_bending_stress = E / (L**2) * (6 * u0y + 2 * r0z * L - 6 * u1y + 4 * r1z * L) * htop[ielem]
# this is moment * h / I
bottom_bending_stress = -E / (L**2) * (6 * u0y + 2 * r0z * L - 6 * u1y + 4 * r1z * L) * hbottom[ielem]
# this is moment * h / I
front_bending_stress = -E / (L**2) * (-6 * u0z + 2 * r0y * L + 6 * u1z + 4 * r1y * L) * hfront[ielem]
# this is moment * h / I
rear_bending_stress = E / (L**2) * (-6 * u0z + 2 * r0y * L + 6 * u1z + 4 * r1y * L) * hrear[ielem]
# shear due to bending (VQ/It) note: the I used to get V cancels the other I
vertical_shear = (
E
/ (L**3)
* (-12 * u0y - 6 * r0z * L + 12 * u1y - 6 * r1z * L)
* Qy[ielem]
/ (2 * spar_thickness[ielem])
)
# print("==========",ielem,"================")
# print("vertical_shear", vertical_shear)
# print("top",top_bending_stress)
# print("bottom",bottom_bending_stress)
# print("front",front_bending_stress)
# print("rear",rear_bending_stress)
# print("axial", axial_stress)
# print("torsion", torsion_stress)
# The 4 stress combinations:
vonmises[ielem, 0] = (
np.sqrt((top_bending_stress + rear_bending_stress + axial_stress) ** 2 + 3 * torsion_stress**2)
/ self.tssf
)
vonmises[ielem, 1] = np.sqrt(
(bottom_bending_stress + front_bending_stress + axial_stress) ** 2 + 3 * torsion_stress**2
)
vonmises[ielem, 2] = np.sqrt(
(front_bending_stress + axial_stress) ** 2 + 3 * (torsion_stress - vertical_shear) ** 2
)
vonmises[ielem, 3] = (
np.sqrt((rear_bending_stress + axial_stress) ** 2 + 3 * (torsion_stress + vertical_shear) ** 2)
/ self.tssf
)
|
bb12d06f9700d729398f1bc8b01d3d8b4b6ac99e
|
302ce5ab1045ee93845608c96580c63d54d730af
|
/src/spikeinterface/extractors/tests/test_toy_example.py
|
448494f975d7e644fa4b2306b89a48b67035411a
|
[
"MIT"
] |
permissive
|
SpikeInterface/spikeinterface
|
f900b62720860b2881d2e6b5fa4441e0e560f625
|
ee2237b3f5ce2347b2ec9df90e97b0ee6c738dcf
|
refs/heads/main
| 2023-09-02T11:27:54.687021
| 2023-09-01T13:48:29
| 2023-09-01T13:48:29
| 196,581,117
| 295
| 133
|
MIT
| 2023-09-14T19:12:16
| 2019-07-12T13:07:46
|
Python
|
UTF-8
|
Python
| false
| false
| 588
|
py
|
test_toy_example.py
|
import pytest
import numpy as np
from spikeinterface.extractors import toy_example
def test_toy_example():
rec, sorting = toy_example(num_segments=2, num_units=10)
assert rec.get_num_segments() == 2
assert sorting.get_num_segments() == 2
assert sorting.get_num_units() == 10
rec, sorting = toy_example(num_segments=1, num_channels=16, num_columns=2)
assert rec.get_num_segments() == 1
assert sorting.get_num_segments() == 1
print(rec)
print(sorting)
probe = rec.get_probe()
print(probe)
if __name__ == "__main__":
test_toy_example()
|
c7d1d42ea5732bd7208293e45c88f2972547974a
|
551eabacd926e7f9146177ef15664424bd4e3c9b
|
/numexpr/__init__.py
|
7946f8522d7c5e536d6def85e69fd7b84aa0373d
|
[
"MIT"
] |
permissive
|
pydata/numexpr
|
69b0dc6e1bdf6450081ccee687f930616ebbf842
|
7d377e38f1c69f629a7e2b536761e7c0373a8b2d
|
refs/heads/master
| 2023-09-05T04:08:51.739737
| 2023-08-18T19:06:13
| 2023-08-18T19:06:13
| 14,829,536
| 1,938
| 235
|
MIT
| 2023-09-13T09:14:04
| 2013-11-30T22:33:48
|
Python
|
UTF-8
|
Python
| false
| false
| 2,279
|
py
|
__init__.py
|
###################################################################
# Numexpr - Fast numerical array expression evaluator for NumPy.
#
# License: MIT
# Author: See AUTHORS.txt
#
# See LICENSE.txt and LICENSES/*.txt for details about copyright and
# rights to use.
####################################################################
"""
Numexpr is a fast numerical expression evaluator for NumPy. With it,
expressions that operate on arrays (like "3*a+4*b") are accelerated
and use less memory than doing the same calculation in Python.
See:
https://github.com/pydata/numexpr
for more info about it.
"""
from numexpr.interpreter import MAX_THREADS, use_vml, __BLOCK_SIZE1__
is_cpu_amd_intel = False # DEPRECATION WARNING: WILL BE REMOVED IN FUTURE RELEASE
# cpuinfo imports were moved into the test submodule function that calls them
# to improve import times.
import os, os.path
import platform
from numexpr.expressions import E
from numexpr.necompiler import (NumExpr, disassemble, evaluate, re_evaluate,
validate)
from numexpr.utils import (_init_num_threads,
get_vml_version, set_vml_accuracy_mode, set_vml_num_threads,
set_num_threads, get_num_threads,
detect_number_of_cores, detect_number_of_threads)
# Detect the number of cores
ncores = detect_number_of_cores()
# Initialize the number of threads to be used
nthreads = _init_num_threads()
# The default for VML is 1 thread (see #39)
# set_vml_num_threads(1)
from . import version
__version__ = version.version
def print_versions():
"""Print the versions of software that numexpr relies on."""
try:
import numexpr.tests
return numexpr.tests.print_versions()
except ImportError:
# To maintain Python 2.6 compatibility we have simple error handling
raise ImportError('`numexpr.tests` could not be imported, likely it was excluded from the distribution.')
def test(verbosity=1):
"""Run all the tests in the test suite."""
try:
import numexpr.tests
return numexpr.tests.test(verbosity=verbosity)
except ImportError:
# To maintain Python 2.6 compatibility we have simple error handling
raise ImportError('`numexpr.tests` could not be imported, likely it was excluded from the distribution.')
|
b5a02429f292fe82fc4834988135de72d6eb923f
|
5b6ba0f288b1e2ac236af846a9bf546a63228476
|
/mmtbx/refinement/targets.py
|
0c2af359f1d417f05b55614b93fa26884769afe1
|
[
"BSD-3-Clause-LBNL"
] |
permissive
|
cctbx/cctbx_project
|
5b547b416cadbdf95cca21dace9f54272a08d98a
|
7f4dfb6c873fd560920f697cbfd8a5ff6eed82fa
|
refs/heads/master
| 2023-08-17T17:44:05.077010
| 2023-08-16T22:40:22
| 2023-08-16T22:40:22
| 39,508,026
| 206
| 131
|
NOASSERTION
| 2023-09-14T17:12:55
| 2015-07-22T13:36:27
|
Python
|
UTF-8
|
Python
| false
| false
| 13,238
|
py
|
targets.py
|
from __future__ import absolute_import, division, print_function
import libtbx.load_env
if(not libtbx.env.has_module(name="phaser")):
phaser = None
else:
import phaser.phenix_adaptors.sad_target
from cctbx.array_family import flex
from cctbx import xray
import boost_adaptbx.boost.python as bp
from libtbx.utils import Sorry, user_plus_sys_time
from cctbx.eltbx.xray_scattering import wk1995
from cctbx import adptbx
from libtbx import adopt_init_args
ext = bp.import_ext("mmtbx_f_model_ext")
time_bulk_solvent_and_scale = 0.0
time_mask = 0.0
time_f_calc = 0.0
time_alpha_beta = 0.0
time_target = 0.0
time_gradients_wrt_atomic_parameters = 0.0
time_fmodel_core_data = 0.0
time_r_factors = 0.0
time_phase_errors = 0.0
time_foms = 0.0
time_show = 0.0
class target_attributes(object):
def __init__(self, family, specialization=None):
adopt_init_args(self, locals())
assert self.validate()
def validate(self):
if (self.family == "lsm"):
self.family = "ls"
self.pseudo_ml = True
else:
self.pseudo_ml = False
if (self.family == "ls"):
return self.specialization is None
elif (self.family == "ml"):
return self.specialization in [None, "hl", "sad", "i", "f"]
return False
def requires_experimental_phases(self):
return (self.family == "ml" and self.specialization == "hl")
target_names = {
"ls_wunit_k1": target_attributes("ls"),
"ls_wunit_kunit": target_attributes("ls"),
"ls_wunit_k1_fixed": target_attributes("ls"),
"ls_wunit_k1ask3_fixed": target_attributes("ls"),
"ls_wexp_k1": target_attributes("ls"),
"ls_wexp_kunit": target_attributes("ls"),
"ls_wff_k1": target_attributes("ls"),
"ls_wff_kunit": target_attributes("ls"),
"ls_wff_k1_fixed": target_attributes("ls"),
"ls_wff_k1ask3_fixed": target_attributes("ls"),
"lsm_k1": target_attributes("lsm"),
"lsm_kunit": target_attributes("lsm"),
"lsm_k1_fixed": target_attributes("lsm"),
"lsm_k1ask3_fixed": target_attributes("lsm"),
"ml": target_attributes("ml", "f"),
"mli": target_attributes("ml","i"),
"mlhl": target_attributes("ml", "hl"),
"ml_sad": target_attributes("ml", "sad")}
class phaser_sad_target_functor(object):
def __init__(self,
f_obs,
r_free_flags,
xray_structure,
f_calc,
target_memory):
self.f_obs = f_obs
self.r_free_flags = r_free_flags
self.xray_structure = xray_structure
self.f_calc = f_calc
if (target_memory is None): # XXX could be more elegant!
den = self.f_obs.data()
num = flex.abs(self.f_calc.data())
denom = flex.sum(num*den)
numerator = flex.sum(den*den)
if (denom == 0):
raise RuntimeError("Zero denominator in scale calculation.")
previous_overall_scaleK = numerator/denom
previous_overall_scaleU = 0.
previous_variances = None
adaptor = phaser.phenix_adaptors.sad_target.data_adaptor(
f_obs=f_obs,
r_free_flags=r_free_flags,
verbose=True)
self.refine_sad_object = adaptor.target(
xray_structure=xray_structure,
previous_overall_scaleK=previous_overall_scaleK,
previous_overall_scaleU=previous_overall_scaleU,
previous_variances=previous_variances)
self.refine_sad_object.set_f_calc(f_calc=f_calc)
target_memory = self.target_memory()
assert len(target_memory) == 4
assert target_memory[0] == "ml_sad"
previous_overall_scaleK = target_memory[1]
previous_overall_scaleU = target_memory[2]
previous_variances = target_memory[3]
adaptor = phaser.phenix_adaptors.sad_target.data_adaptor(
f_obs=f_obs,
r_free_flags=r_free_flags,
verbose=True)
self.refine_sad_object = adaptor.target(
xray_structure=xray_structure,
previous_overall_scaleK=previous_overall_scaleK,
previous_overall_scaleU=previous_overall_scaleU,
previous_variances=previous_variances)
self.refine_sad_object.set_f_calc(f_calc=f_calc)
self.refine_sad_object.reject_outliers()
def prepare_for_minimization(self):
rso = self.refine_sad_object
rso.refine_variance_terms()
self.refined_overall_b_iso = adptbx.u_as_b(
rso.refine_sad_instance.get_refined_scaleU())
rso.refine_sad_instance.set_scaleU(0.)
def target_memory(self):
rsi = self.refine_sad_object.refine_sad_instance
return ("ml_sad", rsi.get_refined_scaleK(),
rsi.get_refined_scaleU(),rsi.get_variance_array())
def __call__(self, f_calc, compute_gradients):
self.refine_sad_object.set_f_calc(f_calc=f_calc)
rso = self.refine_sad_object
target_work = rso.functional(use_working_set=True)
da_db, daa_dbb_dab = rso.derivatives(curvs=True)
target_test = rso.functional(use_working_set=False)
return xray.targets_common_results(
target_per_reflection=flex.double(),
target_work=target_work,
target_test=target_test,
gradients_work=da_db.data(),
hessians_work=daa_dbb_dab.data())
class target_functor(object):
def __init__(self, manager, alpha_beta=None):
self.manager = manager
target_name = manager.target_name
assert target_name is not None
attr = manager.target_attributes()
if (target_name == "ml_sad"):
if (phaser is None):
raise Sorry(
"ml_sad target requires phaser extension, which is not available"
" in this installation.")
self.core = phaser_sad_target_functor(
f_obs=manager.f_obs(),
r_free_flags=manager.r_free_flags(),
xray_structure=manager.xray_structure,
f_calc=manager.f_model(),
target_memory=manager._target_memory)
manager._target_memory = self.core.target_memory()
elif (attr.family == "ml"):
if (attr.requires_experimental_phases()):
experimental_phases = manager.hl_coeffs()
else:
experimental_phases = None
if(alpha_beta is None): alpha_beta = manager.alpha_beta()
self.core = xray.target_functors.max_like(
f_obs = manager.f_obs(),
r_free_flags = manager.r_free_flags(),
experimental_phases = experimental_phases,
alpha_beta = alpha_beta,
scale_factor = manager.scale_ml_wrapper(),
epsilons = manager.epsilons,
spacialization = attr.specialization,
integration_step_size = 5.0)
else:
if (attr.pseudo_ml):
f_obs, weights = manager.f_star_w_star()
weights = weights.data()
if (target_name == "lsm_k1"):
scale_factor = 0
elif (target_name == "lsm_k1ask3_fixed"):
scale_factor = manager.scale_k3_w()
elif (target_name == "lsm_k1_fixed"):
scale_factor = manager.scale_k1_w()
elif (target_name == "lsm_kunit"):
scale_factor = 1.0
else:
raise RuntimeError
else:
f_obs = manager.f_obs()
if (target_name.startswith("ls_wunit_")):
weights = flex.double(f_obs.data().size(), 1.0)
if (target_name == "ls_wunit_k1"):
scale_factor = 0
elif (target_name == "ls_wunit_k1_fixed"):
scale_factor = manager.scale_k1_w()
elif (target_name == "ls_wunit_kunit"):
scale_factor = 1.0
elif (target_name == "ls_wunit_k1ask3_fixed"):
scale_factor = manager.scale_k3_w()
else:
raise RuntimeError
elif (target_name.startswith("ls_wexp_")):
weights = ls_sigma_weights(f_obs)
if (target_name == "ls_wexp_k1"):
scale_factor = 0
elif (target_name == "ls_wexp_kunit"):
scale_factor = 1.0
else:
raise RuntimeError
elif (target_name.startswith("ls_wff_")):
weights = ls_ff_weights(f_obs, "N", 25.0)
if (target_name == "ls_wff_k1"):
scale_factor = 0
elif (target_name == "ls_wff_k1_fixed"):
scale_factor = manager.scale_k1_w()
elif (target_name == "ls_wff_k1ask3_fixed"):
scale_factor = manager.scale_k3_w()
elif (target_name == "ls_wff_kunit"):
scale_factor = 1.0
else:
raise RuntimeError
else:
raise RuntimeError
self.core = xray.target_functors.least_squares(
compute_scale_using_all_data=False,
f_obs=f_obs,
r_free_flags=manager.r_free_flags(),
weights=weights,
scale_factor=scale_factor)
def prepare_for_minimization(self):
if (self.manager.target_name == "ml_sad"):
self.core.prepare_for_minimization()
def target_function_is_invariant_under_allowed_origin_shifts(self):
return (self.manager.target_name != "mlhl")
def __call__(self, compute_gradients=False):
try:
result = target_result(
manager=self.manager,
core_result=self.core(
f_calc=self.manager.f_model(),
compute_gradients=compute_gradients))
except RuntimeError as e:
if str(e) == "mli target is not implemented (yet)!":
raise Sorry("mli target is not implemented (yet). Pick another target.")
else:
raise e
target_memory = getattr(self.core, "target_memory", None)
if (target_memory is not None):
self.manager._target_memory = target_memory()
return result
class target_result_mixin(object):
def gradients_wrt_atomic_parameters(self,
selection=None,
site=False,
u_iso=False,
u_aniso=False,
occupancy=False,
tan_b_iso_max=None,
u_iso_refinable_params=None):
if (tan_b_iso_max is not None and tan_b_iso_max != 0):
raise RuntimeError("Not implemented:\n"
" See CVS revision 1.87, 2007/03/03 01:53:05\n"
" method: manager.gradient_wrt_atomic_parameters()")
global time_gradients_wrt_atomic_parameters
timer = user_plus_sys_time()
manager = self.manager
xray_structure = manager.xray_structure
if (selection is not None):
xray_structure = xray_structure.select(selection)
d_target_d_f_calc = self.d_target_d_f_calc_work()
result = None
if (u_aniso):
result = manager.structure_factor_gradients_w(
u_iso_refinable_params=None,
d_target_d_f_calc=d_target_d_f_calc.data(),
xray_structure=xray_structure,
n_parameters=0,
miller_set=d_target_d_f_calc,
algorithm=manager.sfg_params.algorithm).d_target_d_u_cart()
elif(u_iso):
result = manager.structure_factor_gradients_w(
u_iso_refinable_params=None,
d_target_d_f_calc=d_target_d_f_calc.data(),
xray_structure=xray_structure,
n_parameters=0,
miller_set=d_target_d_f_calc,
algorithm=manager.sfg_params.algorithm).d_target_d_u_iso()
elif(occupancy):
result = manager.structure_factor_gradients_w(
u_iso_refinable_params=None,
d_target_d_f_calc=d_target_d_f_calc.data(),
xray_structure=xray_structure,
n_parameters=0,
miller_set=d_target_d_f_calc,
algorithm=manager.sfg_params.algorithm).d_target_d_occupancy()
else:
result = manager.structure_factor_gradients_w(
u_iso_refinable_params=u_iso_refinable_params,
d_target_d_f_calc=d_target_d_f_calc.data(),
xray_structure=xray_structure,
n_parameters=xray_structure.n_parameters(),
miller_set=d_target_d_f_calc,
algorithm=manager.sfg_params.algorithm)
time_gradients_wrt_atomic_parameters += timer.elapsed()
return result
def d_target_d_site_cart(self):
manager = self.manager
xray.set_scatterer_grad_flags(
scatterers=manager.xray_structure.scatterers(),
site=True)
return flex.vec3_double(
self.gradients_wrt_atomic_parameters().packed())
class target_result(target_result_mixin):
def __init__(self, manager, core_result):
self.manager = manager
self.core_result = core_result
def target_per_reflection(self):
return self.core_result.target_per_reflection()
def target_work(self):
return self.core_result.target_work()
def target_test(self):
return self.core_result.target_test()
def d_target_d_f_model_work(self):
return self.manager.f_obs_work().array(
data=self.core_result.gradients_work())
def d_target_d_f_calc_work(self):
return self.manager.f_obs_work().array(
data=self.core_result.gradients_work()
*self.manager.k_anisotropic_work()*self.manager.k_isotropic_work())
def ls_ff_weights(f_obs, atom, B):
d_star_sq_data = f_obs.d_star_sq().data()
table = wk1995(atom).fetch()
ff = table.at_d_star_sq(d_star_sq_data) * flex.exp(-B/4.0*d_star_sq_data)
weights = 1.0/flex.pow2(ff)
return weights
def ls_sigma_weights(f_obs):
if(f_obs.sigmas() is not None):
sigmas_squared = flex.pow2(f_obs.sigmas())
else:
sigmas_squared = flex.double(f_obs.data().size(), 1.0)
assert sigmas_squared.all_gt(0)
weights = 1 / sigmas_squared
return weights
|
cef53d89feed07e41ee5d844cef05fd57a206d43
|
4674b8088ffdf55905d44995f08a0792a3e4cd5c
|
/tests/hwsim/hwsim.py
|
e21c814f2e8e6928a61ac09d34b72d14884b2bdb
|
[
"BSD-3-Clause",
"BSD-2-Clause"
] |
permissive
|
vanhoefm/krackattacks-scripts
|
41daca791638a92aa4cfa68a582e46119037560e
|
4b78669686f74efe664c6543b1b5b1616b22f902
|
refs/heads/research
| 2022-10-29T20:21:11.512335
| 2022-10-16T18:44:41
| 2022-10-16T18:44:41
| 107,408,514
| 2,184
| 577
|
NOASSERTION
| 2021-07-06T12:43:49
| 2017-10-18T12:58:08
|
C
|
UTF-8
|
Python
| false
| false
| 4,640
|
py
|
hwsim.py
|
#
# HWSIM generic netlink controller code
# Copyright (c) 2014 Intel Corporation
#
# Author: Johannes Berg <johannes.berg@intel.com>
#
# This software may be distributed under the terms of the BSD license.
# See README for more details.
import netlink, os
# constants
HWSIM_CMD_CREATE_RADIO = 4
HWSIM_CMD_DESTROY_RADIO = 5
HWSIM_ATTR_CHANNELS = 9
HWSIM_ATTR_RADIO_ID = 10
HWSIM_ATTR_SUPPORT_P2P_DEVICE = 14
HWSIM_ATTR_USE_CHANCTX = 15
# the controller class
class HWSimController(object):
def __init__(self):
self._conn = netlink.Connection(netlink.NETLINK_GENERIC)
self._fid = netlink.genl_controller.get_family_id('MAC80211_HWSIM')
def create_radio(self, n_channels=None, use_chanctx=False,
use_p2p_device=False):
attrs = []
if n_channels:
attrs.append(netlink.U32Attr(HWSIM_ATTR_CHANNELS, n_channels))
if use_chanctx:
attrs.append(netlink.FlagAttr(HWSIM_ATTR_USE_CHANCTX))
if use_p2p_device:
attrs.append(netlink.FlagAttr(HWSIM_ATTR_SUPPORT_P2P_DEVICE))
msg = netlink.GenlMessage(self._fid, HWSIM_CMD_CREATE_RADIO,
flags = netlink.NLM_F_REQUEST |
netlink.NLM_F_ACK,
attrs = attrs)
return msg.send_and_recv(self._conn).ret
def destroy_radio(self, radio_id):
attrs = [netlink.U32Attr(HWSIM_ATTR_RADIO_ID, radio_id)]
msg = netlink.GenlMessage(self._fid, HWSIM_CMD_DESTROY_RADIO,
flags = netlink.NLM_F_REQUEST |
netlink.NLM_F_ACK,
attrs = attrs)
msg.send_and_recv(self._conn)
class HWSimRadio(object):
def __init__(self, n_channels=None, use_chanctx=False,
use_p2p_device=False):
self._controller = HWSimController()
self._n_channels = n_channels
self._use_chanctx = use_chanctx
self._use_p2p_dev = use_p2p_device
def __enter__(self):
self._radio_id = self._controller.create_radio(
n_channels=self._n_channels,
use_chanctx=self._use_chanctx,
use_p2p_device=self._use_p2p_dev)
if self._radio_id < 0:
raise Exception("Failed to create radio (err:%d)" % self._radio_id)
try:
iface = os.listdir('/sys/class/mac80211_hwsim/hwsim%d/net/' % self._radio_id)[0]
except Exception,e:
self._controller.destroy_radio(self._radio_id)
raise e
return self._radio_id, iface
def __exit__(self, type, value, traceback):
self._controller.destroy_radio(self._radio_id)
def create(args):
print 'Created radio %d' % c.create_radio(n_channels=args.channels,
use_chanctx=args.chanctx)
def destroy(args):
print c.destroy_radio(args.radio)
if __name__ == '__main__':
import argparse
c = HWSimController()
parser = argparse.ArgumentParser(description='send hwsim control commands')
subparsers = parser.add_subparsers(help="Commands", dest='command')
parser_create = subparsers.add_parser('create', help='create a radio')
parser_create.add_argument('--channels', metavar='<number_of_channels>', type=int,
default=0,
help='Number of concurrent channels supported ' +
'by the radio. If not specified, the number ' +
'of channels specified in the ' +
'mac80211_hwsim.channels module parameter is ' +
'used')
parser_create.add_argument('--chanctx', action="store_true",
help='Use channel contexts, regardless of ' +
'whether the number of channels is 1 or ' +
'greater. By default channel contexts are ' +
'only used if the number of channels is ' +
'greater than 1.')
parser_create.set_defaults(func=create)
parser_destroy = subparsers.add_parser('destroy', help='destroy a radio')
parser_destroy.add_argument('radio', metavar='<radio>', type=int,
default=0,
help='The number of the radio to be ' +
'destroyed (i.e., 0 for phy0, 1 for phy1...)')
parser_destroy.set_defaults(func=destroy)
args = parser.parse_args()
args.func(args)
|
8b8474c018550ba69de560c56678301a60f9eff9
|
c19bcbc98555ef06276f9f0dcffc9ac35942a7c4
|
/tests/test_proc_net_route.py
|
177d58478cad83efc12c761b7c06497f453f4a7f
|
[
"MIT"
] |
permissive
|
kellyjonbrazil/jc
|
4e81a5421cd20be5965baf375f4a5671c2ef0410
|
4cd721be8595db52b620cc26cd455d95bf56b85b
|
refs/heads/master
| 2023-08-30T09:53:18.284296
| 2023-07-30T17:08:39
| 2023-07-30T17:08:39
| 215,404,927
| 6,278
| 185
|
MIT
| 2023-09-08T14:52:22
| 2019-10-15T22:04:52
|
Python
|
UTF-8
|
Python
| false
| false
| 1,281
|
py
|
test_proc_net_route.py
|
import os
import unittest
import json
from typing import Dict
import jc.parsers.proc_net_route
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
class MyTests(unittest.TestCase):
f_in: Dict = {}
f_json: Dict = {}
@classmethod
def setUpClass(cls):
fixtures = {
'proc_net_route': (
'fixtures/linux-proc/net_route',
'fixtures/linux-proc/net_route.json')
}
for file, filepaths in fixtures.items():
with open(os.path.join(THIS_DIR, filepaths[0]), 'r', encoding='utf-8') as a, \
open(os.path.join(THIS_DIR, filepaths[1]), 'r', encoding='utf-8') as b:
cls.f_in[file] = a.read()
cls.f_json[file] = json.loads(b.read())
def test_proc_net_route_nodata(self):
"""
Test 'proc_net_route' with no data
"""
self.assertEqual(jc.parsers.proc_net_route.parse('', quiet=True), [])
def test_proc_net_route(self):
"""
Test '/proc/net/route'
"""
self.assertEqual(jc.parsers.proc_net_route.parse(self.f_in['proc_net_route'], quiet=True),
self.f_json['proc_net_route'])
if __name__ == '__main__':
unittest.main()
|
0fbc2b6d137e24412a656515ef777211e749dcfd
|
ef3f32be7b34d7f3cbb166cd3f66200ef33f4268
|
/test/new_tests/test_admin_set_password.py
|
0e0d5d400edbeb5d670a4317b3ad359040187afe
|
[
"Apache-2.0"
] |
permissive
|
aerospike/aerospike-client-python
|
8fa67b82d0d699b5c06e5b408d4b06985e6b3935
|
03853b63b824da488f651e0a375a7ed90730ed8e
|
refs/heads/master
| 2023-08-23T15:39:01.169857
| 2023-08-21T19:12:16
| 2023-08-21T19:12:16
| 21,751,897
| 121
| 106
|
Apache-2.0
| 2023-09-14T21:34:29
| 2014-07-11T21:24:56
|
Python
|
UTF-8
|
Python
| false
| false
| 3,566
|
py
|
test_admin_set_password.py
|
# -*- coding: utf-8 -*-
import pytest
import time
from .test_base_class import TestBaseClass
from aerospike import exception as e
import aerospike
class TestSetPassword(TestBaseClass):
pytestmark = pytest.mark.skipif(
not TestBaseClass.auth_in_use(), reason="No user specified, may be not secured cluster."
)
def setup_method(self, method):
"""
Setup method
"""
config = TestBaseClass.get_connection_config()
TestSetPassword.Me = self
self.client = aerospike.client(config).connect(config["user"], config["password"])
try:
self.client.admin_drop_user("testsetpassworduser")
time.sleep(2)
except e.InvalidUser:
pass
try:
self.client.admin_create_user("testsetpassworduser", "aerospike", ["read"])
except e.UserExistsError:
pass
time.sleep(2)
self.delete_users = []
def teardown_method(self, method):
"""
Teardown method
"""
try:
self.client.admin_drop_user("testsetpassworduser")
time.sleep(2)
except e.InvalidUser:
pass
self.client.close()
def test_set_password_without_any_parameters(self):
with pytest.raises(TypeError) as typeError:
self.client.admin_set_password()
assert "argument 'user' (pos 1)" in str(typeError.value)
def test_set_password_with_proper_parameters(self):
user = "testsetpassworduser"
password = "newpassword"
status = self.client.admin_set_password(user, password)
assert status == 0
def test_set_password_with_invalid_timeout_policy_value(self):
policy = {"timeout": 0.1}
user = "testsetpassworduser"
password = "newpassword"
try:
self.client.admin_set_password(user, password, policy)
except e.ParamError as exception:
assert exception.code == -2
assert exception.msg == "timeout is invalid"
def test_set_password_with_proper_timeout_policy_value(self):
policy = {"timeout": 180000}
user = "testsetpassworduser"
password = "newpassword"
status = self.client.admin_set_password(user, password, policy)
assert status == 0
def test_set_password_with_none_username(self):
user = None
password = "newpassword"
try:
self.client.admin_set_password(user, password)
except e.ParamError as exception:
assert exception.code == -2
assert exception.msg == "Username should be a string"
def test_set_password_with_none_password(self):
user = "testsetpassworduser"
password = None
try:
self.client.admin_set_password(user, password)
except e.ParamError as exception:
assert exception.code == -2
assert exception.msg == "Password should be a string"
def test_set_password_with_non_existent_user(self):
user = "new_user"
password = "newpassword"
try:
self.client.admin_set_password(user, password)
except e.InvalidUser as exception:
assert exception.code == 60
assert exception.msg == "AEROSPIKE_INVALID_USER"
def test_set_password_with_too_long_password(self):
user = "testsetpassworduser"
password = "newpassword$" * 1000
with pytest.raises(e.ClientError):
self.client.admin_set_password(user, password)
|
6034069deee21816810934712805b50384adfe26
|
2f3d66965dbec4021c3819dce093a8b40724af33
|
/mirage/libs/wireless_utils/device.py
|
35708ec36ff92ccd18ca638fa6c03eec384557c1
|
[
"MIT"
] |
permissive
|
RCayre/mirage
|
92bfa2c2822c06238976dbba6df993b10f2dc25d
|
f73f6c4442e4bfd239eb5caf5e1283c125d37db9
|
refs/heads/master
| 2023-02-04T06:23:05.985200
| 2022-11-24T19:16:53
| 2022-11-24T19:16:53
| 203,883,338
| 199
| 35
|
MIT
| 2023-01-26T03:06:32
| 2019-08-22T22:36:00
|
Python
|
UTF-8
|
Python
| false
| false
| 8,961
|
py
|
device.py
|
import mirage.libs.io as io
from mirage.libs.utils import exitMirage,booleanArg
class Device:
'''
This class is used to communicate with a specific hardware component.
Every class communicating with a given hardware component must inherits from this class, and implements the following methods :
* ``init()`` : this method initializes the communication with the hardware component
* ``isUp()`` : this method allows to check if the initialization was successful and if the device is usable
* ``send(packet)`` : this method allows to send data (as a raw representation, e.g. bytes array or scapy frame)
* ``recv()`` : this method allows to receive data (as a raw representation)
* ``close()`` : this method closes the communication with the hardware component
Every device is unique and identified by an interface name : this is a string stored in the ``interface`` attribute.
Some devices may provide some additional features, such as address configuration, multiple modes, etc. In order to implement this specific behaviours, some additional methods can be implemented in the child classes, and their name may be appended to the class attribute ``sharedMethods`` (list of strings). Every shared method will be callable by user using the corresponding Emitter (``mirage.libs.wireless.Emitter``) and/or the corresponding Receiver (``mirage.libs.wireless.Receiver``) : they will expose these additional methods thanks to the Proxy design pattern.
Finally, a simple mechanism allows to attach capabilities to a specific Device class : the capabilities are listed in an attribute ``capabilities`` (list of strings), filled during the initialization of the Device. From a module, an user can check if the device selected has the right capabilities by calling ``hasCapabilities`` on the corresponding Emitter and / or Receiver.
'''
sharedMethods = ["hasCapabilities"]
'''
This class attribute allows to provide some methods' names in order to make them callable from the corresponding Emitter / Receiver.
'''
instances = {}
@classmethod
def get(cls, interface):
'''
This class method implements the Register device pattern.
According to the interface parameter, only one instance of a given specific device will be instanciated if multiple Emitters and/or Receivers tries to access it.
'''
if interface not in cls.instances:
cls.instances[interface] = cls(interface)
cls.instances[interface].init()
if not cls.instances[interface].isUp():
io.fail("An error occured during device initialization (interface : "+str(interface)+")")
exitMirage()
return None
return cls.instances[interface]
def __init__(self,interface):
self.capabilities = []
self.interface = interface
self.subscribers = []
def subscribe(self,subscriber):
'''
This method allows to register a subscriber, according to the design pattern named Publish/subscribe.
It allows a Device to call a method of the corresponding Emitter / Receiver, subscribers by default.
:param subscriber: instance of an object subscribing to the device
:type subscriber: Object
'''
self.subscribers.append(subscriber)
def publish(self,event,*args, **kwargs):
'''
This method allows to publish an event. It may be used to call from the device a method implemented on the corresponding Emitters / Receivers, subscribers by default.
:param event: method's name to call
:type event: str
:Example:
>>> device.publish("stop")
'''
for subscriber in self.subscribers:
if hasattr(subscriber,event) and callable(getattr(subscriber,event)):
return getattr(subscriber,event)(*args,**kwargs)
def hasCapabilities(self, *capability):
'''
This method allows to check if the device implements some specific capabilities.
:param `*capability`: capabilities to check
:type `*capability`: str (multiple)
:return: boolean indicating if the device implements the capabilities provided
:rtype: bool
:Example:
>>> device.capabilities = ["SNIFFING", "INJECTING", "CHANGING_ADDRESS"]
>>> device.hasCapabilities("SNIFFING", "INJECTING")
True
>>> device.hasCapabilities("MAKING_COFFEE")
False
'''
out = True
for cap in capability:
if cap in self.capabilities:
out = out and True
else:
out = out and False
return out
def isUp(self):
'''
This method allows to check if the device is initialized and available for use.
:return: boolean indicating if the device is up
:rtype: bool
'''
return False
def init(self):
'''
This method initializes the device.
'''
pass
def close(self):
'''
This method closes the device.
'''
pass
def send(self,data):
'''
This method sends some datas.
:param data: raw representation of the data to send
'''
pass
def recv(self):
'''
This method receives some datas.
If no data is available, this method returns `None`.
:param data: raw representation of the received data
'''
pass
class SDRDevice(Device):
'''
This class is used to communicate with a specific Sofware Defined Radio (SDR) device.
Every device based on a Software Defined Radio must inherit from this class and implements the following methods:
* ``buildReceivePipeline(interface)`` : this method allows to build the receive pipeline
* ``buildTransmitPieline(interface)`` : this method allows to build the transmit pipeline
* ``setExperimentalDemodulator(enable)`` (optional) : this optional method allow to modify the receive pipeline to use an experimental demodulator (if any)
Keep in mind that the child class must also implements the methods of a classic ``Device``.
'''
SDR_PARAMETERS = {
"GAIN":(["source"],"setGain",int),
"LNA_GAIN":(["source"],"setLNAGain",int),
"FREQUENCY":(["source","sink"],"setFrequency",int),
"TX_GAIN":(["sink"],"setTXGain",int),
"BANDWIDTH":(["source","sink"],"setBandwidth",int),
"SAMPLE_RATE":(["source","sink"],"setSampleRate",int),
"EXPERIMENTAL_DEMODULATOR":(["device"],"setExperimentalDemodulator",booleanArg)
}
def __init__(self,interface,sdrConfig={},sdrMode="HALF_DUPLEX"):
self.capabilities = []
self.interface = interface
self.subscribers = []
self.sdrConfig = sdrConfig
self.sdrMode = sdrMode
self.receivePipeline = self.buildReceivePipeline(interface)
self.transmitPipeline = self.buildTransmitPipeline(interface)
def close(self):
if self.transmitPipeline is not None:
self.transmitPipeline.stop()
if self.receivePipeline is not None:
self.receivePipeline.stop()
def buildReceivePipeline(self,interface):
'''
This method allows to build the receive pipeline of the current device.
:param interface: string indicating the interface to use
:type interface: str
:return: receive pipeline instance
:rtype: ``SDRPipeline``
'''
return None
def buildTransmitPipeline(self,interface):
'''
This method allows to build the transmit pipeline of the current device.
:param interface: string indicating the interface to use
:type interface: str
:return: transmit pipeline instance
:rtype: ``SDRPipeline``
'''
return None
def updateSDRConfig(self,sdrConfig):
'''
This method updates the SDR-related configuration. The supported parameters are:
* **GAIN**: Receive (RX) Gain (integer value)
* **TX_GAIN**: Tranmit (TX) Gain (integer value)
* **LNA_GAIN**: Low Noise Amplifier (LNA) Gain (integer value)
* **FREQUENCY**: Frequency (integer value)
* **BANDWIDTH**: Bandwidth (integer value)
* **SAMPLE_RATE**: Sample Rate (integer value)
* **EXPERIMENTAL_DEMODULATOR**: Use the experimental demodulator if available (boolean value)
:param sdrConfig: dictionary describing the SDR parameters name and their value as string
:type sdrConfig: dict
'''
if sdrConfig != self.sdrConfig:
for name,value in sdrConfig.items():
self.sdrConfig[name] = value
self.applySDRConfig()
def applySDRConfig(self):
'''
This method applies the SDR-related configuration.
'''
for name,value in self.sdrConfig.items():
targets,method,converter = SDRDevice.SDR_PARAMETERS[name]
for target in targets:
if target == "device":
if hasattr(self,method):
getattr(self,method)(converter(value))
if target == "source" and self.receivePipeline is not None:
src = self.receivePipeline.getSource()
if hasattr(src,method):
getattr(src,method)(converter(value))
if target == "sink" and self.transmitPipeline is not None:
sink = self.transmitPipeline.getSink()
if hasattr(sink,method):
getattr(sink,method)(converter(value))
if target == "demodulator" and self.receivePipeline is not None:
demod = self.receivePipeline.getDemodulator()
if hasattr(demod,method):
getattr(demod,method)(converter(value))
if target == "modulator" and self.transmitPipeline is not None:
mod = self.transmitPipeline.getModulator()
if hasattr(mod,method):
getattr(mod,method)(converter(value))
|
097fe26ddcab3b510c554797a24a03782a74118e
|
d6712c5008277098937ac90cfe42533ed9ee55a0
|
/docs/conf.py
|
9807ade8244974bb323a21f749fc1d764597925b
|
[
"MIT"
] |
permissive
|
AnalogJ/lexicon
|
50492bccc304c6d703d29382d6d18359a8a171eb
|
55914350aa28851b0c4df487bc5176b5a4841b88
|
refs/heads/master
| 2023-08-17T00:52:46.893312
| 2023-08-16T14:11:46
| 2023-08-16T14:11:46
| 50,903,853
| 1,397
| 402
|
MIT
| 2023-09-14T14:41:41
| 2016-02-02T07:53:04
|
Python
|
UTF-8
|
Python
| false
| false
| 233
|
py
|
conf.py
|
from os.path import abspath, dirname, join
import toml
metadata = toml.load(join(dirname(dirname(abspath(__file__))), "pyproject.toml"))["tool"]["poetry"]
master_doc = 'index'
project = "DNS-Lexicon"
release = metadata["version"]
|
150b9ee0836d921eb357d290c4c6d15fc82977d3
|
795984ca2c91f62ac5bc3d07ef759e202b688969
|
/pages/tests/test_regression.py
|
687fa9a0f09ccd102405d4565d86162bb4e5e732
|
[
"BSD-3-Clause"
] |
permissive
|
batiste/django-page-cms
|
914b615c0ab3dc9ebc5aa2dfa718569484918c23
|
cde7eab82a69dd5d4bd3da22b583d2de5eb7afdd
|
refs/heads/master
| 2023-09-03T00:04:45.855102
| 2023-08-01T06:50:10
| 2023-08-01T06:50:10
| 403,432
| 154
| 71
|
BSD-3-Clause
| 2023-08-01T06:50:11
| 2009-12-07T11:59:13
|
Python
|
UTF-8
|
Python
| false
| false
| 11,945
|
py
|
test_regression.py
|
# -*- coding: utf-8 -*-
"""Django page CMS test suite module"""
from django.template import Context
from django.template import TemplateDoesNotExist
from django.core.files.uploadedfile import SimpleUploadedFile
from django.template import loader
from django.urls import reverse
from pages.placeholders import PlaceholderNode, get_filename
from pages.utils import get_placeholders
from pages.phttp import get_request_mock
import django
from pages.models import Page, Content
from pages.tests.testcase import TestCase
add_url = reverse("admin:pages_page_add")
class RegressionTestCase(TestCase):
"""Django page CMS test suite class"""
def test_calculated_status_bug(self):
"""
Test the issue 100
http://code.google.com/p/django-page-cms/issues/detail?id=100
"""
self.set_setting("PAGE_SHOW_START_DATE", True)
c = self.get_admin_client()
page_data = self.get_new_page_data()
page_data['slug'] = 'page1'
# create a page for the example otherwise you will get a Http404 error
response = c.post(add_url, page_data)
page1 = Content.objects.get_content_slug_by_slug('page1').page
page1.status = Page.DRAFT
page1.save()
self.assertEqual(page1.calculated_status, Page.DRAFT)
def test_slug_bug(self):
"""
Test the issue 97
http://code.google.com/p/django-page-cms/issues/detail?id=97
"""
c = self.get_admin_client()
page_data = self.get_new_page_data()
page_data['slug'] = 'page1'
# create a page for the example otherwise you will get a Http404 error
response = c.post(add_url, page_data)
response = c.get('/pages/page1/')
self.assertEqual(response.status_code, 200)
try:
response = c.get(self.get_page_url('toto/page1/'))
except TemplateDoesNotExist as e:
if e.args != ('404.html',):
raise
def test_bug_152(self):
"""Test bug 152
http://code.google.com/p/django-page-cms/issues/detail?id=152"""
self.assertEqual(
str(get_placeholders('pages/tests/test1.html')),
"[<Placeholder Node: body>]"
)
def test_bug_block_super(self):
"""{{ block.super }} doesn't work"""
self.assertEqual(
str(get_placeholders('pages/tests/block2.html')),
"[<Placeholder Node: body>, <Placeholder Node: body2>]"
)
def test_bug_block_without_super(self):
"""Without the block the placeholder should not be there"""
self.assertEqual(
str(get_placeholders('pages/tests/block3.html')),
"[<Placeholder Node: test>]"
)
def test_bug_162(self):
"""Test bug 162
http://code.google.com/p/django-page-cms/issues/detail?id=162"""
c = self.get_admin_client()
page_data = self.get_new_page_data()
page_data['title'] = 'test-162-title'
page_data['slug'] = 'test-162-slug'
response = c.post(add_url, page_data)
self.assertRedirects(response, reverse("admin:pages_page_changelist"))
request = get_request_mock()
temp = loader.get_template('pages/tests/test2.html')
render = temp.render({})
self.assertTrue('test-162-slug' in render)
def test_bug_172(self):
"""Test bug 167
http://code.google.com/p/django-page-cms/issues/detail?id=172"""
c = self.get_admin_client()
page_data = self.get_new_page_data()
page_data['title'] = 'title-en-us'
page_data['slug'] = 'slug'
response = c.post(add_url, page_data)
page = Content.objects.get_content_slug_by_slug('slug').page
Content(page=page, type='title', language='fr',
body="title-fr-ch").save()
request = get_request_mock()
temp = loader.get_template('pages/tests/test3.html')
render = temp.render({'page':page})
self.assertTrue('title-en-us' in render)
render = temp.render({'page':page, 'lang':'fr'})
self.assertTrue('title-fr-ch' in render)
def test_page_id_in_template(self):
"""Get a page in the templates via the page id."""
page = self.create_new_page()
request = get_request_mock()
temp = loader.get_template('pages/tests/test4.html')
render = temp.render({})
self.assertTrue(page.title() in render)
def test_bug_178(self):
"""http://code.google.com/p/django-page-cms/issues/detail?id=178"""
request = get_request_mock()
temp = loader.get_template('pages/tests/test5.html')
render = temp.render({'page':None})
def test_language_fallback_bug(self):
"""Language fallback doesn't work properly."""
page = self.create_new_page()
c = Content(page=page, type='new_type', body='toto', language='en')
c.save()
self.assertEqual(
Content.objects.get_content(page, 'en', 'new_type'),
'toto'
)
self.assertEqual(
Content.objects.get_content(page, 'fr', 'new_type'),
''
)
self.assertEqual(
Content.objects.get_content(page, 'fr', 'new_type', True),
'toto'
)
def test_bug_156(self):
c = self.get_admin_client()
page_data = self.get_new_page_data()
page_data['slug'] = 'page1'
page_data['title'] = 'title &'
response = c.post(add_url, page_data)
page1 = Content.objects.get_content_slug_by_slug('page1').page
page1.invalidate()
c = Content.objects.get_content(page1, 'en', 'title')
self.assertEqual(c, page_data['title'])
def test_bug_181(self):
c = self.get_admin_client()
page_data = self.get_new_page_data(draft=True)
page_data['slug'] = 'page1'
# create a draft page and ensure we can view it
response = c.post(add_url, page_data)
response = c.get(self.get_page_url('page1/'))
self.assertEqual(response.status_code, 200)
# logout and we should get a 404
c.logout()
def func():
return c.get(self.get_page_url('page1/'))
self.assert404(func)
# login as a non staff user and we should get a 404
c.login(username= 'nonstaff', password='b')
def func():
return c.get(self.get_page_url('page1/'))
self.assert404(func)
def test_urls_in_templates(self):
"""Test different ways of displaying urls in templates."""
page = self.create_new_page()
request = get_request_mock()
temp = loader.get_template('pages/tests/test7.html')
temp = loader.get_template('pages/tests/test6.html')
render = temp.render({'current_page':page})
self.assertTrue('t1_'+page.get_url_path() in render)
self.assertTrue('t2_'+page.get_url_path() in render)
self.assertTrue('t3_'+page.get_url_path() in render)
self.assertTrue('t4_'+page.slug() in render)
self.assertTrue('t5_'+page.slug() in render)
def test_placeholder_cache_bug(self):
"""There was an bad bug caused when the page cache was filled
the first time."""
from pages.placeholders import PlaceholderNode
page = self.new_page()
placeholder = PlaceholderNode('test', page=page)
placeholder.save(page, 'fr', 'fr', True)
placeholder.save(page, 'en', 'en', True)
self.assertEqual(
Content.objects.get_content(page, 'fr', 'test'),
'fr'
)
def test_placeholder_name_space_bug(self):
"""Cache key cannot us space."""
from pages.placeholders import PlaceholderNode
page = self.new_page()
placeholder = PlaceholderNode('test space', page=page)
placeholder.save(page, 'fr', 'fr', True)
self.assertEqual(
Content.objects.get_content(page, 'fr', placeholder.ctype),
'fr'
)
def test_placeholder_need_iterable_nodelist_attr(self):
"""Should have a iterable nodelist."""
from pages.placeholders import PlaceholderNode
page = self.new_page()
placeholder = PlaceholderNode('test itrable', page=page)
try:
_ = (e for e in placeholder.nodelist)
except TypeError:
assert(False, 'is not iterable')
def test_placeholder_name_space_bug_with_template(self):
"""
Template space test
"""
from pages.placeholders import PlaceholderNode
page = self.new_page()
placeholder = PlaceholderNode('hello world', page=page)
placeholder.save(page, 'fr', 'hello!', True)
context = Context({'current_page': page, 'lang':'fr'})
pl1 = """{% load pages_tags %}{% placeholder "hello world" %}"""
template = self.get_template_from_string(pl1)
self.assertEqual(template.render(context), 'hello!')
def test_pages_dynamic_tree_menu_bug(self):
"""
Test a bug with the dynamic tree template tag doesn't occur anymore.
http://code.google.com/p/django-page-cms/issues/detail?id=209
"""
page = self.new_page()
context = Context({'current_page': page, 'lang':'en'})
pl1 = """{% load pages_tags %}{% pages_dynamic_tree_menu "wrong-slug" %}"""
template = self.get_template_from_string(pl1)
self.assertEqual(template.render(context), '\n')
def test_placeholder_bug(self):
"""Test placeholder with django template inheritance works prepoerly.
http://code.google.com/p/django-page-cms/issues/detail?id=210
"""
p1 = self.new_page(content={'slug':'test', 'one':'one', 'two': 'two'})
template = django.template.loader.get_template('pages/tests/extends.html')
context = {'current_page': p1, 'lang':'en'}
renderer = template.render(context)
self.assertTrue('one' in renderer)
self.assertTrue('two' in renderer)
from pages.utils import get_placeholders
self.assertEqual(
str(get_placeholders('pages/tests/extends.html')),
'[<Placeholder Node: one>, <Placeholder Node: two>]')
def test_param_position(self):
"""There was a typo in the change_form.html"""
c = self.get_admin_client()
page = self.create_new_page(c)
response = c.get(reverse("admin:pages_page_change", args=[page.id]) + '?position=1')
self.assertContains(response, "position=1", status_code=200)
def test_language_and_redirect(self):
"""Language choice in the admin is not kept between redirects"""
c = self.get_admin_client()
page = self.create_new_page(c)
page_url = reverse("admin:pages_page_change", args=[page.id]) + '?language=en'
page_data = self.get_new_page_data()
page_data['_continue'] = 'true'
response = c.post(page_url, page_data)
self.assertRedirects(response, page_url)
def test_get_filename_encoding_bug(self):
"""Problem with encoding file names"""
placeholder = PlaceholderNode("placeholdername")
page = self.new_page({'slug': 'page1'})
fakefile = SimpleUploadedFile(name=u"АБВГДЕЖ.pdf", content=b'blop')
filename = get_filename(page, placeholder.ctype, fakefile)
self.assertTrue(fakefile.name.lower() in filename)
self.assertTrue("page_%d" % page.id in filename)
self.assertTrue(placeholder.name in filename)
def test_str_method(self):
"""Problem with encoding __str__ method"""
page = self.new_page({'title': u'АБВГДЕЖ'})
content = Content(page=page, type='title', language='fr',
body=u"АБВГДЕЖ")
content.save()
try:
str(content)
except:
self.fail("Cyrilic characters in content should not raise any error")
|
3bb4c1be0f4bcf1953f790d186604b1a8eee2200
|
060c559cdfe39e3ff37bcc4f5113f30901a605d7
|
/examples/end2end_tfhub.py
|
25e5251a4a8d8931f393469f78eacceef5c42e24
|
[
"Apache-2.0"
] |
permissive
|
onnx/tensorflow-onnx
|
4402bc3416b1e191b122120aafcce5a5f396b160
|
d5b7f39de66f3b4ff8731fd23b3f379ae731e601
|
refs/heads/main
| 2023-09-01T08:47:00.417130
| 2023-08-29T03:57:16
| 2023-08-29T03:57:16
| 125,098,252
| 2,068
| 454
|
Apache-2.0
| 2023-08-29T03:57:18
| 2018-03-13T18:39:56
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 2,389
|
py
|
end2end_tfhub.py
|
# SPDX-License-Identifier: Apache-2.0
"""
This example retrieves a model from tensorflowhub.
It is converted into ONNX. Predictions are compared to
the predictions from tensorflow to check there is no
discrepencies. Inferencing time is also compared between
*onnxruntime*, *tensorflow* and *tensorflow.lite*.
"""
from onnxruntime import InferenceSession
import os
import sys
import subprocess
import timeit
import numpy as np
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import Input
try:
import tensorflow_hub as tfhub
except ImportError:
# no tensorflow_hub
print("tensorflow_hub not installed.")
sys.exit(0)
########################################
# Downloads the model.
hub_layer = tfhub.KerasLayer(
"https://tfhub.dev/google/efficientnet/b0/classification/1")
model = keras.Sequential()
model.add(Input(shape=(224, 224, 3), dtype=tf.float32))
model.add(hub_layer)
print(model.summary())
########################################
# Saves the model.
if not os.path.exists("efficientnetb0clas"):
os.mkdir("efficientnetb0clas")
tf.keras.models.save_model(model, "efficientnetb0clas")
input_names = [n.name for n in model.inputs]
output_names = [n.name for n in model.outputs]
print('inputs:', input_names)
print('outputs:', output_names)
########################################
# Testing the model.
input = np.random.randn(2, 224, 224, 3).astype(np.float32)
expected = model.predict(input)
print(expected)
########################################
# Run the command line.
proc = subprocess.run(
'python -m tf2onnx.convert --saved-model efficientnetb0clas '
'--output efficientnetb0clas.onnx --opset 12'.split(),
capture_output=True)
print(proc.returncode)
print(proc.stdout.decode('ascii'))
print(proc.stderr.decode('ascii'))
########################################
# Runs onnxruntime.
session = InferenceSession("efficientnetb0clas.onnx")
got = session.run(None, {'input_1': input})
print(got[0])
########################################
# Measures the differences.
print(np.abs(got[0] - expected).max())
########################################
# Measures processing time.
print('tf:', timeit.timeit('model.predict(input)',
number=10, globals=globals()))
print('ort:', timeit.timeit("session.run(None, {'input_1': input})",
number=10, globals=globals()))
|
173b1cc175a40676326626b97d6c1107b683d927
|
14078605a0e9a0a958c6c521a90a6e2df380e924
|
/tests/test_util.py
|
eb8f7e91b648204f5c7919765385b1f61ee83596
|
[
"MIT"
] |
permissive
|
westonplatter/fast_arrow
|
f175a7c76ed6d96b00404cd5887baf2861f88d82
|
1b7bd00c9151f0bb7c1a746219ecda3c3060363d
|
refs/heads/master
| 2021-06-09T17:53:16.463612
| 2020-04-18T18:05:37
| 2020-04-18T18:05:37
| 141,639,575
| 147
| 46
|
MIT
| 2021-03-29T19:26:02
| 2018-07-19T23:15:25
|
Python
|
UTF-8
|
Python
| false
| false
| 573
|
py
|
test_util.py
|
import vcr
from fast_arrow import Client
def gen_vcr():
return vcr.VCR(
cassette_library_dir='tests/fixtures_vcr',
record_mode='none',
match_on=['method', 'scheme', 'host', 'port', 'path', 'query'],
)
def gen_client():
auth_data = gen_auth_data()
client = Client(auth_data)
return client
def gen_auth_data():
auth_data = {
"account_id": 123,
"access_token": "123",
"refresh_token": "xxx_refresh_token",
"device_token": "eeced862-f819-4c51-ad8d-969ae2bb5ddf",
}
return auth_data
|
9374f1d27bd4f3a4174da3d958944baa04bec439
|
39164ede111f154b31cbb61663ea837f16f8aa4f
|
/odps/lib/tzlocal/__init__.py
|
883c2dda8e0b827e5f67aa0b3f9c20d9f74ed0eb
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
aliyun/aliyun-odps-python-sdk
|
217631252e6d52e75354a2a19faab5b9ff40e272
|
c5b897f03759b1a9851505eea3858a96d628f105
|
refs/heads/master
| 2023-08-16T22:42:12.441717
| 2023-07-19T06:28:25
| 2023-07-19T06:28:25
| 45,234,875
| 437
| 116
|
Apache-2.0
| 2023-08-03T06:45:34
| 2015-10-30T07:07:59
|
Python
|
UTF-8
|
Python
| false
| false
| 154
|
py
|
__init__.py
|
import sys
if sys.platform == 'win32':
from .win32 import get_localzone, reload_localzone
else:
from .unix import get_localzone, reload_localzone
|
69a58f0a006b12f5e05ce6ac51fd29c6dc94b6df
|
f576f0ea3725d54bd2551883901b25b863fe6688
|
/sdk/storage/azure-storage-blob/samples/blob_samples_enumerate_blobs_async.py
|
e98180583c57e7eeb59578e72c224f0eb29166db
|
[
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] |
permissive
|
Azure/azure-sdk-for-python
|
02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c
|
c2ca191e736bb06bfbbbc9493e8325763ba990bb
|
refs/heads/main
| 2023-09-06T09:30:13.135012
| 2023-09-06T01:08:06
| 2023-09-06T01:08:06
| 4,127,088
| 4,046
| 2,755
|
MIT
| 2023-09-14T21:48:49
| 2012-04-24T16:46:12
|
Python
|
UTF-8
|
Python
| false
| false
| 1,287
|
py
|
blob_samples_enumerate_blobs_async.py
|
# coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""
FILE: blob_samples_enumerate_blobs_async.py
DESCRIPTION:
This sample demos how to enumerate a container and print all blobs.
USAGE: python blob_samples_enumerate_blobs_async.py
Set the environment variables with your own values before running the sample:
1) AZURE_STORAGE_CONNECTION_STRING - the connection string to your storage account
"""
from __future__ import print_function
import os
import sys
import asyncio
from azure.storage.blob.aio import ContainerClient
async def main():
try:
CONNECTION_STRING = os.environ['AZURE_STORAGE_CONNECTION_STRING']
except KeyError:
print("AZURE_STORAGE_CONNECTION_STRING must be set.")
sys.exit(1)
container = ContainerClient.from_connection_string(CONNECTION_STRING, container_name="mycontainer")
async with container:
async for blob in container.list_blobs():
print(blob.name + '\n')
if __name__ == "__main__":
asyncio.run(main())
|
7d2f6ff92ff23658f3174b346d9a8ed59ba58cc1
|
360328d098a74581d0822fba489dd15e0d4e7ab3
|
/tests/apps/core/test_error_view.py
|
7f22a12420342b3b4dc3b4711a4cb9ebf994fd4b
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
openfun/richie
|
0cef545486267bfb40e75e5fb2ce2a74f85a53ff
|
f2d46fc46b271eb3b4d565039a29c15ba15f027c
|
refs/heads/master
| 2023-08-31T23:51:37.714179
| 2023-08-29T15:25:04
| 2023-08-29T15:48:39
| 111,388,461
| 238
| 96
|
MIT
| 2023-09-13T12:48:53
| 2017-11-20T09:23:40
|
Python
|
UTF-8
|
Python
| false
| false
| 3,246
|
py
|
test_error_view.py
|
"""
Tests for error views
"""
from django.contrib.auth.models import AnonymousUser
from django.test import TestCase
from django.test.client import RequestFactory
from cms.api import create_page
from richie.apps.core.views import error
class ErrorViewHandlersTestCase(TestCase):
"""Test suite for the error view handlers"""
def test_400_error_view_handler(self):
"""
When a request is malformed,
the 400 error view should be displayed
"""
page = create_page("Test", "richie/single_column.html", "en", published=True)
request = RequestFactory().get("/")
request.current_page = page
request.user = AnonymousUser()
with self.assertTemplateUsed("richie/error.html"):
response = error.error_400_view_handler(request, Exception)
self.assertContains(response, "400 - Bad request", status_code=400)
def test_403_error_view_handler(self):
"""
When access to page is not allowed,
the 403 error view should be displayed
"""
page = create_page("Test", "richie/single_column.html", "en", published=True)
request = RequestFactory().get("/")
request.current_page = page
request.user = AnonymousUser()
with self.assertTemplateUsed("richie/error.html"):
response = error.error_403_view_handler(request, Exception)
self.assertContains(response, "403 - Forbidden", status_code=403)
def test_404_error_view_handler(self):
"""
When a request does not found resource,
the 404 error view should be displayed
"""
page = create_page("Test", "richie/single_column.html", "en", published=True)
request = RequestFactory().get("/")
request.current_page = page
request.user = AnonymousUser()
with self.assertTemplateUsed("richie/error.html"):
response = error.error_404_view_handler(request, Exception)
self.assertContains(response, "404 - Page not found", status_code=404)
def test_500_error_view_handler(self):
"""
When an internal server occured,
the 500 error view should be displayed
"""
page = create_page("Test", "richie/single_column.html", "en", published=True)
request = RequestFactory().get("/")
request.current_page = page
request.user = AnonymousUser()
with self.assertTemplateUsed("richie/error.html"):
response = error.error_500_view_handler(request, Exception)
self.assertContains(response, "500 - Server bad request", status_code=500)
def test_error_view_handler_with_unsupported_status_code(self):
"""
When an unsupported status code is used,
the 500 error view should be displayed
"""
page = create_page("Test", "richie/single_column.html", "en", published=True)
request = RequestFactory().get("/")
request.current_page = page
request.user = AnonymousUser()
with self.assertTemplateUsed("richie/error.html"):
response = error.error_view_handler(request, Exception, 405)
self.assertContains(response, "500 - Server bad request", status_code=500)
|
19f3fa2af77779f9f1e1d5b90210f5351befe3b4
|
6408f4b02543a55209b62a470c9d98ae782deac3
|
/pandas_market_calendars/exchange_calendar_bmf.py
|
10b2487fa7ec496e1668be51e5520b950d850d8e
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
rsheftel/pandas_market_calendars
|
c0d6de741abf66365704004c730a8df3ddb645d4
|
4a824d98cc3f62b8a6bff9c2ba044054790e805d
|
refs/heads/master
| 2023-09-05T03:58:07.799842
| 2023-08-22T22:41:44
| 2023-08-22T22:41:44
| 75,791,177
| 649
| 154
|
MIT
| 2023-09-06T21:29:43
| 2016-12-07T02:28:15
|
Python
|
UTF-8
|
Python
| false
| false
| 5,357
|
py
|
exchange_calendar_bmf.py
|
#
# Copyright 2016 Quantopian, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import time
from pandas import Timestamp
from pandas.tseries.holiday import AbstractHolidayCalendar, Day, Easter, GoodFriday, Holiday
from pytz import timezone
from .market_calendar import (FRIDAY, MarketCalendar)
# Universal Confraternization (new years day)
ConfUniversal = Holiday(
'Dia da Confraternizacao Universal',
month=1,
day=1,
)
# Sao Paulo city birthday
AniversarioSaoPaulo = Holiday(
'Aniversario de Sao Paulo',
month=1,
day=25,
end_date='2021-12-31'
)
# Carnival Monday
CarnavalSegunda = Holiday(
'Carnaval Segunda',
month=1,
day=1,
offset=[Easter(), Day(-48)]
)
# Carnival Tuesday
CarnavalTerca = Holiday(
'Carnaval Terca',
month=1,
day=1,
offset=[Easter(), Day(-47)]
)
# Ash Wednesday (short day)
QuartaCinzas = Holiday(
'Quarta Cinzas',
month=1,
day=1,
offset=[Easter(), Day(-46)]
)
# Good Friday
SextaPaixao = GoodFriday
# Feast of the Most Holy Body of Christ
CorpusChristi = Holiday(
'Corpus Christi',
month=1,
day=1,
offset=[Easter(), Day(60)]
)
# Tiradentes Memorial
Tiradentes = Holiday(
'Tiradentes',
month=4,
day=21,
)
# Labor Day
DiaTrabalho = Holiday(
'Dia Trabalho',
month=5,
day=1,
)
# Constitutionalist Revolution
Constitucionalista = Holiday(
'Constitucionalista',
month=7,
day=9,
start_date='1997-01-01',
end_date='2019-12-31'
)
# Independence Day
Independencia = Holiday(
'Independencia',
month=9,
day=7,
)
# Our Lady of Aparecida
Aparecida = Holiday(
'Nossa Senhora de Aparecida',
month=10,
day=12,
)
# All Souls' Day
Finados = Holiday(
'Dia dos Finados',
month=11,
day=2,
)
# Proclamation of the Republic
ProclamacaoRepublica = Holiday(
'Proclamacao da Republica',
month=11,
day=15,
)
# Day of Black Awareness
ConscienciaNegra = Holiday(
'Dia da Consciencia Negra',
month=11,
day=20,
start_date='2004-01-01',
end_date='2019-12-31'
)
# Christmas Eve
VesperaNatal = Holiday(
'Vespera Natal',
month=12,
day=24,
)
# Christmas
Natal = Holiday(
'Natal',
month=12,
day=25,
)
# New Year's Eve
AnoNovo = Holiday(
'Ano Novo',
month=12,
day=31,
)
# New Year's Eve falls on Saturday
AnoNovoSabado = Holiday(
'Ano Novo Sabado',
month=12,
day=30,
days_of_week=(FRIDAY,),
)
##########################
# Non-recurring holidays
##########################
Constitucionalista2021 = Timestamp('2021-07-09', tz='UTC')
ConscienciaNegra2021 = Timestamp('2021-11-20', tz='UTC')
class BMFExchangeCalendar(MarketCalendar):
"""
Exchange calendar for BM&F BOVESPA
Open Time: 10:00 AM, Brazil/Sao Paulo
Close Time: 4:00 PM, Brazil/Sao Paulo
Regularly-Observed Holidays:
- Universal Confraternization (New year's day, Jan 1)
- Sao Paulo City Anniversary (Jan 25 until 2021)
- Carnaval Monday (48 days before Easter)
- Carnaval Tuesday (47 days before Easter)
- Passion of the Christ (Good Friday, 2 days before Easter)
- Corpus Christi (60 days after Easter)
- Tiradentes (April 21)
- Labor day (May 1)
- Constitutionalist Revolution (July 9 after 1997 until 2021, skipping 2020)
- Independence Day (September 7)
- Our Lady of Aparecida Feast (October 12)
- All Souls' Day (November 2)
- Proclamation of the Republic (November 15)
- Day of Black Awareness (November 20 after 2004 until 2021, skipping 2020)
- Christmas (December 24 and 25)
- Day before New Year's Eve (December 30 if NYE falls on a Saturday)
- New Year's Eve (December 31)
"""
aliases = ['BMF', 'B3']
regular_market_times = {
"market_open": ((None, time(10,1)),),
"market_close": ((None, time(16)),)
}
@property
def name(self):
return "BMF"
@property
def tz(self):
return timezone("America/Sao_Paulo")
@property
def regular_holidays(self):
return AbstractHolidayCalendar(rules=[
ConfUniversal,
AniversarioSaoPaulo,
CarnavalSegunda,
CarnavalTerca,
SextaPaixao,
CorpusChristi,
Tiradentes,
DiaTrabalho,
Constitucionalista,
Independencia,
Aparecida,
Finados,
ProclamacaoRepublica,
ConscienciaNegra,
VesperaNatal,
Natal,
AnoNovo,
AnoNovoSabado,
])
@property
def adhoc_holidays(self):
return [
Constitucionalista2021,
ConscienciaNegra2021
]
@property
def special_opens(self):
return [
(time(13, 1), AbstractHolidayCalendar(rules=[QuartaCinzas]))
]
|
28e733ed2bb1801fcf6d7712b2ca19aaaf06732e
|
23652304566b1869ca65b95b116ee43d16e134f3
|
/tests/h/feeds/atom_test.py
|
86720eb0ea8efecc3afe998ffcd25f8d604c6ea8
|
[
"BSD-2-Clause",
"BSD-3-Clause",
"BSD-2-Clause-Views"
] |
permissive
|
hypothesis/h
|
29399a26990856c336b05022e827541dd8aeedab
|
232446d776fdb906d2fb253cf0a409c6813a08d6
|
refs/heads/main
| 2023-08-30T16:21:33.754658
| 2023-08-30T09:26:50
| 2023-08-30T09:40:48
| 3,910,945
| 2,558
| 452
|
BSD-2-Clause
| 2023-09-14T11:25:06
| 2012-04-02T19:56:59
|
Python
|
UTF-8
|
Python
| false
| false
| 6,203
|
py
|
atom_test.py
|
from datetime import datetime, timedelta
from unittest import mock
import pytest
from h.feeds import atom
def test_feed_id():
feed = atom.feed_from_annotations([], "atom_url", mock.Mock())
assert feed["id"] == "atom_url"
def test_feed_title():
feed = atom.feed_from_annotations([], mock.Mock(), mock.Mock(), title="foo")
assert feed["title"] == "foo"
def test_feed_subtitle():
feed = atom.feed_from_annotations([], mock.Mock(), mock.Mock(), subtitle="bar")
assert feed["subtitle"] == "bar"
@mock.patch("h.feeds.atom._feed_entry_from_annotation")
def test_feed_contains_entries(_feed_entry_from_annotation, factories):
"""The feed should contain an entry for each annotation."""
annotations = [
factories.Annotation(),
factories.Annotation(),
factories.Annotation(),
]
annotations_url_function = mock.Mock()
annotations_api_url_function = mock.Mock()
entries = [
"feed entry for annotation 1",
"feed entry for annotation 2",
"feed entry for annotation 3",
]
def pop(*args, **kwargs): # pylint:disable=unused-argument
return entries.pop(0)
_feed_entry_from_annotation.side_effect = pop
feed = atom.feed_from_annotations(
annotations, annotations_url_function, annotations_api_url_function
)
assert feed["entries"] == [
"feed entry for annotation 1",
"feed entry for annotation 2",
"feed entry for annotation 3",
]
def test_atom_url_link():
"""The feed should contain a link to its Atom URL."""
feed = atom.feed_from_annotations([], "atom_url", mock.Mock())
assert feed["links"][0] == {
"rel": "self",
"type": "application/atom+xml",
"href": "atom_url",
}
def test_html_url_link():
"""The feed should contain a link to its corresponding HTML page."""
feed = atom.feed_from_annotations([], mock.Mock(), mock.Mock(), html_url="html_url")
assert feed["links"][1] == {
"rel": "alternate",
"type": "text/html",
"href": "html_url",
}
@mock.patch("h.feeds.util")
def test_entry_id(util, factories):
"""The ids of feed entries should come from tag_uri_for_annotation()."""
annotation = factories.Annotation()
feed = atom.feed_from_annotations(
[annotation], "atom_url", lambda _: "annotation url"
)
util.tag_uri_for_annotation.assert_called_once()
assert feed["entries"][0]["id"] == util.tag_uri_for_annotation.return_value
@pytest.mark.parametrize(
"userid,name",
(
("acct:username@hypothes.is", "username"),
("malformed", "malformed"),
),
)
def test_entry_author(factories, userid, name):
"""The authors of entries should come from the annotation usernames."""
annotation = factories.Annotation(userid=userid)
feed = atom.feed_from_annotations(
[annotation], "atom_url", lambda _: "annotation url"
)
assert feed["entries"][0]["author"]["name"] == name
def test_entry_title(factories):
"""The titles of feed entries should come from annotation.title."""
with mock.patch(
"h.feeds.atom.presenters.AnnotationHTMLPresenter.title",
new_callable=mock.PropertyMock,
) as mock_title:
annotation = factories.Annotation()
feed = atom.feed_from_annotations(
[annotation], "atom_url", lambda _: "annotation url"
)
mock_title.assert_called_once_with()
assert feed["entries"][0]["title"] == mock_title.return_value
def test_entry_dates(factories):
annotation = factories.Annotation(
created=datetime.utcnow(), updated=datetime.utcnow() + timedelta(hours=1)
)
feed = atom.feed_from_annotations(
[annotation], "atom_url", lambda annotation: "annotation url"
)
assert feed["entries"][0]["published"] == f"utc_iso8601_return:{annotation.created}"
assert feed["entries"][0]["updated"] == f"utc_iso8601_return:{annotation.updated}"
def test_entry_content(factories):
"""The contents of entries come from annotation.description."""
with mock.patch(
"h.feeds.atom.presenters.AnnotationHTMLPresenter.description",
new_callable=mock.PropertyMock,
) as mock_description:
annotation = factories.Annotation()
feed = atom.feed_from_annotations(
[annotation], "atom_url", lambda annotation: "annotation url"
)
mock_description.assert_called_once_with()
assert feed["entries"][0]["content"] == mock_description.return_value
@mock.patch("h.feeds.util")
def test_annotation_url_links(_, factories):
"""Entries should contain links to the HTML pages for the annotations."""
annotation = factories.Annotation()
annotation_url = mock.Mock()
feed = atom.feed_from_annotations([annotation], "atom_url", annotation_url)
annotation_url.assert_called_once_with(annotation)
assert feed["entries"][0]["links"][0] == {
"rel": "alternate",
"type": "text/html",
"href": annotation_url.return_value,
}
@mock.patch("h.feeds.util")
def test_annotation_api_url_links(_, factories):
"""Entries should contain links to the JSON pages for the annotations."""
annotation = factories.Annotation()
annotation_api_url = mock.Mock()
feed = atom.feed_from_annotations(
[annotation], "atom_url", mock.Mock(), annotation_api_url=annotation_api_url
)
annotation_api_url.assert_called_once_with(annotation)
assert feed["entries"][0]["links"][1] == {
"rel": "alternate",
"type": "application/json",
"href": annotation_api_url.return_value,
}
def test_feed_updated(factories):
annotations = factories.Annotation.build_batch(3)
annotations[0].updated = datetime.utcnow()
feed = atom.feed_from_annotations(
annotations, "atom_url", lambda annotation: "annotation url"
)
assert feed["updated"] == f"utc_iso8601_return:{annotations[0].updated}"
@pytest.fixture(autouse=True)
def utc_iso8601(patch):
utc_iso8601 = patch("h.feeds.atom.utc_iso8601")
utc_iso8601.side_effect = lambda date: f"utc_iso8601_return:{date}"
return utc_iso8601
|
5b379d6b85f475a3331ecdb9b10a2441cd101923
|
a72cedaa0599bc8d42ed711b31bb01863549b3b1
|
/src/klein/_request.py
|
1a511902a44aaced46ded5e3a6cca7c1bb6d0954
|
[
"MIT"
] |
permissive
|
twisted/klein
|
74d7282707b1c210c78b10e92ea4eae398c81b24
|
1e099333d08bda360e7d9c9d7aeeb77804b56a27
|
refs/heads/trunk
| 2023-08-31T10:47:01.550050
| 2023-08-15T00:41:44
| 2023-08-15T00:41:44
| 3,409,513
| 674
| 141
|
NOASSERTION
| 2023-09-11T23:05:09
| 2012-02-10T17:55:04
|
Python
|
UTF-8
|
Python
| false
| false
| 1,130
|
py
|
_request.py
|
# -*- test-case-name: klein.test.test_request -*-
# Copyright (c) 2011-2021. See LICENSE for details.
"""
HTTP request API.
"""
from typing import Union
from attr import Factory, attrib, attrs
from attr.validators import instance_of, provides
from hyperlink import DecodedURL
from tubes.itube import IFount
from zope.interface import implementer
from ._imessage import IHTTPHeaders, IHTTPRequest
from ._message import MessageState, bodyAsBytes, bodyAsFount, validateBody
__all__ = ()
@implementer(IHTTPRequest)
@attrs(frozen=True)
class FrozenHTTPRequest:
"""
Immutable HTTP request.
"""
method: str = attrib(validator=instance_of(str))
uri: DecodedURL = attrib(validator=instance_of(DecodedURL))
headers: IHTTPHeaders = attrib(validator=provides(IHTTPHeaders))
_body: Union[bytes, IFount] = attrib(validator=validateBody)
_state: MessageState = attrib(default=Factory(MessageState), init=False)
def bodyAsFount(self) -> IFount:
return bodyAsFount(self._body, self._state)
async def bodyAsBytes(self) -> bytes:
return await bodyAsBytes(self._body, self._state)
|
79c2c264dd8499ef4d261bc958676cbc69db9c6c
|
9b3ed66b0a1647a87c6ff38b6cef2354fb790ece
|
/assets/17_convexopt/testcvx.py
|
05f4603349c61bb49c1f119727a55269edd32695
|
[] |
no_license
|
tiepvupsu/tiepvupsu.github.io
|
a117f82b0702b5f5a9422679aa20b8ba59d54d24
|
19c653816f9748537bf403d3adfa9b391c5321e0
|
refs/heads/master
| 2023-08-02T08:29:54.409971
| 2023-01-12T04:21:35
| 2023-01-12T05:11:18
| 83,335,474
| 675
| 773
| null | 2023-01-12T05:11:19
| 2017-02-27T17:05:56
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 205
|
py
|
testcvx.py
|
from cvxopt import matrix, solvers
A = matrix([[-1.0, -1.0, 0., 1.0], [1., -1., -1., -1.]])
b = matrix([1.0, -2.0, 0., 4.])
c = matrix([[2.],[ 1.]])
sol = solvers.lp(c, A, b)
print(sol['x'])
import cvxopt
|
f7c2c0bc951fe80ec7605975a81ecaf706520b73
|
ecca79bf1491492befcccf5af27c1653d4c34685
|
/grappa/reporters/information.py
|
7391852b3be28816cbc5abcca4d1ec8597f1300b
|
[
"MIT"
] |
permissive
|
grappa-py/grappa
|
d1545e9c9cbc161b3f7f068962b6c78a15707320
|
f1861e1572e68f031977e86a5d9eba1957bd164e
|
refs/heads/master
| 2021-06-01T10:31:17.896919
| 2020-11-23T18:18:27
| 2020-11-23T18:18:27
| 81,199,115
| 143
| 17
|
MIT
| 2020-11-23T23:01:27
| 2017-02-07T11:04:41
|
Python
|
UTF-8
|
Python
| false
| false
| 207
|
py
|
information.py
|
# -*- coding: utf-8 -*-
from .base import BaseReporter
class InformationReporter(BaseReporter):
title = 'Information'
def run(self, error):
return self.from_operator('information', None)
|
9097b9a4fe1cf86fc575640130ec274475e2212a
|
5ef6c8d47864f471e26b9902d61f8c687e941f05
|
/src/genie/libs/parser/ios/tests/ShowIpEigrpNeighborsDetail/cli/equal/device_output_6_expected.py
|
98ecc64dea204e990d4a9c21aa19462e7288bfbb
|
[
"Apache-2.0"
] |
permissive
|
CiscoTestAutomation/genieparser
|
169c196558f1c1a0f0d10650876096f993224917
|
b531eff760b2e44cd69d7a2716db6f866907c239
|
refs/heads/master
| 2023-09-03T08:56:18.831340
| 2023-08-29T22:32:02
| 2023-08-29T22:32:02
| 131,621,824
| 247
| 409
|
Apache-2.0
| 2023-08-29T22:32:04
| 2018-04-30T16:51:50
|
Python
|
UTF-8
|
Python
| false
| false
| 3,275
|
py
|
device_output_6_expected.py
|
expected_output = {
"eigrp_instance": {
"100": {
"vrf": {
"default": {
"address_family": {
"ipv4": {
"name": "test",
"named_mode": True,
"eigrp_interface": {
"GigabitEthernet2.90": {
"eigrp_nbr": {
"10.12.90.2": {
"peer_handle": 1,
"hold": 12,
"uptime": "2d10h",
"srtt": 1283.0,
"rto": 5000,
"q_cnt": 0,
"last_seq_number": 5,
"topology_advert_to_peer": "base",
"nbr_sw_ver": {
"os_majorver": 3,
"os_minorver": 3,
"tlv_majorrev": 2,
"tlv_minorrev": 0,
},
"retransmit_count": 0,
"retry_count": 0,
"prefixes": 3,
"topology_ids_from_peer": 0,
}
}
},
"GigabitEthernet3.90": {
"eigrp_nbr": {
"10.13.90.3": {
"peer_handle": 0,
"hold": 10,
"uptime": "2d10h",
"srtt": 6.0,
"rto": 100,
"q_cnt": 0,
"last_seq_number": 9,
"topology_advert_to_peer": "base",
"nbr_sw_ver": {
"os_majorver": 8,
"os_minorver": 0,
"tlv_majorrev": 1,
"tlv_minorrev": 2,
},
"retransmit_count": 1,
"retry_count": 0,
"prefixes": 3,
"topology_ids_from_peer": 0,
}
}
},
},
}
}
}
}
}
}
}
|
e0725e51530a4c9d2e1926679db904d23880777a
|
3a50c0712e0a31b88d0a5e80a0c01dbefc6a6e75
|
/thrift/compiler/test/fixtures/transitive-deps/gen-python/s/thrift_types.py
|
b48271ca75adbc39c8ce56715f6bd859aaa05b1e
|
[
"Apache-2.0"
] |
permissive
|
facebook/fbthrift
|
3b7b94a533666c965ce69cfd6054041218b1ea6f
|
53cf6f138a7648efe5aef9a263aabed3d282df91
|
refs/heads/main
| 2023-08-24T12:51:32.367985
| 2023-08-24T08:28:35
| 2023-08-24T08:28:35
| 11,131,631
| 2,347
| 666
|
Apache-2.0
| 2023-09-01T01:44:39
| 2013-07-02T18:15:51
|
C++
|
UTF-8
|
Python
| false
| false
| 1,103
|
py
|
thrift_types.py
|
#
# Autogenerated by Thrift
#
# DO NOT EDIT
# @generated
#
from __future__ import annotations
import folly.iobuf as _fbthrift_iobuf
import thrift.python.types as _fbthrift_python_types
import thrift.python.exceptions as _fbthrift_python_exceptions
import b.thrift_types
import c.thrift_types
# This unfortunately has to be down here to prevent circular imports
import s.thrift_metadata
_fbthrift_all_enums = [
]
_fbthrift_all_structs = [
]
class _fbthrift_TestService_test_args(metaclass=_fbthrift_python_types.StructMeta):
_fbthrift_SPEC = (
)
class _fbthrift_TestService_test_result(metaclass=_fbthrift_python_types.StructMeta):
_fbthrift_SPEC = (
(
1, # id
_fbthrift_python_types.FieldQualifier.Optional, # qualifier
"ex", # name
lambda: _fbthrift_python_types.StructTypeInfo(c.thrift_types.E), # typeinfo
None, # default value
None, # adapter info
),
)
_fbthrift_python_types.fill_specs(
_fbthrift_TestService_test_args,
_fbthrift_TestService_test_result,
)
|
1d501b446b54802820db4645e97a208bc18bbd4d
|
d110546d747d7e3865ce5742d5fca09f404623c0
|
/doc/_ext/saltautodoc.py
|
d475d34b83db40870b8f746c5e41a368965fc74e
|
[
"Apache-2.0",
"MIT",
"BSD-2-Clause"
] |
permissive
|
saltstack/salt
|
354fc86a7be1f69514b3dd3b2edb9e6f66844c1d
|
1ef90cbdc7203f97775edb7666db86a41eb9fc15
|
refs/heads/master
| 2023-07-19T20:56:20.210556
| 2023-06-29T23:12:28
| 2023-07-19T11:47:47
| 1,390,248
| 11,026
| 6,296
|
Apache-2.0
| 2023-09-14T20:45:37
| 2011-02-20T20:16:56
|
Python
|
UTF-8
|
Python
| false
| false
| 1,459
|
py
|
saltautodoc.py
|
"""
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
saltautodoc.py
~~~~~~~~~~~~~~
Properly handle ``__func_alias__``
"""
from sphinx.ext.autodoc import FunctionDocumenter
class SaltFunctionDocumenter(FunctionDocumenter):
"""
Simple override of sphinx.ext.autodoc.FunctionDocumenter to properly render
salt's aliased function names.
"""
def format_name(self):
"""
Format the function name
"""
if not hasattr(self.module, "__func_alias__"):
# Resume normal sphinx.ext.autodoc operation
return super(FunctionDocumenter, self).format_name()
if not self.objpath:
# Resume normal sphinx.ext.autodoc operation
return super(FunctionDocumenter, self).format_name()
if len(self.objpath) > 1:
# Resume normal sphinx.ext.autodoc operation
return super(FunctionDocumenter, self).format_name()
# Use the salt func aliased name instead of the real name
return self.module.__func_alias__.get(self.objpath[0], self.objpath[0])
def setup(app):
def add_documenter(app, env, docnames):
app.add_autodocumenter(SaltFunctionDocumenter)
# add_autodocumenter() must be called after the initial setup and the
# 'builder-inited' event, as sphinx.ext.autosummary will restore the
# original documenter on 'builder-inited'
app.connect("env-before-read-docs", add_documenter)
|
289c1388df8e1d4e1d2d325d68f0156eb4a7548e
|
e993a7972529f60210d9dd6d7c4097c62c37bcdf
|
/data_loaders/humanml/utils/get_opt.py
|
c331b4dde8cc71c2ce33916945d75a43fc32308f
|
[
"MIT"
] |
permissive
|
GuyTevet/motion-diffusion-model
|
64756013105a80ea2a3180a73ac86519b361e53b
|
8139dda55d90a58aa5a257ebf159b2ecfb78c632
|
refs/heads/main
| 2023-09-01T05:00:14.156745
| 2023-06-06T23:42:33
| 2023-06-06T23:42:33
| 543,082,997
| 2,302
| 265
|
MIT
| 2023-08-29T09:27:54
| 2022-09-29T11:24:35
|
Python
|
UTF-8
|
Python
| false
| false
| 2,548
|
py
|
get_opt.py
|
import os
from argparse import Namespace
import re
from os.path import join as pjoin
from data_loaders.humanml.utils.word_vectorizer import POS_enumerator
def is_float(numStr):
flag = False
numStr = str(numStr).strip().lstrip('-').lstrip('+') # 去除正数(+)、负数(-)符号
try:
reg = re.compile(r'^[-+]?[0-9]+\.[0-9]+$')
res = reg.match(str(numStr))
if res:
flag = True
except Exception as ex:
print("is_float() - error: " + str(ex))
return flag
def is_number(numStr):
flag = False
numStr = str(numStr).strip().lstrip('-').lstrip('+') # 去除正数(+)、负数(-)符号
if str(numStr).isdigit():
flag = True
return flag
def get_opt(opt_path, device):
opt = Namespace()
opt_dict = vars(opt)
skip = ('-------------- End ----------------',
'------------ Options -------------',
'\n')
print('Reading', opt_path)
with open(opt_path) as f:
for line in f:
if line.strip() not in skip:
# print(line.strip())
key, value = line.strip().split(': ')
if value in ('True', 'False'):
opt_dict[key] = bool(value)
elif is_float(value):
opt_dict[key] = float(value)
elif is_number(value):
opt_dict[key] = int(value)
else:
opt_dict[key] = str(value)
# print(opt)
opt_dict['which_epoch'] = 'latest'
opt.save_root = pjoin(opt.checkpoints_dir, opt.dataset_name, opt.name)
opt.model_dir = pjoin(opt.save_root, 'model')
opt.meta_dir = pjoin(opt.save_root, 'meta')
if opt.dataset_name == 't2m':
opt.data_root = './dataset/HumanML3D'
opt.motion_dir = pjoin(opt.data_root, 'new_joint_vecs')
opt.text_dir = pjoin(opt.data_root, 'texts')
opt.joints_num = 22
opt.dim_pose = 263
opt.max_motion_length = 196
elif opt.dataset_name == 'kit':
opt.data_root = './dataset/KIT-ML'
opt.motion_dir = pjoin(opt.data_root, 'new_joint_vecs')
opt.text_dir = pjoin(opt.data_root, 'texts')
opt.joints_num = 21
opt.dim_pose = 251
opt.max_motion_length = 196
else:
raise KeyError('Dataset not recognized')
opt.dim_word = 300
opt.num_classes = 200 // opt.unit_length
opt.dim_pos_ohot = len(POS_enumerator)
opt.is_train = False
opt.is_continue = False
opt.device = device
return opt
|
eaaf6ab90f9d126f4e0419a8bad30fcc2ed7ce08
|
0faf534ebb6db6f32279e5bee25b968bd425ce3a
|
/veriloggen/dataflow/dataflow.py
|
7874fc356606d0ff95240b994622ea290fc33f7b
|
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
PyHDI/veriloggen
|
e8647cb2d40737d84e31d6b89c5799bab9cbd583
|
f2b1b9567150af097eed1b5e79ba2b412854ef43
|
refs/heads/develop
| 2023-08-09T10:02:35.626403
| 2023-08-09T00:50:14
| 2023-08-09T00:50:14
| 37,813,184
| 282
| 60
|
Apache-2.0
| 2023-07-20T03:03:29
| 2015-06-21T15:05:30
|
Python
|
UTF-8
|
Python
| false
| false
| 7,531
|
py
|
dataflow.py
|
from __future__ import absolute_import
from __future__ import print_function
import os
import sys
import copy
import collections
import functools
import veriloggen.core.vtypes as vtypes
from veriloggen.core.module import Module
from veriloggen.seq.seq import Seq
from . import visitor
from . import dtypes
from . import mul
from . import scheduler
from . import allocator
from . import graph
# ID counter for 'Dataflow'
_dataflow_counter = 0
def reset():
global _dataflow_counter
_dataflow_counter = 0
dtypes._object_counter = 0
mul.reset()
def DataflowManager(module, clock, reset,
aswire=True, no_hook=False):
return Dataflow(module=module, clock=clock, reset=reset,
aswire=aswire, no_hook=no_hook)
class Dataflow(object):
def __init__(self, *nodes, **opts):
# ID for manager reuse and merge
global _dataflow_counter
self.object_id = _dataflow_counter
_dataflow_counter += 1
self.nodes = set(nodes)
self.max_stage = 0
self.last_input = None
self.last_output = None
self.module = opts['module'] if 'module' in opts else None
self.clock = opts['clock'] if 'clock' in opts else None
self.reset = opts['reset'] if 'reset' in opts else None
self.aswire = opts['aswire'] if 'aswire' in opts else True
self.seq = None
if (self.module is not None and
self.clock is not None and self.reset is not None):
no_hook = opts['no_hook'] if 'no_hook' in opts else False
if not no_hook:
self.module.add_hook(self.implement)
seq_name = (opts['seq_name'] if 'seq_name' in opts else
'_dataflow_seq_%d' % self.object_id)
self.seq = Seq(self.module, seq_name, self.clock, self.reset)
# -------------------------------------------------------------------------
def add(self, *nodes):
self.nodes.update(set(nodes))
# -------------------------------------------------------------------------
def to_module(self, name, clock='CLK', reset='RST', aswire=False, seq_name=None):
""" generate a Module definion """
m = Module(name)
clk = m.Input(clock)
rst = m.Input(reset)
m = self.implement(m, clk, rst, aswire=aswire, seq_name=seq_name)
return m
# -------------------------------------------------------------------------
def implement(self, m=None, clock=None, reset=None, aswire=None, seq_name=None):
""" implemente actual registers and operations in Verilog """
if m is None:
m = self.module
if clock is None:
clock = self.clock
if reset is None:
reset = self.reset
if self.seq is None or self.seq.done:
if seq_name is None:
seq_name = '_dataflow_seq_%d' % self.object_id
seq = Seq(m, seq_name, clock, reset)
else:
seq = self.seq
if aswire is None:
aswire = self.aswire
# for mult and div
m._clock = clock
m._reset = reset
dataflow_nodes = self.nodes
input_visitor = visitor.InputVisitor()
input_vars = set()
for node in sorted(dataflow_nodes, key=lambda x: x.object_id):
input_vars.update(input_visitor.visit(node))
output_visitor = visitor.OutputVisitor()
output_vars = set()
for node in sorted(dataflow_nodes, key=lambda x: x.object_id):
output_vars.update(output_visitor.visit(node))
# add input ports
for input_var in sorted(input_vars, key=lambda x: x.object_id):
input_var._implement_input(m, seq, aswire)
# schedule
sched = scheduler.ASAPScheduler()
sched.schedule(output_vars)
# balance output stage depth
max_stage = 0
for output_var in sorted(output_vars, key=lambda x: x.object_id):
max_stage = dtypes._max(max_stage, output_var.end_stage)
self.max_stage = max_stage
output_vars = sched.balance_output(output_vars, max_stage)
# get all vars
all_visitor = visitor.AllVisitor()
all_vars = set()
for output_var in sorted(output_vars, key=lambda x: x.object_id):
all_vars.update(all_visitor.visit(output_var))
# allocate (implement signals)
alloc = allocator.Allocator()
alloc.allocate(m, seq, all_vars)
# set default module information
for var in sorted(all_vars, key=lambda x: x.object_id):
var._set_module(m)
var._set_df(self)
if var.seq is not None:
seq.update(var.seq)
var._set_seq(seq)
# add output ports
for output_var in sorted(output_vars, key=lambda x: x.object_id):
output_var._implement_output(m, seq, aswire)
# save schedule result
self.last_input = input_vars
self.last_output = output_vars
return m
# -------------------------------------------------------------------------
def draw_graph(self, filename='out.png', prog='dot', rankdir='LR', approx=False):
if self.last_output is None:
self.to_module()
graph.draw_graph(self.last_output, filename=filename, prog=prog,
rankdir=rankdir, approx=approx)
def enable_draw_graph(self, filename='out.png', prog='dot', rankdir='LR', approx=False):
self.module.add_hook(self.draw_graph,
kwargs={'filename': filename, 'prog': prog,
'rankdir': rankdir, 'approx': approx})
# -------------------------------------------------------------------------
def get_input(self):
if self.last_input is None:
return collections.OrderedDict()
ret = collections.OrderedDict()
for input_var in sorted(self.last_input, key=lambda x: x.object_id):
key = str(input_var.input_data)
value = input_var
ret[key] = value
return ret
def get_output(self):
if self.last_output is None:
return collections.OrderedDict()
ret = collections.OrderedDict()
for output_var in sorted(self.last_output, key=lambda x: x.object_id):
key = str(output_var.output_data)
value = output_var
ret[key] = value
return ret
# -------------------------------------------------------------------------
def pipeline_depth(self):
return self.max_stage
# -------------------------------------------------------------------------
def __getattr__(self, attr):
try:
return object.__getattr__(self, attr)
except AttributeError as e:
if attr.startswith('__') or attr not in dir(dtypes):
raise e
func = getattr(dtypes, attr)
@functools.wraps(func)
def wrapper(*args, **kwargs):
v = func(*args, **kwargs)
if isinstance(v, (tuple, list)):
for item in v:
self._set_info(item)
else:
self._set_info(v)
return v
return wrapper
def _set_info(self, v):
if isinstance(v, dtypes._Numeric):
v._set_module(self.module)
v._set_df(self)
v._set_seq(self.seq)
|
7f4dd69417c79bf27f57d3ebf5893dfbec33c9fd
|
b04c5ad9a8019f2b936ce933293402c98eb25ba7
|
/src/ontogpt/utils/model_utils.py
|
eed44047e239bf73bbc801ba701e55eafba137e9
|
[
"BSD-3-Clause"
] |
permissive
|
monarch-initiative/ontogpt
|
ede671ced22e8b00d2f33bbe09a3595ed04e508c
|
4849f4ce7c54f0394c263fdcad832f7a9eabd52b
|
refs/heads/main
| 2023-08-31T07:20:06.912334
| 2023-08-25T21:20:17
| 2023-08-25T21:20:17
| 584,843,480
| 322
| 43
|
BSD-3-Clause
| 2023-09-14T20:06:46
| 2023-01-03T16:53:25
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 572
|
py
|
model_utils.py
|
"""Utilities for retrieving and applying prebuild models."""
import pystow
import logging
from pathlib import PosixPath
ONTOGPT_MODULE = pystow.module("ontogpt")
def get_model(url: str) -> PosixPath:
"""Retrieve a model from a given URL.
Returns the Path for the retrieved file,
or the path to where it already exists.
"""
logging.info(f"Retrieving model from {url} if needed...")
mod_path = ONTOGPT_MODULE.ensure(url=url, force=False, download_kwargs={"backend": "requests"})
logging.info(f"Model now at {mod_path}")
return mod_path
|
d3e07e30ba9355a5de6fc6eec847ac4dfa5f3d7a
|
0ba9681b235b377b3f57d52532ab7212d4d4cd8a
|
/saw-remote-api/python/tests/saw_low_level/test_swap_low_level.py
|
6c0891425fdd0ebd3ec807861b8785850bb78e65
|
[
"BSD-3-Clause"
] |
permissive
|
GaloisInc/saw-script
|
d9a3eb7b05c1bcbcc319987223cd53b903b55b5d
|
79ddd800bec59528958ed6d7593304e2b17b7dfb
|
refs/heads/master
| 2023-09-01T09:47:31.415255
| 2023-08-30T11:26:08
| 2023-08-30T11:26:08
| 34,082,065
| 458
| 82
|
BSD-3-Clause
| 2023-09-14T16:23:09
| 2015-04-16T21:39:32
|
Haskell
|
UTF-8
|
Python
| false
| false
| 1,959
|
py
|
test_swap_low_level.py
|
from pathlib import Path
import unittest
import saw_client as saw
from saw_client.proofscript import *
class SwapLowLevelTest(unittest.TestCase):
def test_swap(self):
c = saw.connection.connect(reset_server=True)
if __name__ == "__main__": saw.view(saw.LogResults())
swap_bc = str(Path('tests','saw','test-files', 'swap.bc'))
c.llvm_load_module('m', swap_bc).result()
i32 = {"type": "primitive type", "primitive": "integer", "size": 32}
# ServerNames
xp_name = {"name": "xp"}
yp_name = {"name": "yp"}
# SetupVals
xp = {"setup value": "named", "name": "xp"}
yp = {"setup value": "named", "name": "yp"}
x = {"setup value": "Cryptol", "expression": "x" }
y = {"setup value": "Cryptol", "expression": "x" }
contract = {
"pre vars": [
{"server name": "x", "name": "x", "type": i32},
{"server name": "y", "name": "y", "type": i32}
],
"pre conds": [],
"pre allocated": [
{"server name": "xp",
"type": i32,
"mutable": True,
"alignment": None},
{"server name": "yp",
"type": i32,
"mutable": True,
"alignment": None}
],
"pre points tos": [{"pointer": xp, "points to": x},
{"pointer": yp, "points to": y}],
"argument vals": [xp, yp],
"post vars": [],
"post conds": [],
"post allocated": [],
"post points tos": [{"pointer": xp, "points to": y},
{"pointer": yp, "points to": x}],
"return val": None
}
prover = ProofScript([abc]).to_json()
c.llvm_verify('m', 'swap', [], False, contract, prover, 'ok').result()
if __name__ == "__main__":
unittest.main()
|
25ac02f8a2b6634ceaa1b7e485d5827436b10df0
|
bfc42c114f652012b6cfd14e7cccf52cb6b9ac7e
|
/src/spdx_tools/spdx/jsonschema/checksum_converter.py
|
9ffb39b11e6feadc567bb3f4865a56fd94a5a3a5
|
[
"Apache-2.0",
"GPL-2.0-only"
] |
permissive
|
spdx/tools-python
|
05a952501af2ac608678cb1737f7c661f6091fa2
|
777bd274dd06cb24342738df7da5ab285d652350
|
refs/heads/main
| 2023-08-31T09:39:52.930063
| 2023-08-24T06:39:48
| 2023-08-24T10:22:33
| 32,761,058
| 147
| 136
|
Apache-2.0
| 2023-09-14T15:50:59
| 2015-03-23T21:54:39
|
Python
|
UTF-8
|
Python
| false
| false
| 1,232
|
py
|
checksum_converter.py
|
# SPDX-FileCopyrightText: 2022 spdx contributors
#
# SPDX-License-Identifier: Apache-2.0
from beartype.typing import Type
from spdx_tools.spdx.jsonschema.checksum_properties import ChecksumProperty
from spdx_tools.spdx.jsonschema.converter import TypedConverter
from spdx_tools.spdx.jsonschema.json_property import JsonProperty
from spdx_tools.spdx.model import Checksum, ChecksumAlgorithm, Document
class ChecksumConverter(TypedConverter[Checksum]):
def get_data_model_type(self) -> Type[Checksum]:
return Checksum
def get_json_type(self) -> Type[JsonProperty]:
return ChecksumProperty
def _get_property_value(
self, checksum: Checksum, checksum_property: ChecksumProperty, _document: Document = None
) -> str:
if checksum_property == ChecksumProperty.ALGORITHM:
return algorithm_to_json_string(checksum.algorithm)
elif checksum_property == ChecksumProperty.CHECKSUM_VALUE:
return checksum.value
def algorithm_to_json_string(algorithm: ChecksumAlgorithm) -> str:
name_with_dash: str = algorithm.name.replace("_", "-")
if "BLAKE2B" in name_with_dash:
return name_with_dash.replace("BLAKE2B", "BLAKE2b")
return name_with_dash
|
5bc187ceb676bfc52755d2e66381fc19ee1d70cc
|
ac0deeda807797e445ee1f5fd2bfd52c7687843e
|
/MayaSublime.py
|
71a3011666691ac3d4d8c66fcd9c928cec8233bd
|
[
"MIT"
] |
permissive
|
justinfx/MayaSublime
|
7e9ea5e63ca78a9dc4d2dd91ec1e7dd4e1b9e187
|
6b7cd30117940c95e2b94e671b81be28de56647f
|
refs/heads/master
| 2021-12-24T11:00:41.808879
| 2021-09-30T19:02:27
| 2021-09-30T19:02:27
| 4,867,118
| 105
| 27
|
MIT
| 2021-09-26T20:05:36
| 2012-07-03T02:10:59
|
Python
|
UTF-8
|
Python
| false
| false
| 14,191
|
py
|
MayaSublime.py
|
# ST2/ST3 compat
from __future__ import print_function
import re
import sys
import time
import uuid
import socket
import textwrap
import threading
import traceback
from telnetlib import Telnet
import sublime, sublime_plugin
if sublime.version() < '3000':
# we are on ST2 and Python 2.X
_ST3 = False
else:
_ST3 = True
# Our default plugin state
_settings = {
# State of plugin settings
'host': '127.0.0.1',
'mel_port': 7001,
'py_port': 7002,
'strip_comments': True,
'no_collisions': True,
'maya_output': False,
'undo': False,
# Internal state
'_t_reader': None,
}
# A place to globally store a reference to our Thread
_ATTR_READER_THREAD = '_MayaSublime_Reader_Thread'
def plugin_unloaded():
"""
Hook called by ST3 when the plugin is unloaded
"""
# Clean up our thread
reader = _settings['_t_reader']
if reader is not None:
reader.shutdown()
_settings['_t_reader'] = None
class enable_maya_output(sublime_plugin.ApplicationCommand):
def run(self, *args):
_settings['maya_output'] = True
MayaReader.set_maya_output_enabled(True)
class disable_maya_output(sublime_plugin.ApplicationCommand):
def run(self, *args):
_settings['maya_output'] = False
MayaReader.set_maya_output_enabled(False)
class send_to_mayaCommand(sublime_plugin.TextCommand):
# Match single-line comments in MEL/Python
RX_COMMENT = re.compile(r'^\s*(//|#)')
def run(self, edit):
# Do we have a valid source language?
syntax = self.view.settings().get('syntax')
if re.search(r'python', syntax, re.I):
lang = 'python'
sep = '\n'
elif re.search(r'mel', syntax, re.I):
lang = 'mel'
sep = '\r'
else:
print('No Maya-Recognized Language Found')
return
# Apparently ST3 doesn't always sync up its latest
# plugin settings?
if not _settings['host']:
sync_settings()
# Check the current selection size to determine
# how we will send the source to be executed.
selections = self.view.sel() # Returns type sublime.RegionSet
selSize = 0
for sel in selections:
if not sel.empty():
selSize += 1
snips = []
# If nothing is selected, we will use an approach that sends an
# entire source file, and tell Maya to execute it.
if selSize == 0:
execType = 'execfile'
print("Nothing Selected, Attempting to exec entire file")
if self.view.is_dirty():
sublime.error_message("Save Changes Before Maya Source/Import")
return
file_path = self.view.file_name()
if file_path is None:
sublime.error_message("File must be saved before sending to Maya")
return
plat = sublime_plugin.sys.platform
if plat == 'win32':
file_path = file_path.replace('\\','\\\\')
print("FILE PATH:",file_path)
if lang == 'python':
snips.append(file_path)
else:
snips.append('rehash; source "{0}";'.format(file_path))
# Otherwise, we are sending snippets of code to be executed
else:
execType = 'exec'
file_path = ''
substr = self.view.substr
match = self.RX_COMMENT.match
stripComments = _settings['strip_comments']
# Build up all of the selected lines, while removing single-line comments
# to simplify the amount of data being sent.
for sel in selections:
if stripComments:
snips.extend(line for line in substr(sel).splitlines() if not match(line))
else:
snips.extend(substr(sel).splitlines())
mCmd = str(sep.join(snips))
if not mCmd:
return
print('Sending {0}:\n{1!r}\n...'.format(lang, mCmd[:200]))
if lang == 'python':
# We need to wrap our source string into a template
# so that it gets executed properly on the Maya side
no_collide = _settings['no_collisions']
create_undo = _settings["undo"]
opts = dict(
xtype=execType, cmd=mCmd, fp=file_path,
ns=no_collide, undo=create_undo,
)
mCmd = PY_CMD_TEMPLATE.format(**opts)
if _settings["maya_output"]:
# In case maya was restarted, we can make sure the
# callback is always installed
MayaReader.set_maya_output_enabled(_settings["maya_output"])
_send_to_maya(mCmd, lang, wrap=False)
def _send_to_maya(cmd, lang='python', wrap=True, quiet=False):
"""
Send stringified Python code to Maya, to be executed.
"""
if not _settings['host']:
sync_settings()
host = _settings['host']
port = _settings['py_port'] if lang=='python' else _settings['mel_port']
if lang == 'python' and wrap:
no_collide = _settings['no_collisions']
create_undo = _settings["undo"]
opts = dict(xtype='exec', cmd=cmd, fp='', ns=no_collide, undo=create_undo)
cmd = PY_CMD_TEMPLATE.format(**opts)
c = None
try:
c = Telnet(host, int(port), timeout=3)
c.write(_py_str(cmd))
except Exception:
e = sys.exc_info()[1]
err = str(e)
msg = "Failed to communicate with Maya (%(host)s:%(port)s)):\n%(err)s" % locals()
if quiet:
print(msg)
return False
sublime.error_message(msg)
raise
else:
time.sleep(.1)
finally:
if c is not None:
c.close()
return True
def _py_str(s):
"""Encode a py3 string if needed"""
if _ST3:
return s.encode(encoding='UTF-8')
return s
def settings_obj():
return sublime.load_settings("MayaSublime.sublime-settings")
_IS_SYNCING = False
def sync_settings():
global _IS_SYNCING
if _IS_SYNCING:
return
_IS_SYNCING = True
try:
_sync_settings()
finally:
_IS_SYNCING = False
def _sync_settings():
so = settings_obj()
_settings['host'] = so.get('maya_hostname', _settings['host'])
_settings['py_port'] = so.get('python_command_port', _settings['py_port'])
_settings['mel_port'] = so.get('mel_command_port', _settings['mel_port'] )
_settings['strip_comments'] = so.get('strip_sending_comments', _settings['strip_comments'])
_settings['no_collisions'] = so.get('no_collisions', _settings['no_collisions'])
_settings['maya_output'] = so.get('receive_maya_output', _settings['maya_output'])
_settings['undo'] = so.get('create_undo', _settings['undo'] )
MayaReader._st2_remove_reader()
if _settings['maya_output'] is not None:
MayaReader.set_maya_output_enabled(_settings["maya_output"])
# A template wrapper for sending Python source safely
# over the socket.
# Executes in a private namespace to avoid collisions
# with the main environment in Maya.
# Also handles catches and printing exceptions so that
# they are not masked.
PY_CMD_TEMPLATE = textwrap.dedent('''
import traceback
import __main__
import maya.cmds
def _open(f):
try:
return open(f, encoding='utf-8')
except TypeError:
return open(f)
namespace = __main__.__dict__.get('_sublime_SendToMaya_plugin')
if not namespace:
namespace = __main__.__dict__.copy()
__main__.__dict__['_sublime_SendToMaya_plugin'] = namespace
try:
if {undo}:
maya.cmds.undoInfo(openChunk=True, chunkName="MayaSublime Code")
if {ns}:
namespace['__file__'] = {fp!r}
else:
namespace = __main__.__dict__
if {xtype!r} == "exec":
exec({cmd!r}, namespace, namespace)
else:
with _open({fp!r}) as _fp:
_code = compile(_fp.read(), {fp!r}, 'exec')
exec(_code, namespace, namespace)
except:
traceback.print_exc()
finally:
if {undo}:
maya.cmds.undoInfo(closeChunk=True)
''')
PY_MAYA_CALLBACK = textwrap.dedent(r'''
import sys
import errno
import socket
import maya.OpenMaya
try:
from io import StringIO
except ImportError:
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
if '_MayaSublime_ScriptEditorOutput_CID' not in globals():
_MayaSublime_ScriptEditorOutput_CID = None
if '_MayaSublime_SOCK' not in globals():
_MayaSublime_SOCK = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
def _MayaSublime_streamScriptEditor(enable, host="127.0.0.1", port=5123, quiet=False):
om = maya.OpenMaya
global _MayaSublime_ScriptEditorOutput_CID
cid = _MayaSublime_ScriptEditorOutput_CID
# Only print if we are really changing state
if enable and cid is None:
sys.stdout.write("[MayaSublime] Enable Streaming ScriptEditor " \
"({0}:{1})\n".format(host, port))
elif not enable and cid is not None:
sys.stdout.write("[MayaSublime] Disable Streaming ScriptEditor\n")
if cid is not None:
om.MMessage.removeCallback(cid)
_MayaSublime_ScriptEditorOutput_CID = None
if not enable:
return
buf = StringIO()
def _enc(s):
try:
return s.encode('utf-8')
except AttributeError:
return s
def _streamToMayaSublime(msg, msgType, *args):
buf.seek(0)
buf.truncate()
if msgType != om.MCommandMessage.kDisplay:
buf.write('[MayaSublime] ')
if msgType == om.MCommandMessage.kWarning:
buf.write('# Warning: ')
buf.write(msg)
buf.write(' #\n')
elif msgType == om.MCommandMessage.kError:
buf.write('// Error: ')
buf.write(msg)
buf.write(' //\n')
elif msgType == om.MCommandMessage.kResult:
buf.write('# Result: ')
buf.write(msg)
buf.write(' #\n')
else:
buf.write(msg)
buf.seek(0)
# Start with trying to send 8kb packets
bufsize = 8*1024
# Loop until the buffer is empty
while True:
while bufsize > 0:
# Save our position in case we error
# and need to roll back
pos = buf.tell()
part = buf.read(bufsize)
if not part:
# Buffer is empty. Nothing else to send
return
try:
_MayaSublime_SOCK.sendto(_enc(part), (host, port))
except Exception as e:
if e.errno == errno.EMSGSIZE:
# We have hit a message size limit.
# Scale down and try the packet again
bufsize /= 2
if bufsize < 1:
raise
buf.seek(pos)
continue
# Some other error
raise
# Message sent without error
break
cid = om.MCommandMessage.addCommandOutputCallback(_streamToMayaSublime)
_MayaSublime_ScriptEditorOutput_CID = cid
''')
class MayaReader(threading.Thread):
"""
A threaded reader that monitors for published ScriptEditor
output from Maya.
Installs a ScriptEditor callback to Maya to produce messages.
"""
# Max number of bytes to read from each packet.
BUFSIZE = 64 * 1024 # 64KB is max UDP packet size
# Signal to stop a receiving MayaReader
STOP_MSG = _py_str('MayaSublime::MayaReader::{0}'.format(uuid.uuid4()))
# # Stringified ScriptEditor callback code to install in Maya
# PY_MAYA_CALLBACK = open(os.path.join(os.path.dirname(__file__),
# "lib/pubScriptEditor.py")).read()
PY_MAYA_CALLBACK = PY_MAYA_CALLBACK
def __init__(self, host='127.0.0.1', port=0):
super(MayaReader, self).__init__()
self.daemon = True
self._running = threading.Event()
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.sock.bind((host, port))
def port(self):
"""Get the port number being used by the socket"""
_, port = self.sock.getsockname()
return port
def is_running(self):
"""Return true if the thread is running"""
return self._running.is_set()
def shutdown(self):
"""Stop the monitoring of Maya output"""
self._running.clear()
# Send shutdown message to local UDP
self.sock.sendto(self.STOP_MSG, self.sock.getsockname())
def run(self):
prefix = '[MayaSublime] '
print("{0}started on port {1}".format(prefix, self.port()))
fails = 0
self._running.set()
while self._running.is_set():
try:
msg, addr = self.sock.recvfrom(self.BUFSIZE)
except Exception as e:
print("Failed while reading output from Maya:")
traceback.print_exc()
# Prevent runaway failures from spinning
fails += 1
if fails >= 10:
# After too many failures in a row
# wait a bit
fails = 0
time.sleep(5)
continue
fails = 0
if msg == self.STOP_MSG:
break
if _ST3:
msg = msg.decode()
sys.stdout.write(msg)
print("{0}MayaReader stopped".format(prefix))
def _set_maya_callback_enabled(self, enable, quiet=False):
"""
Enable or disable the actual publishing of ScriptEditor output from Maya
"""
host, port = self.sock.getsockname()
cmd = "_MayaSublime_streamScriptEditor({0}, host={1!r}, port={2})".format(enable, host, port)
return _send_to_maya(cmd, quiet=quiet, wrap=_settings['no_collisions'])
@classmethod
def _st2_remove_reader(cls):
"""
A hack to work around SublimeText2 not having a
module level hook for when the plugin is loaded
and unloaded.
Need to store a reference to our thread that doesn't
get blown away when the plugin reloads, so that we
can clean it up.
"""
if _ST3:
return
import __main__
reader = getattr(__main__, _ATTR_READER_THREAD, None)
if reader:
reader.shutdown()
setattr(__main__, _ATTR_READER_THREAD, None)
@classmethod
def _st2_replace_reader(cls, reader):
"""
A hack to work around SublimeText2 not having a
module level hook for when the plugin is loaded
and unloaded.
Need to store a reference to our thread that doesn't
get blown away when the plugin reloads, so that we
can clean it up and replace it with another.
"""
if _ST3:
return
cls._st2_remove_reader()
import __main__
setattr(__main__, _ATTR_READER_THREAD, reader)
@classmethod
def install_maya_callback(cls):
"""Send the callback logic to Maya"""
return _send_to_maya(cls.PY_MAYA_CALLBACK, quiet=True, wrap=_settings['no_collisions'])
@classmethod
def set_maya_output_enabled(cls, enable):
# Make sure the Maya filtering callback code
# is set up already
ok = cls.install_maya_callback()
quiet = not ok
reader = _settings.get('_t_reader')
# handle disabling the reader
if not enable:
if reader:
reader.shutdown()
reader._set_maya_callback_enabled(False, quiet)
return
# handle enabling the reader
if reader and reader.is_alive():
# The reader is already running
reader._set_maya_callback_enabled(True, quiet)
return
# Start the reader
reader = cls()
reader.start()
_settings['_t_reader'] = reader
cls._st2_replace_reader(reader)
reader._set_maya_callback_enabled(True, quiet)
# Add callbacks for monitoring setting changes
settings_obj().clear_on_change("MayaSublime.settings")
settings_obj().add_on_change("MayaSublime.settings", sync_settings)
sync_settings()
|
ee970f8573c61210cadeffcbe8d62fc229e59f5d
|
6fdb4eaf5b0e6dbd7db4bf947547541e9aebf110
|
/api/src/opentrons/protocol_api/core/__init__.py
|
150697d611833661babf835043a853869c191a67
|
[
"LicenseRef-scancode-warranty-disclaimer",
"Apache-2.0"
] |
permissive
|
Opentrons/opentrons
|
874321e01149184960eeaeaa31b1d21719a1ceda
|
026b523c8c9e5d45910c490efb89194d72595be9
|
refs/heads/edge
| 2023-09-02T02:51:49.579906
| 2023-08-31T16:02:45
| 2023-08-31T16:02:45
| 38,644,841
| 326
| 174
|
Apache-2.0
| 2023-09-14T21:47:20
| 2015-07-06T20:41:01
|
Python
|
UTF-8
|
Python
| false
| false
| 736
|
py
|
__init__.py
|
"""Core protocol logic interfaces and implementations.
This module provide facades to different protocol execution cores,
and is an internal implementation detail of the Python Protocol API.
Nothing defined in `opentrons.protocol_api.core` may be considered public.
"""
# TODO(mc, 2022-08-22): uncomment when import cycles can be resolved
# from .protocol import AbstractProtocol
# from .labware_offset_provider import (
# AbstractLabwareOffsetProvider,
# LabwareOffsetProvider,
# NullLabwareOffsetProvider,
# ProvidedLabwareOffset,
# )
# __all__ = [
# "AbstractProtocol",
# "AbstractLabwareOffsetProvider",
# "LabwareOffsetProvider",
# "NullLabwareOffsetProvider",
# "ProvidedLabwareOffset",
# ]
|
c0934b5c2ae84ddb92e9e65bc678adb8f4799e38
|
559f3dec0964d2e0f86c6c871371fe779cf3726c
|
/EISeg/eiseg/util/regularization/rs_regularization.py
|
9fff9b8f99927d9bf2603ae20906d142cf7d4a33
|
[
"Apache-2.0"
] |
permissive
|
PaddlePaddle/PaddleSeg
|
319ab26665ea492527a1949671650135123ffc39
|
2c8c35a8949fef74599f5ec557d340a14415f20d
|
refs/heads/release/2.8
| 2023-08-31T09:08:06.724717
| 2023-08-18T01:59:56
| 2023-08-18T01:59:56
| 204,380,779
| 8,531
| 1,866
|
Apache-2.0
| 2023-09-12T02:30:42
| 2019-08-26T02:32:22
|
Python
|
UTF-8
|
Python
| false
| false
| 11,832
|
py
|
rs_regularization.py
|
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This code is based on https://github.com/PaddlePaddle/PaddleRS
Ths copyright of PaddlePaddle/PaddleRS is as follows:
Apache License [see LICENSE for details]
"""
import math
import cv2
import numpy as np
from .utils import calc_distance
S = 20
TD = 3
D = TD + 1
ALPHA = math.degrees(math.pi / 6)
BETA = math.degrees(math.pi * 17 / 18)
DELTA = math.degrees(math.pi / 12)
THETA = math.degrees(math.pi / 4)
def boundary_regularization(contour, mask_shape, W: int=32) -> np.ndarray:
new_contour = _coarse(contour, mask_shape) # coarse
if new_contour is not None:
contour = _fine(new_contour, W) # fine
return contour
def _coarse(contour, img_shape):
def _inline_check(point, shape, eps=5):
x, y = point[0]
iH, iW = shape
if x < eps or x > iH - eps or y < eps or y > iW - eps:
return False
else:
return True
area = cv2.contourArea(contour)
# S = 20
if area < S: # remove polygons whose area is below a threshold S
return None
# D = 0.3 if area < 200 else 1.0
# TD = 0.5 if area < 200 else 0.9
epsilon = 0.005 * cv2.arcLength(contour, True)
contour = cv2.approxPolyDP(contour, epsilon, True) # DP
p_number = contour.shape[0]
idx = 0
while idx < p_number:
last_point = contour[idx - 1]
current_point = contour[idx]
next_idx = (idx + 1) % p_number
next_point = contour[next_idx]
# remove edges whose lengths are below a given side length TD
# that varies with the area of a building.
distance = calc_distance(current_point, next_point)
if distance < TD and not _inline_check(next_point, img_shape):
contour = np.delete(contour, next_idx, axis=0)
p_number -= 1
continue
# remove over-sharp angles with threshold α.
# remove over-smooth angles with threshold β.
angle = _calc_angle(last_point, current_point, next_point)
if (ALPHA > angle or angle > BETA) and _inline_check(current_point,
img_shape):
contour = np.delete(contour, idx, axis=0)
p_number -= 1
continue
idx += 1
if p_number > 2:
return contour
else:
return None
def _fine(contour, W):
# area = cv2.contourArea(contour)
# W = 6 if area < 200 else 8
# TD = 0.5 if area < 200 else 0.9
# D = TD + 0.3
nW = W
p_number = contour.shape[0]
distance_list = []
azimuth_list = []
indexs_list = []
for idx in range(p_number):
current_point = contour[idx]
next_idx = (idx + 1) % p_number
next_point = contour[next_idx]
distance_list.append(calc_distance(current_point, next_point))
azimuth_list.append(_calc_azimuth(current_point, next_point))
indexs_list.append((idx, next_idx))
# add the direction of the longest edge to the list of main direction.
longest_distance_idx = np.argmax(distance_list)
main_direction_list = [azimuth_list[longest_distance_idx]]
max_dis = distance_list[longest_distance_idx]
if max_dis <= nW:
nW = max_dis - 1e-6
# Add other edges’ direction to the list of main directions
# according to the angle threshold δ between their directions
# and directions in the list.
for distance, azimuth in zip(distance_list, azimuth_list):
for mdir in main_direction_list:
abs_dif_ang = abs(mdir - azimuth)
if distance > nW and THETA <= abs_dif_ang <= (180 - THETA):
main_direction_list.append(azimuth)
contour_by_lines = []
md_used_list = [main_direction_list[0]]
for distance, azimuth, (idx, next_idx) in zip(distance_list, azimuth_list,
indexs_list):
p1 = contour[idx]
p2 = contour[next_idx]
pm = (p1 + p2) / 2
# find long edges with threshold W that varies with building’s area.
if distance > nW:
rotate_ang = main_direction_list[0] - azimuth
for main_direction in main_direction_list:
r_ang = main_direction - azimuth
if abs(r_ang) < abs(rotate_ang):
rotate_ang = r_ang
md_used_list.append(main_direction)
abs_rotate_ang = abs(rotate_ang)
# adjust long edges according to the list and angles.
if abs_rotate_ang < DELTA or abs_rotate_ang > (180 - DELTA):
rp1 = _rotation(p1, pm, rotate_ang)
rp2 = _rotation(p2, pm, rotate_ang)
elif (90 - DELTA) < abs_rotate_ang < (90 + DELTA):
rp1 = _rotation(p1, pm, rotate_ang - 90)
rp2 = _rotation(p2, pm, rotate_ang - 90)
else:
rp1, rp2 = p1, p2
# adjust short edges (judged by a threshold θ) according to the list and angles.
else:
rotate_ang = md_used_list[-1] - azimuth
abs_rotate_ang = abs(rotate_ang)
if abs_rotate_ang < THETA or abs_rotate_ang > (180 - THETA):
rp1 = _rotation(p1, pm, rotate_ang)
rp2 = _rotation(p2, pm, rotate_ang)
else:
rp1 = _rotation(p1, pm, rotate_ang - 90)
rp2 = _rotation(p2, pm, rotate_ang - 90)
# contour_by_lines.extend([rp1, rp2])
contour_by_lines.append([rp1[0], rp2[0]])
correct_points = np.array(contour_by_lines)
# merge (or connect) parallel lines if the distance between
# two lines is less than (or larger than) a threshold D.
final_points = []
final_points.append(correct_points[0][0].reshape([1, 2]))
lp_number = correct_points.shape[0] - 1
for idx in range(lp_number):
next_idx = (idx + 1) if idx < lp_number else 0
cur_edge_p1 = correct_points[idx][0]
cur_edge_p2 = correct_points[idx][1]
next_edge_p1 = correct_points[next_idx][0]
next_edge_p2 = correct_points[next_idx][1]
L1 = _line(cur_edge_p1, cur_edge_p2)
L2 = _line(next_edge_p1, next_edge_p2)
A1 = _calc_azimuth([cur_edge_p1], [cur_edge_p2])
A2 = _calc_azimuth([next_edge_p1], [next_edge_p2])
dif_azi = abs(A1 - A2)
# find intersection point if not parallel
if (90 - DELTA) < dif_azi < (90 + DELTA):
point_intersection = _intersection(L1, L2)
if point_intersection is not None:
final_points.append(point_intersection)
# move or add lines when parallel
elif dif_azi < 1e-6:
marg = _calc_distance_between_lines(L1, L2)
if marg < D:
# move
point_move = _calc_project_in_line(next_edge_p1, cur_edge_p1,
cur_edge_p2)
final_points.append(point_move)
# update next
correct_points[next_idx][0] = point_move
correct_points[next_idx][1] = _calc_project_in_line(
next_edge_p2, cur_edge_p1, cur_edge_p2)
else:
# add line
add_mid_point = (cur_edge_p2 + next_edge_p1) / 2
rp1 = _calc_project_in_line(add_mid_point, cur_edge_p1,
cur_edge_p2)
rp2 = _calc_project_in_line(add_mid_point, next_edge_p1,
next_edge_p2)
final_points.extend([rp1, rp2])
else:
final_points.extend(
[cur_edge_p1[np.newaxis, :], cur_edge_p2[np.newaxis, :]])
final_points = np.array(final_points)
return final_points
def _get_priority(hierarchy):
if hierarchy[3] < 0:
return 1
if hierarchy[2] < 0:
return 2
return 3
def _fill(img, coarse_conts):
result = np.zeros_like(img)
sorted(coarse_conts, key=lambda x: x[1])
for contour, priority in coarse_conts:
if priority == 2:
cv2.fillPoly(result, [contour.astype(np.int32)], (0, 0, 0))
else:
cv2.fillPoly(result, [contour.astype(np.int32)], (255, 255, 255))
return result
def _calc_angle(p1, vertex, p2):
x1, y1 = p1[0]
xv, yv = vertex[0]
x2, y2 = p2[0]
a = ((xv - x2) * (xv - x2) + (yv - y2) * (yv - y2))**0.5
b = ((x1 - x2) * (x1 - x2) + (y1 - y2) * (y1 - y2))**0.5
c = ((x1 - xv) * (x1 - xv) + (y1 - yv) * (y1 - yv))**0.5
return math.degrees(math.acos((b**2 - a**2 - c**2) / (-2 * a * c)))
def _calc_azimuth(p1, p2):
x1, y1 = p1[0]
x2, y2 = p2[0]
if y1 == y2:
return 0.0
if x1 == x2:
return 90.0
elif x1 < x2:
if y1 < y2:
ang = math.atan((y2 - y1) / (x2 - x1))
return math.degrees(ang)
else:
ang = math.atan((y1 - y2) / (x2 - x1))
return 180 - math.degrees(ang)
else: # x1 > x2
if y1 < y2:
ang = math.atan((y2 - y1) / (x1 - x2))
return 180 - math.degrees(ang)
else:
ang = math.atan((y1 - y2) / (x1 - x2))
return math.degrees(ang)
def _rotation(point, center, angle):
if angle == 0:
return point
x, y = point[0]
cx, cy = center[0]
radian = math.radians(abs(angle))
if angle > 0: # clockwise
rx = (x - cx) * math.cos(radian) - (y - cy) * math.sin(radian) + cx
ry = (x - cx) * math.sin(radian) + (y - cy) * math.cos(radian) + cy
else:
rx = (x - cx) * math.cos(radian) + (y - cy) * math.sin(radian) + cx
ry = (y - cy) * math.cos(radian) - (x - cx) * math.sin(radian) + cy
return np.array([[rx, ry]])
def _line(p1, p2):
A = (p1[1] - p2[1])
B = (p2[0] - p1[0])
C = (p1[0] * p2[1] - p2[0] * p1[1])
return A, B, -C
def _intersection(L1, L2):
D = L1[0] * L2[1] - L1[1] * L2[0]
Dx = L1[2] * L2[1] - L1[1] * L2[2]
Dy = L1[0] * L2[2] - L1[2] * L2[0]
if D != 0:
x = Dx / D
y = Dy / D
return np.array([[x, y]])
else:
return None
def _calc_distance_between_lines(L1, L2):
eps = 1e-16
A1, _, C1 = L1
A2, B2, C2 = L2
new_C1 = C1 / (A1 + eps)
new_A2 = 1
new_B2 = B2 / (A2 + eps)
new_C2 = C2 / (A2 + eps)
dist = (np.abs(new_C1 - new_C2)) / (
np.sqrt(new_A2 * new_A2 + new_B2 * new_B2) + eps)
return dist
def _calc_project_in_line(point, line_point1, line_point2):
eps = 1e-16
m, n = point
x1, y1 = line_point1
x2, y2 = line_point2
F = (x2 - x1) * (x2 - x1) + (y2 - y1) * (y2 - y1)
x = (m * (x2 - x1) * (x2 - x1) + n * (y2 - y1) * (x2 - x1) +
(x1 * y2 - x2 * y1) * (y2 - y1)) / (F + eps)
y = (m * (x2 - x1) * (y2 - y1) + n * (y2 - y1) * (y2 - y1) +
(x2 * y1 - x1 * y2) * (x2 - x1)) / (F + eps)
return np.array([[x, y]])
|
e0bd30c05a071236c6ff743bff715c38e023c9b9
|
a1c7055f3e66fb802ae4c3ecdb952ff45579914f
|
/tests_isolated/test_commandset/test_categories.py
|
71f1db8e238e73885ebef0fb40912cf480eb40f9
|
[
"MIT"
] |
permissive
|
python-cmd2/cmd2
|
3e4ef2a1804554e8acd50898636a5685eab292c1
|
9886b82c71face043e1fac871a6cdbebbf0e864c
|
refs/heads/master
| 2023-09-05T14:55:50.702311
| 2023-09-04T16:44:01
| 2023-09-04T16:44:01
| 51,442,999
| 571
| 154
|
MIT
| 2023-09-04T16:44:03
| 2016-02-10T13:34:49
|
Python
|
UTF-8
|
Python
| false
| false
| 4,417
|
py
|
test_categories.py
|
#!/usr/bin/env python3
# coding=utf-8
"""
Simple example demonstrating basic CommandSet usage.
"""
from typing import (
Any,
)
import cmd2
from cmd2 import (
CommandSet,
with_default_category,
)
@with_default_category('Default Category')
class MyBaseCommandSet(CommandSet):
"""Defines a default category for all sub-class CommandSets"""
def __init__(self, _: Any):
super(MyBaseCommandSet, self).__init__()
class ChildInheritsParentCategories(MyBaseCommandSet):
"""
This subclass doesn't declare any categories so all commands here are also categorized under 'Default Category'
"""
def do_hello(self, _: cmd2.Statement):
self._cmd.poutput('Hello')
def do_world(self, _: cmd2.Statement):
self._cmd.poutput('World')
@with_default_category('Non-Heritable Category', heritable=False)
class ChildOverridesParentCategoriesNonHeritable(MyBaseCommandSet):
"""
This subclass overrides the 'Default Category' from the parent, but in a non-heritable fashion. Sub-classes of this
CommandSet will not inherit this category and will, instead, inherit 'Default Category'
"""
def do_goodbye(self, _: cmd2.Statement):
self._cmd.poutput('Goodbye')
class GrandchildInheritsGrandparentCategory(ChildOverridesParentCategoriesNonHeritable):
"""
This subclass's parent class declared its default category non-heritable. Instead, it inherits the category defined
by the grandparent class.
"""
def do_aloha(self, _: cmd2.Statement):
self._cmd.poutput('Aloha')
@with_default_category('Heritable Category')
class ChildOverridesParentCategories(MyBaseCommandSet):
"""
This subclass is decorated with a default category that is heritable. This overrides the parent class's default
category declaration.
"""
def do_bonjour(self, _: cmd2.Statement):
self._cmd.poutput('Bonjour')
class GrandchildInheritsHeritable(ChildOverridesParentCategories):
"""
This subclass's parent declares a default category that overrides its parent. As a result, commands in this
CommandSet will be categorized under 'Heritable Category'
"""
def do_monde(self, _: cmd2.Statement):
self._cmd.poutput('Monde')
class ExampleApp(cmd2.Cmd):
"""
Example to demonstrate heritable default categories
"""
def __init__(self):
super(ExampleApp, self).__init__(auto_load_commands=False)
def do_something(self, arg):
self.poutput('this is the something command')
def test_heritable_categories():
app = ExampleApp()
base_cs = MyBaseCommandSet(0)
assert getattr(base_cs, cmd2.constants.CLASS_ATTR_DEFAULT_HELP_CATEGORY, None) == 'Default Category'
child1 = ChildInheritsParentCategories(1)
assert getattr(child1, cmd2.constants.CLASS_ATTR_DEFAULT_HELP_CATEGORY, None) == 'Default Category'
app.register_command_set(child1)
assert getattr(app.cmd_func('hello').__func__, cmd2.constants.CMD_ATTR_HELP_CATEGORY, None) == 'Default Category'
app.unregister_command_set(child1)
child_nonheritable = ChildOverridesParentCategoriesNonHeritable(2)
assert getattr(child_nonheritable, cmd2.constants.CLASS_ATTR_DEFAULT_HELP_CATEGORY, None) != 'Non-Heritable Category'
app.register_command_set(child_nonheritable)
assert getattr(app.cmd_func('goodbye').__func__, cmd2.constants.CMD_ATTR_HELP_CATEGORY, None) == 'Non-Heritable Category'
app.unregister_command_set(child_nonheritable)
grandchild1 = GrandchildInheritsGrandparentCategory(3)
assert getattr(grandchild1, cmd2.constants.CLASS_ATTR_DEFAULT_HELP_CATEGORY, None) == 'Default Category'
app.register_command_set(grandchild1)
assert getattr(app.cmd_func('aloha').__func__, cmd2.constants.CMD_ATTR_HELP_CATEGORY, None) == 'Default Category'
app.unregister_command_set(grandchild1)
child_overrides = ChildOverridesParentCategories(4)
assert getattr(child_overrides, cmd2.constants.CLASS_ATTR_DEFAULT_HELP_CATEGORY, None) == 'Heritable Category'
app.register_command_set(child_overrides)
assert getattr(app.cmd_func('bonjour').__func__, cmd2.constants.CMD_ATTR_HELP_CATEGORY, None) == 'Heritable Category'
app.unregister_command_set(child_overrides)
grandchild2 = GrandchildInheritsHeritable(5)
assert getattr(grandchild2, cmd2.constants.CLASS_ATTR_DEFAULT_HELP_CATEGORY, None) == 'Heritable Category'
|
4a23646282f2e84935d077be648013ef91c7fc3a
|
b3a693cb2c15f95133876f74a640ec585b7a0f62
|
/CompetitiveProgramming/BinarySearch/adjusting_rectangles.py
|
081c289074bdfcada26ee7d1750c427b5eea1bc9
|
[] |
no_license
|
singhsanket143/CppCompetitiveRepository
|
1a7651553ef69fa407d85d789c7c342f9a4bd8e9
|
6e69599ff57e3c9dce4c4d35e60c744f8837c516
|
refs/heads/master
| 2022-06-23T01:42:38.811581
| 2022-06-16T13:17:15
| 2022-06-16T13:17:15
| 138,698,312
| 349
| 148
| null | 2021-03-06T18:46:58
| 2018-06-26T07:06:16
|
C++
|
UTF-8
|
Python
| false
| false
| 631
|
py
|
adjusting_rectangles.py
|
"""
There are 𝑛 rectangles of the same size: 𝑤 in width and ℎ in length. It is required to find a square of the smallest size into which these rectangles can be packed. Rectangles cannot be rotated.
https://codeforces.com/edu/course/2/lesson/6/2/practice/contest/283932/problem/A
"""
def good(mid, n, w, h):
return (mid//w) * (mid//h) >= n
def adjust_rectangle(w, h, n):
lo = 0
hi = max(w, h)*n
ans = -1
while(lo <= hi):
mid = lo + (hi - lo) // 2
if(good(mid, n, w, h)):
hi = mid - 1
ans = mid
else:
lo = mid + 1
return ans
w, h, n = [int(x) for x in input().split()]
print(adjust_rectangle(w, h, n))
|
c2d5329ef55209d74620838cf0f8f6027d758ea6
|
6bc58f290bc3ecd1f4d2a492f0abc5fd4b8ff9c0
|
/tests/test_aruba.py
|
79186b52c334dc48bccb656eb08d5fde039b693a
|
[
"Apache-2.0",
"CC0-1.0",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-generic-cla",
"BSD-2-Clause"
] |
permissive
|
splunk/splunk-connect-for-syslog
|
c5821e025ef1b5d1312a6ac71b822262f560aa99
|
472f0b7a0bcbb29cb00a141e4fe4aa6193c49fde
|
refs/heads/main
| 2023-08-25T03:57:40.245846
| 2023-08-21T08:55:12
| 2023-08-21T08:55:12
| 194,185,530
| 188
| 149
|
Apache-2.0
| 2023-09-14T18:31:04
| 2019-06-28T01:27:07
|
Python
|
UTF-8
|
Python
| false
| false
| 3,309
|
py
|
test_aruba.py
|
# Copyright 2019 Splunk, Inc.
#
# Use of this source code is governed by a BSD-2-clause-style
# license that can be found in the LICENSE-BSD2 file or at
# https://opensource.org/licenses/BSD-2-Clause
from jinja2 import Environment
from .sendmessage import *
from .splunkutils import *
from .timeutils import *
import pytest
env = Environment()
# time format for Apr 5 22:51:54 2021
# <187>{{ arubadate }} {{ host }} authmgr[4130]: <124198> <4130> <ERRS> <{{ host }} 10.10.10.10> {00:00:00:00:00:00-??} Missing server in attribute list, auth=VPN, utype=L3.
# <187>{{ arubadate }} {{ host }} stm[4133]: <399803> <4133> <ERRS> <{{ host }} 10.10.10.10> An internal system error has occurred at file sapm_ap_mgmt.c function sapm_get_img_build_version_str line 11853 error stat /mswitch/sap/mips64.ari failed: No such file or directory.
# <188>{{ arubadate }} {{ host }} wms[4096]: <126005> <4096> <WARN> <{{ host }} 10.10.10.10> |ids| Interfering AP: The system classified an access point (BSSID 00:0e:8e:96:f4:32 and SSID on CHANNEL 36) as interfering. Additional Info: Detector-AP-Name:00:0b:86:9e:6b:5f; Detector-AP-MAC:24:de:c6:70:2c:90; Detector-AP-Radio:1.
# <191>{{ arubadate }} 10.10.10.10 dnsmasq: reading /etc/resolv.conf
testdata = [
"<187>{{ arubadate }} {{ host }} authmgr[4130]: <124198> <4130> <ERRS> <{{ host }} 10.10.10.10> {00:00:00:00:00:00-??} Missing server in attribute list, auth=VPN, utype=L3.",
"<187>{{ arubadate }} {{ host }} stm[4133]: <399803> <4133> <ERRS> <{{ host }} 10.10.10.10> An internal system error has occurred at file sapm_ap_mgmt.c function sapm_get_img_build_version_str line 11853 error stat /mswitch/sap/mips64.ari failed: No such file or directory.",
"<188>{{ arubadate }} {{ host }} wms[4096]: <126005> <4096> <WARN> <{{ host }} 10.10.10.10> |ids| Interfering AP: The system classified an access point (BSSID 00:0e:8e:96:f4:32 and SSID on CHANNEL 36) as interfering. Additional Info: Detector-AP-Name:00:0b:86:9e:6b:5f; Detector-AP-MAC:24:de:c6:70:2c:90; Detector-AP-Radio:1.",
"<188>{{ arubadate }} {{ host }} sapd[1362]: <127037> <WARN> |AP 00:0b:86:eb:4e:32@10.10.10.10 sapd| |ids-ap| AP(04:bd:88:8a:3a:60): Station Associated to Rogue AP: An AP detected a client a4:8d:3b:ae:68:68 associated to a rogue access point (BSSID 98:1e:19:31:63:b6 and SSID MySpectrumWiFib0-2G on CHANNEL 11).",
]
@pytest.mark.parametrize("event", testdata)
def test_aruba(
record_property, setup_wordlist, get_host_key, setup_splunk, setup_sc4s, event
):
host = get_host_key
dt = datetime.datetime.now()
iso, bsd, time, date, tzoffset, tzname, epoch = time_operations(dt)
arubadate = dt.strftime("%b %d %H:%M:%S %Y")
# Tune time functions
epoch = epoch[:-7]
mt = env.from_string(event + "\n")
message = mt.render(mark="<188>", bsd=bsd, host=host, arubadate=arubadate)
sendsingle(message, setup_sc4s[0], setup_sc4s[1][514])
st = env.from_string(
'search index=netops _time={{ epoch }} sourcetype="aruba:syslog" host={{ host }}'
)
search = st.render(epoch=epoch, host=host)
resultCount, eventCount = splunk_single(setup_splunk, search)
record_property("host", host)
record_property("resultCount", resultCount)
record_property("message", message)
assert resultCount == 1
|
96c5e1a31366de3e83c8a7dc19a618a94ae388fd
|
675ecac2016ba9f8db611f6688a46ac4d2095447
|
/Interview Questions solutions/group anagrams/index.py
|
78026ac25e4902357e718e3745d1361446fdfc77
|
[] |
no_license
|
BitPunchZ/Leetcode-in-python-50-Algorithms-Coding-Interview-Questions
|
87a9619ba011aa8c30fe33e5e94037fea3144d1a
|
86a0ceefa9c8416c17010fe90eb372daf82256db
|
refs/heads/master
| 2023-08-17T05:37:59.467046
| 2023-08-09T08:59:01
| 2023-08-09T08:59:01
| 247,604,188
| 144
| 125
| null | 2021-04-19T03:04:00
| 2020-03-16T03:42:10
|
Python
|
UTF-8
|
Python
| false
| false
| 436
|
py
|
index.py
|
class Solution:
def findHash(self,s):
return ''.join(sorted(s))
def groupAnagrams(self, strs: List[str]) -> List[List[str]]:
answers = []
m = {}
for s in strs:
hashed = self.findHash(s)
if(hashed not in m):
m[hashed] = []
m[hashed].append(s)
for p in m.values():
answers.append(p)
return answers
|
174f7c8c728d1f3f88fbc80d954a37e833f26ee5
|
914faa10e5423efc87d0079248b3eb7df72ed83e
|
/test/calls/call8.py
|
0b78f3d3a8473650025a42b5c240d03f26528292
|
[
"MIT"
] |
permissive
|
MagicStack/MagicPython
|
cf7b7ae8290b0e997adf6a197b2f5be300391a0a
|
7d0f2b22a5ad8fccbd7341bc7b7a715169283044
|
refs/heads/master
| 2023-08-26T04:16:54.672649
| 2022-10-18T07:43:20
| 2022-10-19T23:20:38
| 43,982,620
| 1,564
| 146
|
MIT
| 2023-02-23T19:40:57
| 2015-10-09T22:13:24
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 4,250
|
py
|
call8.py
|
foo. __class__(foo=bar)
foo. __class__ (foo=bar)
foo. __add__ (foo=bar)
foo. __add__(foo=bar)
foo : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
: meta.member.access.python, source.python
__class__ : meta.function-call.python, meta.member.access.python, source.python, support.variable.magic.python
( : meta.function-call.python, meta.member.access.python, punctuation.definition.arguments.begin.python, source.python
foo : meta.function-call.arguments.python, meta.function-call.python, meta.member.access.python, source.python, variable.parameter.function-call.python
= : keyword.operator.assignment.python, meta.function-call.arguments.python, meta.function-call.python, meta.member.access.python, source.python
bar : meta.function-call.arguments.python, meta.function-call.python, meta.member.access.python, source.python
) : meta.function-call.python, meta.member.access.python, punctuation.definition.arguments.end.python, source.python
foo : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
: meta.member.access.python, source.python
__class__ : meta.function-call.python, meta.member.access.python, source.python, support.variable.magic.python
: meta.function-call.python, meta.member.access.python, source.python
( : meta.function-call.python, meta.member.access.python, punctuation.definition.arguments.begin.python, source.python
foo : meta.function-call.arguments.python, meta.function-call.python, meta.member.access.python, source.python, variable.parameter.function-call.python
= : keyword.operator.assignment.python, meta.function-call.arguments.python, meta.function-call.python, meta.member.access.python, source.python
bar : meta.function-call.arguments.python, meta.function-call.python, meta.member.access.python, source.python
) : meta.function-call.python, meta.member.access.python, punctuation.definition.arguments.end.python, source.python
foo : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
: meta.member.access.python, source.python
__add__ : meta.function-call.python, meta.member.access.python, source.python, support.function.magic.python
: meta.function-call.python, meta.member.access.python, source.python
( : meta.function-call.python, meta.member.access.python, punctuation.definition.arguments.begin.python, source.python
foo : meta.function-call.arguments.python, meta.function-call.python, meta.member.access.python, source.python, variable.parameter.function-call.python
= : keyword.operator.assignment.python, meta.function-call.arguments.python, meta.function-call.python, meta.member.access.python, source.python
bar : meta.function-call.arguments.python, meta.function-call.python, meta.member.access.python, source.python
) : meta.function-call.python, meta.member.access.python, punctuation.definition.arguments.end.python, source.python
foo : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
: meta.member.access.python, source.python
__add__ : meta.function-call.python, meta.member.access.python, source.python, support.function.magic.python
( : meta.function-call.python, meta.member.access.python, punctuation.definition.arguments.begin.python, source.python
foo : meta.function-call.arguments.python, meta.function-call.python, meta.member.access.python, source.python, variable.parameter.function-call.python
= : keyword.operator.assignment.python, meta.function-call.arguments.python, meta.function-call.python, meta.member.access.python, source.python
bar : meta.function-call.arguments.python, meta.function-call.python, meta.member.access.python, source.python
) : meta.function-call.python, meta.member.access.python, punctuation.definition.arguments.end.python, source.python
|
29453d3335187eeb54b33e0a775fe7109fdb934b
|
d3b468ef0938ec32edf71ea1ceeb5b5d06ebf171
|
/swig/python/gdal-utils/scripts/gdal_edit.py
|
32e2b9dca8c492546914936a4441b0629f56789a
|
[
"LicenseRef-scancode-warranty-disclaimer",
"SunPro",
"LicenseRef-scancode-info-zip-2005-02",
"BSD-3-Clause",
"MIT",
"ISC",
"Apache-2.0",
"LicenseRef-scancode-public-domain",
"BSD-2-Clause"
] |
permissive
|
OSGeo/gdal
|
30a1e1fb0909d758d4f636d481bf03fcd7affe3c
|
1e7746b2546b8c4878f4bfdb20c87f87e561745b
|
refs/heads/master
| 2023-09-03T19:37:50.027999
| 2023-09-03T18:29:31
| 2023-09-03T18:29:31
| 6,148,317
| 4,100
| 2,611
|
NOASSERTION
| 2023-09-14T20:23:19
| 2012-10-09T21:39:58
|
C++
|
UTF-8
|
Python
| false
| false
| 315
|
py
|
gdal_edit.py
|
#!/usr/bin/env python3
import sys
from osgeo.gdal import UseExceptions, deprecation_warn
# import osgeo_utils.gdal_edit as a convenience to use as a script
from osgeo_utils.gdal_edit import * # noqa
from osgeo_utils.gdal_edit import main
UseExceptions()
deprecation_warn("gdal_edit")
sys.exit(main(sys.argv))
|
a47f80d8ba6c8825aa93c72a576024a13997c32b
|
6afed2f82f1a33ee48104fdef154f05147398645
|
/setup.py
|
2bbb8e69b64a4d5e0413f826b3c9babe93fdb2c3
|
[
"Apache-2.0"
] |
permissive
|
MAIF/eurybia
|
0d8e6936caf19e6c3028db8c8d6c17df0efcf48b
|
31ab98bebadadc2b31a77653fb6a5d4171010207
|
refs/heads/master
| 2023-05-31T03:22:11.867851
| 2023-03-02T16:49:18
| 2023-03-02T16:49:18
| 487,858,444
| 166
| 21
|
Apache-2.0
| 2023-08-31T14:21:56
| 2022-05-02T13:36:59
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 3,126
|
py
|
setup.py
|
#!/usr/bin/env python
"""The setup script."""
import os
from setuptools import setup
here = os.path.abspath(os.path.dirname(__file__))
with open("README.md", encoding="utf8") as readme_file:
long_description = readme_file.read()
# Load the package's __version__.py module as a dictionary.
version_d: dict = {}
with open(os.path.join(here, "eurybia", "__version__.py")) as f:
exec(f.read(), version_d)
requirements = [
"catboost>=0.22",
"datapane==0.14.0",
"ipywidgets>=7.4.2",
"jinja2>=2.11.0",
"scipy>=1.4.0",
"seaborn>=0.10.1",
"shapash>=2.0.0",
"jupyter",
]
setup_requirements = [
"pytest-runner",
]
test_requirements = [
"pytest",
]
setup(
name="eurybia", # Replace with your own username
version=version_d["__version__"],
python_requires=">3.6, < 3.11",
url="https://github.com/MAIF/eurybia",
author="Nicolas Roux, Johann Martin, Thomas Bouché",
author_email="thomas.bouche@maif.fr",
description="Eurybia monitor model drift over time and securize model deployment with data validation",
long_description=long_description,
long_description_content_type="text/markdown",
classifiers=[
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
],
install_requires=requirements,
license="Apache Software License 2.0",
keywords="eurybia",
package_dir={
"eurybia": "eurybia",
"eurybia.data": "eurybia/data",
"eurybia.core": "eurybia/core",
"eurybia.report": "eurybia/report",
"eurybia.assets": "eurybia/assets",
"eurybia.style": "eurybia/style",
"eurybia.utils": "eurybia/utils",
},
packages=["eurybia", "eurybia.data", "eurybia.core", "eurybia.report", "eurybia.style", "eurybia.utils"],
data_files=[
("data", ["eurybia/data/house_prices_dataset.csv"]),
("data", ["eurybia/data/house_prices_labels.json"]),
("data", ["eurybia/data/titanicdata.csv"]),
("data", ["eurybia/data/project_info_car_accident.yml"]),
("data", ["eurybia/data/project_info_house_price.yml"]),
("data", ["eurybia/data/project_info_titanic.yml"]),
("data", ["eurybia/data/titanic_altered.csv"]),
("data", ["eurybia/data/titanic_original.csv"]),
("data", ["eurybia/data/US_Accidents_extract.csv"]),
("style", ["eurybia/style/colors.json"]),
(
"assets",
[
"eurybia/assets/local-report-base.css",
"eurybia/assets/local-report-base.js",
"eurybia/assets/logo_eurybia_dp.png",
"eurybia/assets/report_template.html",
],
),
],
include_package_data=True,
setup_requires=setup_requirements,
test_suite="tests",
tests_require=test_requirements,
zip_safe=False,
)
|
0bf71366402b3bd62274259e2cd72e69f0fc3c92
|
e4c5238c86c8a114d49b7ba3ecc5ef9d5157e152
|
/toqito/matrix_props/is_block_positive.py
|
cf7bdd18f22acc82846e8aa8cecf59b7a88b7f77
|
[
"MIT"
] |
permissive
|
vprusso/toqito
|
64a9963c02b73127836b76d886543a0642b93664
|
7e6869d783f98cb241579ea89e0f9ff61eff9d9b
|
refs/heads/master
| 2023-07-22T17:08:18.392204
| 2023-07-19T07:27:37
| 2023-07-19T07:27:37
| 235,493,396
| 116
| 53
|
MIT
| 2023-09-12T13:35:38
| 2020-01-22T03:47:16
|
Python
|
UTF-8
|
Python
| false
| false
| 4,184
|
py
|
is_block_positive.py
|
"""Is matrix block positive."""
from __future__ import annotations
import numpy as np
from toqito.matrix_props.is_hermitian import is_hermitian
from toqito.matrix_props.is_positive_semidefinite import is_positive_semidefinite
from toqito.matrix_props.sk_norm import sk_operator_norm
def is_block_positive(
mat: np.ndarray,
k: int = 1,
dim: int | list[int] = None,
effort: int = 2,
rtol: float = 1e-5,
) -> bool:
r"""
Check if matrix is block positive [1]_.
Examples
==========
The swap operator is always block positive, since it is the Choi
matrix of the transpose map.
>>> from toqito.matrix_props import is_block_positive
>>> from toqito.perms import swap_operator
>>>
>>> mat = swap_operator(3)
>>> is_block_positive(mat)
True
However, it's not 2 - block positive.
>>> from toqito.matrix_props import is_block_positive
>>> from toqito.perms import swap_operator
>>>
>>> mat = swap_operator(3)
>>> is_block_positive(mat, k=2)
False
References
==========
.. [1] "N. Johnston. Norms and Cones in the Theory of Quantum Entanglement. PhD thesis"
arXiv:1207.1479
:raises ValueError: Unable to determine block positive property.
:param mat: A bipartite Hermitian operator.
:param k: A positive integer indicating that the function should determine whether or not
the input operator is k-block positive, i.e., whether or not it remains nonnegative
under left and right multiplication by vectors with Schmidt rank <= k (default 1).
:param dim: The dimension of the two sub-systems. By default it's assumed to be equal.
:param effort: An integer value indicating the amount of computation you want to devote to
determine block positivity before giving up.
:param rtol: The relative tolerance parameter (default 1e-05).
:return: Return :code:`True` if matrix is k-block positive definite,
:code:`False` if not, or raise a runtime error if we are unable to determine
whether or not the operator is block positive.
"""
if not is_hermitian(mat):
return False
dim_xy = mat.shape[0]
# Set default dimension if none was provided.
if dim is None:
dim = int(np.round(np.sqrt(dim_xy)))
# Allow the user to enter in a single integer for dimension.
if isinstance(dim, int):
dim = np.array([dim, dim_xy / dim])
if np.abs(dim[1] - np.round(dim[1])) >= 2 * dim_xy * np.finfo(float).eps:
raise ValueError(
"If `dim` is a scalar, it must evenly divide the length of the matrix."
)
dim[1] = int(np.round(dim[1]))
dim = np.array(dim, dtype=int)
# When a local dimension is small, block positivity is trivial.
if min(dim) <= k:
return is_positive_semidefinite(mat)
op_norm = np.linalg.norm(mat, ord=2)
# We compute the S(k)-norm of this operator since
# X k-block positive iff:
# c >= S(k)-norm of(c*I - X)
# See Corollary 4.2.9. of [1].
c_mat = op_norm * np.eye(dim_xy) - mat
lower_bound, upper_bound = sk_operator_norm(c_mat, k, dim, op_norm, effort)
# block positive
# Note that QETLAB is more conservative here and multiplies
# by (1 - rtol). After some experiments though, I found out
# that probably due to numerical inaccuracies of CVXPY the check
# upper_bound <= op_norm * (1 - rtol)
# would fail even for k - block positive matrices. So, we choose to
# relax this inequality by increasing RHS. Additionally, the check
# upper_bound <= op_norm * (1 - rtol)
# has the "undesired" property that increasing tolerance makes the
# inequality more difficult to satisfy but usually the reverse holds,
# i.e increased tolerance parameter relaxes the problem.
if upper_bound <= op_norm * (1 + rtol):
return True
# not block positive
if lower_bound >= op_norm * (1 - rtol):
return False
return RuntimeError(
"Unable to determine k-block positivity. Please consider increasing the relative tolerance or the effort level."
)
|
f47b385e5f918a6ef70f369fa7cfefa24678b77a
|
95b4a15808b9c412c8364db80fd619a65dd587e0
|
/tests/compas/geometry/test_curves_parabola.py
|
40e7bf6eeb8989fcb555f0563abbcf7e99258a60
|
[
"MIT"
] |
permissive
|
compas-dev/compas
|
11d5c4d9afd554833297b4a5dbe6a975e6940ce3
|
486e2e9332553240bcbd80e100d26bff58071709
|
refs/heads/main
| 2023-08-31T15:49:32.430570
| 2023-08-17T10:19:52
| 2023-08-17T10:19:52
| 104,857,648
| 286
| 116
|
MIT
| 2023-09-12T13:53:36
| 2017-09-26T08:28:01
|
Python
|
UTF-8
|
Python
| false
| false
| 3,259
|
py
|
test_curves_parabola.py
|
import pytest
import json
import compas
from compas.geometry import allclose
from compas.geometry import Frame
from compas.geometry import Parabola
def test_parabola_create():
parabola = Parabola(focal=1)
assert parabola.focal == 1
assert parabola.frame == Frame.worldXY()
assert allclose(parabola.point_at(0.0), parabola.point_at(0.0, world=False), tol=1e-12)
assert allclose(parabola.point_at(0.5), parabola.point_at(0.5, world=False), tol=1e-12)
assert allclose(parabola.point_at(1.0), parabola.point_at(1.0, world=False), tol=1e-12)
def test_parabola_create_with_frame():
frame = Frame.worldZX()
parabola = Parabola(focal=1, frame=frame)
assert parabola.focal == 1
assert parabola.frame == frame
assert allclose(parabola.point_at(0.0), parabola.point_at(0.0, world=False), tol=1e-12)
assert not allclose(parabola.point_at(0.5), parabola.point_at(0.5, world=False), tol=1e-12)
assert not allclose(parabola.point_at(1.0), parabola.point_at(1.0, world=False), tol=1e-12)
assert allclose(
parabola.point_at(0.0),
parabola.point_at(0.0, world=False).transformed(parabola.transformation),
tol=1e-12,
)
assert allclose(
parabola.point_at(0.5),
parabola.point_at(0.5, world=False).transformed(parabola.transformation),
tol=1e-12,
)
assert allclose(
parabola.point_at(1.0),
parabola.point_at(1.0, world=False).transformed(parabola.transformation),
tol=1e-12,
)
# =============================================================================
# Data
# =============================================================================
def test_parabola_data():
parabola = Parabola(focal=1)
other = Parabola.from_data(json.loads(json.dumps(parabola.data)))
assert parabola.focal == other.focal
assert parabola.frame.point == other.frame.point
assert allclose(parabola.frame.xaxis, other.frame.xaxis, tol=1e-12)
assert allclose(parabola.frame.yaxis, other.frame.yaxis, tol=1e-12)
if not compas.IPY:
assert Parabola.validate_data(parabola.data)
assert Parabola.validate_data(other.data)
# =============================================================================
# Constructors
# =============================================================================
# =============================================================================
# Properties and Geometry
# =============================================================================
def test_parabola_properties():
parabola = Parabola(focal=1.0)
assert parabola.focal == 1.0
parabola._focal = None
with pytest.raises(ValueError):
parabola.focal
# =============================================================================
# Accessors
# =============================================================================
# =============================================================================
# Comparison
# =============================================================================
# =============================================================================
# Other Methods
# =============================================================================
|
68ecb2b96db868ad97cb47033dc005e55e9584ff
|
e9869359c839c8c175ae7877bc35dcfdfe4058f8
|
/kornia/metrics/mean_iou.py
|
26173c8ba54645c7b72f484c9a7ab861ddd8f770
|
[
"Apache-2.0"
] |
permissive
|
kornia/kornia
|
80f93eae6a70b8bc0c9784f92a842ab9a6ab54ae
|
1e0f8baa7318c05b17ea6dbb48605691bca8972f
|
refs/heads/master
| 2023-08-31T06:32:45.960859
| 2023-08-30T21:59:41
| 2023-08-30T21:59:41
| 145,693,916
| 7,351
| 833
|
Apache-2.0
| 2023-09-12T21:59:29
| 2018-08-22T10:31:37
|
Python
|
UTF-8
|
Python
| false
| false
| 4,594
|
py
|
mean_iou.py
|
import torch
from .confusion_matrix import confusion_matrix
def mean_iou(input: torch.Tensor, target: torch.Tensor, num_classes: int, eps: float = 1e-6) -> torch.Tensor:
r"""Calculate mean Intersection-Over-Union (mIOU).
The function internally computes the confusion matrix.
Args:
input : tensor with estimated targets returned by a
classifier. The shape can be :math:`(B, *)` and must contain integer
values between 0 and K-1.
target: tensor with ground truth (correct) target
values. The shape can be :math:`(B, *)` and must contain integer
values between 0 and K-1, where targets are assumed to be provided as
one-hot vectors.
num_classes: total possible number of classes in target.
Returns:
a tensor representing the mean intersection-over union
with shape :math:`(B, K)` where K is the number of classes.
Example:
>>> logits = torch.tensor([[0, 1, 0]])
>>> target = torch.tensor([[0, 1, 0]])
>>> mean_iou(logits, target, num_classes=3)
tensor([[1., 1., 1.]])
"""
if not torch.is_tensor(input) and input.dtype is not torch.int64:
raise TypeError(f"Input input type is not a torch.Tensor with torch.int64 dtype. Got {type(input)}")
if not torch.is_tensor(target) and target.dtype is not torch.int64:
raise TypeError(f"Input target type is not a torch.Tensor with torch.int64 dtype. Got {type(target)}")
if not input.shape == target.shape:
raise ValueError(f"Inputs input and target must have the same shape. Got: {input.shape} and {target.shape}")
if not input.device == target.device:
raise ValueError(f"Inputs must be in the same device. Got: {input.device} - {target.device}")
if not isinstance(num_classes, int) or num_classes < 2:
raise ValueError(f"The number of classes must be an integer bigger than two. Got: {num_classes}")
# we first compute the confusion matrix
conf_mat: torch.Tensor = confusion_matrix(input, target, num_classes)
# compute the actual intersection over union
sum_over_row = torch.sum(conf_mat, dim=1)
sum_over_col = torch.sum(conf_mat, dim=2)
conf_mat_diag = torch.diagonal(conf_mat, dim1=-2, dim2=-1)
denominator = sum_over_row + sum_over_col - conf_mat_diag
# NOTE: we add epsilon so that samples that are neither in the
# prediction or ground truth are taken into account.
ious = (conf_mat_diag + eps) / (denominator + eps)
return ious
def mean_iou_bbox(boxes_1: torch.Tensor, boxes_2: torch.Tensor) -> torch.Tensor:
"""Compute the IoU of the cartesian product of two sets of boxes.
Each box in each set shall be (x1, y1, x2, y2).
Args:
boxes_1: a tensor of bounding boxes in :math:`(B1, 4)`.
boxes_2: a tensor of bounding boxes in :math:`(B2, 4)`.
Returns:
a tensor in dimensions :math:`(B1, B2)`, representing the
intersection of each of the boxes in set 1 with respect to each of the boxes in set 2.
Example:
>>> boxes_1 = torch.tensor([[40, 40, 60, 60], [30, 40, 50, 60]])
>>> boxes_2 = torch.tensor([[40, 50, 60, 70], [30, 40, 40, 50]])
>>> mean_iou_bbox(boxes_1, boxes_2)
tensor([[0.3333, 0.0000],
[0.1429, 0.2500]])
"""
# TODO: support more box types. e.g. xywh,
if not (((boxes_1[:, 2] - boxes_1[:, 0]) > 0).all() or ((boxes_1[:, 3] - boxes_1[:, 1]) > 0).all()):
raise AssertionError("Boxes_1 does not follow (x1, y1, x2, y2) format.")
if not (((boxes_2[:, 2] - boxes_2[:, 0]) > 0).all() or ((boxes_2[:, 3] - boxes_2[:, 1]) > 0).all()):
raise AssertionError("Boxes_2 does not follow (x1, y1, x2, y2) format.")
# find intersection
lower_bounds = torch.max(boxes_1[:, :2].unsqueeze(1), boxes_2[:, :2].unsqueeze(0)) # (n1, n2, 2)
upper_bounds = torch.min(boxes_1[:, 2:].unsqueeze(1), boxes_2[:, 2:].unsqueeze(0)) # (n1, n2, 2)
intersection_dims = torch.clamp(upper_bounds - lower_bounds, min=0) # (n1, n2, 2)
intersection = intersection_dims[:, :, 0] * intersection_dims[:, :, 1] # (n1, n2)
# Find areas of each box in both sets
areas_set_1 = (boxes_1[:, 2] - boxes_1[:, 0]) * (boxes_1[:, 3] - boxes_1[:, 1]) # (n1)
areas_set_2 = (boxes_2[:, 2] - boxes_2[:, 0]) * (boxes_2[:, 3] - boxes_2[:, 1]) # (n2)
# Find the union
# PyTorch auto-broadcasts singleton dimensions
union = areas_set_1.unsqueeze(1) + areas_set_2.unsqueeze(0) - intersection # (n1, n2)
return intersection / union # (n1, n2)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.